id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
15,500
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/authentication.py
Verifier.key_bytes
def key_bytes(self): """Returns the raw verification key. :rtype: bytes """ return self.key.public_bytes( encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo )
python
def key_bytes(self): """Returns the raw verification key. :rtype: bytes """ return self.key.public_bytes( encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo )
[ "def", "key_bytes", "(", "self", ")", ":", "return", "self", ".", "key", ".", "public_bytes", "(", "encoding", "=", "serialization", ".", "Encoding", ".", "DER", ",", "format", "=", "serialization", ".", "PublicFormat", ".", "SubjectPublicKeyInfo", ")" ]
Returns the raw verification key. :rtype: bytes
[ "Returns", "the", "raw", "verification", "key", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L167-L174
15,501
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/authentication.py
Verifier.verify
def verify(self, signature): """Verifies the signature against the current cryptographic verifier state. :param bytes signature: The signature to verify """ prehashed_digest = self._hasher.finalize() self.key.verify( signature=signature, data=prehashed_digest, signature_algorithm=ec.ECDSA(Prehashed(self.algorithm.signing_hash_type())), )
python
def verify(self, signature): """Verifies the signature against the current cryptographic verifier state. :param bytes signature: The signature to verify """ prehashed_digest = self._hasher.finalize() self.key.verify( signature=signature, data=prehashed_digest, signature_algorithm=ec.ECDSA(Prehashed(self.algorithm.signing_hash_type())), )
[ "def", "verify", "(", "self", ",", "signature", ")", ":", "prehashed_digest", "=", "self", ".", "_hasher", ".", "finalize", "(", ")", "self", ".", "key", ".", "verify", "(", "signature", "=", "signature", ",", "data", "=", "prehashed_digest", ",", "signature_algorithm", "=", "ec", ".", "ECDSA", "(", "Prehashed", "(", "self", ".", "algorithm", ".", "signing_hash_type", "(", ")", ")", ")", ",", ")" ]
Verifies the signature against the current cryptographic verifier state. :param bytes signature: The signature to verify
[ "Verifies", "the", "signature", "against", "the", "current", "cryptographic", "verifier", "state", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L183-L193
15,502
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/wrapping_keys.py
WrappingKey.encrypt
def encrypt(self, plaintext_data_key, encryption_context): """Encrypts a data key using a direct wrapping key. :param bytes plaintext_data_key: Data key to encrypt :param dict encryption_context: Encryption context to use in encryption :returns: Deserialized object containing encrypted key :rtype: aws_encryption_sdk.internal.structures.EncryptedData """ if self.wrapping_algorithm.encryption_type is EncryptionType.ASYMMETRIC: if self.wrapping_key_type is EncryptionKeyType.PRIVATE: encrypted_key = self._wrapping_key.public_key().encrypt( plaintext=plaintext_data_key, padding=self.wrapping_algorithm.padding ) else: encrypted_key = self._wrapping_key.encrypt( plaintext=plaintext_data_key, padding=self.wrapping_algorithm.padding ) return EncryptedData(iv=None, ciphertext=encrypted_key, tag=None) serialized_encryption_context = serialize_encryption_context(encryption_context=encryption_context) iv = os.urandom(self.wrapping_algorithm.algorithm.iv_len) return encrypt( algorithm=self.wrapping_algorithm.algorithm, key=self._derived_wrapping_key, plaintext=plaintext_data_key, associated_data=serialized_encryption_context, iv=iv, )
python
def encrypt(self, plaintext_data_key, encryption_context): """Encrypts a data key using a direct wrapping key. :param bytes plaintext_data_key: Data key to encrypt :param dict encryption_context: Encryption context to use in encryption :returns: Deserialized object containing encrypted key :rtype: aws_encryption_sdk.internal.structures.EncryptedData """ if self.wrapping_algorithm.encryption_type is EncryptionType.ASYMMETRIC: if self.wrapping_key_type is EncryptionKeyType.PRIVATE: encrypted_key = self._wrapping_key.public_key().encrypt( plaintext=plaintext_data_key, padding=self.wrapping_algorithm.padding ) else: encrypted_key = self._wrapping_key.encrypt( plaintext=plaintext_data_key, padding=self.wrapping_algorithm.padding ) return EncryptedData(iv=None, ciphertext=encrypted_key, tag=None) serialized_encryption_context = serialize_encryption_context(encryption_context=encryption_context) iv = os.urandom(self.wrapping_algorithm.algorithm.iv_len) return encrypt( algorithm=self.wrapping_algorithm.algorithm, key=self._derived_wrapping_key, plaintext=plaintext_data_key, associated_data=serialized_encryption_context, iv=iv, )
[ "def", "encrypt", "(", "self", ",", "plaintext_data_key", ",", "encryption_context", ")", ":", "if", "self", ".", "wrapping_algorithm", ".", "encryption_type", "is", "EncryptionType", ".", "ASYMMETRIC", ":", "if", "self", ".", "wrapping_key_type", "is", "EncryptionKeyType", ".", "PRIVATE", ":", "encrypted_key", "=", "self", ".", "_wrapping_key", ".", "public_key", "(", ")", ".", "encrypt", "(", "plaintext", "=", "plaintext_data_key", ",", "padding", "=", "self", ".", "wrapping_algorithm", ".", "padding", ")", "else", ":", "encrypted_key", "=", "self", ".", "_wrapping_key", ".", "encrypt", "(", "plaintext", "=", "plaintext_data_key", ",", "padding", "=", "self", ".", "wrapping_algorithm", ".", "padding", ")", "return", "EncryptedData", "(", "iv", "=", "None", ",", "ciphertext", "=", "encrypted_key", ",", "tag", "=", "None", ")", "serialized_encryption_context", "=", "serialize_encryption_context", "(", "encryption_context", "=", "encryption_context", ")", "iv", "=", "os", ".", "urandom", "(", "self", ".", "wrapping_algorithm", ".", "algorithm", ".", "iv_len", ")", "return", "encrypt", "(", "algorithm", "=", "self", ".", "wrapping_algorithm", ".", "algorithm", ",", "key", "=", "self", ".", "_derived_wrapping_key", ",", "plaintext", "=", "plaintext_data_key", ",", "associated_data", "=", "serialized_encryption_context", ",", "iv", "=", "iv", ",", ")" ]
Encrypts a data key using a direct wrapping key. :param bytes plaintext_data_key: Data key to encrypt :param dict encryption_context: Encryption context to use in encryption :returns: Deserialized object containing encrypted key :rtype: aws_encryption_sdk.internal.structures.EncryptedData
[ "Encrypts", "a", "data", "key", "using", "a", "direct", "wrapping", "key", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/wrapping_keys.py#L61-L87
15,503
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/wrapping_keys.py
WrappingKey.decrypt
def decrypt(self, encrypted_wrapped_data_key, encryption_context): """Decrypts a wrapped, encrypted, data key. :param encrypted_wrapped_data_key: Encrypted, wrapped, data key :type encrypted_wrapped_data_key: aws_encryption_sdk.internal.structures.EncryptedData :param dict encryption_context: Encryption context to use in decryption :returns: Plaintext of data key :rtype: bytes """ if self.wrapping_key_type is EncryptionKeyType.PUBLIC: raise IncorrectMasterKeyError("Public key cannot decrypt") if self.wrapping_key_type is EncryptionKeyType.PRIVATE: return self._wrapping_key.decrypt( ciphertext=encrypted_wrapped_data_key.ciphertext, padding=self.wrapping_algorithm.padding ) serialized_encryption_context = serialize_encryption_context(encryption_context=encryption_context) return decrypt( algorithm=self.wrapping_algorithm.algorithm, key=self._derived_wrapping_key, encrypted_data=encrypted_wrapped_data_key, associated_data=serialized_encryption_context, )
python
def decrypt(self, encrypted_wrapped_data_key, encryption_context): """Decrypts a wrapped, encrypted, data key. :param encrypted_wrapped_data_key: Encrypted, wrapped, data key :type encrypted_wrapped_data_key: aws_encryption_sdk.internal.structures.EncryptedData :param dict encryption_context: Encryption context to use in decryption :returns: Plaintext of data key :rtype: bytes """ if self.wrapping_key_type is EncryptionKeyType.PUBLIC: raise IncorrectMasterKeyError("Public key cannot decrypt") if self.wrapping_key_type is EncryptionKeyType.PRIVATE: return self._wrapping_key.decrypt( ciphertext=encrypted_wrapped_data_key.ciphertext, padding=self.wrapping_algorithm.padding ) serialized_encryption_context = serialize_encryption_context(encryption_context=encryption_context) return decrypt( algorithm=self.wrapping_algorithm.algorithm, key=self._derived_wrapping_key, encrypted_data=encrypted_wrapped_data_key, associated_data=serialized_encryption_context, )
[ "def", "decrypt", "(", "self", ",", "encrypted_wrapped_data_key", ",", "encryption_context", ")", ":", "if", "self", ".", "wrapping_key_type", "is", "EncryptionKeyType", ".", "PUBLIC", ":", "raise", "IncorrectMasterKeyError", "(", "\"Public key cannot decrypt\"", ")", "if", "self", ".", "wrapping_key_type", "is", "EncryptionKeyType", ".", "PRIVATE", ":", "return", "self", ".", "_wrapping_key", ".", "decrypt", "(", "ciphertext", "=", "encrypted_wrapped_data_key", ".", "ciphertext", ",", "padding", "=", "self", ".", "wrapping_algorithm", ".", "padding", ")", "serialized_encryption_context", "=", "serialize_encryption_context", "(", "encryption_context", "=", "encryption_context", ")", "return", "decrypt", "(", "algorithm", "=", "self", ".", "wrapping_algorithm", ".", "algorithm", ",", "key", "=", "self", ".", "_derived_wrapping_key", ",", "encrypted_data", "=", "encrypted_wrapped_data_key", ",", "associated_data", "=", "serialized_encryption_context", ",", ")" ]
Decrypts a wrapped, encrypted, data key. :param encrypted_wrapped_data_key: Encrypted, wrapped, data key :type encrypted_wrapped_data_key: aws_encryption_sdk.internal.structures.EncryptedData :param dict encryption_context: Encryption context to use in decryption :returns: Plaintext of data key :rtype: bytes
[ "Decrypts", "a", "wrapped", "encrypted", "data", "key", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/wrapping_keys.py#L89-L110
15,504
aws/aws-encryption-sdk-python
decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/key_providers/counting.py
CountingMasterKey._generate_data_key
def _generate_data_key(self, algorithm: AlgorithmSuite, encryption_context: Dict[Text, Text]) -> DataKey: """Perform the provider-specific data key generation task. :param algorithm: Algorithm on which to base data key :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use in encryption :returns: Generated data key :rtype: aws_encryption_sdk.structures.DataKey """ data_key = b"".join([chr(i).encode("utf-8") for i in range(1, algorithm.data_key_len + 1)]) return DataKey(key_provider=self.key_provider, data_key=data_key, encrypted_data_key=self._encrypted_data_key)
python
def _generate_data_key(self, algorithm: AlgorithmSuite, encryption_context: Dict[Text, Text]) -> DataKey: """Perform the provider-specific data key generation task. :param algorithm: Algorithm on which to base data key :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use in encryption :returns: Generated data key :rtype: aws_encryption_sdk.structures.DataKey """ data_key = b"".join([chr(i).encode("utf-8") for i in range(1, algorithm.data_key_len + 1)]) return DataKey(key_provider=self.key_provider, data_key=data_key, encrypted_data_key=self._encrypted_data_key)
[ "def", "_generate_data_key", "(", "self", ",", "algorithm", ":", "AlgorithmSuite", ",", "encryption_context", ":", "Dict", "[", "Text", ",", "Text", "]", ")", "->", "DataKey", ":", "data_key", "=", "b\"\"", ".", "join", "(", "[", "chr", "(", "i", ")", ".", "encode", "(", "\"utf-8\"", ")", "for", "i", "in", "range", "(", "1", ",", "algorithm", ".", "data_key_len", "+", "1", ")", "]", ")", "return", "DataKey", "(", "key_provider", "=", "self", ".", "key_provider", ",", "data_key", "=", "data_key", ",", "encrypted_data_key", "=", "self", ".", "_encrypted_data_key", ")" ]
Perform the provider-specific data key generation task. :param algorithm: Algorithm on which to base data key :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use in encryption :returns: Generated data key :rtype: aws_encryption_sdk.structures.DataKey
[ "Perform", "the", "provider", "-", "specific", "data", "key", "generation", "task", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/key_providers/counting.py#L53-L63
15,505
aws/aws-encryption-sdk-python
decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/key_providers/counting.py
CountingMasterKey._encrypt_data_key
def _encrypt_data_key( self, data_key: DataKey, algorithm: AlgorithmSuite, encryption_context: Dict[Text, Text] ) -> NoReturn: """Encrypt a data key and return the ciphertext. :param data_key: Unencrypted data key :type data_key: :class:`aws_encryption_sdk.structures.RawDataKey` or :class:`aws_encryption_sdk.structures.DataKey` :param algorithm: Algorithm object which directs how this Master Key will encrypt the data key :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use in encryption :raises NotImplementedError: when called """ raise NotImplementedError("CountingMasterKey does not support encrypt_data_key")
python
def _encrypt_data_key( self, data_key: DataKey, algorithm: AlgorithmSuite, encryption_context: Dict[Text, Text] ) -> NoReturn: """Encrypt a data key and return the ciphertext. :param data_key: Unencrypted data key :type data_key: :class:`aws_encryption_sdk.structures.RawDataKey` or :class:`aws_encryption_sdk.structures.DataKey` :param algorithm: Algorithm object which directs how this Master Key will encrypt the data key :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use in encryption :raises NotImplementedError: when called """ raise NotImplementedError("CountingMasterKey does not support encrypt_data_key")
[ "def", "_encrypt_data_key", "(", "self", ",", "data_key", ":", "DataKey", ",", "algorithm", ":", "AlgorithmSuite", ",", "encryption_context", ":", "Dict", "[", "Text", ",", "Text", "]", ")", "->", "NoReturn", ":", "raise", "NotImplementedError", "(", "\"CountingMasterKey does not support encrypt_data_key\"", ")" ]
Encrypt a data key and return the ciphertext. :param data_key: Unencrypted data key :type data_key: :class:`aws_encryption_sdk.structures.RawDataKey` or :class:`aws_encryption_sdk.structures.DataKey` :param algorithm: Algorithm object which directs how this Master Key will encrypt the data key :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use in encryption :raises NotImplementedError: when called
[ "Encrypt", "a", "data", "key", "and", "return", "the", "ciphertext", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/key_providers/counting.py#L65-L78
15,506
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
validate_header
def validate_header(header, header_auth, raw_header, data_key): """Validates the header using the header authentication data. :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param header_auth: Deserialized header auth :type header_auth: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication :type stream: io.BytesIO :param bytes raw_header: Raw header bytes :param bytes data_key: Data key with which to perform validation :raises SerializationError: if header authorization fails """ _LOGGER.debug("Starting header validation") try: decrypt( algorithm=header.algorithm, key=data_key, encrypted_data=EncryptedData(header_auth.iv, b"", header_auth.tag), associated_data=raw_header, ) except InvalidTag: raise SerializationError("Header authorization failed")
python
def validate_header(header, header_auth, raw_header, data_key): """Validates the header using the header authentication data. :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param header_auth: Deserialized header auth :type header_auth: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication :type stream: io.BytesIO :param bytes raw_header: Raw header bytes :param bytes data_key: Data key with which to perform validation :raises SerializationError: if header authorization fails """ _LOGGER.debug("Starting header validation") try: decrypt( algorithm=header.algorithm, key=data_key, encrypted_data=EncryptedData(header_auth.iv, b"", header_auth.tag), associated_data=raw_header, ) except InvalidTag: raise SerializationError("Header authorization failed")
[ "def", "validate_header", "(", "header", ",", "header_auth", ",", "raw_header", ",", "data_key", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting header validation\"", ")", "try", ":", "decrypt", "(", "algorithm", "=", "header", ".", "algorithm", ",", "key", "=", "data_key", ",", "encrypted_data", "=", "EncryptedData", "(", "header_auth", ".", "iv", ",", "b\"\"", ",", "header_auth", ".", "tag", ")", ",", "associated_data", "=", "raw_header", ",", ")", "except", "InvalidTag", ":", "raise", "SerializationError", "(", "\"Header authorization failed\"", ")" ]
Validates the header using the header authentication data. :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param header_auth: Deserialized header auth :type header_auth: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication :type stream: io.BytesIO :param bytes raw_header: Raw header bytes :param bytes data_key: Data key with which to perform validation :raises SerializationError: if header authorization fails
[ "Validates", "the", "header", "using", "the", "header", "authentication", "data", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L52-L73
15,507
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
_deserialize_encrypted_data_keys
def _deserialize_encrypted_data_keys(stream): # type: (IO) -> Set[EncryptedDataKey] """Deserialize some encrypted data keys from a stream. :param stream: Stream from which to read encrypted data keys :return: Loaded encrypted data keys :rtype: set of :class:`EncryptedDataKey` """ (encrypted_data_key_count,) = unpack_values(">H", stream) encrypted_data_keys = set([]) for _ in range(encrypted_data_key_count): (key_provider_length,) = unpack_values(">H", stream) (key_provider_identifier,) = unpack_values(">{}s".format(key_provider_length), stream) (key_provider_information_length,) = unpack_values(">H", stream) (key_provider_information,) = unpack_values(">{}s".format(key_provider_information_length), stream) (encrypted_data_key_length,) = unpack_values(">H", stream) encrypted_data_key = stream.read(encrypted_data_key_length) encrypted_data_keys.add( EncryptedDataKey( key_provider=MasterKeyInfo( provider_id=to_str(key_provider_identifier), key_info=key_provider_information ), encrypted_data_key=encrypted_data_key, ) ) return encrypted_data_keys
python
def _deserialize_encrypted_data_keys(stream): # type: (IO) -> Set[EncryptedDataKey] """Deserialize some encrypted data keys from a stream. :param stream: Stream from which to read encrypted data keys :return: Loaded encrypted data keys :rtype: set of :class:`EncryptedDataKey` """ (encrypted_data_key_count,) = unpack_values(">H", stream) encrypted_data_keys = set([]) for _ in range(encrypted_data_key_count): (key_provider_length,) = unpack_values(">H", stream) (key_provider_identifier,) = unpack_values(">{}s".format(key_provider_length), stream) (key_provider_information_length,) = unpack_values(">H", stream) (key_provider_information,) = unpack_values(">{}s".format(key_provider_information_length), stream) (encrypted_data_key_length,) = unpack_values(">H", stream) encrypted_data_key = stream.read(encrypted_data_key_length) encrypted_data_keys.add( EncryptedDataKey( key_provider=MasterKeyInfo( provider_id=to_str(key_provider_identifier), key_info=key_provider_information ), encrypted_data_key=encrypted_data_key, ) ) return encrypted_data_keys
[ "def", "_deserialize_encrypted_data_keys", "(", "stream", ")", ":", "# type: (IO) -> Set[EncryptedDataKey]", "(", "encrypted_data_key_count", ",", ")", "=", "unpack_values", "(", "\">H\"", ",", "stream", ")", "encrypted_data_keys", "=", "set", "(", "[", "]", ")", "for", "_", "in", "range", "(", "encrypted_data_key_count", ")", ":", "(", "key_provider_length", ",", ")", "=", "unpack_values", "(", "\">H\"", ",", "stream", ")", "(", "key_provider_identifier", ",", ")", "=", "unpack_values", "(", "\">{}s\"", ".", "format", "(", "key_provider_length", ")", ",", "stream", ")", "(", "key_provider_information_length", ",", ")", "=", "unpack_values", "(", "\">H\"", ",", "stream", ")", "(", "key_provider_information", ",", ")", "=", "unpack_values", "(", "\">{}s\"", ".", "format", "(", "key_provider_information_length", ")", ",", "stream", ")", "(", "encrypted_data_key_length", ",", ")", "=", "unpack_values", "(", "\">H\"", ",", "stream", ")", "encrypted_data_key", "=", "stream", ".", "read", "(", "encrypted_data_key_length", ")", "encrypted_data_keys", ".", "add", "(", "EncryptedDataKey", "(", "key_provider", "=", "MasterKeyInfo", "(", "provider_id", "=", "to_str", "(", "key_provider_identifier", ")", ",", "key_info", "=", "key_provider_information", ")", ",", "encrypted_data_key", "=", "encrypted_data_key", ",", ")", ")", "return", "encrypted_data_keys" ]
Deserialize some encrypted data keys from a stream. :param stream: Stream from which to read encrypted data keys :return: Loaded encrypted data keys :rtype: set of :class:`EncryptedDataKey`
[ "Deserialize", "some", "encrypted", "data", "keys", "from", "a", "stream", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L127-L152
15,508
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
_verified_iv_length
def _verified_iv_length(iv_length, algorithm_suite): # type: (int, AlgorithmSuite) -> int """Verify an IV length for an algorithm suite. :param int iv_length: IV length to verify :param AlgorithmSuite algorithm_suite: Algorithm suite to verify against :return: IV length :rtype: int :raises SerializationError: if IV length does not match algorithm suite """ if iv_length != algorithm_suite.iv_len: raise SerializationError( "Specified IV length ({length}) does not match algorithm IV length ({algorithm})".format( length=iv_length, algorithm=algorithm_suite ) ) return iv_length
python
def _verified_iv_length(iv_length, algorithm_suite): # type: (int, AlgorithmSuite) -> int """Verify an IV length for an algorithm suite. :param int iv_length: IV length to verify :param AlgorithmSuite algorithm_suite: Algorithm suite to verify against :return: IV length :rtype: int :raises SerializationError: if IV length does not match algorithm suite """ if iv_length != algorithm_suite.iv_len: raise SerializationError( "Specified IV length ({length}) does not match algorithm IV length ({algorithm})".format( length=iv_length, algorithm=algorithm_suite ) ) return iv_length
[ "def", "_verified_iv_length", "(", "iv_length", ",", "algorithm_suite", ")", ":", "# type: (int, AlgorithmSuite) -> int", "if", "iv_length", "!=", "algorithm_suite", ".", "iv_len", ":", "raise", "SerializationError", "(", "\"Specified IV length ({length}) does not match algorithm IV length ({algorithm})\"", ".", "format", "(", "length", "=", "iv_length", ",", "algorithm", "=", "algorithm_suite", ")", ")", "return", "iv_length" ]
Verify an IV length for an algorithm suite. :param int iv_length: IV length to verify :param AlgorithmSuite algorithm_suite: Algorithm suite to verify against :return: IV length :rtype: int :raises SerializationError: if IV length does not match algorithm suite
[ "Verify", "an", "IV", "length", "for", "an", "algorithm", "suite", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L185-L202
15,509
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
_verified_frame_length
def _verified_frame_length(frame_length, content_type): # type: (int, ContentType) -> int """Verify a frame length value for a message content type. :param int frame_length: Frame length to verify :param ContentType content_type: Message content type to verify against :return: frame length :rtype: int :raises SerializationError: if frame length is too large :raises SerializationError: if frame length is not zero for unframed content type """ if content_type == ContentType.FRAMED_DATA and frame_length > MAX_FRAME_SIZE: raise SerializationError( "Specified frame length larger than allowed maximum: {found} > {max}".format( found=frame_length, max=MAX_FRAME_SIZE ) ) if content_type == ContentType.NO_FRAMING and frame_length != 0: raise SerializationError("Non-zero frame length found for non-framed message") return frame_length
python
def _verified_frame_length(frame_length, content_type): # type: (int, ContentType) -> int """Verify a frame length value for a message content type. :param int frame_length: Frame length to verify :param ContentType content_type: Message content type to verify against :return: frame length :rtype: int :raises SerializationError: if frame length is too large :raises SerializationError: if frame length is not zero for unframed content type """ if content_type == ContentType.FRAMED_DATA and frame_length > MAX_FRAME_SIZE: raise SerializationError( "Specified frame length larger than allowed maximum: {found} > {max}".format( found=frame_length, max=MAX_FRAME_SIZE ) ) if content_type == ContentType.NO_FRAMING and frame_length != 0: raise SerializationError("Non-zero frame length found for non-framed message") return frame_length
[ "def", "_verified_frame_length", "(", "frame_length", ",", "content_type", ")", ":", "# type: (int, ContentType) -> int", "if", "content_type", "==", "ContentType", ".", "FRAMED_DATA", "and", "frame_length", ">", "MAX_FRAME_SIZE", ":", "raise", "SerializationError", "(", "\"Specified frame length larger than allowed maximum: {found} > {max}\"", ".", "format", "(", "found", "=", "frame_length", ",", "max", "=", "MAX_FRAME_SIZE", ")", ")", "if", "content_type", "==", "ContentType", ".", "NO_FRAMING", "and", "frame_length", "!=", "0", ":", "raise", "SerializationError", "(", "\"Non-zero frame length found for non-framed message\"", ")", "return", "frame_length" ]
Verify a frame length value for a message content type. :param int frame_length: Frame length to verify :param ContentType content_type: Message content type to verify against :return: frame length :rtype: int :raises SerializationError: if frame length is too large :raises SerializationError: if frame length is not zero for unframed content type
[ "Verify", "a", "frame", "length", "value", "for", "a", "message", "content", "type", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L205-L226
15,510
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
deserialize_header
def deserialize_header(stream): # type: (IO) -> MessageHeader """Deserializes the header from a source stream :param stream: Source data stream :type stream: io.BytesIO :returns: Deserialized MessageHeader object :rtype: :class:`aws_encryption_sdk.structures.MessageHeader` and bytes :raises NotSupportedError: if unsupported data types are found :raises UnknownIdentityError: if unknown data types are found :raises SerializationError: if IV length does not match algorithm """ _LOGGER.debug("Starting header deserialization") tee = io.BytesIO() tee_stream = TeeStream(stream, tee) version_id, message_type_id = unpack_values(">BB", tee_stream) header = dict() header["version"] = _verified_version_from_id(version_id) header["type"] = _verified_message_type_from_id(message_type_id) algorithm_id, message_id, ser_encryption_context_length = unpack_values(">H16sH", tee_stream) header["algorithm"] = _verified_algorithm_from_id(algorithm_id) header["message_id"] = message_id header["encryption_context"] = deserialize_encryption_context(tee_stream.read(ser_encryption_context_length)) header["encrypted_data_keys"] = _deserialize_encrypted_data_keys(tee_stream) (content_type_id,) = unpack_values(">B", tee_stream) header["content_type"] = _verified_content_type_from_id(content_type_id) (content_aad_length,) = unpack_values(">I", tee_stream) header["content_aad_length"] = _verified_content_aad_length(content_aad_length) (iv_length,) = unpack_values(">B", tee_stream) header["header_iv_length"] = _verified_iv_length(iv_length, header["algorithm"]) (frame_length,) = unpack_values(">I", tee_stream) header["frame_length"] = _verified_frame_length(frame_length, header["content_type"]) return MessageHeader(**header), tee.getvalue()
python
def deserialize_header(stream): # type: (IO) -> MessageHeader """Deserializes the header from a source stream :param stream: Source data stream :type stream: io.BytesIO :returns: Deserialized MessageHeader object :rtype: :class:`aws_encryption_sdk.structures.MessageHeader` and bytes :raises NotSupportedError: if unsupported data types are found :raises UnknownIdentityError: if unknown data types are found :raises SerializationError: if IV length does not match algorithm """ _LOGGER.debug("Starting header deserialization") tee = io.BytesIO() tee_stream = TeeStream(stream, tee) version_id, message_type_id = unpack_values(">BB", tee_stream) header = dict() header["version"] = _verified_version_from_id(version_id) header["type"] = _verified_message_type_from_id(message_type_id) algorithm_id, message_id, ser_encryption_context_length = unpack_values(">H16sH", tee_stream) header["algorithm"] = _verified_algorithm_from_id(algorithm_id) header["message_id"] = message_id header["encryption_context"] = deserialize_encryption_context(tee_stream.read(ser_encryption_context_length)) header["encrypted_data_keys"] = _deserialize_encrypted_data_keys(tee_stream) (content_type_id,) = unpack_values(">B", tee_stream) header["content_type"] = _verified_content_type_from_id(content_type_id) (content_aad_length,) = unpack_values(">I", tee_stream) header["content_aad_length"] = _verified_content_aad_length(content_aad_length) (iv_length,) = unpack_values(">B", tee_stream) header["header_iv_length"] = _verified_iv_length(iv_length, header["algorithm"]) (frame_length,) = unpack_values(">I", tee_stream) header["frame_length"] = _verified_frame_length(frame_length, header["content_type"]) return MessageHeader(**header), tee.getvalue()
[ "def", "deserialize_header", "(", "stream", ")", ":", "# type: (IO) -> MessageHeader", "_LOGGER", ".", "debug", "(", "\"Starting header deserialization\"", ")", "tee", "=", "io", ".", "BytesIO", "(", ")", "tee_stream", "=", "TeeStream", "(", "stream", ",", "tee", ")", "version_id", ",", "message_type_id", "=", "unpack_values", "(", "\">BB\"", ",", "tee_stream", ")", "header", "=", "dict", "(", ")", "header", "[", "\"version\"", "]", "=", "_verified_version_from_id", "(", "version_id", ")", "header", "[", "\"type\"", "]", "=", "_verified_message_type_from_id", "(", "message_type_id", ")", "algorithm_id", ",", "message_id", ",", "ser_encryption_context_length", "=", "unpack_values", "(", "\">H16sH\"", ",", "tee_stream", ")", "header", "[", "\"algorithm\"", "]", "=", "_verified_algorithm_from_id", "(", "algorithm_id", ")", "header", "[", "\"message_id\"", "]", "=", "message_id", "header", "[", "\"encryption_context\"", "]", "=", "deserialize_encryption_context", "(", "tee_stream", ".", "read", "(", "ser_encryption_context_length", ")", ")", "header", "[", "\"encrypted_data_keys\"", "]", "=", "_deserialize_encrypted_data_keys", "(", "tee_stream", ")", "(", "content_type_id", ",", ")", "=", "unpack_values", "(", "\">B\"", ",", "tee_stream", ")", "header", "[", "\"content_type\"", "]", "=", "_verified_content_type_from_id", "(", "content_type_id", ")", "(", "content_aad_length", ",", ")", "=", "unpack_values", "(", "\">I\"", ",", "tee_stream", ")", "header", "[", "\"content_aad_length\"", "]", "=", "_verified_content_aad_length", "(", "content_aad_length", ")", "(", "iv_length", ",", ")", "=", "unpack_values", "(", "\">B\"", ",", "tee_stream", ")", "header", "[", "\"header_iv_length\"", "]", "=", "_verified_iv_length", "(", "iv_length", ",", "header", "[", "\"algorithm\"", "]", ")", "(", "frame_length", ",", ")", "=", "unpack_values", "(", "\">I\"", ",", "tee_stream", ")", "header", "[", "\"frame_length\"", "]", "=", "_verified_frame_length", "(", "frame_length", ",", "header", "[", "\"content_type\"", "]", ")", "return", "MessageHeader", "(", "*", "*", "header", ")", ",", "tee", ".", "getvalue", "(", ")" ]
Deserializes the header from a source stream :param stream: Source data stream :type stream: io.BytesIO :returns: Deserialized MessageHeader object :rtype: :class:`aws_encryption_sdk.structures.MessageHeader` and bytes :raises NotSupportedError: if unsupported data types are found :raises UnknownIdentityError: if unknown data types are found :raises SerializationError: if IV length does not match algorithm
[ "Deserializes", "the", "header", "from", "a", "source", "stream" ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L229-L270
15,511
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
deserialize_header_auth
def deserialize_header_auth(stream, algorithm, verifier=None): """Deserializes a MessageHeaderAuthentication object from a source stream. :param stream: Source data stream :type stream: io.BytesIO :param algorithm: The AlgorithmSuite object type contained in the header :type algorith: aws_encryption_sdk.identifiers.AlgorithmSuite :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized MessageHeaderAuthentication object :rtype: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication """ _LOGGER.debug("Starting header auth deserialization") format_string = ">{iv_len}s{tag_len}s".format(iv_len=algorithm.iv_len, tag_len=algorithm.tag_len) return MessageHeaderAuthentication(*unpack_values(format_string, stream, verifier))
python
def deserialize_header_auth(stream, algorithm, verifier=None): """Deserializes a MessageHeaderAuthentication object from a source stream. :param stream: Source data stream :type stream: io.BytesIO :param algorithm: The AlgorithmSuite object type contained in the header :type algorith: aws_encryption_sdk.identifiers.AlgorithmSuite :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized MessageHeaderAuthentication object :rtype: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication """ _LOGGER.debug("Starting header auth deserialization") format_string = ">{iv_len}s{tag_len}s".format(iv_len=algorithm.iv_len, tag_len=algorithm.tag_len) return MessageHeaderAuthentication(*unpack_values(format_string, stream, verifier))
[ "def", "deserialize_header_auth", "(", "stream", ",", "algorithm", ",", "verifier", "=", "None", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting header auth deserialization\"", ")", "format_string", "=", "\">{iv_len}s{tag_len}s\"", ".", "format", "(", "iv_len", "=", "algorithm", ".", "iv_len", ",", "tag_len", "=", "algorithm", ".", "tag_len", ")", "return", "MessageHeaderAuthentication", "(", "*", "unpack_values", "(", "format_string", ",", "stream", ",", "verifier", ")", ")" ]
Deserializes a MessageHeaderAuthentication object from a source stream. :param stream: Source data stream :type stream: io.BytesIO :param algorithm: The AlgorithmSuite object type contained in the header :type algorith: aws_encryption_sdk.identifiers.AlgorithmSuite :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized MessageHeaderAuthentication object :rtype: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication
[ "Deserializes", "a", "MessageHeaderAuthentication", "object", "from", "a", "source", "stream", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L273-L287
15,512
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
deserialize_non_framed_values
def deserialize_non_framed_values(stream, header, verifier=None): """Deserializes the IV and body length from a non-framed stream. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: IV and Data Length values for body :rtype: tuple of bytes and int """ _LOGGER.debug("Starting non-framed body iv/tag deserialization") (data_iv, data_length) = unpack_values(">{}sQ".format(header.algorithm.iv_len), stream, verifier) return data_iv, data_length
python
def deserialize_non_framed_values(stream, header, verifier=None): """Deserializes the IV and body length from a non-framed stream. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: IV and Data Length values for body :rtype: tuple of bytes and int """ _LOGGER.debug("Starting non-framed body iv/tag deserialization") (data_iv, data_length) = unpack_values(">{}sQ".format(header.algorithm.iv_len), stream, verifier) return data_iv, data_length
[ "def", "deserialize_non_framed_values", "(", "stream", ",", "header", ",", "verifier", "=", "None", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting non-framed body iv/tag deserialization\"", ")", "(", "data_iv", ",", "data_length", ")", "=", "unpack_values", "(", "\">{}sQ\"", ".", "format", "(", "header", ".", "algorithm", ".", "iv_len", ")", ",", "stream", ",", "verifier", ")", "return", "data_iv", ",", "data_length" ]
Deserializes the IV and body length from a non-framed stream. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: IV and Data Length values for body :rtype: tuple of bytes and int
[ "Deserializes", "the", "IV", "and", "body", "length", "from", "a", "non", "-", "framed", "stream", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L290-L304
15,513
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
deserialize_tag
def deserialize_tag(stream, header, verifier=None): """Deserialize the Tag value from a non-framed stream. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Tag value for body :rtype: bytes """ (data_tag,) = unpack_values( format_string=">{auth_len}s".format(auth_len=header.algorithm.auth_len), stream=stream, verifier=verifier ) return data_tag
python
def deserialize_tag(stream, header, verifier=None): """Deserialize the Tag value from a non-framed stream. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Tag value for body :rtype: bytes """ (data_tag,) = unpack_values( format_string=">{auth_len}s".format(auth_len=header.algorithm.auth_len), stream=stream, verifier=verifier ) return data_tag
[ "def", "deserialize_tag", "(", "stream", ",", "header", ",", "verifier", "=", "None", ")", ":", "(", "data_tag", ",", ")", "=", "unpack_values", "(", "format_string", "=", "\">{auth_len}s\"", ".", "format", "(", "auth_len", "=", "header", ".", "algorithm", ".", "auth_len", ")", ",", "stream", "=", "stream", ",", "verifier", "=", "verifier", ")", "return", "data_tag" ]
Deserialize the Tag value from a non-framed stream. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Tag value for body :rtype: bytes
[ "Deserialize", "the", "Tag", "value", "from", "a", "non", "-", "framed", "stream", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L307-L322
15,514
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
deserialize_frame
def deserialize_frame(stream, header, verifier=None): """Deserializes a frame from a body. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized frame and a boolean stating if this is the final frame :rtype: :class:`aws_encryption_sdk.internal.structures.MessageFrameBody` and bool """ _LOGGER.debug("Starting frame deserialization") frame_data = {} final_frame = False (sequence_number,) = unpack_values(">I", stream, verifier) if sequence_number == SequenceIdentifier.SEQUENCE_NUMBER_END.value: _LOGGER.debug("Deserializing final frame") (sequence_number,) = unpack_values(">I", stream, verifier) final_frame = True else: _LOGGER.debug("Deserializing frame sequence number %d", int(sequence_number)) frame_data["final_frame"] = final_frame frame_data["sequence_number"] = sequence_number (frame_iv,) = unpack_values(">{iv_len}s".format(iv_len=header.algorithm.iv_len), stream, verifier) frame_data["iv"] = frame_iv if final_frame is True: (content_length,) = unpack_values(">I", stream, verifier) if content_length >= header.frame_length: raise SerializationError( "Invalid final frame length: {final} >= {normal}".format( final=content_length, normal=header.frame_length ) ) else: content_length = header.frame_length (frame_content, frame_tag) = unpack_values( ">{content_len}s{auth_len}s".format(content_len=content_length, auth_len=header.algorithm.auth_len), stream, verifier, ) frame_data["ciphertext"] = frame_content frame_data["tag"] = frame_tag return MessageFrameBody(**frame_data), final_frame
python
def deserialize_frame(stream, header, verifier=None): """Deserializes a frame from a body. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized frame and a boolean stating if this is the final frame :rtype: :class:`aws_encryption_sdk.internal.structures.MessageFrameBody` and bool """ _LOGGER.debug("Starting frame deserialization") frame_data = {} final_frame = False (sequence_number,) = unpack_values(">I", stream, verifier) if sequence_number == SequenceIdentifier.SEQUENCE_NUMBER_END.value: _LOGGER.debug("Deserializing final frame") (sequence_number,) = unpack_values(">I", stream, verifier) final_frame = True else: _LOGGER.debug("Deserializing frame sequence number %d", int(sequence_number)) frame_data["final_frame"] = final_frame frame_data["sequence_number"] = sequence_number (frame_iv,) = unpack_values(">{iv_len}s".format(iv_len=header.algorithm.iv_len), stream, verifier) frame_data["iv"] = frame_iv if final_frame is True: (content_length,) = unpack_values(">I", stream, verifier) if content_length >= header.frame_length: raise SerializationError( "Invalid final frame length: {final} >= {normal}".format( final=content_length, normal=header.frame_length ) ) else: content_length = header.frame_length (frame_content, frame_tag) = unpack_values( ">{content_len}s{auth_len}s".format(content_len=content_length, auth_len=header.algorithm.auth_len), stream, verifier, ) frame_data["ciphertext"] = frame_content frame_data["tag"] = frame_tag return MessageFrameBody(**frame_data), final_frame
[ "def", "deserialize_frame", "(", "stream", ",", "header", ",", "verifier", "=", "None", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting frame deserialization\"", ")", "frame_data", "=", "{", "}", "final_frame", "=", "False", "(", "sequence_number", ",", ")", "=", "unpack_values", "(", "\">I\"", ",", "stream", ",", "verifier", ")", "if", "sequence_number", "==", "SequenceIdentifier", ".", "SEQUENCE_NUMBER_END", ".", "value", ":", "_LOGGER", ".", "debug", "(", "\"Deserializing final frame\"", ")", "(", "sequence_number", ",", ")", "=", "unpack_values", "(", "\">I\"", ",", "stream", ",", "verifier", ")", "final_frame", "=", "True", "else", ":", "_LOGGER", ".", "debug", "(", "\"Deserializing frame sequence number %d\"", ",", "int", "(", "sequence_number", ")", ")", "frame_data", "[", "\"final_frame\"", "]", "=", "final_frame", "frame_data", "[", "\"sequence_number\"", "]", "=", "sequence_number", "(", "frame_iv", ",", ")", "=", "unpack_values", "(", "\">{iv_len}s\"", ".", "format", "(", "iv_len", "=", "header", ".", "algorithm", ".", "iv_len", ")", ",", "stream", ",", "verifier", ")", "frame_data", "[", "\"iv\"", "]", "=", "frame_iv", "if", "final_frame", "is", "True", ":", "(", "content_length", ",", ")", "=", "unpack_values", "(", "\">I\"", ",", "stream", ",", "verifier", ")", "if", "content_length", ">=", "header", ".", "frame_length", ":", "raise", "SerializationError", "(", "\"Invalid final frame length: {final} >= {normal}\"", ".", "format", "(", "final", "=", "content_length", ",", "normal", "=", "header", ".", "frame_length", ")", ")", "else", ":", "content_length", "=", "header", ".", "frame_length", "(", "frame_content", ",", "frame_tag", ")", "=", "unpack_values", "(", "\">{content_len}s{auth_len}s\"", ".", "format", "(", "content_len", "=", "content_length", ",", "auth_len", "=", "header", ".", "algorithm", ".", "auth_len", ")", ",", "stream", ",", "verifier", ",", ")", "frame_data", "[", "\"ciphertext\"", "]", "=", "frame_content", "frame_data", "[", "\"tag\"", "]", "=", "frame_tag", "return", "MessageFrameBody", "(", "*", "*", "frame_data", ")", ",", "final_frame" ]
Deserializes a frame from a body. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized frame and a boolean stating if this is the final frame :rtype: :class:`aws_encryption_sdk.internal.structures.MessageFrameBody` and bool
[ "Deserializes", "a", "frame", "from", "a", "body", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L325-L368
15,515
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
deserialize_footer
def deserialize_footer(stream, verifier=None): """Deserializes a footer. :param stream: Source data stream :type stream: io.BytesIO :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized footer :rtype: aws_encryption_sdk.internal.structures.MessageFooter :raises SerializationError: if verifier supplied and no footer found """ _LOGGER.debug("Starting footer deserialization") signature = b"" if verifier is None: return MessageFooter(signature=signature) try: (sig_len,) = unpack_values(">H", stream) (signature,) = unpack_values(">{sig_len}s".format(sig_len=sig_len), stream) except SerializationError: raise SerializationError("No signature found in message") if verifier: verifier.verify(signature) return MessageFooter(signature=signature)
python
def deserialize_footer(stream, verifier=None): """Deserializes a footer. :param stream: Source data stream :type stream: io.BytesIO :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized footer :rtype: aws_encryption_sdk.internal.structures.MessageFooter :raises SerializationError: if verifier supplied and no footer found """ _LOGGER.debug("Starting footer deserialization") signature = b"" if verifier is None: return MessageFooter(signature=signature) try: (sig_len,) = unpack_values(">H", stream) (signature,) = unpack_values(">{sig_len}s".format(sig_len=sig_len), stream) except SerializationError: raise SerializationError("No signature found in message") if verifier: verifier.verify(signature) return MessageFooter(signature=signature)
[ "def", "deserialize_footer", "(", "stream", ",", "verifier", "=", "None", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting footer deserialization\"", ")", "signature", "=", "b\"\"", "if", "verifier", "is", "None", ":", "return", "MessageFooter", "(", "signature", "=", "signature", ")", "try", ":", "(", "sig_len", ",", ")", "=", "unpack_values", "(", "\">H\"", ",", "stream", ")", "(", "signature", ",", ")", "=", "unpack_values", "(", "\">{sig_len}s\"", ".", "format", "(", "sig_len", "=", "sig_len", ")", ",", "stream", ")", "except", "SerializationError", ":", "raise", "SerializationError", "(", "\"No signature found in message\"", ")", "if", "verifier", ":", "verifier", ".", "verify", "(", "signature", ")", "return", "MessageFooter", "(", "signature", "=", "signature", ")" ]
Deserializes a footer. :param stream: Source data stream :type stream: io.BytesIO :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized footer :rtype: aws_encryption_sdk.internal.structures.MessageFooter :raises SerializationError: if verifier supplied and no footer found
[ "Deserializes", "a", "footer", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L371-L393
15,516
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/formatting/deserialize.py
deserialize_wrapped_key
def deserialize_wrapped_key(wrapping_algorithm, wrapping_key_id, wrapped_encrypted_key): """Extracts and deserializes EncryptedData from a Wrapped EncryptedDataKey. :param wrapping_algorithm: Wrapping Algorithm with which to wrap plaintext_data_key :type wrapping_algorithm: aws_encryption_sdk.identifiers.WrappingAlgorithm :param bytes wrapping_key_id: Key ID of wrapping MasterKey :param wrapped_encrypted_key: Raw Wrapped EncryptedKey :type wrapped_encrypted_key: aws_encryption_sdk.structures.EncryptedDataKey :returns: EncryptedData of deserialized Wrapped EncryptedKey :rtype: aws_encryption_sdk.internal.structures.EncryptedData :raises SerializationError: if wrapping_key_id does not match deserialized wrapping key id :raises SerializationError: if wrapping_algorithm IV length does not match deserialized IV length """ if wrapping_key_id == wrapped_encrypted_key.key_provider.key_info: encrypted_wrapped_key = EncryptedData(iv=None, ciphertext=wrapped_encrypted_key.encrypted_data_key, tag=None) else: if not wrapped_encrypted_key.key_provider.key_info.startswith(wrapping_key_id): raise SerializationError("Master Key mismatch for wrapped data key") _key_info = wrapped_encrypted_key.key_provider.key_info[len(wrapping_key_id) :] try: tag_len, iv_len = struct.unpack(">II", _key_info[:8]) except struct.error: raise SerializationError("Malformed key info: key info missing data") tag_len //= 8 # Tag Length is stored in bits, not bytes if iv_len != wrapping_algorithm.algorithm.iv_len: raise SerializationError("Wrapping AlgorithmSuite mismatch for wrapped data key") iv = _key_info[8:] if len(iv) != iv_len: raise SerializationError("Malformed key info: incomplete iv") ciphertext = wrapped_encrypted_key.encrypted_data_key[: -1 * tag_len] tag = wrapped_encrypted_key.encrypted_data_key[-1 * tag_len :] if not ciphertext or len(tag) != tag_len: raise SerializationError("Malformed key info: incomplete ciphertext or tag") encrypted_wrapped_key = EncryptedData(iv=iv, ciphertext=ciphertext, tag=tag) return encrypted_wrapped_key
python
def deserialize_wrapped_key(wrapping_algorithm, wrapping_key_id, wrapped_encrypted_key): """Extracts and deserializes EncryptedData from a Wrapped EncryptedDataKey. :param wrapping_algorithm: Wrapping Algorithm with which to wrap plaintext_data_key :type wrapping_algorithm: aws_encryption_sdk.identifiers.WrappingAlgorithm :param bytes wrapping_key_id: Key ID of wrapping MasterKey :param wrapped_encrypted_key: Raw Wrapped EncryptedKey :type wrapped_encrypted_key: aws_encryption_sdk.structures.EncryptedDataKey :returns: EncryptedData of deserialized Wrapped EncryptedKey :rtype: aws_encryption_sdk.internal.structures.EncryptedData :raises SerializationError: if wrapping_key_id does not match deserialized wrapping key id :raises SerializationError: if wrapping_algorithm IV length does not match deserialized IV length """ if wrapping_key_id == wrapped_encrypted_key.key_provider.key_info: encrypted_wrapped_key = EncryptedData(iv=None, ciphertext=wrapped_encrypted_key.encrypted_data_key, tag=None) else: if not wrapped_encrypted_key.key_provider.key_info.startswith(wrapping_key_id): raise SerializationError("Master Key mismatch for wrapped data key") _key_info = wrapped_encrypted_key.key_provider.key_info[len(wrapping_key_id) :] try: tag_len, iv_len = struct.unpack(">II", _key_info[:8]) except struct.error: raise SerializationError("Malformed key info: key info missing data") tag_len //= 8 # Tag Length is stored in bits, not bytes if iv_len != wrapping_algorithm.algorithm.iv_len: raise SerializationError("Wrapping AlgorithmSuite mismatch for wrapped data key") iv = _key_info[8:] if len(iv) != iv_len: raise SerializationError("Malformed key info: incomplete iv") ciphertext = wrapped_encrypted_key.encrypted_data_key[: -1 * tag_len] tag = wrapped_encrypted_key.encrypted_data_key[-1 * tag_len :] if not ciphertext or len(tag) != tag_len: raise SerializationError("Malformed key info: incomplete ciphertext or tag") encrypted_wrapped_key = EncryptedData(iv=iv, ciphertext=ciphertext, tag=tag) return encrypted_wrapped_key
[ "def", "deserialize_wrapped_key", "(", "wrapping_algorithm", ",", "wrapping_key_id", ",", "wrapped_encrypted_key", ")", ":", "if", "wrapping_key_id", "==", "wrapped_encrypted_key", ".", "key_provider", ".", "key_info", ":", "encrypted_wrapped_key", "=", "EncryptedData", "(", "iv", "=", "None", ",", "ciphertext", "=", "wrapped_encrypted_key", ".", "encrypted_data_key", ",", "tag", "=", "None", ")", "else", ":", "if", "not", "wrapped_encrypted_key", ".", "key_provider", ".", "key_info", ".", "startswith", "(", "wrapping_key_id", ")", ":", "raise", "SerializationError", "(", "\"Master Key mismatch for wrapped data key\"", ")", "_key_info", "=", "wrapped_encrypted_key", ".", "key_provider", ".", "key_info", "[", "len", "(", "wrapping_key_id", ")", ":", "]", "try", ":", "tag_len", ",", "iv_len", "=", "struct", ".", "unpack", "(", "\">II\"", ",", "_key_info", "[", ":", "8", "]", ")", "except", "struct", ".", "error", ":", "raise", "SerializationError", "(", "\"Malformed key info: key info missing data\"", ")", "tag_len", "//=", "8", "# Tag Length is stored in bits, not bytes", "if", "iv_len", "!=", "wrapping_algorithm", ".", "algorithm", ".", "iv_len", ":", "raise", "SerializationError", "(", "\"Wrapping AlgorithmSuite mismatch for wrapped data key\"", ")", "iv", "=", "_key_info", "[", "8", ":", "]", "if", "len", "(", "iv", ")", "!=", "iv_len", ":", "raise", "SerializationError", "(", "\"Malformed key info: incomplete iv\"", ")", "ciphertext", "=", "wrapped_encrypted_key", ".", "encrypted_data_key", "[", ":", "-", "1", "*", "tag_len", "]", "tag", "=", "wrapped_encrypted_key", ".", "encrypted_data_key", "[", "-", "1", "*", "tag_len", ":", "]", "if", "not", "ciphertext", "or", "len", "(", "tag", ")", "!=", "tag_len", ":", "raise", "SerializationError", "(", "\"Malformed key info: incomplete ciphertext or tag\"", ")", "encrypted_wrapped_key", "=", "EncryptedData", "(", "iv", "=", "iv", ",", "ciphertext", "=", "ciphertext", ",", "tag", "=", "tag", ")", "return", "encrypted_wrapped_key" ]
Extracts and deserializes EncryptedData from a Wrapped EncryptedDataKey. :param wrapping_algorithm: Wrapping Algorithm with which to wrap plaintext_data_key :type wrapping_algorithm: aws_encryption_sdk.identifiers.WrappingAlgorithm :param bytes wrapping_key_id: Key ID of wrapping MasterKey :param wrapped_encrypted_key: Raw Wrapped EncryptedKey :type wrapped_encrypted_key: aws_encryption_sdk.structures.EncryptedDataKey :returns: EncryptedData of deserialized Wrapped EncryptedKey :rtype: aws_encryption_sdk.internal.structures.EncryptedData :raises SerializationError: if wrapping_key_id does not match deserialized wrapping key id :raises SerializationError: if wrapping_algorithm IV length does not match deserialized IV length
[ "Extracts", "and", "deserializes", "EncryptedData", "from", "a", "Wrapped", "EncryptedDataKey", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/deserialize.py#L417-L451
15,517
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/utils/__init__.py
validate_frame_length
def validate_frame_length(frame_length, algorithm): """Validates that frame length is within the defined limits and is compatible with the selected algorithm. :param int frame_length: Frame size in bytes :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :raises SerializationError: if frame size is negative or not a multiple of the algorithm block size :raises SerializationError: if frame size is larger than the maximum allowed frame size """ if frame_length < 0 or frame_length % algorithm.encryption_algorithm.block_size != 0: raise SerializationError( "Frame size must be a non-negative multiple of the block size of the crypto algorithm: {block_size}".format( block_size=algorithm.encryption_algorithm.block_size ) ) if frame_length > aws_encryption_sdk.internal.defaults.MAX_FRAME_SIZE: raise SerializationError( "Frame size too large: {frame} > {max}".format( frame=frame_length, max=aws_encryption_sdk.internal.defaults.MAX_FRAME_SIZE ) )
python
def validate_frame_length(frame_length, algorithm): """Validates that frame length is within the defined limits and is compatible with the selected algorithm. :param int frame_length: Frame size in bytes :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :raises SerializationError: if frame size is negative or not a multiple of the algorithm block size :raises SerializationError: if frame size is larger than the maximum allowed frame size """ if frame_length < 0 or frame_length % algorithm.encryption_algorithm.block_size != 0: raise SerializationError( "Frame size must be a non-negative multiple of the block size of the crypto algorithm: {block_size}".format( block_size=algorithm.encryption_algorithm.block_size ) ) if frame_length > aws_encryption_sdk.internal.defaults.MAX_FRAME_SIZE: raise SerializationError( "Frame size too large: {frame} > {max}".format( frame=frame_length, max=aws_encryption_sdk.internal.defaults.MAX_FRAME_SIZE ) )
[ "def", "validate_frame_length", "(", "frame_length", ",", "algorithm", ")", ":", "if", "frame_length", "<", "0", "or", "frame_length", "%", "algorithm", ".", "encryption_algorithm", ".", "block_size", "!=", "0", ":", "raise", "SerializationError", "(", "\"Frame size must be a non-negative multiple of the block size of the crypto algorithm: {block_size}\"", ".", "format", "(", "block_size", "=", "algorithm", ".", "encryption_algorithm", ".", "block_size", ")", ")", "if", "frame_length", ">", "aws_encryption_sdk", ".", "internal", ".", "defaults", ".", "MAX_FRAME_SIZE", ":", "raise", "SerializationError", "(", "\"Frame size too large: {frame} > {max}\"", ".", "format", "(", "frame", "=", "frame_length", ",", "max", "=", "aws_encryption_sdk", ".", "internal", ".", "defaults", ".", "MAX_FRAME_SIZE", ")", ")" ]
Validates that frame length is within the defined limits and is compatible with the selected algorithm. :param int frame_length: Frame size in bytes :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :raises SerializationError: if frame size is negative or not a multiple of the algorithm block size :raises SerializationError: if frame size is larger than the maximum allowed frame size
[ "Validates", "that", "frame", "length", "is", "within", "the", "defined", "limits", "and", "is", "compatible", "with", "the", "selected", "algorithm", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/utils/__init__.py#L44-L64
15,518
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/utils/__init__.py
get_aad_content_string
def get_aad_content_string(content_type, is_final_frame): """Prepares the appropriate Body AAD Value for a message body. :param content_type: Defines the type of content for which to prepare AAD String :type content_type: aws_encryption_sdk.identifiers.ContentType :param bool is_final_frame: Boolean stating whether this is the final frame in a body :returns: Appropriate AAD Content String :rtype: bytes :raises UnknownIdentityError: if unknown content type """ if content_type == ContentType.NO_FRAMING: aad_content_string = ContentAADString.NON_FRAMED_STRING_ID elif content_type == ContentType.FRAMED_DATA: if is_final_frame: aad_content_string = ContentAADString.FINAL_FRAME_STRING_ID else: aad_content_string = ContentAADString.FRAME_STRING_ID else: raise UnknownIdentityError("Unhandled content type") return aad_content_string
python
def get_aad_content_string(content_type, is_final_frame): """Prepares the appropriate Body AAD Value for a message body. :param content_type: Defines the type of content for which to prepare AAD String :type content_type: aws_encryption_sdk.identifiers.ContentType :param bool is_final_frame: Boolean stating whether this is the final frame in a body :returns: Appropriate AAD Content String :rtype: bytes :raises UnknownIdentityError: if unknown content type """ if content_type == ContentType.NO_FRAMING: aad_content_string = ContentAADString.NON_FRAMED_STRING_ID elif content_type == ContentType.FRAMED_DATA: if is_final_frame: aad_content_string = ContentAADString.FINAL_FRAME_STRING_ID else: aad_content_string = ContentAADString.FRAME_STRING_ID else: raise UnknownIdentityError("Unhandled content type") return aad_content_string
[ "def", "get_aad_content_string", "(", "content_type", ",", "is_final_frame", ")", ":", "if", "content_type", "==", "ContentType", ".", "NO_FRAMING", ":", "aad_content_string", "=", "ContentAADString", ".", "NON_FRAMED_STRING_ID", "elif", "content_type", "==", "ContentType", ".", "FRAMED_DATA", ":", "if", "is_final_frame", ":", "aad_content_string", "=", "ContentAADString", ".", "FINAL_FRAME_STRING_ID", "else", ":", "aad_content_string", "=", "ContentAADString", ".", "FRAME_STRING_ID", "else", ":", "raise", "UnknownIdentityError", "(", "\"Unhandled content type\"", ")", "return", "aad_content_string" ]
Prepares the appropriate Body AAD Value for a message body. :param content_type: Defines the type of content for which to prepare AAD String :type content_type: aws_encryption_sdk.identifiers.ContentType :param bool is_final_frame: Boolean stating whether this is the final frame in a body :returns: Appropriate AAD Content String :rtype: bytes :raises UnknownIdentityError: if unknown content type
[ "Prepares", "the", "appropriate", "Body", "AAD", "Value", "for", "a", "message", "body", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/utils/__init__.py#L76-L95
15,519
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/utils/__init__.py
prepare_data_keys
def prepare_data_keys(primary_master_key, master_keys, algorithm, encryption_context): """Prepares a DataKey to be used for encrypting message and list of EncryptedDataKey objects to be serialized into header. :param primary_master_key: Master key with which to generate the encryption data key :type primary_master_key: aws_encryption_sdk.key_providers.base.MasterKey :param master_keys: All master keys with which to encrypt data keys :type master_keys: list of :class:`aws_encryption_sdk.key_providers.base.MasterKey` :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use when generating data key :rtype: tuple containing :class:`aws_encryption_sdk.structures.DataKey` and set of :class:`aws_encryption_sdk.structures.EncryptedDataKey` """ encrypted_data_keys = set() encrypted_data_encryption_key = None data_encryption_key = primary_master_key.generate_data_key(algorithm, encryption_context) _LOGGER.debug("encryption data generated with master key: %s", data_encryption_key.key_provider) for master_key in master_keys: # Don't re-encrypt the encryption data key; we already have the ciphertext if master_key is primary_master_key: encrypted_data_encryption_key = EncryptedDataKey( key_provider=data_encryption_key.key_provider, encrypted_data_key=data_encryption_key.encrypted_data_key ) encrypted_data_keys.add(encrypted_data_encryption_key) continue encrypted_key = master_key.encrypt_data_key( data_key=data_encryption_key, algorithm=algorithm, encryption_context=encryption_context ) encrypted_data_keys.add(encrypted_key) _LOGGER.debug("encryption key encrypted with master key: %s", master_key.key_provider) return data_encryption_key, encrypted_data_keys
python
def prepare_data_keys(primary_master_key, master_keys, algorithm, encryption_context): """Prepares a DataKey to be used for encrypting message and list of EncryptedDataKey objects to be serialized into header. :param primary_master_key: Master key with which to generate the encryption data key :type primary_master_key: aws_encryption_sdk.key_providers.base.MasterKey :param master_keys: All master keys with which to encrypt data keys :type master_keys: list of :class:`aws_encryption_sdk.key_providers.base.MasterKey` :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use when generating data key :rtype: tuple containing :class:`aws_encryption_sdk.structures.DataKey` and set of :class:`aws_encryption_sdk.structures.EncryptedDataKey` """ encrypted_data_keys = set() encrypted_data_encryption_key = None data_encryption_key = primary_master_key.generate_data_key(algorithm, encryption_context) _LOGGER.debug("encryption data generated with master key: %s", data_encryption_key.key_provider) for master_key in master_keys: # Don't re-encrypt the encryption data key; we already have the ciphertext if master_key is primary_master_key: encrypted_data_encryption_key = EncryptedDataKey( key_provider=data_encryption_key.key_provider, encrypted_data_key=data_encryption_key.encrypted_data_key ) encrypted_data_keys.add(encrypted_data_encryption_key) continue encrypted_key = master_key.encrypt_data_key( data_key=data_encryption_key, algorithm=algorithm, encryption_context=encryption_context ) encrypted_data_keys.add(encrypted_key) _LOGGER.debug("encryption key encrypted with master key: %s", master_key.key_provider) return data_encryption_key, encrypted_data_keys
[ "def", "prepare_data_keys", "(", "primary_master_key", ",", "master_keys", ",", "algorithm", ",", "encryption_context", ")", ":", "encrypted_data_keys", "=", "set", "(", ")", "encrypted_data_encryption_key", "=", "None", "data_encryption_key", "=", "primary_master_key", ".", "generate_data_key", "(", "algorithm", ",", "encryption_context", ")", "_LOGGER", ".", "debug", "(", "\"encryption data generated with master key: %s\"", ",", "data_encryption_key", ".", "key_provider", ")", "for", "master_key", "in", "master_keys", ":", "# Don't re-encrypt the encryption data key; we already have the ciphertext", "if", "master_key", "is", "primary_master_key", ":", "encrypted_data_encryption_key", "=", "EncryptedDataKey", "(", "key_provider", "=", "data_encryption_key", ".", "key_provider", ",", "encrypted_data_key", "=", "data_encryption_key", ".", "encrypted_data_key", ")", "encrypted_data_keys", ".", "add", "(", "encrypted_data_encryption_key", ")", "continue", "encrypted_key", "=", "master_key", ".", "encrypt_data_key", "(", "data_key", "=", "data_encryption_key", ",", "algorithm", "=", "algorithm", ",", "encryption_context", "=", "encryption_context", ")", "encrypted_data_keys", ".", "add", "(", "encrypted_key", ")", "_LOGGER", ".", "debug", "(", "\"encryption key encrypted with master key: %s\"", ",", "master_key", ".", "key_provider", ")", "return", "data_encryption_key", ",", "encrypted_data_keys" ]
Prepares a DataKey to be used for encrypting message and list of EncryptedDataKey objects to be serialized into header. :param primary_master_key: Master key with which to generate the encryption data key :type primary_master_key: aws_encryption_sdk.key_providers.base.MasterKey :param master_keys: All master keys with which to encrypt data keys :type master_keys: list of :class:`aws_encryption_sdk.key_providers.base.MasterKey` :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param dict encryption_context: Encryption context to use when generating data key :rtype: tuple containing :class:`aws_encryption_sdk.structures.DataKey` and set of :class:`aws_encryption_sdk.structures.EncryptedDataKey`
[ "Prepares", "a", "DataKey", "to", "be", "used", "for", "encrypting", "message", "and", "list", "of", "EncryptedDataKey", "objects", "to", "be", "serialized", "into", "header", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/utils/__init__.py#L98-L129
15,520
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/utils/__init__.py
prep_stream_data
def prep_stream_data(data): """Take an input and prepare it for use as a stream. :param data: Input data :returns: Prepared stream :rtype: InsistentReaderBytesIO """ if isinstance(data, (six.string_types, six.binary_type)): stream = io.BytesIO(to_bytes(data)) else: stream = data return InsistentReaderBytesIO(stream)
python
def prep_stream_data(data): """Take an input and prepare it for use as a stream. :param data: Input data :returns: Prepared stream :rtype: InsistentReaderBytesIO """ if isinstance(data, (six.string_types, six.binary_type)): stream = io.BytesIO(to_bytes(data)) else: stream = data return InsistentReaderBytesIO(stream)
[ "def", "prep_stream_data", "(", "data", ")", ":", "if", "isinstance", "(", "data", ",", "(", "six", ".", "string_types", ",", "six", ".", "binary_type", ")", ")", ":", "stream", "=", "io", ".", "BytesIO", "(", "to_bytes", "(", "data", ")", ")", "else", ":", "stream", "=", "data", "return", "InsistentReaderBytesIO", "(", "stream", ")" ]
Take an input and prepare it for use as a stream. :param data: Input data :returns: Prepared stream :rtype: InsistentReaderBytesIO
[ "Take", "an", "input", "and", "prepare", "it", "for", "use", "as", "a", "stream", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/utils/__init__.py#L132-L144
15,521
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/utils/__init__.py
source_data_key_length_check
def source_data_key_length_check(source_data_key, algorithm): """Validates that the supplied source_data_key's data_key is the correct length for the supplied algorithm's kdf_input_len value. :param source_data_key: Source data key object received from MasterKey decrypt or generate data_key methods :type source_data_key: :class:`aws_encryption_sdk.structures.RawDataKey` or :class:`aws_encryption_sdk.structures.DataKey` :param algorithm: Algorithm object which directs how this data key will be used :type algorithm: aws_encryption_sdk.identifiers.Algorithm :raises InvalidDataKeyError: if data key length does not match required kdf input length """ if len(source_data_key.data_key) != algorithm.kdf_input_len: raise InvalidDataKeyError( "Invalid Source Data Key length {actual} for algorithm required: {required}".format( actual=len(source_data_key.data_key), required=algorithm.kdf_input_len ) )
python
def source_data_key_length_check(source_data_key, algorithm): """Validates that the supplied source_data_key's data_key is the correct length for the supplied algorithm's kdf_input_len value. :param source_data_key: Source data key object received from MasterKey decrypt or generate data_key methods :type source_data_key: :class:`aws_encryption_sdk.structures.RawDataKey` or :class:`aws_encryption_sdk.structures.DataKey` :param algorithm: Algorithm object which directs how this data key will be used :type algorithm: aws_encryption_sdk.identifiers.Algorithm :raises InvalidDataKeyError: if data key length does not match required kdf input length """ if len(source_data_key.data_key) != algorithm.kdf_input_len: raise InvalidDataKeyError( "Invalid Source Data Key length {actual} for algorithm required: {required}".format( actual=len(source_data_key.data_key), required=algorithm.kdf_input_len ) )
[ "def", "source_data_key_length_check", "(", "source_data_key", ",", "algorithm", ")", ":", "if", "len", "(", "source_data_key", ".", "data_key", ")", "!=", "algorithm", ".", "kdf_input_len", ":", "raise", "InvalidDataKeyError", "(", "\"Invalid Source Data Key length {actual} for algorithm required: {required}\"", ".", "format", "(", "actual", "=", "len", "(", "source_data_key", ".", "data_key", ")", ",", "required", "=", "algorithm", ".", "kdf_input_len", ")", ")" ]
Validates that the supplied source_data_key's data_key is the correct length for the supplied algorithm's kdf_input_len value. :param source_data_key: Source data key object received from MasterKey decrypt or generate data_key methods :type source_data_key: :class:`aws_encryption_sdk.structures.RawDataKey` or :class:`aws_encryption_sdk.structures.DataKey` :param algorithm: Algorithm object which directs how this data key will be used :type algorithm: aws_encryption_sdk.identifiers.Algorithm :raises InvalidDataKeyError: if data key length does not match required kdf input length
[ "Validates", "that", "the", "supplied", "source_data_key", "s", "data_key", "is", "the", "correct", "length", "for", "the", "supplied", "algorithm", "s", "kdf_input_len", "value", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/utils/__init__.py#L147-L163
15,522
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/encryption.py
encrypt
def encrypt(algorithm, key, plaintext, associated_data, iv): """Encrypts a frame body. :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key: Encryption key :param bytes plaintext: Body plaintext :param bytes associated_data: Body AAD Data :param bytes iv: IV to use when encrypting message :returns: Deserialized object containing encrypted body :rtype: aws_encryption_sdk.internal.structures.EncryptedData """ encryptor = Encryptor(algorithm, key, associated_data, iv) ciphertext = encryptor.update(plaintext) + encryptor.finalize() return EncryptedData(encryptor.iv, ciphertext, encryptor.tag)
python
def encrypt(algorithm, key, plaintext, associated_data, iv): """Encrypts a frame body. :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key: Encryption key :param bytes plaintext: Body plaintext :param bytes associated_data: Body AAD Data :param bytes iv: IV to use when encrypting message :returns: Deserialized object containing encrypted body :rtype: aws_encryption_sdk.internal.structures.EncryptedData """ encryptor = Encryptor(algorithm, key, associated_data, iv) ciphertext = encryptor.update(plaintext) + encryptor.finalize() return EncryptedData(encryptor.iv, ciphertext, encryptor.tag)
[ "def", "encrypt", "(", "algorithm", ",", "key", ",", "plaintext", ",", "associated_data", ",", "iv", ")", ":", "encryptor", "=", "Encryptor", "(", "algorithm", ",", "key", ",", "associated_data", ",", "iv", ")", "ciphertext", "=", "encryptor", ".", "update", "(", "plaintext", ")", "+", "encryptor", ".", "finalize", "(", ")", "return", "EncryptedData", "(", "encryptor", ".", "iv", ",", "ciphertext", ",", "encryptor", ".", "tag", ")" ]
Encrypts a frame body. :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key: Encryption key :param bytes plaintext: Body plaintext :param bytes associated_data: Body AAD Data :param bytes iv: IV to use when encrypting message :returns: Deserialized object containing encrypted body :rtype: aws_encryption_sdk.internal.structures.EncryptedData
[ "Encrypts", "a", "frame", "body", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/encryption.py#L76-L90
15,523
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/encryption.py
decrypt
def decrypt(algorithm, key, encrypted_data, associated_data): """Decrypts a frame body. :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key: Plaintext data key :param encrypted_data: EncryptedData containing body data :type encrypted_data: :class:`aws_encryption_sdk.internal.structures.EncryptedData`, :class:`aws_encryption_sdk.internal.structures.FrameBody`, or :class:`aws_encryption_sdk.internal.structures.MessageNoFrameBody` :param bytes associated_data: AAD string generated for body :type associated_data: bytes :returns: Plaintext of body :rtype: bytes """ decryptor = Decryptor(algorithm, key, associated_data, encrypted_data.iv, encrypted_data.tag) return decryptor.update(encrypted_data.ciphertext) + decryptor.finalize()
python
def decrypt(algorithm, key, encrypted_data, associated_data): """Decrypts a frame body. :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key: Plaintext data key :param encrypted_data: EncryptedData containing body data :type encrypted_data: :class:`aws_encryption_sdk.internal.structures.EncryptedData`, :class:`aws_encryption_sdk.internal.structures.FrameBody`, or :class:`aws_encryption_sdk.internal.structures.MessageNoFrameBody` :param bytes associated_data: AAD string generated for body :type associated_data: bytes :returns: Plaintext of body :rtype: bytes """ decryptor = Decryptor(algorithm, key, associated_data, encrypted_data.iv, encrypted_data.tag) return decryptor.update(encrypted_data.ciphertext) + decryptor.finalize()
[ "def", "decrypt", "(", "algorithm", ",", "key", ",", "encrypted_data", ",", "associated_data", ")", ":", "decryptor", "=", "Decryptor", "(", "algorithm", ",", "key", ",", "associated_data", ",", "encrypted_data", ".", "iv", ",", "encrypted_data", ".", "tag", ")", "return", "decryptor", ".", "update", "(", "encrypted_data", ".", "ciphertext", ")", "+", "decryptor", ".", "finalize", "(", ")" ]
Decrypts a frame body. :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes key: Plaintext data key :param encrypted_data: EncryptedData containing body data :type encrypted_data: :class:`aws_encryption_sdk.internal.structures.EncryptedData`, :class:`aws_encryption_sdk.internal.structures.FrameBody`, or :class:`aws_encryption_sdk.internal.structures.MessageNoFrameBody` :param bytes associated_data: AAD string generated for body :type associated_data: bytes :returns: Plaintext of body :rtype: bytes
[ "Decrypts", "a", "frame", "body", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/encryption.py#L135-L151
15,524
aws/aws-encryption-sdk-python
decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/app.py
_master_key_provider
def _master_key_provider() -> KMSMasterKeyProvider: """Build the V0 master key provider.""" master_key_provider = KMSMasterKeyProvider() master_key_provider.add_master_key_provider(NullMasterKey()) master_key_provider.add_master_key_provider(CountingMasterKey()) return master_key_provider
python
def _master_key_provider() -> KMSMasterKeyProvider: """Build the V0 master key provider.""" master_key_provider = KMSMasterKeyProvider() master_key_provider.add_master_key_provider(NullMasterKey()) master_key_provider.add_master_key_provider(CountingMasterKey()) return master_key_provider
[ "def", "_master_key_provider", "(", ")", "->", "KMSMasterKeyProvider", ":", "master_key_provider", "=", "KMSMasterKeyProvider", "(", ")", "master_key_provider", ".", "add_master_key_provider", "(", "NullMasterKey", "(", ")", ")", "master_key_provider", ".", "add_master_key_provider", "(", "CountingMasterKey", "(", ")", ")", "return", "master_key_provider" ]
Build the V0 master key provider.
[ "Build", "the", "V0", "master", "key", "provider", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/app.py#L30-L35
15,525
aws/aws-encryption-sdk-python
decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/app.py
basic_decrypt
def basic_decrypt() -> Response: """Basic decrypt handler for decrypt oracle v0. **Request** * **Method**: POST * **Body**: Raw ciphertext bytes * **Headers**: * **Content-Type**: ``application/octet-stream`` * **Accept**: ``application/octet-stream`` **Response** * 200 response code with the raw plaintext bytes as the body * 400 response code with whatever error code was encountered as the body """ APP.log.debug("Request:") APP.log.debug(json.dumps(APP.current_request.to_dict())) APP.log.debug("Ciphertext:") APP.log.debug(APP.current_request.raw_body) try: ciphertext = APP.current_request.raw_body plaintext, _header = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=_master_key_provider()) APP.log.debug("Plaintext:") APP.log.debug(plaintext) response = Response(body=plaintext, headers={"Content-Type": "application/octet-stream"}, status_code=200) except Exception as error: # pylint: disable=broad-except response = Response(body=str(error), status_code=400) APP.log.debug("Response:") APP.log.debug(json.dumps(response.to_dict(binary_types=["application/octet-stream"]))) return response
python
def basic_decrypt() -> Response: """Basic decrypt handler for decrypt oracle v0. **Request** * **Method**: POST * **Body**: Raw ciphertext bytes * **Headers**: * **Content-Type**: ``application/octet-stream`` * **Accept**: ``application/octet-stream`` **Response** * 200 response code with the raw plaintext bytes as the body * 400 response code with whatever error code was encountered as the body """ APP.log.debug("Request:") APP.log.debug(json.dumps(APP.current_request.to_dict())) APP.log.debug("Ciphertext:") APP.log.debug(APP.current_request.raw_body) try: ciphertext = APP.current_request.raw_body plaintext, _header = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=_master_key_provider()) APP.log.debug("Plaintext:") APP.log.debug(plaintext) response = Response(body=plaintext, headers={"Content-Type": "application/octet-stream"}, status_code=200) except Exception as error: # pylint: disable=broad-except response = Response(body=str(error), status_code=400) APP.log.debug("Response:") APP.log.debug(json.dumps(response.to_dict(binary_types=["application/octet-stream"]))) return response
[ "def", "basic_decrypt", "(", ")", "->", "Response", ":", "APP", ".", "log", ".", "debug", "(", "\"Request:\"", ")", "APP", ".", "log", ".", "debug", "(", "json", ".", "dumps", "(", "APP", ".", "current_request", ".", "to_dict", "(", ")", ")", ")", "APP", ".", "log", ".", "debug", "(", "\"Ciphertext:\"", ")", "APP", ".", "log", ".", "debug", "(", "APP", ".", "current_request", ".", "raw_body", ")", "try", ":", "ciphertext", "=", "APP", ".", "current_request", ".", "raw_body", "plaintext", ",", "_header", "=", "aws_encryption_sdk", ".", "decrypt", "(", "source", "=", "ciphertext", ",", "key_provider", "=", "_master_key_provider", "(", ")", ")", "APP", ".", "log", ".", "debug", "(", "\"Plaintext:\"", ")", "APP", ".", "log", ".", "debug", "(", "plaintext", ")", "response", "=", "Response", "(", "body", "=", "plaintext", ",", "headers", "=", "{", "\"Content-Type\"", ":", "\"application/octet-stream\"", "}", ",", "status_code", "=", "200", ")", "except", "Exception", "as", "error", ":", "# pylint: disable=broad-except", "response", "=", "Response", "(", "body", "=", "str", "(", "error", ")", ",", "status_code", "=", "400", ")", "APP", ".", "log", ".", "debug", "(", "\"Response:\"", ")", "APP", ".", "log", ".", "debug", "(", "json", ".", "dumps", "(", "response", ".", "to_dict", "(", "binary_types", "=", "[", "\"application/octet-stream\"", "]", ")", ")", ")", "return", "response" ]
Basic decrypt handler for decrypt oracle v0. **Request** * **Method**: POST * **Body**: Raw ciphertext bytes * **Headers**: * **Content-Type**: ``application/octet-stream`` * **Accept**: ``application/octet-stream`` **Response** * 200 response code with the raw plaintext bytes as the body * 400 response code with whatever error code was encountered as the body
[ "Basic", "decrypt", "handler", "for", "decrypt", "oracle", "v0", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/app.py#L39-L72
15,526
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/utils/streams.py
TeeStream.read
def read(self, b=None): """Reads data from source, copying it into ``tee`` before returning. :param int b: number of bytes to read """ data = self.__wrapped__.read(b) self.__tee.write(data) return data
python
def read(self, b=None): """Reads data from source, copying it into ``tee`` before returning. :param int b: number of bytes to read """ data = self.__wrapped__.read(b) self.__tee.write(data) return data
[ "def", "read", "(", "self", ",", "b", "=", "None", ")", ":", "data", "=", "self", ".", "__wrapped__", ".", "read", "(", "b", ")", "self", ".", "__tee", ".", "write", "(", "data", ")", "return", "data" ]
Reads data from source, copying it into ``tee`` before returning. :param int b: number of bytes to read
[ "Reads", "data", "from", "source", "copying", "it", "into", "tee", "before", "returning", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/utils/streams.py#L54-L61
15,527
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/utils/streams.py
InsistentReaderBytesIO.read
def read(self, b=-1): """Keep reading from source stream until either the source stream is done or the requested number of bytes have been obtained. :param int b: number of bytes to read :return: All bytes read from wrapped stream :rtype: bytes """ remaining_bytes = b data = io.BytesIO() while True: try: chunk = to_bytes(self.__wrapped__.read(remaining_bytes)) except ValueError: if self.__wrapped__.closed: break raise if not chunk: break data.write(chunk) remaining_bytes -= len(chunk) if remaining_bytes <= 0: break return data.getvalue()
python
def read(self, b=-1): """Keep reading from source stream until either the source stream is done or the requested number of bytes have been obtained. :param int b: number of bytes to read :return: All bytes read from wrapped stream :rtype: bytes """ remaining_bytes = b data = io.BytesIO() while True: try: chunk = to_bytes(self.__wrapped__.read(remaining_bytes)) except ValueError: if self.__wrapped__.closed: break raise if not chunk: break data.write(chunk) remaining_bytes -= len(chunk) if remaining_bytes <= 0: break return data.getvalue()
[ "def", "read", "(", "self", ",", "b", "=", "-", "1", ")", ":", "remaining_bytes", "=", "b", "data", "=", "io", ".", "BytesIO", "(", ")", "while", "True", ":", "try", ":", "chunk", "=", "to_bytes", "(", "self", ".", "__wrapped__", ".", "read", "(", "remaining_bytes", ")", ")", "except", "ValueError", ":", "if", "self", ".", "__wrapped__", ".", "closed", ":", "break", "raise", "if", "not", "chunk", ":", "break", "data", ".", "write", "(", "chunk", ")", "remaining_bytes", "-=", "len", "(", "chunk", ")", "if", "remaining_bytes", "<=", "0", ":", "break", "return", "data", ".", "getvalue", "(", ")" ]
Keep reading from source stream until either the source stream is done or the requested number of bytes have been obtained. :param int b: number of bytes to read :return: All bytes read from wrapped stream :rtype: bytes
[ "Keep", "reading", "from", "source", "stream", "until", "either", "the", "source", "stream", "is", "done", "or", "the", "requested", "number", "of", "bytes", "have", "been", "obtained", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/utils/streams.py#L73-L99
15,528
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/elliptic_curve.py
_ecc_static_length_signature
def _ecc_static_length_signature(key, algorithm, digest): """Calculates an elliptic curve signature with a static length using pre-calculated hash. :param key: Elliptic curve private key :type key: cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey :param algorithm: Master algorithm to use :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes digest: Pre-calculated hash digest :returns: Signature with required length :rtype: bytes """ pre_hashed_algorithm = ec.ECDSA(Prehashed(algorithm.signing_hash_type())) signature = b"" while len(signature) != algorithm.signature_len: _LOGGER.debug( "Signature length %d is not desired length %d. Recalculating.", len(signature), algorithm.signature_len ) signature = key.sign(digest, pre_hashed_algorithm) if len(signature) != algorithm.signature_len: # Most of the time, a signature of the wrong length can be fixed # by negating s in the signature relative to the group order. _LOGGER.debug( "Signature length %d is not desired length %d. Negating s.", len(signature), algorithm.signature_len ) r, s = decode_dss_signature(signature) s = _ECC_CURVE_PARAMETERS[algorithm.signing_algorithm_info.name].order - s signature = encode_dss_signature(r, s) return signature
python
def _ecc_static_length_signature(key, algorithm, digest): """Calculates an elliptic curve signature with a static length using pre-calculated hash. :param key: Elliptic curve private key :type key: cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey :param algorithm: Master algorithm to use :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes digest: Pre-calculated hash digest :returns: Signature with required length :rtype: bytes """ pre_hashed_algorithm = ec.ECDSA(Prehashed(algorithm.signing_hash_type())) signature = b"" while len(signature) != algorithm.signature_len: _LOGGER.debug( "Signature length %d is not desired length %d. Recalculating.", len(signature), algorithm.signature_len ) signature = key.sign(digest, pre_hashed_algorithm) if len(signature) != algorithm.signature_len: # Most of the time, a signature of the wrong length can be fixed # by negating s in the signature relative to the group order. _LOGGER.debug( "Signature length %d is not desired length %d. Negating s.", len(signature), algorithm.signature_len ) r, s = decode_dss_signature(signature) s = _ECC_CURVE_PARAMETERS[algorithm.signing_algorithm_info.name].order - s signature = encode_dss_signature(r, s) return signature
[ "def", "_ecc_static_length_signature", "(", "key", ",", "algorithm", ",", "digest", ")", ":", "pre_hashed_algorithm", "=", "ec", ".", "ECDSA", "(", "Prehashed", "(", "algorithm", ".", "signing_hash_type", "(", ")", ")", ")", "signature", "=", "b\"\"", "while", "len", "(", "signature", ")", "!=", "algorithm", ".", "signature_len", ":", "_LOGGER", ".", "debug", "(", "\"Signature length %d is not desired length %d. Recalculating.\"", ",", "len", "(", "signature", ")", ",", "algorithm", ".", "signature_len", ")", "signature", "=", "key", ".", "sign", "(", "digest", ",", "pre_hashed_algorithm", ")", "if", "len", "(", "signature", ")", "!=", "algorithm", ".", "signature_len", ":", "# Most of the time, a signature of the wrong length can be fixed", "# by negating s in the signature relative to the group order.", "_LOGGER", ".", "debug", "(", "\"Signature length %d is not desired length %d. Negating s.\"", ",", "len", "(", "signature", ")", ",", "algorithm", ".", "signature_len", ")", "r", ",", "s", "=", "decode_dss_signature", "(", "signature", ")", "s", "=", "_ECC_CURVE_PARAMETERS", "[", "algorithm", ".", "signing_algorithm_info", ".", "name", "]", ".", "order", "-", "s", "signature", "=", "encode_dss_signature", "(", "r", ",", "s", ")", "return", "signature" ]
Calculates an elliptic curve signature with a static length using pre-calculated hash. :param key: Elliptic curve private key :type key: cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey :param algorithm: Master algorithm to use :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes digest: Pre-calculated hash digest :returns: Signature with required length :rtype: bytes
[ "Calculates", "an", "elliptic", "curve", "signature", "with", "a", "static", "length", "using", "pre", "-", "calculated", "hash", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/elliptic_curve.py#L55-L82
15,529
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/elliptic_curve.py
generate_ecc_signing_key
def generate_ecc_signing_key(algorithm): """Returns an ECC signing key. :param algorithm: Algorithm object which determines what signature to generate :type algorithm: aws_encryption_sdk.identifiers.Algorithm :returns: Generated signing key :raises NotSupportedError: if signing algorithm is not supported on this platform """ try: verify_interface(ec.EllipticCurve, algorithm.signing_algorithm_info) return ec.generate_private_key(curve=algorithm.signing_algorithm_info(), backend=default_backend()) except InterfaceNotImplemented: raise NotSupportedError("Unsupported signing algorithm info")
python
def generate_ecc_signing_key(algorithm): """Returns an ECC signing key. :param algorithm: Algorithm object which determines what signature to generate :type algorithm: aws_encryption_sdk.identifiers.Algorithm :returns: Generated signing key :raises NotSupportedError: if signing algorithm is not supported on this platform """ try: verify_interface(ec.EllipticCurve, algorithm.signing_algorithm_info) return ec.generate_private_key(curve=algorithm.signing_algorithm_info(), backend=default_backend()) except InterfaceNotImplemented: raise NotSupportedError("Unsupported signing algorithm info")
[ "def", "generate_ecc_signing_key", "(", "algorithm", ")", ":", "try", ":", "verify_interface", "(", "ec", ".", "EllipticCurve", ",", "algorithm", ".", "signing_algorithm_info", ")", "return", "ec", ".", "generate_private_key", "(", "curve", "=", "algorithm", ".", "signing_algorithm_info", "(", ")", ",", "backend", "=", "default_backend", "(", ")", ")", "except", "InterfaceNotImplemented", ":", "raise", "NotSupportedError", "(", "\"Unsupported signing algorithm info\"", ")" ]
Returns an ECC signing key. :param algorithm: Algorithm object which determines what signature to generate :type algorithm: aws_encryption_sdk.identifiers.Algorithm :returns: Generated signing key :raises NotSupportedError: if signing algorithm is not supported on this platform
[ "Returns", "an", "ECC", "signing", "key", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/elliptic_curve.py#L177-L189
15,530
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/internal/crypto/data_keys.py
derive_data_encryption_key
def derive_data_encryption_key(source_key, algorithm, message_id): """Derives the data encryption key using the defined algorithm. :param bytes source_key: Raw source key :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes message_id: Message ID :returns: Derived data encryption key :rtype: bytes """ key = source_key if algorithm.kdf_type is not None: key = algorithm.kdf_type( algorithm=algorithm.kdf_hash_type(), length=algorithm.data_key_len, salt=None, info=struct.pack(">H16s", algorithm.algorithm_id, message_id), backend=default_backend(), ).derive(source_key) return key
python
def derive_data_encryption_key(source_key, algorithm, message_id): """Derives the data encryption key using the defined algorithm. :param bytes source_key: Raw source key :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes message_id: Message ID :returns: Derived data encryption key :rtype: bytes """ key = source_key if algorithm.kdf_type is not None: key = algorithm.kdf_type( algorithm=algorithm.kdf_hash_type(), length=algorithm.data_key_len, salt=None, info=struct.pack(">H16s", algorithm.algorithm_id, message_id), backend=default_backend(), ).derive(source_key) return key
[ "def", "derive_data_encryption_key", "(", "source_key", ",", "algorithm", ",", "message_id", ")", ":", "key", "=", "source_key", "if", "algorithm", ".", "kdf_type", "is", "not", "None", ":", "key", "=", "algorithm", ".", "kdf_type", "(", "algorithm", "=", "algorithm", ".", "kdf_hash_type", "(", ")", ",", "length", "=", "algorithm", ".", "data_key_len", ",", "salt", "=", "None", ",", "info", "=", "struct", ".", "pack", "(", "\">H16s\"", ",", "algorithm", ".", "algorithm_id", ",", "message_id", ")", ",", "backend", "=", "default_backend", "(", ")", ",", ")", ".", "derive", "(", "source_key", ")", "return", "key" ]
Derives the data encryption key using the defined algorithm. :param bytes source_key: Raw source key :param algorithm: Algorithm used to encrypt this body :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param bytes message_id: Message ID :returns: Derived data encryption key :rtype: bytes
[ "Derives", "the", "data", "encryption", "key", "using", "the", "defined", "algorithm", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/data_keys.py#L22-L41
15,531
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/__init__.py
encrypt
def encrypt(**kwargs): """Encrypts and serializes provided plaintext. .. note:: When using this function, the entire ciphertext message is encrypted into memory before returning any data. If streaming is desired, see :class:`aws_encryption_sdk.stream`. .. code:: python >>> import aws_encryption_sdk >>> kms_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(key_ids=[ ... 'arn:aws:kms:us-east-1:2222222222222:key/22222222-2222-2222-2222-222222222222', ... 'arn:aws:kms:us-east-1:3333333333333:key/33333333-3333-3333-3333-333333333333' ... ]) >>> my_ciphertext, encryptor_header = aws_encryption_sdk.encrypt( ... source=my_plaintext, ... key_provider=kms_key_provider ... ) :param config: Client configuration object (config or individual parameters required) :type config: aws_encryption_sdk.streaming_client.EncryptorConfig :param source: Source data to encrypt or decrypt :type source: str, bytes, io.IOBase, or file :param materials_manager: `CryptoMaterialsManager` from which to obtain cryptographic materials (either `materials_manager` or `key_provider` required) :type materials_manager: aws_encryption_sdk.materials_managers.base.CryptoMaterialsManager :param key_provider: `MasterKeyProvider` from which to obtain data keys for encryption (either `materials_manager` or `key_provider` required) :type key_provider: aws_encryption_sdk.key_providers.base.MasterKeyProvider :param int source_length: Length of source data (optional) .. note:: If source_length is not provided and unframed message is being written or read() is called, will attempt to seek() to the end of the stream and tell() to find the length of source data. .. note:: .. versionadded:: 1.3.0 If `source_length` and `materials_manager` are both provided, the total plaintext bytes encrypted will not be allowed to exceed `source_length`. To maintain backwards compatibility, this is not enforced if a `key_provider` is provided. :param dict encryption_context: Dictionary defining encryption context :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param int frame_length: Frame length in bytes :returns: Tuple containing the encrypted ciphertext and the message header object :rtype: tuple of bytes and :class:`aws_encryption_sdk.structures.MessageHeader` """ with StreamEncryptor(**kwargs) as encryptor: ciphertext = encryptor.read() return ciphertext, encryptor.header
python
def encrypt(**kwargs): """Encrypts and serializes provided plaintext. .. note:: When using this function, the entire ciphertext message is encrypted into memory before returning any data. If streaming is desired, see :class:`aws_encryption_sdk.stream`. .. code:: python >>> import aws_encryption_sdk >>> kms_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(key_ids=[ ... 'arn:aws:kms:us-east-1:2222222222222:key/22222222-2222-2222-2222-222222222222', ... 'arn:aws:kms:us-east-1:3333333333333:key/33333333-3333-3333-3333-333333333333' ... ]) >>> my_ciphertext, encryptor_header = aws_encryption_sdk.encrypt( ... source=my_plaintext, ... key_provider=kms_key_provider ... ) :param config: Client configuration object (config or individual parameters required) :type config: aws_encryption_sdk.streaming_client.EncryptorConfig :param source: Source data to encrypt or decrypt :type source: str, bytes, io.IOBase, or file :param materials_manager: `CryptoMaterialsManager` from which to obtain cryptographic materials (either `materials_manager` or `key_provider` required) :type materials_manager: aws_encryption_sdk.materials_managers.base.CryptoMaterialsManager :param key_provider: `MasterKeyProvider` from which to obtain data keys for encryption (either `materials_manager` or `key_provider` required) :type key_provider: aws_encryption_sdk.key_providers.base.MasterKeyProvider :param int source_length: Length of source data (optional) .. note:: If source_length is not provided and unframed message is being written or read() is called, will attempt to seek() to the end of the stream and tell() to find the length of source data. .. note:: .. versionadded:: 1.3.0 If `source_length` and `materials_manager` are both provided, the total plaintext bytes encrypted will not be allowed to exceed `source_length`. To maintain backwards compatibility, this is not enforced if a `key_provider` is provided. :param dict encryption_context: Dictionary defining encryption context :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param int frame_length: Frame length in bytes :returns: Tuple containing the encrypted ciphertext and the message header object :rtype: tuple of bytes and :class:`aws_encryption_sdk.structures.MessageHeader` """ with StreamEncryptor(**kwargs) as encryptor: ciphertext = encryptor.read() return ciphertext, encryptor.header
[ "def", "encrypt", "(", "*", "*", "kwargs", ")", ":", "with", "StreamEncryptor", "(", "*", "*", "kwargs", ")", "as", "encryptor", ":", "ciphertext", "=", "encryptor", ".", "read", "(", ")", "return", "ciphertext", ",", "encryptor", ".", "header" ]
Encrypts and serializes provided plaintext. .. note:: When using this function, the entire ciphertext message is encrypted into memory before returning any data. If streaming is desired, see :class:`aws_encryption_sdk.stream`. .. code:: python >>> import aws_encryption_sdk >>> kms_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(key_ids=[ ... 'arn:aws:kms:us-east-1:2222222222222:key/22222222-2222-2222-2222-222222222222', ... 'arn:aws:kms:us-east-1:3333333333333:key/33333333-3333-3333-3333-333333333333' ... ]) >>> my_ciphertext, encryptor_header = aws_encryption_sdk.encrypt( ... source=my_plaintext, ... key_provider=kms_key_provider ... ) :param config: Client configuration object (config or individual parameters required) :type config: aws_encryption_sdk.streaming_client.EncryptorConfig :param source: Source data to encrypt or decrypt :type source: str, bytes, io.IOBase, or file :param materials_manager: `CryptoMaterialsManager` from which to obtain cryptographic materials (either `materials_manager` or `key_provider` required) :type materials_manager: aws_encryption_sdk.materials_managers.base.CryptoMaterialsManager :param key_provider: `MasterKeyProvider` from which to obtain data keys for encryption (either `materials_manager` or `key_provider` required) :type key_provider: aws_encryption_sdk.key_providers.base.MasterKeyProvider :param int source_length: Length of source data (optional) .. note:: If source_length is not provided and unframed message is being written or read() is called, will attempt to seek() to the end of the stream and tell() to find the length of source data. .. note:: .. versionadded:: 1.3.0 If `source_length` and `materials_manager` are both provided, the total plaintext bytes encrypted will not be allowed to exceed `source_length`. To maintain backwards compatibility, this is not enforced if a `key_provider` is provided. :param dict encryption_context: Dictionary defining encryption context :param algorithm: Algorithm to use for encryption :type algorithm: aws_encryption_sdk.identifiers.Algorithm :param int frame_length: Frame length in bytes :returns: Tuple containing the encrypted ciphertext and the message header object :rtype: tuple of bytes and :class:`aws_encryption_sdk.structures.MessageHeader`
[ "Encrypts", "and", "serializes", "provided", "plaintext", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/__init__.py#L29-L80
15,532
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/__init__.py
decrypt
def decrypt(**kwargs): """Deserializes and decrypts provided ciphertext. .. note:: When using this function, the entire ciphertext message is decrypted into memory before returning any data. If streaming is desired, see :class:`aws_encryption_sdk.stream`. .. code:: python >>> import aws_encryption_sdk >>> kms_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(key_ids=[ ... 'arn:aws:kms:us-east-1:2222222222222:key/22222222-2222-2222-2222-222222222222', ... 'arn:aws:kms:us-east-1:3333333333333:key/33333333-3333-3333-3333-333333333333' ... ]) >>> my_ciphertext, encryptor_header = aws_encryption_sdk.decrypt( ... source=my_ciphertext, ... key_provider=kms_key_provider ... ) :param config: Client configuration object (config or individual parameters required) :type config: aws_encryption_sdk.streaming_client.DecryptorConfig :param source: Source data to encrypt or decrypt :type source: str, bytes, io.IOBase, or file :param materials_manager: `CryptoMaterialsManager` from which to obtain cryptographic materials (either `materials_manager` or `key_provider` required) :type materials_manager: aws_encryption_sdk.materials_managers.base.CryptoMaterialsManager :param key_provider: `MasterKeyProvider` from which to obtain data keys for decryption (either `materials_manager` or `key_provider` required) :type key_provider: aws_encryption_sdk.key_providers.base.MasterKeyProvider :param int source_length: Length of source data (optional) .. note:: If source_length is not provided and read() is called, will attempt to seek() to the end of the stream and tell() to find the length of source data. :param int max_body_length: Maximum frame size (or content length for non-framed messages) in bytes to read from ciphertext message. :returns: Tuple containing the decrypted plaintext and the message header object :rtype: tuple of bytes and :class:`aws_encryption_sdk.structures.MessageHeader` """ with StreamDecryptor(**kwargs) as decryptor: plaintext = decryptor.read() return plaintext, decryptor.header
python
def decrypt(**kwargs): """Deserializes and decrypts provided ciphertext. .. note:: When using this function, the entire ciphertext message is decrypted into memory before returning any data. If streaming is desired, see :class:`aws_encryption_sdk.stream`. .. code:: python >>> import aws_encryption_sdk >>> kms_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(key_ids=[ ... 'arn:aws:kms:us-east-1:2222222222222:key/22222222-2222-2222-2222-222222222222', ... 'arn:aws:kms:us-east-1:3333333333333:key/33333333-3333-3333-3333-333333333333' ... ]) >>> my_ciphertext, encryptor_header = aws_encryption_sdk.decrypt( ... source=my_ciphertext, ... key_provider=kms_key_provider ... ) :param config: Client configuration object (config or individual parameters required) :type config: aws_encryption_sdk.streaming_client.DecryptorConfig :param source: Source data to encrypt or decrypt :type source: str, bytes, io.IOBase, or file :param materials_manager: `CryptoMaterialsManager` from which to obtain cryptographic materials (either `materials_manager` or `key_provider` required) :type materials_manager: aws_encryption_sdk.materials_managers.base.CryptoMaterialsManager :param key_provider: `MasterKeyProvider` from which to obtain data keys for decryption (either `materials_manager` or `key_provider` required) :type key_provider: aws_encryption_sdk.key_providers.base.MasterKeyProvider :param int source_length: Length of source data (optional) .. note:: If source_length is not provided and read() is called, will attempt to seek() to the end of the stream and tell() to find the length of source data. :param int max_body_length: Maximum frame size (or content length for non-framed messages) in bytes to read from ciphertext message. :returns: Tuple containing the decrypted plaintext and the message header object :rtype: tuple of bytes and :class:`aws_encryption_sdk.structures.MessageHeader` """ with StreamDecryptor(**kwargs) as decryptor: plaintext = decryptor.read() return plaintext, decryptor.header
[ "def", "decrypt", "(", "*", "*", "kwargs", ")", ":", "with", "StreamDecryptor", "(", "*", "*", "kwargs", ")", "as", "decryptor", ":", "plaintext", "=", "decryptor", ".", "read", "(", ")", "return", "plaintext", ",", "decryptor", ".", "header" ]
Deserializes and decrypts provided ciphertext. .. note:: When using this function, the entire ciphertext message is decrypted into memory before returning any data. If streaming is desired, see :class:`aws_encryption_sdk.stream`. .. code:: python >>> import aws_encryption_sdk >>> kms_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(key_ids=[ ... 'arn:aws:kms:us-east-1:2222222222222:key/22222222-2222-2222-2222-222222222222', ... 'arn:aws:kms:us-east-1:3333333333333:key/33333333-3333-3333-3333-333333333333' ... ]) >>> my_ciphertext, encryptor_header = aws_encryption_sdk.decrypt( ... source=my_ciphertext, ... key_provider=kms_key_provider ... ) :param config: Client configuration object (config or individual parameters required) :type config: aws_encryption_sdk.streaming_client.DecryptorConfig :param source: Source data to encrypt or decrypt :type source: str, bytes, io.IOBase, or file :param materials_manager: `CryptoMaterialsManager` from which to obtain cryptographic materials (either `materials_manager` or `key_provider` required) :type materials_manager: aws_encryption_sdk.materials_managers.base.CryptoMaterialsManager :param key_provider: `MasterKeyProvider` from which to obtain data keys for decryption (either `materials_manager` or `key_provider` required) :type key_provider: aws_encryption_sdk.key_providers.base.MasterKeyProvider :param int source_length: Length of source data (optional) .. note:: If source_length is not provided and read() is called, will attempt to seek() to the end of the stream and tell() to find the length of source data. :param int max_body_length: Maximum frame size (or content length for non-framed messages) in bytes to read from ciphertext message. :returns: Tuple containing the decrypted plaintext and the message header object :rtype: tuple of bytes and :class:`aws_encryption_sdk.structures.MessageHeader`
[ "Deserializes", "and", "decrypts", "provided", "ciphertext", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/__init__.py#L83-L125
15,533
aws/aws-encryption-sdk-python
examples/src/basic_file_encryption_with_multiple_providers.py
cycle_file
def cycle_file(key_arn, source_plaintext_filename, botocore_session=None): """Encrypts and then decrypts a file using a KMS master key provider and a custom static master key provider. Both master key providers are used to encrypt the plaintext file, so either one alone can decrypt it. :param str key_arn: Amazon Resource Name (ARN) of the KMS Customer Master Key (CMK) (http://docs.aws.amazon.com/kms/latest/developerguide/viewing-keys.html) :param str source_plaintext_filename: Filename of file to encrypt :param botocore_session: existing botocore session instance :type botocore_session: botocore.session.Session """ # "Cycled" means encrypted and then decrypted ciphertext_filename = source_plaintext_filename + ".encrypted" cycled_kms_plaintext_filename = source_plaintext_filename + ".kms.decrypted" cycled_static_plaintext_filename = source_plaintext_filename + ".static.decrypted" # Create a KMS master key provider kms_kwargs = dict(key_ids=[key_arn]) if botocore_session is not None: kms_kwargs["botocore_session"] = botocore_session kms_master_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(**kms_kwargs) # Create a static master key provider and add a master key to it static_key_id = os.urandom(8) static_master_key_provider = StaticRandomMasterKeyProvider() static_master_key_provider.add_master_key(static_key_id) # Add the static master key provider to the KMS master key provider # The resulting master key provider uses KMS master keys to generate (and encrypt) # data keys and static master keys to create an additional encrypted copy of each data key. kms_master_key_provider.add_master_key_provider(static_master_key_provider) # Encrypt plaintext with both KMS and static master keys with open(source_plaintext_filename, "rb") as plaintext, open(ciphertext_filename, "wb") as ciphertext: with aws_encryption_sdk.stream(source=plaintext, mode="e", key_provider=kms_master_key_provider) as encryptor: for chunk in encryptor: ciphertext.write(chunk) # Decrypt the ciphertext with only the KMS master key with open(ciphertext_filename, "rb") as ciphertext, open(cycled_kms_plaintext_filename, "wb") as plaintext: with aws_encryption_sdk.stream( source=ciphertext, mode="d", key_provider=aws_encryption_sdk.KMSMasterKeyProvider(**kms_kwargs) ) as kms_decryptor: for chunk in kms_decryptor: plaintext.write(chunk) # Decrypt the ciphertext with only the static master key with open(ciphertext_filename, "rb") as ciphertext, open(cycled_static_plaintext_filename, "wb") as plaintext: with aws_encryption_sdk.stream( source=ciphertext, mode="d", key_provider=static_master_key_provider ) as static_decryptor: for chunk in static_decryptor: plaintext.write(chunk) # Verify that the "cycled" (encrypted, then decrypted) plaintext is identical to the source plaintext assert filecmp.cmp(source_plaintext_filename, cycled_kms_plaintext_filename) assert filecmp.cmp(source_plaintext_filename, cycled_static_plaintext_filename) # Verify that the encryption context in the decrypt operation includes all key pairs from the # encrypt operation. # # In production, always use a meaningful encryption context. In this sample, we omit the # encryption context (no key pairs). assert all( pair in kms_decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items() ) assert all( pair in static_decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items() ) return ciphertext_filename, cycled_kms_plaintext_filename, cycled_static_plaintext_filename
python
def cycle_file(key_arn, source_plaintext_filename, botocore_session=None): """Encrypts and then decrypts a file using a KMS master key provider and a custom static master key provider. Both master key providers are used to encrypt the plaintext file, so either one alone can decrypt it. :param str key_arn: Amazon Resource Name (ARN) of the KMS Customer Master Key (CMK) (http://docs.aws.amazon.com/kms/latest/developerguide/viewing-keys.html) :param str source_plaintext_filename: Filename of file to encrypt :param botocore_session: existing botocore session instance :type botocore_session: botocore.session.Session """ # "Cycled" means encrypted and then decrypted ciphertext_filename = source_plaintext_filename + ".encrypted" cycled_kms_plaintext_filename = source_plaintext_filename + ".kms.decrypted" cycled_static_plaintext_filename = source_plaintext_filename + ".static.decrypted" # Create a KMS master key provider kms_kwargs = dict(key_ids=[key_arn]) if botocore_session is not None: kms_kwargs["botocore_session"] = botocore_session kms_master_key_provider = aws_encryption_sdk.KMSMasterKeyProvider(**kms_kwargs) # Create a static master key provider and add a master key to it static_key_id = os.urandom(8) static_master_key_provider = StaticRandomMasterKeyProvider() static_master_key_provider.add_master_key(static_key_id) # Add the static master key provider to the KMS master key provider # The resulting master key provider uses KMS master keys to generate (and encrypt) # data keys and static master keys to create an additional encrypted copy of each data key. kms_master_key_provider.add_master_key_provider(static_master_key_provider) # Encrypt plaintext with both KMS and static master keys with open(source_plaintext_filename, "rb") as plaintext, open(ciphertext_filename, "wb") as ciphertext: with aws_encryption_sdk.stream(source=plaintext, mode="e", key_provider=kms_master_key_provider) as encryptor: for chunk in encryptor: ciphertext.write(chunk) # Decrypt the ciphertext with only the KMS master key with open(ciphertext_filename, "rb") as ciphertext, open(cycled_kms_plaintext_filename, "wb") as plaintext: with aws_encryption_sdk.stream( source=ciphertext, mode="d", key_provider=aws_encryption_sdk.KMSMasterKeyProvider(**kms_kwargs) ) as kms_decryptor: for chunk in kms_decryptor: plaintext.write(chunk) # Decrypt the ciphertext with only the static master key with open(ciphertext_filename, "rb") as ciphertext, open(cycled_static_plaintext_filename, "wb") as plaintext: with aws_encryption_sdk.stream( source=ciphertext, mode="d", key_provider=static_master_key_provider ) as static_decryptor: for chunk in static_decryptor: plaintext.write(chunk) # Verify that the "cycled" (encrypted, then decrypted) plaintext is identical to the source plaintext assert filecmp.cmp(source_plaintext_filename, cycled_kms_plaintext_filename) assert filecmp.cmp(source_plaintext_filename, cycled_static_plaintext_filename) # Verify that the encryption context in the decrypt operation includes all key pairs from the # encrypt operation. # # In production, always use a meaningful encryption context. In this sample, we omit the # encryption context (no key pairs). assert all( pair in kms_decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items() ) assert all( pair in static_decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items() ) return ciphertext_filename, cycled_kms_plaintext_filename, cycled_static_plaintext_filename
[ "def", "cycle_file", "(", "key_arn", ",", "source_plaintext_filename", ",", "botocore_session", "=", "None", ")", ":", "# \"Cycled\" means encrypted and then decrypted", "ciphertext_filename", "=", "source_plaintext_filename", "+", "\".encrypted\"", "cycled_kms_plaintext_filename", "=", "source_plaintext_filename", "+", "\".kms.decrypted\"", "cycled_static_plaintext_filename", "=", "source_plaintext_filename", "+", "\".static.decrypted\"", "# Create a KMS master key provider", "kms_kwargs", "=", "dict", "(", "key_ids", "=", "[", "key_arn", "]", ")", "if", "botocore_session", "is", "not", "None", ":", "kms_kwargs", "[", "\"botocore_session\"", "]", "=", "botocore_session", "kms_master_key_provider", "=", "aws_encryption_sdk", ".", "KMSMasterKeyProvider", "(", "*", "*", "kms_kwargs", ")", "# Create a static master key provider and add a master key to it", "static_key_id", "=", "os", ".", "urandom", "(", "8", ")", "static_master_key_provider", "=", "StaticRandomMasterKeyProvider", "(", ")", "static_master_key_provider", ".", "add_master_key", "(", "static_key_id", ")", "# Add the static master key provider to the KMS master key provider", "# The resulting master key provider uses KMS master keys to generate (and encrypt)", "# data keys and static master keys to create an additional encrypted copy of each data key.", "kms_master_key_provider", ".", "add_master_key_provider", "(", "static_master_key_provider", ")", "# Encrypt plaintext with both KMS and static master keys", "with", "open", "(", "source_plaintext_filename", ",", "\"rb\"", ")", "as", "plaintext", ",", "open", "(", "ciphertext_filename", ",", "\"wb\"", ")", "as", "ciphertext", ":", "with", "aws_encryption_sdk", ".", "stream", "(", "source", "=", "plaintext", ",", "mode", "=", "\"e\"", ",", "key_provider", "=", "kms_master_key_provider", ")", "as", "encryptor", ":", "for", "chunk", "in", "encryptor", ":", "ciphertext", ".", "write", "(", "chunk", ")", "# Decrypt the ciphertext with only the KMS master key", "with", "open", "(", "ciphertext_filename", ",", "\"rb\"", ")", "as", "ciphertext", ",", "open", "(", "cycled_kms_plaintext_filename", ",", "\"wb\"", ")", "as", "plaintext", ":", "with", "aws_encryption_sdk", ".", "stream", "(", "source", "=", "ciphertext", ",", "mode", "=", "\"d\"", ",", "key_provider", "=", "aws_encryption_sdk", ".", "KMSMasterKeyProvider", "(", "*", "*", "kms_kwargs", ")", ")", "as", "kms_decryptor", ":", "for", "chunk", "in", "kms_decryptor", ":", "plaintext", ".", "write", "(", "chunk", ")", "# Decrypt the ciphertext with only the static master key", "with", "open", "(", "ciphertext_filename", ",", "\"rb\"", ")", "as", "ciphertext", ",", "open", "(", "cycled_static_plaintext_filename", ",", "\"wb\"", ")", "as", "plaintext", ":", "with", "aws_encryption_sdk", ".", "stream", "(", "source", "=", "ciphertext", ",", "mode", "=", "\"d\"", ",", "key_provider", "=", "static_master_key_provider", ")", "as", "static_decryptor", ":", "for", "chunk", "in", "static_decryptor", ":", "plaintext", ".", "write", "(", "chunk", ")", "# Verify that the \"cycled\" (encrypted, then decrypted) plaintext is identical to the source plaintext", "assert", "filecmp", ".", "cmp", "(", "source_plaintext_filename", ",", "cycled_kms_plaintext_filename", ")", "assert", "filecmp", ".", "cmp", "(", "source_plaintext_filename", ",", "cycled_static_plaintext_filename", ")", "# Verify that the encryption context in the decrypt operation includes all key pairs from the", "# encrypt operation.", "#", "# In production, always use a meaningful encryption context. In this sample, we omit the", "# encryption context (no key pairs).", "assert", "all", "(", "pair", "in", "kms_decryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", "for", "pair", "in", "encryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", ")", "assert", "all", "(", "pair", "in", "static_decryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", "for", "pair", "in", "encryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", ")", "return", "ciphertext_filename", ",", "cycled_kms_plaintext_filename", ",", "cycled_static_plaintext_filename" ]
Encrypts and then decrypts a file using a KMS master key provider and a custom static master key provider. Both master key providers are used to encrypt the plaintext file, so either one alone can decrypt it. :param str key_arn: Amazon Resource Name (ARN) of the KMS Customer Master Key (CMK) (http://docs.aws.amazon.com/kms/latest/developerguide/viewing-keys.html) :param str source_plaintext_filename: Filename of file to encrypt :param botocore_session: existing botocore session instance :type botocore_session: botocore.session.Session
[ "Encrypts", "and", "then", "decrypts", "a", "file", "using", "a", "KMS", "master", "key", "provider", "and", "a", "custom", "static", "master", "key", "provider", ".", "Both", "master", "key", "providers", "are", "used", "to", "encrypt", "the", "plaintext", "file", "so", "either", "one", "alone", "can", "decrypt", "it", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/examples/src/basic_file_encryption_with_multiple_providers.py#L63-L133
15,534
aws/aws-encryption-sdk-python
examples/src/basic_file_encryption_with_multiple_providers.py
StaticRandomMasterKeyProvider._get_raw_key
def _get_raw_key(self, key_id): """Retrieves a static, randomly generated, RSA key for the specified key id. :param str key_id: User-defined ID for the static key :returns: Wrapping key that contains the specified static key :rtype: :class:`aws_encryption_sdk.internal.crypto.WrappingKey` """ try: static_key = self._static_keys[key_id] except KeyError: private_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) static_key = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption(), ) self._static_keys[key_id] = static_key return WrappingKey( wrapping_algorithm=WrappingAlgorithm.RSA_OAEP_SHA1_MGF1, wrapping_key=static_key, wrapping_key_type=EncryptionKeyType.PRIVATE, )
python
def _get_raw_key(self, key_id): """Retrieves a static, randomly generated, RSA key for the specified key id. :param str key_id: User-defined ID for the static key :returns: Wrapping key that contains the specified static key :rtype: :class:`aws_encryption_sdk.internal.crypto.WrappingKey` """ try: static_key = self._static_keys[key_id] except KeyError: private_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) static_key = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption(), ) self._static_keys[key_id] = static_key return WrappingKey( wrapping_algorithm=WrappingAlgorithm.RSA_OAEP_SHA1_MGF1, wrapping_key=static_key, wrapping_key_type=EncryptionKeyType.PRIVATE, )
[ "def", "_get_raw_key", "(", "self", ",", "key_id", ")", ":", "try", ":", "static_key", "=", "self", ".", "_static_keys", "[", "key_id", "]", "except", "KeyError", ":", "private_key", "=", "rsa", ".", "generate_private_key", "(", "public_exponent", "=", "65537", ",", "key_size", "=", "4096", ",", "backend", "=", "default_backend", "(", ")", ")", "static_key", "=", "private_key", ".", "private_bytes", "(", "encoding", "=", "serialization", ".", "Encoding", ".", "PEM", ",", "format", "=", "serialization", ".", "PrivateFormat", ".", "PKCS8", ",", "encryption_algorithm", "=", "serialization", ".", "NoEncryption", "(", ")", ",", ")", "self", ".", "_static_keys", "[", "key_id", "]", "=", "static_key", "return", "WrappingKey", "(", "wrapping_algorithm", "=", "WrappingAlgorithm", ".", "RSA_OAEP_SHA1_MGF1", ",", "wrapping_key", "=", "static_key", ",", "wrapping_key_type", "=", "EncryptionKeyType", ".", "PRIVATE", ",", ")" ]
Retrieves a static, randomly generated, RSA key for the specified key id. :param str key_id: User-defined ID for the static key :returns: Wrapping key that contains the specified static key :rtype: :class:`aws_encryption_sdk.internal.crypto.WrappingKey`
[ "Retrieves", "a", "static", "randomly", "generated", "RSA", "key", "for", "the", "specified", "key", "id", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/examples/src/basic_file_encryption_with_multiple_providers.py#L39-L60
15,535
dakrauth/django-swingtime
swingtime/utils.py
month_boundaries
def month_boundaries(dt=None): ''' Return a 2-tuple containing the datetime instances for the first and last dates of the current month or using ``dt`` as a reference. ''' dt = dt or date.today() wkday, ndays = calendar.monthrange(dt.year, dt.month) start = datetime(dt.year, dt.month, 1) return (start, start + timedelta(ndays - 1))
python
def month_boundaries(dt=None): ''' Return a 2-tuple containing the datetime instances for the first and last dates of the current month or using ``dt`` as a reference. ''' dt = dt or date.today() wkday, ndays = calendar.monthrange(dt.year, dt.month) start = datetime(dt.year, dt.month, 1) return (start, start + timedelta(ndays - 1))
[ "def", "month_boundaries", "(", "dt", "=", "None", ")", ":", "dt", "=", "dt", "or", "date", ".", "today", "(", ")", "wkday", ",", "ndays", "=", "calendar", ".", "monthrange", "(", "dt", ".", "year", ",", "dt", ".", "month", ")", "start", "=", "datetime", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "1", ")", "return", "(", "start", ",", "start", "+", "timedelta", "(", "ndays", "-", "1", ")", ")" ]
Return a 2-tuple containing the datetime instances for the first and last dates of the current month or using ``dt`` as a reference.
[ "Return", "a", "2", "-", "tuple", "containing", "the", "datetime", "instances", "for", "the", "first", "and", "last", "dates", "of", "the", "current", "month", "or", "using", "dt", "as", "a", "reference", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/utils.py#L27-L36
15,536
dakrauth/django-swingtime
swingtime/utils.py
css_class_cycler
def css_class_cycler(): ''' Return a dictionary keyed by ``EventType`` abbreviations, whose values are an iterable or cycle of CSS class names. ''' FMT = 'evt-{0}-{1}'.format return defaultdict(default_css_class_cycler, ( (e.abbr, itertools.cycle((FMT(e.abbr, 'even'), FMT(e.abbr, 'odd')))) for e in EventType.objects.all() ))
python
def css_class_cycler(): ''' Return a dictionary keyed by ``EventType`` abbreviations, whose values are an iterable or cycle of CSS class names. ''' FMT = 'evt-{0}-{1}'.format return defaultdict(default_css_class_cycler, ( (e.abbr, itertools.cycle((FMT(e.abbr, 'even'), FMT(e.abbr, 'odd')))) for e in EventType.objects.all() ))
[ "def", "css_class_cycler", "(", ")", ":", "FMT", "=", "'evt-{0}-{1}'", ".", "format", "return", "defaultdict", "(", "default_css_class_cycler", ",", "(", "(", "e", ".", "abbr", ",", "itertools", ".", "cycle", "(", "(", "FMT", "(", "e", ".", "abbr", ",", "'even'", ")", ",", "FMT", "(", "e", ".", "abbr", ",", "'odd'", ")", ")", ")", ")", "for", "e", "in", "EventType", ".", "objects", ".", "all", "(", ")", ")", ")" ]
Return a dictionary keyed by ``EventType`` abbreviations, whose values are an iterable or cycle of CSS class names.
[ "Return", "a", "dictionary", "keyed", "by", "EventType", "abbreviations", "whose", "values", "are", "an", "iterable", "or", "cycle", "of", "CSS", "class", "names", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/utils.py#L43-L53
15,537
dakrauth/django-swingtime
swingtime/models.py
create_event
def create_event( title, event_type, description='', start_time=None, end_time=None, note=None, **rrule_params ): ''' Convenience function to create an ``Event``, optionally create an ``EventType``, and associated ``Occurrence``s. ``Occurrence`` creation rules match those for ``Event.add_occurrences``. Returns the newly created ``Event`` instance. Parameters ``event_type`` can be either an ``EventType`` object or 2-tuple of ``(abbreviation,label)``, from which an ``EventType`` is either created or retrieved. ``start_time`` will default to the current hour if ``None`` ``end_time`` will default to ``start_time`` plus swingtime_settings.DEFAULT_OCCURRENCE_DURATION hour if ``None`` ``freq``, ``count``, ``rrule_params`` follow the ``dateutils`` API (see http://labix.org/python-dateutil) ''' if isinstance(event_type, tuple): event_type, created = EventType.objects.get_or_create( abbr=event_type[0], label=event_type[1] ) event = Event.objects.create( title=title, description=description, event_type=event_type ) if note is not None: event.notes.create(note=note) start_time = start_time or datetime.now().replace( minute=0, second=0, microsecond=0 ) end_time = end_time or (start_time + swingtime_settings.DEFAULT_OCCURRENCE_DURATION) event.add_occurrences(start_time, end_time, **rrule_params) return event
python
def create_event( title, event_type, description='', start_time=None, end_time=None, note=None, **rrule_params ): ''' Convenience function to create an ``Event``, optionally create an ``EventType``, and associated ``Occurrence``s. ``Occurrence`` creation rules match those for ``Event.add_occurrences``. Returns the newly created ``Event`` instance. Parameters ``event_type`` can be either an ``EventType`` object or 2-tuple of ``(abbreviation,label)``, from which an ``EventType`` is either created or retrieved. ``start_time`` will default to the current hour if ``None`` ``end_time`` will default to ``start_time`` plus swingtime_settings.DEFAULT_OCCURRENCE_DURATION hour if ``None`` ``freq``, ``count``, ``rrule_params`` follow the ``dateutils`` API (see http://labix.org/python-dateutil) ''' if isinstance(event_type, tuple): event_type, created = EventType.objects.get_or_create( abbr=event_type[0], label=event_type[1] ) event = Event.objects.create( title=title, description=description, event_type=event_type ) if note is not None: event.notes.create(note=note) start_time = start_time or datetime.now().replace( minute=0, second=0, microsecond=0 ) end_time = end_time or (start_time + swingtime_settings.DEFAULT_OCCURRENCE_DURATION) event.add_occurrences(start_time, end_time, **rrule_params) return event
[ "def", "create_event", "(", "title", ",", "event_type", ",", "description", "=", "''", ",", "start_time", "=", "None", ",", "end_time", "=", "None", ",", "note", "=", "None", ",", "*", "*", "rrule_params", ")", ":", "if", "isinstance", "(", "event_type", ",", "tuple", ")", ":", "event_type", ",", "created", "=", "EventType", ".", "objects", ".", "get_or_create", "(", "abbr", "=", "event_type", "[", "0", "]", ",", "label", "=", "event_type", "[", "1", "]", ")", "event", "=", "Event", ".", "objects", ".", "create", "(", "title", "=", "title", ",", "description", "=", "description", ",", "event_type", "=", "event_type", ")", "if", "note", "is", "not", "None", ":", "event", ".", "notes", ".", "create", "(", "note", "=", "note", ")", "start_time", "=", "start_time", "or", "datetime", ".", "now", "(", ")", ".", "replace", "(", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "0", ")", "end_time", "=", "end_time", "or", "(", "start_time", "+", "swingtime_settings", ".", "DEFAULT_OCCURRENCE_DURATION", ")", "event", ".", "add_occurrences", "(", "start_time", ",", "end_time", ",", "*", "*", "rrule_params", ")", "return", "event" ]
Convenience function to create an ``Event``, optionally create an ``EventType``, and associated ``Occurrence``s. ``Occurrence`` creation rules match those for ``Event.add_occurrences``. Returns the newly created ``Event`` instance. Parameters ``event_type`` can be either an ``EventType`` object or 2-tuple of ``(abbreviation,label)``, from which an ``EventType`` is either created or retrieved. ``start_time`` will default to the current hour if ``None`` ``end_time`` will default to ``start_time`` plus swingtime_settings.DEFAULT_OCCURRENCE_DURATION hour if ``None`` ``freq``, ``count``, ``rrule_params`` follow the ``dateutils`` API (see http://labix.org/python-dateutil)
[ "Convenience", "function", "to", "create", "an", "Event", "optionally", "create", "an", "EventType", "and", "associated", "Occurrence", "s", ".", "Occurrence", "creation", "rules", "match", "those", "for", "Event", ".", "add_occurrences", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/models.py#L208-L265
15,538
dakrauth/django-swingtime
swingtime/models.py
Event.add_occurrences
def add_occurrences(self, start_time, end_time, **rrule_params): ''' Add one or more occurences to the event using a comparable API to ``dateutil.rrule``. If ``rrule_params`` does not contain a ``freq``, one will be defaulted to ``rrule.DAILY``. Because ``rrule.rrule`` returns an iterator that can essentially be unbounded, we need to slightly alter the expected behavior here in order to enforce a finite number of occurrence creation. If both ``count`` and ``until`` entries are missing from ``rrule_params``, only a single ``Occurrence`` instance will be created using the exact ``start_time`` and ``end_time`` values. ''' count = rrule_params.get('count') until = rrule_params.get('until') if not (count or until): self.occurrence_set.create(start_time=start_time, end_time=end_time) else: rrule_params.setdefault('freq', rrule.DAILY) delta = end_time - start_time occurrences = [] for ev in rrule.rrule(dtstart=start_time, **rrule_params): occurrences.append(Occurrence(start_time=ev, end_time=ev + delta, event=self)) self.occurrence_set.bulk_create(occurrences)
python
def add_occurrences(self, start_time, end_time, **rrule_params): ''' Add one or more occurences to the event using a comparable API to ``dateutil.rrule``. If ``rrule_params`` does not contain a ``freq``, one will be defaulted to ``rrule.DAILY``. Because ``rrule.rrule`` returns an iterator that can essentially be unbounded, we need to slightly alter the expected behavior here in order to enforce a finite number of occurrence creation. If both ``count`` and ``until`` entries are missing from ``rrule_params``, only a single ``Occurrence`` instance will be created using the exact ``start_time`` and ``end_time`` values. ''' count = rrule_params.get('count') until = rrule_params.get('until') if not (count or until): self.occurrence_set.create(start_time=start_time, end_time=end_time) else: rrule_params.setdefault('freq', rrule.DAILY) delta = end_time - start_time occurrences = [] for ev in rrule.rrule(dtstart=start_time, **rrule_params): occurrences.append(Occurrence(start_time=ev, end_time=ev + delta, event=self)) self.occurrence_set.bulk_create(occurrences)
[ "def", "add_occurrences", "(", "self", ",", "start_time", ",", "end_time", ",", "*", "*", "rrule_params", ")", ":", "count", "=", "rrule_params", ".", "get", "(", "'count'", ")", "until", "=", "rrule_params", ".", "get", "(", "'until'", ")", "if", "not", "(", "count", "or", "until", ")", ":", "self", ".", "occurrence_set", ".", "create", "(", "start_time", "=", "start_time", ",", "end_time", "=", "end_time", ")", "else", ":", "rrule_params", ".", "setdefault", "(", "'freq'", ",", "rrule", ".", "DAILY", ")", "delta", "=", "end_time", "-", "start_time", "occurrences", "=", "[", "]", "for", "ev", "in", "rrule", ".", "rrule", "(", "dtstart", "=", "start_time", ",", "*", "*", "rrule_params", ")", ":", "occurrences", ".", "append", "(", "Occurrence", "(", "start_time", "=", "ev", ",", "end_time", "=", "ev", "+", "delta", ",", "event", "=", "self", ")", ")", "self", ".", "occurrence_set", ".", "bulk_create", "(", "occurrences", ")" ]
Add one or more occurences to the event using a comparable API to ``dateutil.rrule``. If ``rrule_params`` does not contain a ``freq``, one will be defaulted to ``rrule.DAILY``. Because ``rrule.rrule`` returns an iterator that can essentially be unbounded, we need to slightly alter the expected behavior here in order to enforce a finite number of occurrence creation. If both ``count`` and ``until`` entries are missing from ``rrule_params``, only a single ``Occurrence`` instance will be created using the exact ``start_time`` and ``end_time`` values.
[ "Add", "one", "or", "more", "occurences", "to", "the", "event", "using", "a", "comparable", "API", "to", "dateutil", ".", "rrule", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/models.py#L84-L110
15,539
dakrauth/django-swingtime
swingtime/models.py
Event.daily_occurrences
def daily_occurrences(self, dt=None): ''' Convenience method wrapping ``Occurrence.objects.daily_occurrences``. ''' return Occurrence.objects.daily_occurrences(dt=dt, event=self)
python
def daily_occurrences(self, dt=None): ''' Convenience method wrapping ``Occurrence.objects.daily_occurrences``. ''' return Occurrence.objects.daily_occurrences(dt=dt, event=self)
[ "def", "daily_occurrences", "(", "self", ",", "dt", "=", "None", ")", ":", "return", "Occurrence", ".", "objects", ".", "daily_occurrences", "(", "dt", "=", "dt", ",", "event", "=", "self", ")" ]
Convenience method wrapping ``Occurrence.objects.daily_occurrences``.
[ "Convenience", "method", "wrapping", "Occurrence", ".", "objects", ".", "daily_occurrences", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/models.py#L127-L131
15,540
dakrauth/django-swingtime
swingtime/models.py
OccurrenceManager.daily_occurrences
def daily_occurrences(self, dt=None, event=None): ''' Returns a queryset of for instances that have any overlap with a particular day. * ``dt`` may be either a datetime.datetime, datetime.date object, or ``None``. If ``None``, default to the current day. * ``event`` can be an ``Event`` instance for further filtering. ''' dt = dt or datetime.now() start = datetime(dt.year, dt.month, dt.day) end = start.replace(hour=23, minute=59, second=59) qs = self.filter( models.Q( start_time__gte=start, start_time__lte=end, ) | models.Q( end_time__gte=start, end_time__lte=end, ) | models.Q( start_time__lt=start, end_time__gt=end ) ) return qs.filter(event=event) if event else qs
python
def daily_occurrences(self, dt=None, event=None): ''' Returns a queryset of for instances that have any overlap with a particular day. * ``dt`` may be either a datetime.datetime, datetime.date object, or ``None``. If ``None``, default to the current day. * ``event`` can be an ``Event`` instance for further filtering. ''' dt = dt or datetime.now() start = datetime(dt.year, dt.month, dt.day) end = start.replace(hour=23, minute=59, second=59) qs = self.filter( models.Q( start_time__gte=start, start_time__lte=end, ) | models.Q( end_time__gte=start, end_time__lte=end, ) | models.Q( start_time__lt=start, end_time__gt=end ) ) return qs.filter(event=event) if event else qs
[ "def", "daily_occurrences", "(", "self", ",", "dt", "=", "None", ",", "event", "=", "None", ")", ":", "dt", "=", "dt", "or", "datetime", ".", "now", "(", ")", "start", "=", "datetime", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ")", "end", "=", "start", ".", "replace", "(", "hour", "=", "23", ",", "minute", "=", "59", ",", "second", "=", "59", ")", "qs", "=", "self", ".", "filter", "(", "models", ".", "Q", "(", "start_time__gte", "=", "start", ",", "start_time__lte", "=", "end", ",", ")", "|", "models", ".", "Q", "(", "end_time__gte", "=", "start", ",", "end_time__lte", "=", "end", ",", ")", "|", "models", ".", "Q", "(", "start_time__lt", "=", "start", ",", "end_time__gt", "=", "end", ")", ")", "return", "qs", ".", "filter", "(", "event", "=", "event", ")", "if", "event", "else", "qs" ]
Returns a queryset of for instances that have any overlap with a particular day. * ``dt`` may be either a datetime.datetime, datetime.date object, or ``None``. If ``None``, default to the current day. * ``event`` can be an ``Event`` instance for further filtering.
[ "Returns", "a", "queryset", "of", "for", "instances", "that", "have", "any", "overlap", "with", "a", "particular", "day", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/models.py#L136-L164
15,541
dakrauth/django-swingtime
swingtime/views.py
event_listing
def event_listing( request, template='swingtime/event_list.html', events=None, **extra_context ): ''' View all ``events``. If ``events`` is a queryset, clone it. If ``None`` default to all ``Event``s. Context parameters: ``events`` an iterable of ``Event`` objects ... plus all values passed in via **extra_context ''' events = events or Event.objects.all() extra_context['events'] = events return render(request, template, extra_context)
python
def event_listing( request, template='swingtime/event_list.html', events=None, **extra_context ): ''' View all ``events``. If ``events`` is a queryset, clone it. If ``None`` default to all ``Event``s. Context parameters: ``events`` an iterable of ``Event`` objects ... plus all values passed in via **extra_context ''' events = events or Event.objects.all() extra_context['events'] = events return render(request, template, extra_context)
[ "def", "event_listing", "(", "request", ",", "template", "=", "'swingtime/event_list.html'", ",", "events", "=", "None", ",", "*", "*", "extra_context", ")", ":", "events", "=", "events", "or", "Event", ".", "objects", ".", "all", "(", ")", "extra_context", "[", "'events'", "]", "=", "events", "return", "render", "(", "request", ",", "template", ",", "extra_context", ")" ]
View all ``events``. If ``events`` is a queryset, clone it. If ``None`` default to all ``Event``s. Context parameters: ``events`` an iterable of ``Event`` objects ... plus all values passed in via **extra_context
[ "View", "all", "events", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/views.py#L20-L40
15,542
dakrauth/django-swingtime
swingtime/views.py
event_view
def event_view( request, pk, template='swingtime/event_detail.html', event_form_class=forms.EventForm, recurrence_form_class=forms.MultipleOccurrenceForm ): ''' View an ``Event`` instance and optionally update either the event or its occurrences. Context parameters: ``event`` the event keyed by ``pk`` ``event_form`` a form object for updating the event ``recurrence_form`` a form object for adding occurrences ''' event = get_object_or_404(Event, pk=pk) event_form = recurrence_form = None if request.method == 'POST': if '_update' in request.POST: event_form = event_form_class(request.POST, instance=event) if event_form.is_valid(): event_form.save(event) return http.HttpResponseRedirect(request.path) elif '_add' in request.POST: recurrence_form = recurrence_form_class(request.POST) if recurrence_form.is_valid(): recurrence_form.save(event) return http.HttpResponseRedirect(request.path) else: return http.HttpResponseBadRequest('Bad Request') data = { 'event': event, 'event_form': event_form or event_form_class(instance=event), 'recurrence_form': recurrence_form or recurrence_form_class( initial={'dtstart': datetime.now()} ) } return render(request, template, data)
python
def event_view( request, pk, template='swingtime/event_detail.html', event_form_class=forms.EventForm, recurrence_form_class=forms.MultipleOccurrenceForm ): ''' View an ``Event`` instance and optionally update either the event or its occurrences. Context parameters: ``event`` the event keyed by ``pk`` ``event_form`` a form object for updating the event ``recurrence_form`` a form object for adding occurrences ''' event = get_object_or_404(Event, pk=pk) event_form = recurrence_form = None if request.method == 'POST': if '_update' in request.POST: event_form = event_form_class(request.POST, instance=event) if event_form.is_valid(): event_form.save(event) return http.HttpResponseRedirect(request.path) elif '_add' in request.POST: recurrence_form = recurrence_form_class(request.POST) if recurrence_form.is_valid(): recurrence_form.save(event) return http.HttpResponseRedirect(request.path) else: return http.HttpResponseBadRequest('Bad Request') data = { 'event': event, 'event_form': event_form or event_form_class(instance=event), 'recurrence_form': recurrence_form or recurrence_form_class( initial={'dtstart': datetime.now()} ) } return render(request, template, data)
[ "def", "event_view", "(", "request", ",", "pk", ",", "template", "=", "'swingtime/event_detail.html'", ",", "event_form_class", "=", "forms", ".", "EventForm", ",", "recurrence_form_class", "=", "forms", ".", "MultipleOccurrenceForm", ")", ":", "event", "=", "get_object_or_404", "(", "Event", ",", "pk", "=", "pk", ")", "event_form", "=", "recurrence_form", "=", "None", "if", "request", ".", "method", "==", "'POST'", ":", "if", "'_update'", "in", "request", ".", "POST", ":", "event_form", "=", "event_form_class", "(", "request", ".", "POST", ",", "instance", "=", "event", ")", "if", "event_form", ".", "is_valid", "(", ")", ":", "event_form", ".", "save", "(", "event", ")", "return", "http", ".", "HttpResponseRedirect", "(", "request", ".", "path", ")", "elif", "'_add'", "in", "request", ".", "POST", ":", "recurrence_form", "=", "recurrence_form_class", "(", "request", ".", "POST", ")", "if", "recurrence_form", ".", "is_valid", "(", ")", ":", "recurrence_form", ".", "save", "(", "event", ")", "return", "http", ".", "HttpResponseRedirect", "(", "request", ".", "path", ")", "else", ":", "return", "http", ".", "HttpResponseBadRequest", "(", "'Bad Request'", ")", "data", "=", "{", "'event'", ":", "event", ",", "'event_form'", ":", "event_form", "or", "event_form_class", "(", "instance", "=", "event", ")", ",", "'recurrence_form'", ":", "recurrence_form", "or", "recurrence_form_class", "(", "initial", "=", "{", "'dtstart'", ":", "datetime", ".", "now", "(", ")", "}", ")", "}", "return", "render", "(", "request", ",", "template", ",", "data", ")" ]
View an ``Event`` instance and optionally update either the event or its occurrences. Context parameters: ``event`` the event keyed by ``pk`` ``event_form`` a form object for updating the event ``recurrence_form`` a form object for adding occurrences
[ "View", "an", "Event", "instance", "and", "optionally", "update", "either", "the", "event", "or", "its", "occurrences", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/views.py#L43-L88
15,543
dakrauth/django-swingtime
swingtime/views.py
occurrence_view
def occurrence_view( request, event_pk, pk, template='swingtime/occurrence_detail.html', form_class=forms.SingleOccurrenceForm ): ''' View a specific occurrence and optionally handle any updates. Context parameters: ``occurrence`` the occurrence object keyed by ``pk`` ``form`` a form object for updating the occurrence ''' occurrence = get_object_or_404(Occurrence, pk=pk, event__pk=event_pk) if request.method == 'POST': form = form_class(request.POST, instance=occurrence) if form.is_valid(): form.save() return http.HttpResponseRedirect(request.path) else: form = form_class(instance=occurrence) return render(request, template, {'occurrence': occurrence, 'form': form})
python
def occurrence_view( request, event_pk, pk, template='swingtime/occurrence_detail.html', form_class=forms.SingleOccurrenceForm ): ''' View a specific occurrence and optionally handle any updates. Context parameters: ``occurrence`` the occurrence object keyed by ``pk`` ``form`` a form object for updating the occurrence ''' occurrence = get_object_or_404(Occurrence, pk=pk, event__pk=event_pk) if request.method == 'POST': form = form_class(request.POST, instance=occurrence) if form.is_valid(): form.save() return http.HttpResponseRedirect(request.path) else: form = form_class(instance=occurrence) return render(request, template, {'occurrence': occurrence, 'form': form})
[ "def", "occurrence_view", "(", "request", ",", "event_pk", ",", "pk", ",", "template", "=", "'swingtime/occurrence_detail.html'", ",", "form_class", "=", "forms", ".", "SingleOccurrenceForm", ")", ":", "occurrence", "=", "get_object_or_404", "(", "Occurrence", ",", "pk", "=", "pk", ",", "event__pk", "=", "event_pk", ")", "if", "request", ".", "method", "==", "'POST'", ":", "form", "=", "form_class", "(", "request", ".", "POST", ",", "instance", "=", "occurrence", ")", "if", "form", ".", "is_valid", "(", ")", ":", "form", ".", "save", "(", ")", "return", "http", ".", "HttpResponseRedirect", "(", "request", ".", "path", ")", "else", ":", "form", "=", "form_class", "(", "instance", "=", "occurrence", ")", "return", "render", "(", "request", ",", "template", ",", "{", "'occurrence'", ":", "occurrence", ",", "'form'", ":", "form", "}", ")" ]
View a specific occurrence and optionally handle any updates. Context parameters: ``occurrence`` the occurrence object keyed by ``pk`` ``form`` a form object for updating the occurrence
[ "View", "a", "specific", "occurrence", "and", "optionally", "handle", "any", "updates", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/views.py#L91-L118
15,544
dakrauth/django-swingtime
swingtime/views.py
add_event
def add_event( request, template='swingtime/add_event.html', event_form_class=forms.EventForm, recurrence_form_class=forms.MultipleOccurrenceForm ): ''' Add a new ``Event`` instance and 1 or more associated ``Occurrence``s. Context parameters: ``dtstart`` a datetime.datetime object representing the GET request value if present, otherwise None ``event_form`` a form object for updating the event ``recurrence_form`` a form object for adding occurrences ''' dtstart = None if request.method == 'POST': event_form = event_form_class(request.POST) recurrence_form = recurrence_form_class(request.POST) if event_form.is_valid() and recurrence_form.is_valid(): event = event_form.save() recurrence_form.save(event) return http.HttpResponseRedirect(event.get_absolute_url()) else: if 'dtstart' in request.GET: try: dtstart = parser.parse(request.GET['dtstart']) except(TypeError, ValueError) as exc: # TODO: A badly formatted date is passed to add_event logging.warning(exc) dtstart = dtstart or datetime.now() event_form = event_form_class() recurrence_form = recurrence_form_class(initial={'dtstart': dtstart}) return render( request, template, {'dtstart': dtstart, 'event_form': event_form, 'recurrence_form': recurrence_form} )
python
def add_event( request, template='swingtime/add_event.html', event_form_class=forms.EventForm, recurrence_form_class=forms.MultipleOccurrenceForm ): ''' Add a new ``Event`` instance and 1 or more associated ``Occurrence``s. Context parameters: ``dtstart`` a datetime.datetime object representing the GET request value if present, otherwise None ``event_form`` a form object for updating the event ``recurrence_form`` a form object for adding occurrences ''' dtstart = None if request.method == 'POST': event_form = event_form_class(request.POST) recurrence_form = recurrence_form_class(request.POST) if event_form.is_valid() and recurrence_form.is_valid(): event = event_form.save() recurrence_form.save(event) return http.HttpResponseRedirect(event.get_absolute_url()) else: if 'dtstart' in request.GET: try: dtstart = parser.parse(request.GET['dtstart']) except(TypeError, ValueError) as exc: # TODO: A badly formatted date is passed to add_event logging.warning(exc) dtstart = dtstart or datetime.now() event_form = event_form_class() recurrence_form = recurrence_form_class(initial={'dtstart': dtstart}) return render( request, template, {'dtstart': dtstart, 'event_form': event_form, 'recurrence_form': recurrence_form} )
[ "def", "add_event", "(", "request", ",", "template", "=", "'swingtime/add_event.html'", ",", "event_form_class", "=", "forms", ".", "EventForm", ",", "recurrence_form_class", "=", "forms", ".", "MultipleOccurrenceForm", ")", ":", "dtstart", "=", "None", "if", "request", ".", "method", "==", "'POST'", ":", "event_form", "=", "event_form_class", "(", "request", ".", "POST", ")", "recurrence_form", "=", "recurrence_form_class", "(", "request", ".", "POST", ")", "if", "event_form", ".", "is_valid", "(", ")", "and", "recurrence_form", ".", "is_valid", "(", ")", ":", "event", "=", "event_form", ".", "save", "(", ")", "recurrence_form", ".", "save", "(", "event", ")", "return", "http", ".", "HttpResponseRedirect", "(", "event", ".", "get_absolute_url", "(", ")", ")", "else", ":", "if", "'dtstart'", "in", "request", ".", "GET", ":", "try", ":", "dtstart", "=", "parser", ".", "parse", "(", "request", ".", "GET", "[", "'dtstart'", "]", ")", "except", "(", "TypeError", ",", "ValueError", ")", "as", "exc", ":", "# TODO: A badly formatted date is passed to add_event", "logging", ".", "warning", "(", "exc", ")", "dtstart", "=", "dtstart", "or", "datetime", ".", "now", "(", ")", "event_form", "=", "event_form_class", "(", ")", "recurrence_form", "=", "recurrence_form_class", "(", "initial", "=", "{", "'dtstart'", ":", "dtstart", "}", ")", "return", "render", "(", "request", ",", "template", ",", "{", "'dtstart'", ":", "dtstart", ",", "'event_form'", ":", "event_form", ",", "'recurrence_form'", ":", "recurrence_form", "}", ")" ]
Add a new ``Event`` instance and 1 or more associated ``Occurrence``s. Context parameters: ``dtstart`` a datetime.datetime object representing the GET request value if present, otherwise None ``event_form`` a form object for updating the event ``recurrence_form`` a form object for adding occurrences
[ "Add", "a", "new", "Event", "instance", "and", "1", "or", "more", "associated", "Occurrence", "s", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/views.py#L121-L168
15,545
dakrauth/django-swingtime
swingtime/views.py
_datetime_view
def _datetime_view( request, template, dt, timeslot_factory=None, items=None, params=None ): ''' Build a time slot grid representation for the given datetime ``dt``. See utils.create_timeslot_table documentation for items and params. Context parameters: ``day`` the specified datetime value (dt) ``next_day`` day + 1 day ``prev_day`` day - 1 day ``timeslots`` time slot grid of (time, cells) rows ''' timeslot_factory = timeslot_factory or utils.create_timeslot_table params = params or {} return render(request, template, { 'day': dt, 'next_day': dt + timedelta(days=+1), 'prev_day': dt + timedelta(days=-1), 'timeslots': timeslot_factory(dt, items, **params) })
python
def _datetime_view( request, template, dt, timeslot_factory=None, items=None, params=None ): ''' Build a time slot grid representation for the given datetime ``dt``. See utils.create_timeslot_table documentation for items and params. Context parameters: ``day`` the specified datetime value (dt) ``next_day`` day + 1 day ``prev_day`` day - 1 day ``timeslots`` time slot grid of (time, cells) rows ''' timeslot_factory = timeslot_factory or utils.create_timeslot_table params = params or {} return render(request, template, { 'day': dt, 'next_day': dt + timedelta(days=+1), 'prev_day': dt + timedelta(days=-1), 'timeslots': timeslot_factory(dt, items, **params) })
[ "def", "_datetime_view", "(", "request", ",", "template", ",", "dt", ",", "timeslot_factory", "=", "None", ",", "items", "=", "None", ",", "params", "=", "None", ")", ":", "timeslot_factory", "=", "timeslot_factory", "or", "utils", ".", "create_timeslot_table", "params", "=", "params", "or", "{", "}", "return", "render", "(", "request", ",", "template", ",", "{", "'day'", ":", "dt", ",", "'next_day'", ":", "dt", "+", "timedelta", "(", "days", "=", "+", "1", ")", ",", "'prev_day'", ":", "dt", "+", "timedelta", "(", "days", "=", "-", "1", ")", ",", "'timeslots'", ":", "timeslot_factory", "(", "dt", ",", "items", ",", "*", "*", "params", ")", "}", ")" ]
Build a time slot grid representation for the given datetime ``dt``. See utils.create_timeslot_table documentation for items and params. Context parameters: ``day`` the specified datetime value (dt) ``next_day`` day + 1 day ``prev_day`` day - 1 day ``timeslots`` time slot grid of (time, cells) rows
[ "Build", "a", "time", "slot", "grid", "representation", "for", "the", "given", "datetime", "dt", ".", "See", "utils", ".", "create_timeslot_table", "documentation", "for", "items", "and", "params", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/views.py#L171-L206
15,546
dakrauth/django-swingtime
swingtime/views.py
month_view
def month_view( request, year, month, template='swingtime/monthly_view.html', queryset=None ): ''' Render a tradional calendar grid view with temporal navigation variables. Context parameters: ``today`` the current datetime.datetime value ``calendar`` a list of rows containing (day, items) cells, where day is the day of the month integer and items is a (potentially empty) list of occurrence for the day ``this_month`` a datetime.datetime representing the first day of the month ``next_month`` this_month + 1 month ``last_month`` this_month - 1 month ''' year, month = int(year), int(month) cal = calendar.monthcalendar(year, month) dtstart = datetime(year, month, 1) last_day = max(cal[-1]) dtend = datetime(year, month, last_day) # TODO Whether to include those occurrences that started in the previous # month but end in this month? queryset = queryset._clone() if queryset is not None else Occurrence.objects.select_related() occurrences = queryset.filter(start_time__year=year, start_time__month=month) def start_day(o): return o.start_time.day by_day = dict([(dt, list(o)) for dt, o in itertools.groupby(occurrences, start_day)]) data = { 'today': datetime.now(), 'calendar': [[(d, by_day.get(d, [])) for d in row] for row in cal], 'this_month': dtstart, 'next_month': dtstart + timedelta(days=+last_day), 'last_month': dtstart + timedelta(days=-1), } return render(request, template, data)
python
def month_view( request, year, month, template='swingtime/monthly_view.html', queryset=None ): ''' Render a tradional calendar grid view with temporal navigation variables. Context parameters: ``today`` the current datetime.datetime value ``calendar`` a list of rows containing (day, items) cells, where day is the day of the month integer and items is a (potentially empty) list of occurrence for the day ``this_month`` a datetime.datetime representing the first day of the month ``next_month`` this_month + 1 month ``last_month`` this_month - 1 month ''' year, month = int(year), int(month) cal = calendar.monthcalendar(year, month) dtstart = datetime(year, month, 1) last_day = max(cal[-1]) dtend = datetime(year, month, last_day) # TODO Whether to include those occurrences that started in the previous # month but end in this month? queryset = queryset._clone() if queryset is not None else Occurrence.objects.select_related() occurrences = queryset.filter(start_time__year=year, start_time__month=month) def start_day(o): return o.start_time.day by_day = dict([(dt, list(o)) for dt, o in itertools.groupby(occurrences, start_day)]) data = { 'today': datetime.now(), 'calendar': [[(d, by_day.get(d, [])) for d in row] for row in cal], 'this_month': dtstart, 'next_month': dtstart + timedelta(days=+last_day), 'last_month': dtstart + timedelta(days=-1), } return render(request, template, data)
[ "def", "month_view", "(", "request", ",", "year", ",", "month", ",", "template", "=", "'swingtime/monthly_view.html'", ",", "queryset", "=", "None", ")", ":", "year", ",", "month", "=", "int", "(", "year", ")", ",", "int", "(", "month", ")", "cal", "=", "calendar", ".", "monthcalendar", "(", "year", ",", "month", ")", "dtstart", "=", "datetime", "(", "year", ",", "month", ",", "1", ")", "last_day", "=", "max", "(", "cal", "[", "-", "1", "]", ")", "dtend", "=", "datetime", "(", "year", ",", "month", ",", "last_day", ")", "# TODO Whether to include those occurrences that started in the previous", "# month but end in this month?", "queryset", "=", "queryset", ".", "_clone", "(", ")", "if", "queryset", "is", "not", "None", "else", "Occurrence", ".", "objects", ".", "select_related", "(", ")", "occurrences", "=", "queryset", ".", "filter", "(", "start_time__year", "=", "year", ",", "start_time__month", "=", "month", ")", "def", "start_day", "(", "o", ")", ":", "return", "o", ".", "start_time", ".", "day", "by_day", "=", "dict", "(", "[", "(", "dt", ",", "list", "(", "o", ")", ")", "for", "dt", ",", "o", "in", "itertools", ".", "groupby", "(", "occurrences", ",", "start_day", ")", "]", ")", "data", "=", "{", "'today'", ":", "datetime", ".", "now", "(", ")", ",", "'calendar'", ":", "[", "[", "(", "d", ",", "by_day", ".", "get", "(", "d", ",", "[", "]", ")", ")", "for", "d", "in", "row", "]", "for", "row", "in", "cal", "]", ",", "'this_month'", ":", "dtstart", ",", "'next_month'", ":", "dtstart", "+", "timedelta", "(", "days", "=", "+", "last_day", ")", ",", "'last_month'", ":", "dtstart", "+", "timedelta", "(", "days", "=", "-", "1", ")", ",", "}", "return", "render", "(", "request", ",", "template", ",", "data", ")" ]
Render a tradional calendar grid view with temporal navigation variables. Context parameters: ``today`` the current datetime.datetime value ``calendar`` a list of rows containing (day, items) cells, where day is the day of the month integer and items is a (potentially empty) list of occurrence for the day ``this_month`` a datetime.datetime representing the first day of the month ``next_month`` this_month + 1 month ``last_month`` this_month - 1 month
[ "Render", "a", "tradional", "calendar", "grid", "view", "with", "temporal", "navigation", "variables", "." ]
d1cdd449bd5c6895c3ff182fd890c4d3452943fe
https://github.com/dakrauth/django-swingtime/blob/d1cdd449bd5c6895c3ff182fd890c4d3452943fe/swingtime/views.py#L270-L323
15,547
p1c2u/openapi-core
openapi_core/schema/schemas/models.py
Schema.cast
def cast(self, value, custom_formatters=None, strict=True): """Cast value to schema type""" if value is None: if not self.nullable: raise InvalidSchemaValue("Null value for non-nullable schema", value, self.type) return self.default cast_mapping = self.get_cast_mapping( custom_formatters=custom_formatters, strict=strict) if self.type is not SchemaType.STRING and value == '': return None cast_callable = cast_mapping[self.type] try: return cast_callable(value) except ValueError: raise InvalidSchemaValue( "Failed to cast value {value} to type {type}", value, self.type)
python
def cast(self, value, custom_formatters=None, strict=True): """Cast value to schema type""" if value is None: if not self.nullable: raise InvalidSchemaValue("Null value for non-nullable schema", value, self.type) return self.default cast_mapping = self.get_cast_mapping( custom_formatters=custom_formatters, strict=strict) if self.type is not SchemaType.STRING and value == '': return None cast_callable = cast_mapping[self.type] try: return cast_callable(value) except ValueError: raise InvalidSchemaValue( "Failed to cast value {value} to type {type}", value, self.type)
[ "def", "cast", "(", "self", ",", "value", ",", "custom_formatters", "=", "None", ",", "strict", "=", "True", ")", ":", "if", "value", "is", "None", ":", "if", "not", "self", ".", "nullable", ":", "raise", "InvalidSchemaValue", "(", "\"Null value for non-nullable schema\"", ",", "value", ",", "self", ".", "type", ")", "return", "self", ".", "default", "cast_mapping", "=", "self", ".", "get_cast_mapping", "(", "custom_formatters", "=", "custom_formatters", ",", "strict", "=", "strict", ")", "if", "self", ".", "type", "is", "not", "SchemaType", ".", "STRING", "and", "value", "==", "''", ":", "return", "None", "cast_callable", "=", "cast_mapping", "[", "self", ".", "type", "]", "try", ":", "return", "cast_callable", "(", "value", ")", "except", "ValueError", ":", "raise", "InvalidSchemaValue", "(", "\"Failed to cast value {value} to type {type}\"", ",", "value", ",", "self", ".", "type", ")" ]
Cast value to schema type
[ "Cast", "value", "to", "schema", "type" ]
f274836c4dd45729b1634aff8758c63323173947
https://github.com/p1c2u/openapi-core/blob/f274836c4dd45729b1634aff8758c63323173947/openapi_core/schema/schemas/models.py#L171-L189
15,548
p1c2u/openapi-core
openapi_core/schema/schemas/models.py
Schema.unmarshal
def unmarshal(self, value, custom_formatters=None, strict=True): """Unmarshal parameter from the value.""" if self.deprecated: warnings.warn("The schema is deprecated", DeprecationWarning) casted = self.cast(value, custom_formatters=custom_formatters, strict=strict) if casted is None and not self.required: return None if self.enum and casted not in self.enum: raise InvalidSchemaValue( "Value {value} not in enum choices: {type}", value, self.enum) return casted
python
def unmarshal(self, value, custom_formatters=None, strict=True): """Unmarshal parameter from the value.""" if self.deprecated: warnings.warn("The schema is deprecated", DeprecationWarning) casted = self.cast(value, custom_formatters=custom_formatters, strict=strict) if casted is None and not self.required: return None if self.enum and casted not in self.enum: raise InvalidSchemaValue( "Value {value} not in enum choices: {type}", value, self.enum) return casted
[ "def", "unmarshal", "(", "self", ",", "value", ",", "custom_formatters", "=", "None", ",", "strict", "=", "True", ")", ":", "if", "self", ".", "deprecated", ":", "warnings", ".", "warn", "(", "\"The schema is deprecated\"", ",", "DeprecationWarning", ")", "casted", "=", "self", ".", "cast", "(", "value", ",", "custom_formatters", "=", "custom_formatters", ",", "strict", "=", "strict", ")", "if", "casted", "is", "None", "and", "not", "self", ".", "required", ":", "return", "None", "if", "self", ".", "enum", "and", "casted", "not", "in", "self", ".", "enum", ":", "raise", "InvalidSchemaValue", "(", "\"Value {value} not in enum choices: {type}\"", ",", "value", ",", "self", ".", "enum", ")", "return", "casted" ]
Unmarshal parameter from the value.
[ "Unmarshal", "parameter", "from", "the", "value", "." ]
f274836c4dd45729b1634aff8758c63323173947
https://github.com/p1c2u/openapi-core/blob/f274836c4dd45729b1634aff8758c63323173947/openapi_core/schema/schemas/models.py#L191-L205
15,549
p1c2u/openapi-core
openapi_core/validation/util.py
get_operation_pattern
def get_operation_pattern(server_url, request_url_pattern): """Return an updated request URL pattern with the server URL removed.""" if server_url[-1] == "/": # operations have to start with a slash, so do not remove it server_url = server_url[:-1] if is_absolute(server_url): return request_url_pattern.replace(server_url, "", 1) return path_qs(request_url_pattern).replace(server_url, "", 1)
python
def get_operation_pattern(server_url, request_url_pattern): """Return an updated request URL pattern with the server URL removed.""" if server_url[-1] == "/": # operations have to start with a slash, so do not remove it server_url = server_url[:-1] if is_absolute(server_url): return request_url_pattern.replace(server_url, "", 1) return path_qs(request_url_pattern).replace(server_url, "", 1)
[ "def", "get_operation_pattern", "(", "server_url", ",", "request_url_pattern", ")", ":", "if", "server_url", "[", "-", "1", "]", "==", "\"/\"", ":", "# operations have to start with a slash, so do not remove it", "server_url", "=", "server_url", "[", ":", "-", "1", "]", "if", "is_absolute", "(", "server_url", ")", ":", "return", "request_url_pattern", ".", "replace", "(", "server_url", ",", "\"\"", ",", "1", ")", "return", "path_qs", "(", "request_url_pattern", ")", ".", "replace", "(", "server_url", ",", "\"\"", ",", "1", ")" ]
Return an updated request URL pattern with the server URL removed.
[ "Return", "an", "updated", "request", "URL", "pattern", "with", "the", "server", "URL", "removed", "." ]
f274836c4dd45729b1634aff8758c63323173947
https://github.com/p1c2u/openapi-core/blob/f274836c4dd45729b1634aff8758c63323173947/openapi_core/validation/util.py#L17-L24
15,550
nesaro/pydsl
pydsl/check.py
check
def check(definition, data, *args, **kwargs): """Checks if the input follows the definition""" checker = checker_factory(definition) return checker(data, *args, **kwargs)
python
def check(definition, data, *args, **kwargs): """Checks if the input follows the definition""" checker = checker_factory(definition) return checker(data, *args, **kwargs)
[ "def", "check", "(", "definition", ",", "data", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "checker", "=", "checker_factory", "(", "definition", ")", "return", "checker", "(", "data", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Checks if the input follows the definition
[ "Checks", "if", "the", "input", "follows", "the", "definition" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/check.py#L29-L32
15,551
nesaro/pydsl
pydsl/check.py
RegularExpressionChecker.check
def check(self, data): """returns True if any match any regexp""" if isinstance(data, Iterable): data = "".join(str(x) for x in data) try: data = str(data) except UnicodeDecodeError: return False return bool(data and self.__regexp.match(data))
python
def check(self, data): """returns True if any match any regexp""" if isinstance(data, Iterable): data = "".join(str(x) for x in data) try: data = str(data) except UnicodeDecodeError: return False return bool(data and self.__regexp.match(data))
[ "def", "check", "(", "self", ",", "data", ")", ":", "if", "isinstance", "(", "data", ",", "Iterable", ")", ":", "data", "=", "\"\"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "data", ")", "try", ":", "data", "=", "str", "(", "data", ")", "except", "UnicodeDecodeError", ":", "return", "False", "return", "bool", "(", "data", "and", "self", ".", "__regexp", ".", "match", "(", "data", ")", ")" ]
returns True if any match any regexp
[ "returns", "True", "if", "any", "match", "any", "regexp" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/check.py#L93-L101
15,552
nesaro/pydsl
pydsl/parser/LR0.py
_build_item_closure
def _build_item_closure(itemset, productionset): """Build input itemset closure """ #For every item inside current itemset, if we have the following rule: # xxx <cursor><nonterminalSymbol> xxx append every rule from self._productionruleset that begins with that NonTerminalSymbol if not isinstance(itemset, LR0ItemSet): raise TypeError import copy resultset = copy.copy(itemset) changed = True while changed: changed = False for currentitem in resultset.itemlist: nextsymbol = currentitem.next_symbol() if nextsymbol is None: break for rule in productionset.productions: newitem = LR0Item(rule) if rule.leftside[0] == nextsymbol and newitem not in resultset.itemlist: resultset.append_item(newitem) changed = True return resultset
python
def _build_item_closure(itemset, productionset): """Build input itemset closure """ #For every item inside current itemset, if we have the following rule: # xxx <cursor><nonterminalSymbol> xxx append every rule from self._productionruleset that begins with that NonTerminalSymbol if not isinstance(itemset, LR0ItemSet): raise TypeError import copy resultset = copy.copy(itemset) changed = True while changed: changed = False for currentitem in resultset.itemlist: nextsymbol = currentitem.next_symbol() if nextsymbol is None: break for rule in productionset.productions: newitem = LR0Item(rule) if rule.leftside[0] == nextsymbol and newitem not in resultset.itemlist: resultset.append_item(newitem) changed = True return resultset
[ "def", "_build_item_closure", "(", "itemset", ",", "productionset", ")", ":", "#For every item inside current itemset, if we have the following rule:", "# xxx <cursor><nonterminalSymbol> xxx append every rule from self._productionruleset that begins with that NonTerminalSymbol", "if", "not", "isinstance", "(", "itemset", ",", "LR0ItemSet", ")", ":", "raise", "TypeError", "import", "copy", "resultset", "=", "copy", ".", "copy", "(", "itemset", ")", "changed", "=", "True", "while", "changed", ":", "changed", "=", "False", "for", "currentitem", "in", "resultset", ".", "itemlist", ":", "nextsymbol", "=", "currentitem", ".", "next_symbol", "(", ")", "if", "nextsymbol", "is", "None", ":", "break", "for", "rule", "in", "productionset", ".", "productions", ":", "newitem", "=", "LR0Item", "(", "rule", ")", "if", "rule", ".", "leftside", "[", "0", "]", "==", "nextsymbol", "and", "newitem", "not", "in", "resultset", ".", "itemlist", ":", "resultset", ".", "append_item", "(", "newitem", ")", "changed", "=", "True", "return", "resultset" ]
Build input itemset closure
[ "Build", "input", "itemset", "closure" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L34-L54
15,553
nesaro/pydsl
pydsl/parser/LR0.py
item_set_goto
def item_set_goto(itemset, inputsymbol, productionset): """returns an itemset locate inside itemset every element with inputsymbol following cursor for every located item, append its itemclosure""" resultset = LR0ItemSet() for item in itemset.itemlist: if item.next_symbol() == inputsymbol: newitem = LR0Item(item.rule, item.position + 1) resultset.append_item(newitem) return _build_item_closure(resultset, productionset)
python
def item_set_goto(itemset, inputsymbol, productionset): """returns an itemset locate inside itemset every element with inputsymbol following cursor for every located item, append its itemclosure""" resultset = LR0ItemSet() for item in itemset.itemlist: if item.next_symbol() == inputsymbol: newitem = LR0Item(item.rule, item.position + 1) resultset.append_item(newitem) return _build_item_closure(resultset, productionset)
[ "def", "item_set_goto", "(", "itemset", ",", "inputsymbol", ",", "productionset", ")", ":", "resultset", "=", "LR0ItemSet", "(", ")", "for", "item", "in", "itemset", ".", "itemlist", ":", "if", "item", ".", "next_symbol", "(", ")", "==", "inputsymbol", ":", "newitem", "=", "LR0Item", "(", "item", ".", "rule", ",", "item", ".", "position", "+", "1", ")", "resultset", ".", "append_item", "(", "newitem", ")", "return", "_build_item_closure", "(", "resultset", ",", "productionset", ")" ]
returns an itemset locate inside itemset every element with inputsymbol following cursor for every located item, append its itemclosure
[ "returns", "an", "itemset", "locate", "inside", "itemset", "every", "element", "with", "inputsymbol", "following", "cursor", "for", "every", "located", "item", "append", "its", "itemclosure" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L56-L65
15,554
nesaro/pydsl
pydsl/parser/LR0.py
_slr_build_parser_table
def _slr_build_parser_table(productionset): """SLR method to build parser table""" result = ParserTable() statesset = build_states_sets(productionset) for itemindex, itemset in enumerate(statesset): LOG.debug("_slr_build_parser_table: Evaluating itemset:" + str(itemset)) for symbol in productionset.getSymbols() + [EndSymbol()]: numberoptions = 0 for lritem in itemset.itemlist: #if cursor is before a terminal, and there is a transition to another itemset with the following terminal, append shift rule if isinstance(symbol, TerminalSymbol) and lritem.next_symbol() == symbol and itemset.has_transition(symbol): destinationstate = statesset.index(itemset.get_transition(symbol)) result.append(itemindex, symbol, "Shift", destinationstate) numberoptions += 1 if isinstance(symbol, NonTerminalSymbol) and lritem.next_symbol() == symbol and itemset.has_transition(symbol): destinationstate = statesset.index(itemset.get_transition(symbol)) result.append_goto(itemindex, symbol, destinationstate) #if cursor is at the end of the rule, then append reduce rule and go transition if lritem.previous_symbol() == symbol and lritem.is_last_position() and symbol != Extended_S: for x in productionset.next_lookup(symbol): if isinstance(x, Grammar): result.append(itemindex, TerminalSymbol(x), "Reduce", None, lritem.rule) elif isinstance(x, Symbol): result.append(itemindex, x, "Reduce", None, lritem.rule) else: raise TypeError(x) numberoptions += 1 #if cursor is at the end of main rule, and current symbol is end, then append accept rule if symbol == EndSymbol() and lritem.previous_symbol() == productionset.initialsymbol and lritem.next_symbol() == EndSymbol(): result.append(itemindex, symbol, "Accept", None) numberoptions += 1 if not numberoptions: LOG.info("No rule found to generate a new parsertable entry ") LOG.debug("symbol: " + str(symbol)) LOG.debug("itemset: " + str(itemset)) elif numberoptions > 1: #FIXME can it count duplicated entries? raise Exception("LR Conflict %s" % symbol) return result
python
def _slr_build_parser_table(productionset): """SLR method to build parser table""" result = ParserTable() statesset = build_states_sets(productionset) for itemindex, itemset in enumerate(statesset): LOG.debug("_slr_build_parser_table: Evaluating itemset:" + str(itemset)) for symbol in productionset.getSymbols() + [EndSymbol()]: numberoptions = 0 for lritem in itemset.itemlist: #if cursor is before a terminal, and there is a transition to another itemset with the following terminal, append shift rule if isinstance(symbol, TerminalSymbol) and lritem.next_symbol() == symbol and itemset.has_transition(symbol): destinationstate = statesset.index(itemset.get_transition(symbol)) result.append(itemindex, symbol, "Shift", destinationstate) numberoptions += 1 if isinstance(symbol, NonTerminalSymbol) and lritem.next_symbol() == symbol and itemset.has_transition(symbol): destinationstate = statesset.index(itemset.get_transition(symbol)) result.append_goto(itemindex, symbol, destinationstate) #if cursor is at the end of the rule, then append reduce rule and go transition if lritem.previous_symbol() == symbol and lritem.is_last_position() and symbol != Extended_S: for x in productionset.next_lookup(symbol): if isinstance(x, Grammar): result.append(itemindex, TerminalSymbol(x), "Reduce", None, lritem.rule) elif isinstance(x, Symbol): result.append(itemindex, x, "Reduce", None, lritem.rule) else: raise TypeError(x) numberoptions += 1 #if cursor is at the end of main rule, and current symbol is end, then append accept rule if symbol == EndSymbol() and lritem.previous_symbol() == productionset.initialsymbol and lritem.next_symbol() == EndSymbol(): result.append(itemindex, symbol, "Accept", None) numberoptions += 1 if not numberoptions: LOG.info("No rule found to generate a new parsertable entry ") LOG.debug("symbol: " + str(symbol)) LOG.debug("itemset: " + str(itemset)) elif numberoptions > 1: #FIXME can it count duplicated entries? raise Exception("LR Conflict %s" % symbol) return result
[ "def", "_slr_build_parser_table", "(", "productionset", ")", ":", "result", "=", "ParserTable", "(", ")", "statesset", "=", "build_states_sets", "(", "productionset", ")", "for", "itemindex", ",", "itemset", "in", "enumerate", "(", "statesset", ")", ":", "LOG", ".", "debug", "(", "\"_slr_build_parser_table: Evaluating itemset:\"", "+", "str", "(", "itemset", ")", ")", "for", "symbol", "in", "productionset", ".", "getSymbols", "(", ")", "+", "[", "EndSymbol", "(", ")", "]", ":", "numberoptions", "=", "0", "for", "lritem", "in", "itemset", ".", "itemlist", ":", "#if cursor is before a terminal, and there is a transition to another itemset with the following terminal, append shift rule", "if", "isinstance", "(", "symbol", ",", "TerminalSymbol", ")", "and", "lritem", ".", "next_symbol", "(", ")", "==", "symbol", "and", "itemset", ".", "has_transition", "(", "symbol", ")", ":", "destinationstate", "=", "statesset", ".", "index", "(", "itemset", ".", "get_transition", "(", "symbol", ")", ")", "result", ".", "append", "(", "itemindex", ",", "symbol", ",", "\"Shift\"", ",", "destinationstate", ")", "numberoptions", "+=", "1", "if", "isinstance", "(", "symbol", ",", "NonTerminalSymbol", ")", "and", "lritem", ".", "next_symbol", "(", ")", "==", "symbol", "and", "itemset", ".", "has_transition", "(", "symbol", ")", ":", "destinationstate", "=", "statesset", ".", "index", "(", "itemset", ".", "get_transition", "(", "symbol", ")", ")", "result", ".", "append_goto", "(", "itemindex", ",", "symbol", ",", "destinationstate", ")", "#if cursor is at the end of the rule, then append reduce rule and go transition", "if", "lritem", ".", "previous_symbol", "(", ")", "==", "symbol", "and", "lritem", ".", "is_last_position", "(", ")", "and", "symbol", "!=", "Extended_S", ":", "for", "x", "in", "productionset", ".", "next_lookup", "(", "symbol", ")", ":", "if", "isinstance", "(", "x", ",", "Grammar", ")", ":", "result", ".", "append", "(", "itemindex", ",", "TerminalSymbol", "(", "x", ")", ",", "\"Reduce\"", ",", "None", ",", "lritem", ".", "rule", ")", "elif", "isinstance", "(", "x", ",", "Symbol", ")", ":", "result", ".", "append", "(", "itemindex", ",", "x", ",", "\"Reduce\"", ",", "None", ",", "lritem", ".", "rule", ")", "else", ":", "raise", "TypeError", "(", "x", ")", "numberoptions", "+=", "1", "#if cursor is at the end of main rule, and current symbol is end, then append accept rule", "if", "symbol", "==", "EndSymbol", "(", ")", "and", "lritem", ".", "previous_symbol", "(", ")", "==", "productionset", ".", "initialsymbol", "and", "lritem", ".", "next_symbol", "(", ")", "==", "EndSymbol", "(", ")", ":", "result", ".", "append", "(", "itemindex", ",", "symbol", ",", "\"Accept\"", ",", "None", ")", "numberoptions", "+=", "1", "if", "not", "numberoptions", ":", "LOG", ".", "info", "(", "\"No rule found to generate a new parsertable entry \"", ")", "LOG", ".", "debug", "(", "\"symbol: \"", "+", "str", "(", "symbol", ")", ")", "LOG", ".", "debug", "(", "\"itemset: \"", "+", "str", "(", "itemset", ")", ")", "elif", "numberoptions", ">", "1", ":", "#FIXME can it count duplicated entries?", "raise", "Exception", "(", "\"LR Conflict %s\"", "%", "symbol", ")", "return", "result" ]
SLR method to build parser table
[ "SLR", "method", "to", "build", "parser", "table" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L97-L134
15,555
nesaro/pydsl
pydsl/parser/LR0.py
ParserTable.append
def append(self, state, symbol, action, destinationstate, production = None): """Appends a new rule""" if action not in (None, "Accept", "Shift", "Reduce"): raise TypeError rule = {"action":action, "dest":destinationstate} if action == "Reduce": if rule is None: raise TypeError("Expected production parameter") rule["rule"] = production while isinstance(symbol, TerminalSymbol) and isinstance(symbol.gd, Iterable) and len(symbol.gd) == 1 and isinstance(list(symbol.gd)[0], Grammar): symbol = TerminalSymbol(list(symbol.gd)[0]) #Reduces symbol if its gd is a Sequence/Choice of 1 element if not isinstance(symbol, Symbol): raise TypeError("Expected symbol, got %s" % symbol) self[state][symbol] = rule
python
def append(self, state, symbol, action, destinationstate, production = None): """Appends a new rule""" if action not in (None, "Accept", "Shift", "Reduce"): raise TypeError rule = {"action":action, "dest":destinationstate} if action == "Reduce": if rule is None: raise TypeError("Expected production parameter") rule["rule"] = production while isinstance(symbol, TerminalSymbol) and isinstance(symbol.gd, Iterable) and len(symbol.gd) == 1 and isinstance(list(symbol.gd)[0], Grammar): symbol = TerminalSymbol(list(symbol.gd)[0]) #Reduces symbol if its gd is a Sequence/Choice of 1 element if not isinstance(symbol, Symbol): raise TypeError("Expected symbol, got %s" % symbol) self[state][symbol] = rule
[ "def", "append", "(", "self", ",", "state", ",", "symbol", ",", "action", ",", "destinationstate", ",", "production", "=", "None", ")", ":", "if", "action", "not", "in", "(", "None", ",", "\"Accept\"", ",", "\"Shift\"", ",", "\"Reduce\"", ")", ":", "raise", "TypeError", "rule", "=", "{", "\"action\"", ":", "action", ",", "\"dest\"", ":", "destinationstate", "}", "if", "action", "==", "\"Reduce\"", ":", "if", "rule", "is", "None", ":", "raise", "TypeError", "(", "\"Expected production parameter\"", ")", "rule", "[", "\"rule\"", "]", "=", "production", "while", "isinstance", "(", "symbol", ",", "TerminalSymbol", ")", "and", "isinstance", "(", "symbol", ".", "gd", ",", "Iterable", ")", "and", "len", "(", "symbol", ".", "gd", ")", "==", "1", "and", "isinstance", "(", "list", "(", "symbol", ".", "gd", ")", "[", "0", "]", ",", "Grammar", ")", ":", "symbol", "=", "TerminalSymbol", "(", "list", "(", "symbol", ".", "gd", ")", "[", "0", "]", ")", "#Reduces symbol if its gd is a Sequence/Choice of 1 element", "if", "not", "isinstance", "(", "symbol", ",", "Symbol", ")", ":", "raise", "TypeError", "(", "\"Expected symbol, got %s\"", "%", "symbol", ")", "self", "[", "state", "]", "[", "symbol", "]", "=", "rule" ]
Appends a new rule
[ "Appends", "a", "new", "rule" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L141-L154
15,556
nesaro/pydsl
pydsl/parser/LR0.py
ParserTable.insert
def insert(self, state, token): """change internal state, return action""" if token == EndSymbol(): return self[state][EndSymbol()] from pydsl.check import check symbol_list = [x for x in self[state] if isinstance(x, TerminalSymbol) and check(x.gd, [token])] if not symbol_list: return {"action":"Fail"} if len(symbol_list) > 1: raise Exception("Multiple symbols matches input") symbol = symbol_list[0] return self[state][symbol]
python
def insert(self, state, token): """change internal state, return action""" if token == EndSymbol(): return self[state][EndSymbol()] from pydsl.check import check symbol_list = [x for x in self[state] if isinstance(x, TerminalSymbol) and check(x.gd, [token])] if not symbol_list: return {"action":"Fail"} if len(symbol_list) > 1: raise Exception("Multiple symbols matches input") symbol = symbol_list[0] return self[state][symbol]
[ "def", "insert", "(", "self", ",", "state", ",", "token", ")", ":", "if", "token", "==", "EndSymbol", "(", ")", ":", "return", "self", "[", "state", "]", "[", "EndSymbol", "(", ")", "]", "from", "pydsl", ".", "check", "import", "check", "symbol_list", "=", "[", "x", "for", "x", "in", "self", "[", "state", "]", "if", "isinstance", "(", "x", ",", "TerminalSymbol", ")", "and", "check", "(", "x", ".", "gd", ",", "[", "token", "]", ")", "]", "if", "not", "symbol_list", ":", "return", "{", "\"action\"", ":", "\"Fail\"", "}", "if", "len", "(", "symbol_list", ")", ">", "1", ":", "raise", "Exception", "(", "\"Multiple symbols matches input\"", ")", "symbol", "=", "symbol_list", "[", "0", "]", "return", "self", "[", "state", "]", "[", "symbol", "]" ]
change internal state, return action
[ "change", "internal", "state", "return", "action" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L164-L175
15,557
nesaro/pydsl
pydsl/parser/LR0.py
LR0ItemSet.append_item
def append_item(self, item): """Append new item to set""" if not isinstance(item, LR0Item): raise TypeError self.itemlist.append(item)
python
def append_item(self, item): """Append new item to set""" if not isinstance(item, LR0Item): raise TypeError self.itemlist.append(item)
[ "def", "append_item", "(", "self", ",", "item", ")", ":", "if", "not", "isinstance", "(", "item", ",", "LR0Item", ")", ":", "raise", "TypeError", "self", ".", "itemlist", ".", "append", "(", "item", ")" ]
Append new item to set
[ "Append", "new", "item", "to", "set" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L248-L252
15,558
nesaro/pydsl
pydsl/parser/LR0.py
LR0ItemSet.append_transition
def append_transition(self, symbol, targetset): """Appends a transition""" if symbol in self.transitions: return self.transitions[symbol] = targetset
python
def append_transition(self, symbol, targetset): """Appends a transition""" if symbol in self.transitions: return self.transitions[symbol] = targetset
[ "def", "append_transition", "(", "self", ",", "symbol", ",", "targetset", ")", ":", "if", "symbol", "in", "self", ".", "transitions", ":", "return", "self", ".", "transitions", "[", "symbol", "]", "=", "targetset" ]
Appends a transition
[ "Appends", "a", "transition" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L254-L258
15,559
nesaro/pydsl
pydsl/parser/LR0.py
LR0Parser.__parse
def __parse(self, tokenlist): """see parent docstring""" #empty stack #iterate over symbollist tokenlist = [x for x in tokenlist] if not isinstance(tokenlist, list): raise TypeError("Expected list, got %s" % tokenlist.__class__.__name__) LOG.debug("get_trees: checking list: " + str(tokenlist)) stack = [(0, Extended_S)] while True: state = stack[-1][0] if len(tokenlist):#FIXME: tokenlist with one element is reported as false token = tokenlist[0] else: token = EndSymbol() newdic = self.__parsertable.insert(state, token) action = newdic["action"] if action == "Fail": return False elif action == "Accept": return True if action == "Reduce": reductionrule = newdic["rule"] #TODO extract len(right side) of the rule and insert left side for rsymbol in reversed(reductionrule.rightside): state, symbol = stack.pop() # TODO: check state = stack[-1][0] state = self.__parsertable.goto(state,reductionrule.leftside[0]) stack.append((state, reductionrule.leftside[0])) elif action == "Shift": stack.append((newdic['dest'], tokenlist.pop(0))) else: raise ValueError("Unknown action") return False
python
def __parse(self, tokenlist): """see parent docstring""" #empty stack #iterate over symbollist tokenlist = [x for x in tokenlist] if not isinstance(tokenlist, list): raise TypeError("Expected list, got %s" % tokenlist.__class__.__name__) LOG.debug("get_trees: checking list: " + str(tokenlist)) stack = [(0, Extended_S)] while True: state = stack[-1][0] if len(tokenlist):#FIXME: tokenlist with one element is reported as false token = tokenlist[0] else: token = EndSymbol() newdic = self.__parsertable.insert(state, token) action = newdic["action"] if action == "Fail": return False elif action == "Accept": return True if action == "Reduce": reductionrule = newdic["rule"] #TODO extract len(right side) of the rule and insert left side for rsymbol in reversed(reductionrule.rightside): state, symbol = stack.pop() # TODO: check state = stack[-1][0] state = self.__parsertable.goto(state,reductionrule.leftside[0]) stack.append((state, reductionrule.leftside[0])) elif action == "Shift": stack.append((newdic['dest'], tokenlist.pop(0))) else: raise ValueError("Unknown action") return False
[ "def", "__parse", "(", "self", ",", "tokenlist", ")", ":", "#empty stack", "#iterate over symbollist", "tokenlist", "=", "[", "x", "for", "x", "in", "tokenlist", "]", "if", "not", "isinstance", "(", "tokenlist", ",", "list", ")", ":", "raise", "TypeError", "(", "\"Expected list, got %s\"", "%", "tokenlist", ".", "__class__", ".", "__name__", ")", "LOG", ".", "debug", "(", "\"get_trees: checking list: \"", "+", "str", "(", "tokenlist", ")", ")", "stack", "=", "[", "(", "0", ",", "Extended_S", ")", "]", "while", "True", ":", "state", "=", "stack", "[", "-", "1", "]", "[", "0", "]", "if", "len", "(", "tokenlist", ")", ":", "#FIXME: tokenlist with one element is reported as false", "token", "=", "tokenlist", "[", "0", "]", "else", ":", "token", "=", "EndSymbol", "(", ")", "newdic", "=", "self", ".", "__parsertable", ".", "insert", "(", "state", ",", "token", ")", "action", "=", "newdic", "[", "\"action\"", "]", "if", "action", "==", "\"Fail\"", ":", "return", "False", "elif", "action", "==", "\"Accept\"", ":", "return", "True", "if", "action", "==", "\"Reduce\"", ":", "reductionrule", "=", "newdic", "[", "\"rule\"", "]", "#TODO extract len(right side) of the rule and insert left side", "for", "rsymbol", "in", "reversed", "(", "reductionrule", ".", "rightside", ")", ":", "state", ",", "symbol", "=", "stack", ".", "pop", "(", ")", "# TODO: check", "state", "=", "stack", "[", "-", "1", "]", "[", "0", "]", "state", "=", "self", ".", "__parsertable", ".", "goto", "(", "state", ",", "reductionrule", ".", "leftside", "[", "0", "]", ")", "stack", ".", "append", "(", "(", "state", ",", "reductionrule", ".", "leftside", "[", "0", "]", ")", ")", "elif", "action", "==", "\"Shift\"", ":", "stack", ".", "append", "(", "(", "newdic", "[", "'dest'", "]", ",", "tokenlist", ".", "pop", "(", "0", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Unknown action\"", ")", "return", "False" ]
see parent docstring
[ "see", "parent", "docstring" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/parser/LR0.py#L279-L312
15,560
nesaro/pydsl
pydsl/lex.py
graph_from_alphabet
def graph_from_alphabet(alphabet, base): """Creates a graph that connects the base with the target through alphabets If every target is connected to any inputs, create the independent paths""" if not isinstance(alphabet, Choice): raise TypeError(alphabet.__class__.__name__) if not isinstance(base, Choice): raise TypeError(base.__class__.__name__) import networkx result = networkx.DiGraph() current_alphabet = alphabet pending_stack = set(current_alphabet) while pending_stack: current_alphabet = pending_stack.pop() if current_alphabet == base: continue if current_alphabet in base: result.add_edge(current_alphabet, base) elif isinstance(current_alphabet, Choice): for element in current_alphabet: if element in base: result.add_edge(current_alphabet, base) else: result.add_edge(current_alphabet, element) pending_stack.add(element) elif current_alphabet.alphabet: result.add_edge(current_alphabet, current_alphabet.alphabet) pending_stack.add(current_alphabet.alphabet) return result
python
def graph_from_alphabet(alphabet, base): """Creates a graph that connects the base with the target through alphabets If every target is connected to any inputs, create the independent paths""" if not isinstance(alphabet, Choice): raise TypeError(alphabet.__class__.__name__) if not isinstance(base, Choice): raise TypeError(base.__class__.__name__) import networkx result = networkx.DiGraph() current_alphabet = alphabet pending_stack = set(current_alphabet) while pending_stack: current_alphabet = pending_stack.pop() if current_alphabet == base: continue if current_alphabet in base: result.add_edge(current_alphabet, base) elif isinstance(current_alphabet, Choice): for element in current_alphabet: if element in base: result.add_edge(current_alphabet, base) else: result.add_edge(current_alphabet, element) pending_stack.add(element) elif current_alphabet.alphabet: result.add_edge(current_alphabet, current_alphabet.alphabet) pending_stack.add(current_alphabet.alphabet) return result
[ "def", "graph_from_alphabet", "(", "alphabet", ",", "base", ")", ":", "if", "not", "isinstance", "(", "alphabet", ",", "Choice", ")", ":", "raise", "TypeError", "(", "alphabet", ".", "__class__", ".", "__name__", ")", "if", "not", "isinstance", "(", "base", ",", "Choice", ")", ":", "raise", "TypeError", "(", "base", ".", "__class__", ".", "__name__", ")", "import", "networkx", "result", "=", "networkx", ".", "DiGraph", "(", ")", "current_alphabet", "=", "alphabet", "pending_stack", "=", "set", "(", "current_alphabet", ")", "while", "pending_stack", ":", "current_alphabet", "=", "pending_stack", ".", "pop", "(", ")", "if", "current_alphabet", "==", "base", ":", "continue", "if", "current_alphabet", "in", "base", ":", "result", ".", "add_edge", "(", "current_alphabet", ",", "base", ")", "elif", "isinstance", "(", "current_alphabet", ",", "Choice", ")", ":", "for", "element", "in", "current_alphabet", ":", "if", "element", "in", "base", ":", "result", ".", "add_edge", "(", "current_alphabet", ",", "base", ")", "else", ":", "result", ".", "add_edge", "(", "current_alphabet", ",", "element", ")", "pending_stack", ".", "add", "(", "element", ")", "elif", "current_alphabet", ".", "alphabet", ":", "result", ".", "add_edge", "(", "current_alphabet", ",", "current_alphabet", ".", "alphabet", ")", "pending_stack", ".", "add", "(", "current_alphabet", ".", "alphabet", ")", "return", "result" ]
Creates a graph that connects the base with the target through alphabets If every target is connected to any inputs, create the independent paths
[ "Creates", "a", "graph", "that", "connects", "the", "base", "with", "the", "target", "through", "alphabets", "If", "every", "target", "is", "connected", "to", "any", "inputs", "create", "the", "independent", "paths" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/lex.py#L56-L84
15,561
nesaro/pydsl
pydsl/lex.py
is_subset
def is_subset(a, b): """Excluding same size""" return b.left <= a.left and b.right > a.right or b.left < a.left and b.right >= a.right
python
def is_subset(a, b): """Excluding same size""" return b.left <= a.left and b.right > a.right or b.left < a.left and b.right >= a.right
[ "def", "is_subset", "(", "a", ",", "b", ")", ":", "return", "b", ".", "left", "<=", "a", ".", "left", "and", "b", ".", "right", ">", "a", ".", "right", "or", "b", ".", "left", "<", "a", ".", "left", "and", "b", ".", "right", ">=", "a", ".", "right" ]
Excluding same size
[ "Excluding", "same", "size" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/lex.py#L141-L143
15,562
nesaro/pydsl
pydsl/lex.py
digraph_walker_backwards
def digraph_walker_backwards(graph, element, call_back): """Visits every element guaranteeing that the previous elements have been visited before""" call_back(graph, element) for predecessor in graph.predecessors(element): call_back(graph, predecessor) for predecessor in graph.predecessors(element): digraph_walker_backwards(graph, predecessor, call_back)
python
def digraph_walker_backwards(graph, element, call_back): """Visits every element guaranteeing that the previous elements have been visited before""" call_back(graph, element) for predecessor in graph.predecessors(element): call_back(graph, predecessor) for predecessor in graph.predecessors(element): digraph_walker_backwards(graph, predecessor, call_back)
[ "def", "digraph_walker_backwards", "(", "graph", ",", "element", ",", "call_back", ")", ":", "call_back", "(", "graph", ",", "element", ")", "for", "predecessor", "in", "graph", ".", "predecessors", "(", "element", ")", ":", "call_back", "(", "graph", ",", "predecessor", ")", "for", "predecessor", "in", "graph", ".", "predecessors", "(", "element", ")", ":", "digraph_walker_backwards", "(", "graph", ",", "predecessor", ",", "call_back", ")" ]
Visits every element guaranteeing that the previous elements have been visited before
[ "Visits", "every", "element", "guaranteeing", "that", "the", "previous", "elements", "have", "been", "visited", "before" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/lex.py#L185-L191
15,563
nesaro/pydsl
pydsl/grammar/BNF.py
BNFGrammar.first_lookup
def first_lookup(self, symbol, size=1): """ Returns a Grammar Definition with the first n terminal symbols produced by the input symbol """ if isinstance(symbol, (TerminalSymbol, NullSymbol)): return [symbol.gd] result = [] for production in self.productions: if production.leftside[0] != symbol: continue for right_symbol in production.rightside: if right_symbol == symbol: #Avoids infinite recursion break current_symbol_first = self.first_lookup(right_symbol, size) import collections from pydsl.grammar.definition import String if isinstance(current_symbol_first, collections.Iterable) and not isinstance(current_symbol_first, String): result += current_symbol_first else: result.append(current_symbol_first) if isinstance(current_symbol_first, String) or \ not isinstance(current_symbol_first, collections.Iterable) or \ (NullSymbol not in current_symbol_first): break # This element doesn't have Null in its first set so there is no need to continue if not result: raise KeyError("Symbol doesn't exist in this grammar") return Choice(result)
python
def first_lookup(self, symbol, size=1): """ Returns a Grammar Definition with the first n terminal symbols produced by the input symbol """ if isinstance(symbol, (TerminalSymbol, NullSymbol)): return [symbol.gd] result = [] for production in self.productions: if production.leftside[0] != symbol: continue for right_symbol in production.rightside: if right_symbol == symbol: #Avoids infinite recursion break current_symbol_first = self.first_lookup(right_symbol, size) import collections from pydsl.grammar.definition import String if isinstance(current_symbol_first, collections.Iterable) and not isinstance(current_symbol_first, String): result += current_symbol_first else: result.append(current_symbol_first) if isinstance(current_symbol_first, String) or \ not isinstance(current_symbol_first, collections.Iterable) or \ (NullSymbol not in current_symbol_first): break # This element doesn't have Null in its first set so there is no need to continue if not result: raise KeyError("Symbol doesn't exist in this grammar") return Choice(result)
[ "def", "first_lookup", "(", "self", ",", "symbol", ",", "size", "=", "1", ")", ":", "if", "isinstance", "(", "symbol", ",", "(", "TerminalSymbol", ",", "NullSymbol", ")", ")", ":", "return", "[", "symbol", ".", "gd", "]", "result", "=", "[", "]", "for", "production", "in", "self", ".", "productions", ":", "if", "production", ".", "leftside", "[", "0", "]", "!=", "symbol", ":", "continue", "for", "right_symbol", "in", "production", ".", "rightside", ":", "if", "right_symbol", "==", "symbol", ":", "#Avoids infinite recursion", "break", "current_symbol_first", "=", "self", ".", "first_lookup", "(", "right_symbol", ",", "size", ")", "import", "collections", "from", "pydsl", ".", "grammar", ".", "definition", "import", "String", "if", "isinstance", "(", "current_symbol_first", ",", "collections", ".", "Iterable", ")", "and", "not", "isinstance", "(", "current_symbol_first", ",", "String", ")", ":", "result", "+=", "current_symbol_first", "else", ":", "result", ".", "append", "(", "current_symbol_first", ")", "if", "isinstance", "(", "current_symbol_first", ",", "String", ")", "or", "not", "isinstance", "(", "current_symbol_first", ",", "collections", ".", "Iterable", ")", "or", "(", "NullSymbol", "not", "in", "current_symbol_first", ")", ":", "break", "# This element doesn't have Null in its first set so there is no need to continue", "if", "not", "result", ":", "raise", "KeyError", "(", "\"Symbol doesn't exist in this grammar\"", ")", "return", "Choice", "(", "result", ")" ]
Returns a Grammar Definition with the first n terminal symbols produced by the input symbol
[ "Returns", "a", "Grammar", "Definition", "with", "the", "first", "n", "terminal", "symbols", "produced", "by", "the", "input", "symbol" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/grammar/BNF.py#L96-L123
15,564
nesaro/pydsl
pydsl/grammar/BNF.py
BNFGrammar.next_lookup
def next_lookup(self, symbol): """Returns the next TerminalSymbols produced by the input symbol within this grammar definition""" result = [] if symbol == self.initialsymbol: result.append(EndSymbol()) for production in self.productions: if symbol in production.rightside: nextindex = production.rightside.index(symbol) + 1 while nextindex < len(production.rightside): nextsymbol = production.rightside[nextindex] firstlist = self.first_lookup(nextsymbol) cleanfirstlist = Choice([x for x in firstlist if x != NullSymbol()]) result.append(cleanfirstlist) if NullSymbol() not in firstlist: break else: result += self.next_lookup(production.leftside[0]) #reached the end of the rightside return result
python
def next_lookup(self, symbol): """Returns the next TerminalSymbols produced by the input symbol within this grammar definition""" result = [] if symbol == self.initialsymbol: result.append(EndSymbol()) for production in self.productions: if symbol in production.rightside: nextindex = production.rightside.index(symbol) + 1 while nextindex < len(production.rightside): nextsymbol = production.rightside[nextindex] firstlist = self.first_lookup(nextsymbol) cleanfirstlist = Choice([x for x in firstlist if x != NullSymbol()]) result.append(cleanfirstlist) if NullSymbol() not in firstlist: break else: result += self.next_lookup(production.leftside[0]) #reached the end of the rightside return result
[ "def", "next_lookup", "(", "self", ",", "symbol", ")", ":", "result", "=", "[", "]", "if", "symbol", "==", "self", ".", "initialsymbol", ":", "result", ".", "append", "(", "EndSymbol", "(", ")", ")", "for", "production", "in", "self", ".", "productions", ":", "if", "symbol", "in", "production", ".", "rightside", ":", "nextindex", "=", "production", ".", "rightside", ".", "index", "(", "symbol", ")", "+", "1", "while", "nextindex", "<", "len", "(", "production", ".", "rightside", ")", ":", "nextsymbol", "=", "production", ".", "rightside", "[", "nextindex", "]", "firstlist", "=", "self", ".", "first_lookup", "(", "nextsymbol", ")", "cleanfirstlist", "=", "Choice", "(", "[", "x", "for", "x", "in", "firstlist", "if", "x", "!=", "NullSymbol", "(", ")", "]", ")", "result", ".", "append", "(", "cleanfirstlist", ")", "if", "NullSymbol", "(", ")", "not", "in", "firstlist", ":", "break", "else", ":", "result", "+=", "self", ".", "next_lookup", "(", "production", ".", "leftside", "[", "0", "]", ")", "#reached the end of the rightside", "return", "result" ]
Returns the next TerminalSymbols produced by the input symbol within this grammar definition
[ "Returns", "the", "next", "TerminalSymbols", "produced", "by", "the", "input", "symbol", "within", "this", "grammar", "definition" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/grammar/BNF.py#L125-L142
15,565
nesaro/pydsl
pydsl/grammar/BNF.py
BNFGrammar.main_production
def main_production(self): """Returns main rule""" for rule in self.productions: if rule.leftside[0] == self._initialsymbol: return rule raise IndexError
python
def main_production(self): """Returns main rule""" for rule in self.productions: if rule.leftside[0] == self._initialsymbol: return rule raise IndexError
[ "def", "main_production", "(", "self", ")", ":", "for", "rule", "in", "self", ".", "productions", ":", "if", "rule", ".", "leftside", "[", "0", "]", "==", "self", ".", "_initialsymbol", ":", "return", "rule", "raise", "IndexError" ]
Returns main rule
[ "Returns", "main", "rule" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/grammar/BNF.py#L155-L160
15,566
nesaro/pydsl
pydsl/grammar/BNF.py
BNFGrammar.getSymbols
def getSymbols(self): """Returns every symbol""" symbollist = [] for rule in self.productions: for symbol in rule.leftside + rule.rightside: if symbol not in symbollist: symbollist.append(symbol) symbollist += self.terminal_symbols return symbollist
python
def getSymbols(self): """Returns every symbol""" symbollist = [] for rule in self.productions: for symbol in rule.leftside + rule.rightside: if symbol not in symbollist: symbollist.append(symbol) symbollist += self.terminal_symbols return symbollist
[ "def", "getSymbols", "(", "self", ")", ":", "symbollist", "=", "[", "]", "for", "rule", "in", "self", ".", "productions", ":", "for", "symbol", "in", "rule", ".", "leftside", "+", "rule", ".", "rightside", ":", "if", "symbol", "not", "in", "symbollist", ":", "symbollist", ".", "append", "(", "symbol", ")", "symbollist", "+=", "self", ".", "terminal_symbols", "return", "symbollist" ]
Returns every symbol
[ "Returns", "every", "symbol" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/grammar/BNF.py#L168-L176
15,567
nesaro/pydsl
pydsl/extract.py
extract_alphabet
def extract_alphabet(alphabet, inputdata, fixed_start = False): """ Receives a sequence and an alphabet, returns a list of PositionTokens with all of the parts of the sequence that are a subset of the alphabet """ if not inputdata: return [] base_alphabet = alphabet.alphabet lexer = lexer_factory(alphabet, base_alphabet) totallen = len(inputdata) maxl = totallen minl = 1 if fixed_start: max_start = 1 else: max_start = totallen result = [] for i in range(max_start): for j in range(i+minl, min(i+maxl, totallen) + 1): try: lexed = lexer(inputdata[i:j]) if lexed and len(lexed) == 1: result.append((i,j, inputdata[i:j], lexed[0].gd)) elif lexed: raise Exception except: continue result = filter_subsets(result) return [PositionToken(content, gd, left, right) for (left, right, content, gd) in result]
python
def extract_alphabet(alphabet, inputdata, fixed_start = False): """ Receives a sequence and an alphabet, returns a list of PositionTokens with all of the parts of the sequence that are a subset of the alphabet """ if not inputdata: return [] base_alphabet = alphabet.alphabet lexer = lexer_factory(alphabet, base_alphabet) totallen = len(inputdata) maxl = totallen minl = 1 if fixed_start: max_start = 1 else: max_start = totallen result = [] for i in range(max_start): for j in range(i+minl, min(i+maxl, totallen) + 1): try: lexed = lexer(inputdata[i:j]) if lexed and len(lexed) == 1: result.append((i,j, inputdata[i:j], lexed[0].gd)) elif lexed: raise Exception except: continue result = filter_subsets(result) return [PositionToken(content, gd, left, right) for (left, right, content, gd) in result]
[ "def", "extract_alphabet", "(", "alphabet", ",", "inputdata", ",", "fixed_start", "=", "False", ")", ":", "if", "not", "inputdata", ":", "return", "[", "]", "base_alphabet", "=", "alphabet", ".", "alphabet", "lexer", "=", "lexer_factory", "(", "alphabet", ",", "base_alphabet", ")", "totallen", "=", "len", "(", "inputdata", ")", "maxl", "=", "totallen", "minl", "=", "1", "if", "fixed_start", ":", "max_start", "=", "1", "else", ":", "max_start", "=", "totallen", "result", "=", "[", "]", "for", "i", "in", "range", "(", "max_start", ")", ":", "for", "j", "in", "range", "(", "i", "+", "minl", ",", "min", "(", "i", "+", "maxl", ",", "totallen", ")", "+", "1", ")", ":", "try", ":", "lexed", "=", "lexer", "(", "inputdata", "[", "i", ":", "j", "]", ")", "if", "lexed", "and", "len", "(", "lexed", ")", "==", "1", ":", "result", ".", "append", "(", "(", "i", ",", "j", ",", "inputdata", "[", "i", ":", "j", "]", ",", "lexed", "[", "0", "]", ".", "gd", ")", ")", "elif", "lexed", ":", "raise", "Exception", "except", ":", "continue", "result", "=", "filter_subsets", "(", "result", ")", "return", "[", "PositionToken", "(", "content", ",", "gd", ",", "left", ",", "right", ")", "for", "(", "left", ",", "right", ",", "content", ",", "gd", ")", "in", "result", "]" ]
Receives a sequence and an alphabet, returns a list of PositionTokens with all of the parts of the sequence that are a subset of the alphabet
[ "Receives", "a", "sequence", "and", "an", "alphabet", "returns", "a", "list", "of", "PositionTokens", "with", "all", "of", "the", "parts", "of", "the", "sequence", "that", "are", "a", "subset", "of", "the", "alphabet" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/extract.py#L44-L74
15,568
nesaro/pydsl
pydsl/extract.py
extract
def extract(grammar, inputdata, fixed_start = False, return_first=False): """ Receives a sequence and a grammar, returns a list of PositionTokens with all of the parts of the sequence that are recognized by the grammar """ if not inputdata: return [] checker = checker_factory(grammar) totallen = len(inputdata) from pydsl.grammar.PEG import Choice try: maxl = grammar.maxsize or totallen except NotImplementedError: maxl = totallen try: #minl = grammar.minsize #FIXME: It won't work with incompatible alphabets minl = 1 except NotImplementedError: minl = 1 if fixed_start: max_start = 1 else: max_start = totallen result = [] for i in range(max_start): for j in range(i+minl, min(i+maxl, totallen) + 1): slice = inputdata[i:j] check = checker.check(slice) if check: this_pt = PositionToken(slice, grammar, i, j) if return_first: return this_pt result.append(this_pt) return result
python
def extract(grammar, inputdata, fixed_start = False, return_first=False): """ Receives a sequence and a grammar, returns a list of PositionTokens with all of the parts of the sequence that are recognized by the grammar """ if not inputdata: return [] checker = checker_factory(grammar) totallen = len(inputdata) from pydsl.grammar.PEG import Choice try: maxl = grammar.maxsize or totallen except NotImplementedError: maxl = totallen try: #minl = grammar.minsize #FIXME: It won't work with incompatible alphabets minl = 1 except NotImplementedError: minl = 1 if fixed_start: max_start = 1 else: max_start = totallen result = [] for i in range(max_start): for j in range(i+minl, min(i+maxl, totallen) + 1): slice = inputdata[i:j] check = checker.check(slice) if check: this_pt = PositionToken(slice, grammar, i, j) if return_first: return this_pt result.append(this_pt) return result
[ "def", "extract", "(", "grammar", ",", "inputdata", ",", "fixed_start", "=", "False", ",", "return_first", "=", "False", ")", ":", "if", "not", "inputdata", ":", "return", "[", "]", "checker", "=", "checker_factory", "(", "grammar", ")", "totallen", "=", "len", "(", "inputdata", ")", "from", "pydsl", ".", "grammar", ".", "PEG", "import", "Choice", "try", ":", "maxl", "=", "grammar", ".", "maxsize", "or", "totallen", "except", "NotImplementedError", ":", "maxl", "=", "totallen", "try", ":", "#minl = grammar.minsize #FIXME: It won't work with incompatible alphabets", "minl", "=", "1", "except", "NotImplementedError", ":", "minl", "=", "1", "if", "fixed_start", ":", "max_start", "=", "1", "else", ":", "max_start", "=", "totallen", "result", "=", "[", "]", "for", "i", "in", "range", "(", "max_start", ")", ":", "for", "j", "in", "range", "(", "i", "+", "minl", ",", "min", "(", "i", "+", "maxl", ",", "totallen", ")", "+", "1", ")", ":", "slice", "=", "inputdata", "[", "i", ":", "j", "]", "check", "=", "checker", ".", "check", "(", "slice", ")", "if", "check", ":", "this_pt", "=", "PositionToken", "(", "slice", ",", "grammar", ",", "i", ",", "j", ")", "if", "return_first", ":", "return", "this_pt", "result", ".", "append", "(", "this_pt", ")", "return", "result" ]
Receives a sequence and a grammar, returns a list of PositionTokens with all of the parts of the sequence that are recognized by the grammar
[ "Receives", "a", "sequence", "and", "a", "grammar", "returns", "a", "list", "of", "PositionTokens", "with", "all", "of", "the", "parts", "of", "the", "sequence", "that", "are", "recognized", "by", "the", "grammar" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/extract.py#L76-L111
15,569
nesaro/pydsl
pydsl/token.py
append_position_to_token_list
def append_position_to_token_list(token_list): """Converts a list of Token into a list of Token, asuming size == 1""" return [PositionToken(value.content, value.gd, index, index+1) for (index, value) in enumerate(token_list)]
python
def append_position_to_token_list(token_list): """Converts a list of Token into a list of Token, asuming size == 1""" return [PositionToken(value.content, value.gd, index, index+1) for (index, value) in enumerate(token_list)]
[ "def", "append_position_to_token_list", "(", "token_list", ")", ":", "return", "[", "PositionToken", "(", "value", ".", "content", ",", "value", ".", "gd", ",", "index", ",", "index", "+", "1", ")", "for", "(", "index", ",", "value", ")", "in", "enumerate", "(", "token_list", ")", "]" ]
Converts a list of Token into a list of Token, asuming size == 1
[ "Converts", "a", "list", "of", "Token", "into", "a", "list", "of", "Token", "asuming", "size", "==", "1" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/token.py#L62-L64
15,570
nesaro/pydsl
pydsl/file/python.py
load_python_file
def load_python_file(moduleobject): """ Try to create an indexable instance from a module""" if isinstance(moduleobject, str): moduleobject = load_module(moduleobject) if not hasattr(moduleobject, "iclass"): raise KeyError("Element" + str(moduleobject)) iclass = getattr(moduleobject, "iclass") mylist = getattr(moduleobject, "__all__", None) or list(filter(lambda x:x[:1] != "_", (dir(moduleobject)))) mylist.remove('iclass') resultdic = {} for x in mylist: resultdic[x] = getattr(moduleobject, x) if iclass == "SymbolGrammar": from pydsl.grammar.BNF import BNFGrammar return BNFGrammar(**resultdic) elif iclass == "PLY": from pydsl.grammar.definition import PLYGrammar return PLYGrammar(moduleobject) elif iclass in ["PythonGrammar"]: from pydsl.grammar.definition import PythonGrammar return PythonGrammar(resultdic) elif iclass == "PythonTranslator": return resultdic elif iclass == "parsley": from pydsl.grammar.parsley import ParsleyGrammar return ParsleyGrammar(**resultdic) elif iclass == "pyparsing": return resultdic['root_symbol'] else: raise ValueError(str(moduleobject))
python
def load_python_file(moduleobject): """ Try to create an indexable instance from a module""" if isinstance(moduleobject, str): moduleobject = load_module(moduleobject) if not hasattr(moduleobject, "iclass"): raise KeyError("Element" + str(moduleobject)) iclass = getattr(moduleobject, "iclass") mylist = getattr(moduleobject, "__all__", None) or list(filter(lambda x:x[:1] != "_", (dir(moduleobject)))) mylist.remove('iclass') resultdic = {} for x in mylist: resultdic[x] = getattr(moduleobject, x) if iclass == "SymbolGrammar": from pydsl.grammar.BNF import BNFGrammar return BNFGrammar(**resultdic) elif iclass == "PLY": from pydsl.grammar.definition import PLYGrammar return PLYGrammar(moduleobject) elif iclass in ["PythonGrammar"]: from pydsl.grammar.definition import PythonGrammar return PythonGrammar(resultdic) elif iclass == "PythonTranslator": return resultdic elif iclass == "parsley": from pydsl.grammar.parsley import ParsleyGrammar return ParsleyGrammar(**resultdic) elif iclass == "pyparsing": return resultdic['root_symbol'] else: raise ValueError(str(moduleobject))
[ "def", "load_python_file", "(", "moduleobject", ")", ":", "if", "isinstance", "(", "moduleobject", ",", "str", ")", ":", "moduleobject", "=", "load_module", "(", "moduleobject", ")", "if", "not", "hasattr", "(", "moduleobject", ",", "\"iclass\"", ")", ":", "raise", "KeyError", "(", "\"Element\"", "+", "str", "(", "moduleobject", ")", ")", "iclass", "=", "getattr", "(", "moduleobject", ",", "\"iclass\"", ")", "mylist", "=", "getattr", "(", "moduleobject", ",", "\"__all__\"", ",", "None", ")", "or", "list", "(", "filter", "(", "lambda", "x", ":", "x", "[", ":", "1", "]", "!=", "\"_\"", ",", "(", "dir", "(", "moduleobject", ")", ")", ")", ")", "mylist", ".", "remove", "(", "'iclass'", ")", "resultdic", "=", "{", "}", "for", "x", "in", "mylist", ":", "resultdic", "[", "x", "]", "=", "getattr", "(", "moduleobject", ",", "x", ")", "if", "iclass", "==", "\"SymbolGrammar\"", ":", "from", "pydsl", ".", "grammar", ".", "BNF", "import", "BNFGrammar", "return", "BNFGrammar", "(", "*", "*", "resultdic", ")", "elif", "iclass", "==", "\"PLY\"", ":", "from", "pydsl", ".", "grammar", ".", "definition", "import", "PLYGrammar", "return", "PLYGrammar", "(", "moduleobject", ")", "elif", "iclass", "in", "[", "\"PythonGrammar\"", "]", ":", "from", "pydsl", ".", "grammar", ".", "definition", "import", "PythonGrammar", "return", "PythonGrammar", "(", "resultdic", ")", "elif", "iclass", "==", "\"PythonTranslator\"", ":", "return", "resultdic", "elif", "iclass", "==", "\"parsley\"", ":", "from", "pydsl", ".", "grammar", ".", "parsley", "import", "ParsleyGrammar", "return", "ParsleyGrammar", "(", "*", "*", "resultdic", ")", "elif", "iclass", "==", "\"pyparsing\"", ":", "return", "resultdic", "[", "'root_symbol'", "]", "else", ":", "raise", "ValueError", "(", "str", "(", "moduleobject", ")", ")" ]
Try to create an indexable instance from a module
[ "Try", "to", "create", "an", "indexable", "instance", "from", "a", "module" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/file/python.py#L34-L63
15,571
nesaro/pydsl
pydsl/file/BNF.py
load_bnf_file
def load_bnf_file(filepath, repository = None): """Converts a bnf file into a BNFGrammar instance""" linelist = [] with open(filepath,'r') as mlfile: for line in mlfile: linelist.append(line) return strlist_to_production_set(linelist, repository)
python
def load_bnf_file(filepath, repository = None): """Converts a bnf file into a BNFGrammar instance""" linelist = [] with open(filepath,'r') as mlfile: for line in mlfile: linelist.append(line) return strlist_to_production_set(linelist, repository)
[ "def", "load_bnf_file", "(", "filepath", ",", "repository", "=", "None", ")", ":", "linelist", "=", "[", "]", "with", "open", "(", "filepath", ",", "'r'", ")", "as", "mlfile", ":", "for", "line", "in", "mlfile", ":", "linelist", ".", "append", "(", "line", ")", "return", "strlist_to_production_set", "(", "linelist", ",", "repository", ")" ]
Converts a bnf file into a BNFGrammar instance
[ "Converts", "a", "bnf", "file", "into", "a", "BNFGrammar", "instance" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/file/BNF.py#L141-L147
15,572
nesaro/pydsl
pydsl/file/regexp.py
load_re_from_file
def load_re_from_file(filepath): """Converts a re file to Regular Grammar instance""" regexp = None with open(filepath,'r') as mlfile: flagstr = "" for line in mlfile: cleanline = re.sub("//.*$", "", line) if re.search("^\s*$", cleanline): continue if re.search ("^#.*$", cleanline): flagstr = cleanline[1:] continue if regexp is not None: raise Exception("Regular expression file format error") else: regexp = cleanline.rstrip('\n') flags = 0 if "i" in flagstr: flags |= re.I from pydsl.grammar.definition import RegularExpression return RegularExpression(regexp, flags)
python
def load_re_from_file(filepath): """Converts a re file to Regular Grammar instance""" regexp = None with open(filepath,'r') as mlfile: flagstr = "" for line in mlfile: cleanline = re.sub("//.*$", "", line) if re.search("^\s*$", cleanline): continue if re.search ("^#.*$", cleanline): flagstr = cleanline[1:] continue if regexp is not None: raise Exception("Regular expression file format error") else: regexp = cleanline.rstrip('\n') flags = 0 if "i" in flagstr: flags |= re.I from pydsl.grammar.definition import RegularExpression return RegularExpression(regexp, flags)
[ "def", "load_re_from_file", "(", "filepath", ")", ":", "regexp", "=", "None", "with", "open", "(", "filepath", ",", "'r'", ")", "as", "mlfile", ":", "flagstr", "=", "\"\"", "for", "line", "in", "mlfile", ":", "cleanline", "=", "re", ".", "sub", "(", "\"//.*$\"", ",", "\"\"", ",", "line", ")", "if", "re", ".", "search", "(", "\"^\\s*$\"", ",", "cleanline", ")", ":", "continue", "if", "re", ".", "search", "(", "\"^#.*$\"", ",", "cleanline", ")", ":", "flagstr", "=", "cleanline", "[", "1", ":", "]", "continue", "if", "regexp", "is", "not", "None", ":", "raise", "Exception", "(", "\"Regular expression file format error\"", ")", "else", ":", "regexp", "=", "cleanline", ".", "rstrip", "(", "'\\n'", ")", "flags", "=", "0", "if", "\"i\"", "in", "flagstr", ":", "flags", "|=", "re", ".", "I", "from", "pydsl", ".", "grammar", ".", "definition", "import", "RegularExpression", "return", "RegularExpression", "(", "regexp", ",", "flags", ")" ]
Converts a re file to Regular Grammar instance
[ "Converts", "a", "re", "file", "to", "Regular", "Grammar", "instance" ]
00b4fffd72036b80335e1a44a888fac57917ab41
https://github.com/nesaro/pydsl/blob/00b4fffd72036b80335e1a44a888fac57917ab41/pydsl/file/regexp.py#L28-L48
15,573
aio-libs/aiohttp-jinja2
aiohttp_jinja2/helpers.py
url_for
def url_for(context, __route_name, **parts): """Filter for generating urls. Usage: {{ url('the-view-name') }} might become "/path/to/view" or {{ url('item-details', id=123, query={'active': 'true'}) }} might become "/items/1?active=true". """ app = context['app'] query = None if 'query_' in parts: query = parts.pop('query_') for key in parts: val = parts[key] if isinstance(val, str): # if type is inherited from str expilict cast to str makes sense # if type is exactly str the operation is very fast val = str(val) elif type(val) is int: # int inherited classes like bool are forbidden val = str(val) else: raise TypeError("argument value should be str or int, " "got {} -> [{}] {!r}".format(key, type(val), val)) parts[key] = val url = app.router[__route_name].url_for(**parts) if query: url = url.with_query(query) return url
python
def url_for(context, __route_name, **parts): """Filter for generating urls. Usage: {{ url('the-view-name') }} might become "/path/to/view" or {{ url('item-details', id=123, query={'active': 'true'}) }} might become "/items/1?active=true". """ app = context['app'] query = None if 'query_' in parts: query = parts.pop('query_') for key in parts: val = parts[key] if isinstance(val, str): # if type is inherited from str expilict cast to str makes sense # if type is exactly str the operation is very fast val = str(val) elif type(val) is int: # int inherited classes like bool are forbidden val = str(val) else: raise TypeError("argument value should be str or int, " "got {} -> [{}] {!r}".format(key, type(val), val)) parts[key] = val url = app.router[__route_name].url_for(**parts) if query: url = url.with_query(query) return url
[ "def", "url_for", "(", "context", ",", "__route_name", ",", "*", "*", "parts", ")", ":", "app", "=", "context", "[", "'app'", "]", "query", "=", "None", "if", "'query_'", "in", "parts", ":", "query", "=", "parts", ".", "pop", "(", "'query_'", ")", "for", "key", "in", "parts", ":", "val", "=", "parts", "[", "key", "]", "if", "isinstance", "(", "val", ",", "str", ")", ":", "# if type is inherited from str expilict cast to str makes sense", "# if type is exactly str the operation is very fast", "val", "=", "str", "(", "val", ")", "elif", "type", "(", "val", ")", "is", "int", ":", "# int inherited classes like bool are forbidden", "val", "=", "str", "(", "val", ")", "else", ":", "raise", "TypeError", "(", "\"argument value should be str or int, \"", "\"got {} -> [{}] {!r}\"", ".", "format", "(", "key", ",", "type", "(", "val", ")", ",", "val", ")", ")", "parts", "[", "key", "]", "=", "val", "url", "=", "app", ".", "router", "[", "__route_name", "]", ".", "url_for", "(", "*", "*", "parts", ")", "if", "query", ":", "url", "=", "url", ".", "with_query", "(", "query", ")", "return", "url" ]
Filter for generating urls. Usage: {{ url('the-view-name') }} might become "/path/to/view" or {{ url('item-details', id=123, query={'active': 'true'}) }} might become "/items/1?active=true".
[ "Filter", "for", "generating", "urls", "." ]
474a852c8b29dc69106bf4aba28a9c99cf63dac9
https://github.com/aio-libs/aiohttp-jinja2/blob/474a852c8b29dc69106bf4aba28a9c99cf63dac9/aiohttp_jinja2/helpers.py#L9-L39
15,574
aio-libs/aiohttp-jinja2
aiohttp_jinja2/helpers.py
static_url
def static_url(context, static_file_path): """Filter for generating urls for static files. NOTE: you'll need to set app['static_root_url'] to be used as the root for the urls returned. Usage: {{ static('styles.css') }} might become "/static/styles.css" or "http://mycdn.example.com/styles.css" """ app = context['app'] try: static_url = app['static_root_url'] except KeyError: raise RuntimeError( "app does not define a static root url " "'static_root_url', you need to set the url root " "with app['static_root_url'] = '<static root>'.") from None return '{}/{}'.format(static_url.rstrip('/'), static_file_path.lstrip('/'))
python
def static_url(context, static_file_path): """Filter for generating urls for static files. NOTE: you'll need to set app['static_root_url'] to be used as the root for the urls returned. Usage: {{ static('styles.css') }} might become "/static/styles.css" or "http://mycdn.example.com/styles.css" """ app = context['app'] try: static_url = app['static_root_url'] except KeyError: raise RuntimeError( "app does not define a static root url " "'static_root_url', you need to set the url root " "with app['static_root_url'] = '<static root>'.") from None return '{}/{}'.format(static_url.rstrip('/'), static_file_path.lstrip('/'))
[ "def", "static_url", "(", "context", ",", "static_file_path", ")", ":", "app", "=", "context", "[", "'app'", "]", "try", ":", "static_url", "=", "app", "[", "'static_root_url'", "]", "except", "KeyError", ":", "raise", "RuntimeError", "(", "\"app does not define a static root url \"", "\"'static_root_url', you need to set the url root \"", "\"with app['static_root_url'] = '<static root>'.\"", ")", "from", "None", "return", "'{}/{}'", ".", "format", "(", "static_url", ".", "rstrip", "(", "'/'", ")", ",", "static_file_path", ".", "lstrip", "(", "'/'", ")", ")" ]
Filter for generating urls for static files. NOTE: you'll need to set app['static_root_url'] to be used as the root for the urls returned. Usage: {{ static('styles.css') }} might become "/static/styles.css" or "http://mycdn.example.com/styles.css"
[ "Filter", "for", "generating", "urls", "for", "static", "files", "." ]
474a852c8b29dc69106bf4aba28a9c99cf63dac9
https://github.com/aio-libs/aiohttp-jinja2/blob/474a852c8b29dc69106bf4aba28a9c99cf63dac9/aiohttp_jinja2/helpers.py#L43-L60
15,575
cmbruns/pyopenvr
src/openvr/gl_renderer.py
OpenVrGlRenderer.init_gl
def init_gl(self): "allocate OpenGL resources" self.vr_system = openvr.init(openvr.VRApplication_Scene) w, h = self.vr_system.getRecommendedRenderTargetSize() self.left_fb = OpenVrFramebuffer(w, h, multisample=self.multisample) self.right_fb = OpenVrFramebuffer(w, h, multisample=self.multisample) self.compositor = openvr.VRCompositor() if self.compositor is None: raise Exception("Unable to create compositor") self.left_fb.init_gl() self.right_fb.init_gl() # Compute projection matrix zNear = 0.2 zFar = 500.0 self.projection_left = numpy.asarray(matrixForOpenVrMatrix(self.vr_system.getProjectionMatrix( openvr.Eye_Left, zNear, zFar))) self.projection_right = numpy.asarray(matrixForOpenVrMatrix(self.vr_system.getProjectionMatrix( openvr.Eye_Right, zNear, zFar))) self.view_left = matrixForOpenVrMatrix( self.vr_system.getEyeToHeadTransform(openvr.Eye_Left)).I # head_X_eye in Kane notation self.view_right = matrixForOpenVrMatrix( self.vr_system.getEyeToHeadTransform(openvr.Eye_Right)).I # head_X_eye in Kane notation for actor in self: actor.init_gl()
python
def init_gl(self): "allocate OpenGL resources" self.vr_system = openvr.init(openvr.VRApplication_Scene) w, h = self.vr_system.getRecommendedRenderTargetSize() self.left_fb = OpenVrFramebuffer(w, h, multisample=self.multisample) self.right_fb = OpenVrFramebuffer(w, h, multisample=self.multisample) self.compositor = openvr.VRCompositor() if self.compositor is None: raise Exception("Unable to create compositor") self.left_fb.init_gl() self.right_fb.init_gl() # Compute projection matrix zNear = 0.2 zFar = 500.0 self.projection_left = numpy.asarray(matrixForOpenVrMatrix(self.vr_system.getProjectionMatrix( openvr.Eye_Left, zNear, zFar))) self.projection_right = numpy.asarray(matrixForOpenVrMatrix(self.vr_system.getProjectionMatrix( openvr.Eye_Right, zNear, zFar))) self.view_left = matrixForOpenVrMatrix( self.vr_system.getEyeToHeadTransform(openvr.Eye_Left)).I # head_X_eye in Kane notation self.view_right = matrixForOpenVrMatrix( self.vr_system.getEyeToHeadTransform(openvr.Eye_Right)).I # head_X_eye in Kane notation for actor in self: actor.init_gl()
[ "def", "init_gl", "(", "self", ")", ":", "self", ".", "vr_system", "=", "openvr", ".", "init", "(", "openvr", ".", "VRApplication_Scene", ")", "w", ",", "h", "=", "self", ".", "vr_system", ".", "getRecommendedRenderTargetSize", "(", ")", "self", ".", "left_fb", "=", "OpenVrFramebuffer", "(", "w", ",", "h", ",", "multisample", "=", "self", ".", "multisample", ")", "self", ".", "right_fb", "=", "OpenVrFramebuffer", "(", "w", ",", "h", ",", "multisample", "=", "self", ".", "multisample", ")", "self", ".", "compositor", "=", "openvr", ".", "VRCompositor", "(", ")", "if", "self", ".", "compositor", "is", "None", ":", "raise", "Exception", "(", "\"Unable to create compositor\"", ")", "self", ".", "left_fb", ".", "init_gl", "(", ")", "self", ".", "right_fb", ".", "init_gl", "(", ")", "# Compute projection matrix", "zNear", "=", "0.2", "zFar", "=", "500.0", "self", ".", "projection_left", "=", "numpy", ".", "asarray", "(", "matrixForOpenVrMatrix", "(", "self", ".", "vr_system", ".", "getProjectionMatrix", "(", "openvr", ".", "Eye_Left", ",", "zNear", ",", "zFar", ")", ")", ")", "self", ".", "projection_right", "=", "numpy", ".", "asarray", "(", "matrixForOpenVrMatrix", "(", "self", ".", "vr_system", ".", "getProjectionMatrix", "(", "openvr", ".", "Eye_Right", ",", "zNear", ",", "zFar", ")", ")", ")", "self", ".", "view_left", "=", "matrixForOpenVrMatrix", "(", "self", ".", "vr_system", ".", "getEyeToHeadTransform", "(", "openvr", ".", "Eye_Left", ")", ")", ".", "I", "# head_X_eye in Kane notation", "self", ".", "view_right", "=", "matrixForOpenVrMatrix", "(", "self", ".", "vr_system", ".", "getEyeToHeadTransform", "(", "openvr", ".", "Eye_Right", ")", ")", ".", "I", "# head_X_eye in Kane notation", "for", "actor", "in", "self", ":", "actor", ".", "init_gl", "(", ")" ]
allocate OpenGL resources
[ "allocate", "OpenGL", "resources" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/gl_renderer.py#L144-L169
15,576
cmbruns/pyopenvr
src/samples/glut/pink_world.py
PinkWorld.display
def display(self): "Renders the scene once every refresh" self.compositor.waitGetPoses(self.poses, openvr.k_unMaxTrackedDeviceCount, None, 0) hmd_pose0 = self.poses[openvr.k_unTrackedDeviceIndex_Hmd] if not hmd_pose0.bPoseIsValid: return # hmd_pose = hmd_pose0.mDeviceToAbsoluteTracking # 1) On-screen render: if True: glClearColor(0.8, 0.4, 0.4, 0) # Pink background glClear(GL_COLOR_BUFFER_BIT) # glutSwapBuffers() glFlush() # Single buffer # 2) VR render # TODO: render different things to each eye glBindFramebuffer(GL_FRAMEBUFFER, self.fb) glClearColor(0.8, 0.4, 0.4, 0) # Pink background glClear(GL_COLOR_BUFFER_BIT) glBindFramebuffer(GL_FRAMEBUFFER, 0) # # TODO: use different textures for each eye self.compositor.submit(openvr.Eye_Left, self.texture) self.compositor.submit(openvr.Eye_Right, self.texture) glBindFramebuffer(GL_FRAMEBUFFER, 0)
python
def display(self): "Renders the scene once every refresh" self.compositor.waitGetPoses(self.poses, openvr.k_unMaxTrackedDeviceCount, None, 0) hmd_pose0 = self.poses[openvr.k_unTrackedDeviceIndex_Hmd] if not hmd_pose0.bPoseIsValid: return # hmd_pose = hmd_pose0.mDeviceToAbsoluteTracking # 1) On-screen render: if True: glClearColor(0.8, 0.4, 0.4, 0) # Pink background glClear(GL_COLOR_BUFFER_BIT) # glutSwapBuffers() glFlush() # Single buffer # 2) VR render # TODO: render different things to each eye glBindFramebuffer(GL_FRAMEBUFFER, self.fb) glClearColor(0.8, 0.4, 0.4, 0) # Pink background glClear(GL_COLOR_BUFFER_BIT) glBindFramebuffer(GL_FRAMEBUFFER, 0) # # TODO: use different textures for each eye self.compositor.submit(openvr.Eye_Left, self.texture) self.compositor.submit(openvr.Eye_Right, self.texture) glBindFramebuffer(GL_FRAMEBUFFER, 0)
[ "def", "display", "(", "self", ")", ":", "self", ".", "compositor", ".", "waitGetPoses", "(", "self", ".", "poses", ",", "openvr", ".", "k_unMaxTrackedDeviceCount", ",", "None", ",", "0", ")", "hmd_pose0", "=", "self", ".", "poses", "[", "openvr", ".", "k_unTrackedDeviceIndex_Hmd", "]", "if", "not", "hmd_pose0", ".", "bPoseIsValid", ":", "return", "# hmd_pose = hmd_pose0.mDeviceToAbsoluteTracking\r", "# 1) On-screen render:\r", "if", "True", ":", "glClearColor", "(", "0.8", ",", "0.4", ",", "0.4", ",", "0", ")", "# Pink background\r", "glClear", "(", "GL_COLOR_BUFFER_BIT", ")", "# glutSwapBuffers()\r", "glFlush", "(", ")", "# Single buffer\r", "# 2) VR render\r", "# TODO: render different things to each eye\r", "glBindFramebuffer", "(", "GL_FRAMEBUFFER", ",", "self", ".", "fb", ")", "glClearColor", "(", "0.8", ",", "0.4", ",", "0.4", ",", "0", ")", "# Pink background\r", "glClear", "(", "GL_COLOR_BUFFER_BIT", ")", "glBindFramebuffer", "(", "GL_FRAMEBUFFER", ",", "0", ")", "#\r", "# TODO: use different textures for each eye\r", "self", ".", "compositor", ".", "submit", "(", "openvr", ".", "Eye_Left", ",", "self", ".", "texture", ")", "self", ".", "compositor", ".", "submit", "(", "openvr", ".", "Eye_Right", ",", "self", ".", "texture", ")", "glBindFramebuffer", "(", "GL_FRAMEBUFFER", ",", "0", ")" ]
Renders the scene once every refresh
[ "Renders", "the", "scene", "once", "every", "refresh" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/samples/glut/pink_world.py#L93-L116
15,577
cmbruns/pyopenvr
src/samples/glut/pink_world.py
PinkWorld.key_press
def key_press(self, key, x, y): "Close the application when the player presses ESCAPE" if ord(key) == 27: # print "Escape!" if bool(glutLeaveMainLoop): glutLeaveMainLoop() else: raise Exception("Application quit")
python
def key_press(self, key, x, y): "Close the application when the player presses ESCAPE" if ord(key) == 27: # print "Escape!" if bool(glutLeaveMainLoop): glutLeaveMainLoop() else: raise Exception("Application quit")
[ "def", "key_press", "(", "self", ",", "key", ",", "x", ",", "y", ")", ":", "if", "ord", "(", "key", ")", "==", "27", ":", "# print \"Escape!\"\r", "if", "bool", "(", "glutLeaveMainLoop", ")", ":", "glutLeaveMainLoop", "(", ")", "else", ":", "raise", "Exception", "(", "\"Application quit\"", ")" ]
Close the application when the player presses ESCAPE
[ "Close", "the", "application", "when", "the", "player", "presses", "ESCAPE" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/samples/glut/pink_world.py#L118-L125
15,578
cmbruns/pyopenvr
src/openvr/glframework/cyglfw3_app.py
CyGLFW3App.key_callback
def key_callback(self, window, key, scancode, action, mods): """press ESCAPE to quite the application""" if key == glfw.KEY_ESCAPE and action == glfw.PRESS: glfw.SetWindowShouldClose(self.window, True)
python
def key_callback(self, window, key, scancode, action, mods): """press ESCAPE to quite the application""" if key == glfw.KEY_ESCAPE and action == glfw.PRESS: glfw.SetWindowShouldClose(self.window, True)
[ "def", "key_callback", "(", "self", ",", "window", ",", "key", ",", "scancode", ",", "action", ",", "mods", ")", ":", "if", "key", "==", "glfw", ".", "KEY_ESCAPE", "and", "action", "==", "glfw", ".", "PRESS", ":", "glfw", ".", "SetWindowShouldClose", "(", "self", ".", "window", ",", "True", ")" ]
press ESCAPE to quite the application
[ "press", "ESCAPE", "to", "quite", "the", "application" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/glframework/cyglfw3_app.py#L66-L69
15,579
cmbruns/pyopenvr
src/openvr/glframework/sdl_app.py
SdlApp.run_loop
def run_loop(self): "keep rendering until the user says quit" self.running = True event = SDL_Event() try: while self.running: while SDL_PollEvent(ctypes.byref(event)) != 0: f = self._sdl_event_handlers.get(event.type) if f is not None: f ( event ) self.render_scene() except SdlAppQuit as e: pass
python
def run_loop(self): "keep rendering until the user says quit" self.running = True event = SDL_Event() try: while self.running: while SDL_PollEvent(ctypes.byref(event)) != 0: f = self._sdl_event_handlers.get(event.type) if f is not None: f ( event ) self.render_scene() except SdlAppQuit as e: pass
[ "def", "run_loop", "(", "self", ")", ":", "self", ".", "running", "=", "True", "event", "=", "SDL_Event", "(", ")", "try", ":", "while", "self", ".", "running", ":", "while", "SDL_PollEvent", "(", "ctypes", ".", "byref", "(", "event", ")", ")", "!=", "0", ":", "f", "=", "self", ".", "_sdl_event_handlers", ".", "get", "(", "event", ".", "type", ")", "if", "f", "is", "not", "None", ":", "f", "(", "event", ")", "self", ".", "render_scene", "(", ")", "except", "SdlAppQuit", "as", "e", ":", "pass" ]
keep rendering until the user says quit
[ "keep", "rendering", "until", "the", "user", "says", "quit" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/glframework/sdl_app.py#L112-L124
15,580
cmbruns/pyopenvr
src/samples/sdl/NOTWORKING_hellovr_opengl_sdl.py
Matrix4.scale
def scale(self, x, y=None, z=None): "Uniform scale, if only sx argument is specified" if y is None: y = x if z is None: z = x m = self for col in range(4): # Only the top three rows m[0,col] *= x m[1,col] *= y m[2,col] *= z return self
python
def scale(self, x, y=None, z=None): "Uniform scale, if only sx argument is specified" if y is None: y = x if z is None: z = x m = self for col in range(4): # Only the top three rows m[0,col] *= x m[1,col] *= y m[2,col] *= z return self
[ "def", "scale", "(", "self", ",", "x", ",", "y", "=", "None", ",", "z", "=", "None", ")", ":", "if", "y", "is", "None", ":", "y", "=", "x", "if", "z", "is", "None", ":", "z", "=", "x", "m", "=", "self", "for", "col", "in", "range", "(", "4", ")", ":", "# Only the top three rows\r", "m", "[", "0", ",", "col", "]", "*=", "x", "m", "[", "1", ",", "col", "]", "*=", "y", "m", "[", "2", ",", "col", "]", "*=", "z", "return", "self" ]
Uniform scale, if only sx argument is specified
[ "Uniform", "scale", "if", "only", "sx", "argument", "is", "specified" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/samples/sdl/NOTWORKING_hellovr_opengl_sdl.py#L160-L172
15,581
cmbruns/pyopenvr
src/openvr/tracked_devices_actor.py
TrackedDevicesActor._check_devices
def _check_devices(self): "Enumerate OpenVR tracked devices and check whether any need to be initialized" for i in range(1, len(self.poses)): pose = self.poses[i] if not pose.bDeviceIsConnected: continue if not pose.bPoseIsValid: continue if self.show_controllers_only: device_class = openvr.VRSystem().getTrackedDeviceClass(i) if not device_class == openvr.TrackedDeviceClass_Controller: continue model_name = openvr.VRSystem().getStringTrackedDeviceProperty(i, openvr.Prop_RenderModelName_String) # Create a new mesh object, if necessary if model_name not in self.meshes: self.meshes[model_name] = TrackedDeviceMesh(model_name)
python
def _check_devices(self): "Enumerate OpenVR tracked devices and check whether any need to be initialized" for i in range(1, len(self.poses)): pose = self.poses[i] if not pose.bDeviceIsConnected: continue if not pose.bPoseIsValid: continue if self.show_controllers_only: device_class = openvr.VRSystem().getTrackedDeviceClass(i) if not device_class == openvr.TrackedDeviceClass_Controller: continue model_name = openvr.VRSystem().getStringTrackedDeviceProperty(i, openvr.Prop_RenderModelName_String) # Create a new mesh object, if necessary if model_name not in self.meshes: self.meshes[model_name] = TrackedDeviceMesh(model_name)
[ "def", "_check_devices", "(", "self", ")", ":", "for", "i", "in", "range", "(", "1", ",", "len", "(", "self", ".", "poses", ")", ")", ":", "pose", "=", "self", ".", "poses", "[", "i", "]", "if", "not", "pose", ".", "bDeviceIsConnected", ":", "continue", "if", "not", "pose", ".", "bPoseIsValid", ":", "continue", "if", "self", ".", "show_controllers_only", ":", "device_class", "=", "openvr", ".", "VRSystem", "(", ")", ".", "getTrackedDeviceClass", "(", "i", ")", "if", "not", "device_class", "==", "openvr", ".", "TrackedDeviceClass_Controller", ":", "continue", "model_name", "=", "openvr", ".", "VRSystem", "(", ")", ".", "getStringTrackedDeviceProperty", "(", "i", ",", "openvr", ".", "Prop_RenderModelName_String", ")", "# Create a new mesh object, if necessary", "if", "model_name", "not", "in", "self", ".", "meshes", ":", "self", ".", "meshes", "[", "model_name", "]", "=", "TrackedDeviceMesh", "(", "model_name", ")" ]
Enumerate OpenVR tracked devices and check whether any need to be initialized
[ "Enumerate", "OpenVR", "tracked", "devices", "and", "check", "whether", "any", "need", "to", "be", "initialized" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/tracked_devices_actor.py#L144-L159
15,582
cmbruns/pyopenvr
src/openvr/__init__.py
getGenericInterface
def getGenericInterface(interfaceVersion): """ Returns the interface of the specified version. This method must be called after VR_Init. The pointer returned is valid until VR_Shutdown is called. """ error = EVRInitError() result = _openvr.VR_GetGenericInterface(interfaceVersion, byref(error)) _checkInitError(error.value) return result
python
def getGenericInterface(interfaceVersion): """ Returns the interface of the specified version. This method must be called after VR_Init. The pointer returned is valid until VR_Shutdown is called. """ error = EVRInitError() result = _openvr.VR_GetGenericInterface(interfaceVersion, byref(error)) _checkInitError(error.value) return result
[ "def", "getGenericInterface", "(", "interfaceVersion", ")", ":", "error", "=", "EVRInitError", "(", ")", "result", "=", "_openvr", ".", "VR_GetGenericInterface", "(", "interfaceVersion", ",", "byref", "(", "error", ")", ")", "_checkInitError", "(", "error", ".", "value", ")", "return", "result" ]
Returns the interface of the specified version. This method must be called after VR_Init. The pointer returned is valid until VR_Shutdown is called.
[ "Returns", "the", "interface", "of", "the", "specified", "version", ".", "This", "method", "must", "be", "called", "after", "VR_Init", ".", "The", "pointer", "returned", "is", "valid", "until", "VR_Shutdown", "is", "called", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L6414-L6422
15,583
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getRecommendedRenderTargetSize
def getRecommendedRenderTargetSize(self): """Suggested size for the intermediate render target that the distortion pulls from.""" fn = self.function_table.getRecommendedRenderTargetSize pnWidth = c_uint32() pnHeight = c_uint32() fn(byref(pnWidth), byref(pnHeight)) return pnWidth.value, pnHeight.value
python
def getRecommendedRenderTargetSize(self): """Suggested size for the intermediate render target that the distortion pulls from.""" fn = self.function_table.getRecommendedRenderTargetSize pnWidth = c_uint32() pnHeight = c_uint32() fn(byref(pnWidth), byref(pnHeight)) return pnWidth.value, pnHeight.value
[ "def", "getRecommendedRenderTargetSize", "(", "self", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getRecommendedRenderTargetSize", "pnWidth", "=", "c_uint32", "(", ")", "pnHeight", "=", "c_uint32", "(", ")", "fn", "(", "byref", "(", "pnWidth", ")", ",", "byref", "(", "pnHeight", ")", ")", "return", "pnWidth", ".", "value", ",", "pnHeight", ".", "value" ]
Suggested size for the intermediate render target that the distortion pulls from.
[ "Suggested", "size", "for", "the", "intermediate", "render", "target", "that", "the", "distortion", "pulls", "from", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2628-L2635
15,584
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getProjectionMatrix
def getProjectionMatrix(self, eEye, fNearZ, fFarZ): """The projection matrix for the specified eye""" fn = self.function_table.getProjectionMatrix result = fn(eEye, fNearZ, fFarZ) return result
python
def getProjectionMatrix(self, eEye, fNearZ, fFarZ): """The projection matrix for the specified eye""" fn = self.function_table.getProjectionMatrix result = fn(eEye, fNearZ, fFarZ) return result
[ "def", "getProjectionMatrix", "(", "self", ",", "eEye", ",", "fNearZ", ",", "fFarZ", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getProjectionMatrix", "result", "=", "fn", "(", "eEye", ",", "fNearZ", ",", "fFarZ", ")", "return", "result" ]
The projection matrix for the specified eye
[ "The", "projection", "matrix", "for", "the", "specified", "eye" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2637-L2642
15,585
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getProjectionRaw
def getProjectionRaw(self, eEye): """ The components necessary to build your own projection matrix in case your application is doing something fancy like infinite Z """ fn = self.function_table.getProjectionRaw pfLeft = c_float() pfRight = c_float() pfTop = c_float() pfBottom = c_float() fn(eEye, byref(pfLeft), byref(pfRight), byref(pfTop), byref(pfBottom)) return pfLeft.value, pfRight.value, pfTop.value, pfBottom.value
python
def getProjectionRaw(self, eEye): """ The components necessary to build your own projection matrix in case your application is doing something fancy like infinite Z """ fn = self.function_table.getProjectionRaw pfLeft = c_float() pfRight = c_float() pfTop = c_float() pfBottom = c_float() fn(eEye, byref(pfLeft), byref(pfRight), byref(pfTop), byref(pfBottom)) return pfLeft.value, pfRight.value, pfTop.value, pfBottom.value
[ "def", "getProjectionRaw", "(", "self", ",", "eEye", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getProjectionRaw", "pfLeft", "=", "c_float", "(", ")", "pfRight", "=", "c_float", "(", ")", "pfTop", "=", "c_float", "(", ")", "pfBottom", "=", "c_float", "(", ")", "fn", "(", "eEye", ",", "byref", "(", "pfLeft", ")", ",", "byref", "(", "pfRight", ")", ",", "byref", "(", "pfTop", ")", ",", "byref", "(", "pfBottom", ")", ")", "return", "pfLeft", ".", "value", ",", "pfRight", ".", "value", ",", "pfTop", ".", "value", ",", "pfBottom", ".", "value" ]
The components necessary to build your own projection matrix in case your application is doing something fancy like infinite Z
[ "The", "components", "necessary", "to", "build", "your", "own", "projection", "matrix", "in", "case", "your", "application", "is", "doing", "something", "fancy", "like", "infinite", "Z" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2644-L2656
15,586
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.computeDistortion
def computeDistortion(self, eEye, fU, fV): """ Gets the result of the distortion function for the specified eye and input UVs. UVs go from 0,0 in the upper left of that eye's viewport and 1,1 in the lower right of that eye's viewport. Returns true for success. Otherwise, returns false, and distortion coordinates are not suitable. """ fn = self.function_table.computeDistortion pDistortionCoordinates = DistortionCoordinates_t() result = fn(eEye, fU, fV, byref(pDistortionCoordinates)) return result, pDistortionCoordinates
python
def computeDistortion(self, eEye, fU, fV): """ Gets the result of the distortion function for the specified eye and input UVs. UVs go from 0,0 in the upper left of that eye's viewport and 1,1 in the lower right of that eye's viewport. Returns true for success. Otherwise, returns false, and distortion coordinates are not suitable. """ fn = self.function_table.computeDistortion pDistortionCoordinates = DistortionCoordinates_t() result = fn(eEye, fU, fV, byref(pDistortionCoordinates)) return result, pDistortionCoordinates
[ "def", "computeDistortion", "(", "self", ",", "eEye", ",", "fU", ",", "fV", ")", ":", "fn", "=", "self", ".", "function_table", ".", "computeDistortion", "pDistortionCoordinates", "=", "DistortionCoordinates_t", "(", ")", "result", "=", "fn", "(", "eEye", ",", "fU", ",", "fV", ",", "byref", "(", "pDistortionCoordinates", ")", ")", "return", "result", ",", "pDistortionCoordinates" ]
Gets the result of the distortion function for the specified eye and input UVs. UVs go from 0,0 in the upper left of that eye's viewport and 1,1 in the lower right of that eye's viewport. Returns true for success. Otherwise, returns false, and distortion coordinates are not suitable.
[ "Gets", "the", "result", "of", "the", "distortion", "function", "for", "the", "specified", "eye", "and", "input", "UVs", ".", "UVs", "go", "from", "0", "0", "in", "the", "upper", "left", "of", "that", "eye", "s", "viewport", "and", "1", "1", "in", "the", "lower", "right", "of", "that", "eye", "s", "viewport", ".", "Returns", "true", "for", "success", ".", "Otherwise", "returns", "false", "and", "distortion", "coordinates", "are", "not", "suitable", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2658-L2668
15,587
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getTimeSinceLastVsync
def getTimeSinceLastVsync(self): """ Returns the number of elapsed seconds since the last recorded vsync event. This will come from a vsync timer event in the timer if possible or from the application-reported time if that is not available. If no vsync times are available the function will return zero for vsync time and frame counter and return false from the method. """ fn = self.function_table.getTimeSinceLastVsync pfSecondsSinceLastVsync = c_float() pulFrameCounter = c_uint64() result = fn(byref(pfSecondsSinceLastVsync), byref(pulFrameCounter)) return result, pfSecondsSinceLastVsync.value, pulFrameCounter.value
python
def getTimeSinceLastVsync(self): """ Returns the number of elapsed seconds since the last recorded vsync event. This will come from a vsync timer event in the timer if possible or from the application-reported time if that is not available. If no vsync times are available the function will return zero for vsync time and frame counter and return false from the method. """ fn = self.function_table.getTimeSinceLastVsync pfSecondsSinceLastVsync = c_float() pulFrameCounter = c_uint64() result = fn(byref(pfSecondsSinceLastVsync), byref(pulFrameCounter)) return result, pfSecondsSinceLastVsync.value, pulFrameCounter.value
[ "def", "getTimeSinceLastVsync", "(", "self", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getTimeSinceLastVsync", "pfSecondsSinceLastVsync", "=", "c_float", "(", ")", "pulFrameCounter", "=", "c_uint64", "(", ")", "result", "=", "fn", "(", "byref", "(", "pfSecondsSinceLastVsync", ")", ",", "byref", "(", "pulFrameCounter", ")", ")", "return", "result", ",", "pfSecondsSinceLastVsync", ".", "value", ",", "pulFrameCounter", ".", "value" ]
Returns the number of elapsed seconds since the last recorded vsync event. This will come from a vsync timer event in the timer if possible or from the application-reported time if that is not available. If no vsync times are available the function will return zero for vsync time and frame counter and return false from the method.
[ "Returns", "the", "number", "of", "elapsed", "seconds", "since", "the", "last", "recorded", "vsync", "event", ".", "This", "will", "come", "from", "a", "vsync", "timer", "event", "in", "the", "timer", "if", "possible", "or", "from", "the", "application", "-", "reported", "time", "if", "that", "is", "not", "available", ".", "If", "no", "vsync", "times", "are", "available", "the", "function", "will", "return", "zero", "for", "vsync", "time", "and", "frame", "counter", "and", "return", "false", "from", "the", "method", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2681-L2693
15,588
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getTrackedDeviceActivityLevel
def getTrackedDeviceActivityLevel(self, unDeviceId): """Returns the level of activity on the device.""" fn = self.function_table.getTrackedDeviceActivityLevel result = fn(unDeviceId) return result
python
def getTrackedDeviceActivityLevel(self, unDeviceId): """Returns the level of activity on the device.""" fn = self.function_table.getTrackedDeviceActivityLevel result = fn(unDeviceId) return result
[ "def", "getTrackedDeviceActivityLevel", "(", "self", ",", "unDeviceId", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getTrackedDeviceActivityLevel", "result", "=", "fn", "(", "unDeviceId", ")", "return", "result" ]
Returns the level of activity on the device.
[ "Returns", "the", "level", "of", "activity", "on", "the", "device", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2835-L2840
15,589
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.applyTransform
def applyTransform(self): """ Convenience utility to apply the specified transform to the specified pose. This properly transforms all pose components, including velocity and angular velocity """ fn = self.function_table.applyTransform pOutputPose = TrackedDevicePose_t() pTrackedDevicePose = TrackedDevicePose_t() pTransform = HmdMatrix34_t() fn(byref(pOutputPose), byref(pTrackedDevicePose), byref(pTransform)) return pOutputPose, pTrackedDevicePose, pTransform
python
def applyTransform(self): """ Convenience utility to apply the specified transform to the specified pose. This properly transforms all pose components, including velocity and angular velocity """ fn = self.function_table.applyTransform pOutputPose = TrackedDevicePose_t() pTrackedDevicePose = TrackedDevicePose_t() pTransform = HmdMatrix34_t() fn(byref(pOutputPose), byref(pTrackedDevicePose), byref(pTransform)) return pOutputPose, pTrackedDevicePose, pTransform
[ "def", "applyTransform", "(", "self", ")", ":", "fn", "=", "self", ".", "function_table", ".", "applyTransform", "pOutputPose", "=", "TrackedDevicePose_t", "(", ")", "pTrackedDevicePose", "=", "TrackedDevicePose_t", "(", ")", "pTransform", "=", "HmdMatrix34_t", "(", ")", "fn", "(", "byref", "(", "pOutputPose", ")", ",", "byref", "(", "pTrackedDevicePose", ")", ",", "byref", "(", "pTransform", ")", ")", "return", "pOutputPose", ",", "pTrackedDevicePose", ",", "pTransform" ]
Convenience utility to apply the specified transform to the specified pose. This properly transforms all pose components, including velocity and angular velocity
[ "Convenience", "utility", "to", "apply", "the", "specified", "transform", "to", "the", "specified", "pose", ".", "This", "properly", "transforms", "all", "pose", "components", "including", "velocity", "and", "angular", "velocity" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2842-L2853
15,590
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getTrackedDeviceIndexForControllerRole
def getTrackedDeviceIndexForControllerRole(self, unDeviceType): """Returns the device index associated with a specific role, for example the left hand or the right hand. This function is deprecated in favor of the new IVRInput system.""" fn = self.function_table.getTrackedDeviceIndexForControllerRole result = fn(unDeviceType) return result
python
def getTrackedDeviceIndexForControllerRole(self, unDeviceType): """Returns the device index associated with a specific role, for example the left hand or the right hand. This function is deprecated in favor of the new IVRInput system.""" fn = self.function_table.getTrackedDeviceIndexForControllerRole result = fn(unDeviceType) return result
[ "def", "getTrackedDeviceIndexForControllerRole", "(", "self", ",", "unDeviceType", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getTrackedDeviceIndexForControllerRole", "result", "=", "fn", "(", "unDeviceType", ")", "return", "result" ]
Returns the device index associated with a specific role, for example the left hand or the right hand. This function is deprecated in favor of the new IVRInput system.
[ "Returns", "the", "device", "index", "associated", "with", "a", "specific", "role", "for", "example", "the", "left", "hand", "or", "the", "right", "hand", ".", "This", "function", "is", "deprecated", "in", "favor", "of", "the", "new", "IVRInput", "system", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2855-L2860
15,591
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getControllerRoleForTrackedDeviceIndex
def getControllerRoleForTrackedDeviceIndex(self, unDeviceIndex): """Returns the controller type associated with a device index. This function is deprecated in favor of the new IVRInput system.""" fn = self.function_table.getControllerRoleForTrackedDeviceIndex result = fn(unDeviceIndex) return result
python
def getControllerRoleForTrackedDeviceIndex(self, unDeviceIndex): """Returns the controller type associated with a device index. This function is deprecated in favor of the new IVRInput system.""" fn = self.function_table.getControllerRoleForTrackedDeviceIndex result = fn(unDeviceIndex) return result
[ "def", "getControllerRoleForTrackedDeviceIndex", "(", "self", ",", "unDeviceIndex", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getControllerRoleForTrackedDeviceIndex", "result", "=", "fn", "(", "unDeviceIndex", ")", "return", "result" ]
Returns the controller type associated with a device index. This function is deprecated in favor of the new IVRInput system.
[ "Returns", "the", "controller", "type", "associated", "with", "a", "device", "index", ".", "This", "function", "is", "deprecated", "in", "favor", "of", "the", "new", "IVRInput", "system", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2862-L2867
15,592
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.isTrackedDeviceConnected
def isTrackedDeviceConnected(self, unDeviceIndex): """Returns true if there is a device connected in this slot.""" fn = self.function_table.isTrackedDeviceConnected result = fn(unDeviceIndex) return result
python
def isTrackedDeviceConnected(self, unDeviceIndex): """Returns true if there is a device connected in this slot.""" fn = self.function_table.isTrackedDeviceConnected result = fn(unDeviceIndex) return result
[ "def", "isTrackedDeviceConnected", "(", "self", ",", "unDeviceIndex", ")", ":", "fn", "=", "self", ".", "function_table", ".", "isTrackedDeviceConnected", "result", "=", "fn", "(", "unDeviceIndex", ")", "return", "result" ]
Returns true if there is a device connected in this slot.
[ "Returns", "true", "if", "there", "is", "a", "device", "connected", "in", "this", "slot", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2883-L2888
15,593
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getBoolTrackedDeviceProperty
def getBoolTrackedDeviceProperty(self, unDeviceIndex, prop): """Returns a bool property. If the device index is not valid or the property is not a bool type this function will return false.""" fn = self.function_table.getBoolTrackedDeviceProperty pError = ETrackedPropertyError() result = fn(unDeviceIndex, prop, byref(pError)) return result, pError
python
def getBoolTrackedDeviceProperty(self, unDeviceIndex, prop): """Returns a bool property. If the device index is not valid or the property is not a bool type this function will return false.""" fn = self.function_table.getBoolTrackedDeviceProperty pError = ETrackedPropertyError() result = fn(unDeviceIndex, prop, byref(pError)) return result, pError
[ "def", "getBoolTrackedDeviceProperty", "(", "self", ",", "unDeviceIndex", ",", "prop", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getBoolTrackedDeviceProperty", "pError", "=", "ETrackedPropertyError", "(", ")", "result", "=", "fn", "(", "unDeviceIndex", ",", "prop", ",", "byref", "(", "pError", ")", ")", "return", "result", ",", "pError" ]
Returns a bool property. If the device index is not valid or the property is not a bool type this function will return false.
[ "Returns", "a", "bool", "property", ".", "If", "the", "device", "index", "is", "not", "valid", "or", "the", "property", "is", "not", "a", "bool", "type", "this", "function", "will", "return", "false", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2890-L2896
15,594
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getArrayTrackedDeviceProperty
def getArrayTrackedDeviceProperty(self, unDeviceIndex, prop, propType, pBuffer, unBufferSize): """ Returns an array of one type of property. If the device index is not valid or the property is not a single value or an array of the specified type, this function will return 0. Otherwise it returns the number of bytes necessary to hold the array of properties. If unBufferSize is greater than the returned size and pBuffer is non-NULL, pBuffer is filled with the contents of array of properties. """ fn = self.function_table.getArrayTrackedDeviceProperty pError = ETrackedPropertyError() result = fn(unDeviceIndex, prop, propType, pBuffer, unBufferSize, byref(pError)) return result, pError
python
def getArrayTrackedDeviceProperty(self, unDeviceIndex, prop, propType, pBuffer, unBufferSize): """ Returns an array of one type of property. If the device index is not valid or the property is not a single value or an array of the specified type, this function will return 0. Otherwise it returns the number of bytes necessary to hold the array of properties. If unBufferSize is greater than the returned size and pBuffer is non-NULL, pBuffer is filled with the contents of array of properties. """ fn = self.function_table.getArrayTrackedDeviceProperty pError = ETrackedPropertyError() result = fn(unDeviceIndex, prop, propType, pBuffer, unBufferSize, byref(pError)) return result, pError
[ "def", "getArrayTrackedDeviceProperty", "(", "self", ",", "unDeviceIndex", ",", "prop", ",", "propType", ",", "pBuffer", ",", "unBufferSize", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getArrayTrackedDeviceProperty", "pError", "=", "ETrackedPropertyError", "(", ")", "result", "=", "fn", "(", "unDeviceIndex", ",", "prop", ",", "propType", ",", "pBuffer", ",", "unBufferSize", ",", "byref", "(", "pError", ")", ")", "return", "result", ",", "pError" ]
Returns an array of one type of property. If the device index is not valid or the property is not a single value or an array of the specified type, this function will return 0. Otherwise it returns the number of bytes necessary to hold the array of properties. If unBufferSize is greater than the returned size and pBuffer is non-NULL, pBuffer is filled with the contents of array of properties.
[ "Returns", "an", "array", "of", "one", "type", "of", "property", ".", "If", "the", "device", "index", "is", "not", "valid", "or", "the", "property", "is", "not", "a", "single", "value", "or", "an", "array", "of", "the", "specified", "type", "this", "function", "will", "return", "0", ".", "Otherwise", "it", "returns", "the", "number", "of", "bytes", "necessary", "to", "hold", "the", "array", "of", "properties", ".", "If", "unBufferSize", "is", "greater", "than", "the", "returned", "size", "and", "pBuffer", "is", "non", "-", "NULL", "pBuffer", "is", "filled", "with", "the", "contents", "of", "array", "of", "properties", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2930-L2940
15,595
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getStringTrackedDeviceProperty
def getStringTrackedDeviceProperty(self, unDeviceIndex, prop): """ Returns a string property. If the device index is not valid or the property is not a string type this function will return 0. Otherwise it returns the length of the number of bytes necessary to hold this string including the trailing null. Strings will always fit in buffers of k_unMaxPropertyStringSize characters. """ fn = self.function_table.getStringTrackedDeviceProperty pError = ETrackedPropertyError() # TODO: automate this string argument manipulation **** unRequiredBufferLen = fn( unDeviceIndex, prop, None, 0, byref(pError) ) if unRequiredBufferLen == 0: return b"" pchBuffer = ctypes.create_string_buffer(unRequiredBufferLen) fn( unDeviceIndex, prop, pchBuffer, unRequiredBufferLen, byref(pError) ) if pError.value != TrackedProp_Success: raise OpenVRError(str(pError)) sResult = bytes(pchBuffer.value) return sResult
python
def getStringTrackedDeviceProperty(self, unDeviceIndex, prop): """ Returns a string property. If the device index is not valid or the property is not a string type this function will return 0. Otherwise it returns the length of the number of bytes necessary to hold this string including the trailing null. Strings will always fit in buffers of k_unMaxPropertyStringSize characters. """ fn = self.function_table.getStringTrackedDeviceProperty pError = ETrackedPropertyError() # TODO: automate this string argument manipulation **** unRequiredBufferLen = fn( unDeviceIndex, prop, None, 0, byref(pError) ) if unRequiredBufferLen == 0: return b"" pchBuffer = ctypes.create_string_buffer(unRequiredBufferLen) fn( unDeviceIndex, prop, pchBuffer, unRequiredBufferLen, byref(pError) ) if pError.value != TrackedProp_Success: raise OpenVRError(str(pError)) sResult = bytes(pchBuffer.value) return sResult
[ "def", "getStringTrackedDeviceProperty", "(", "self", ",", "unDeviceIndex", ",", "prop", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getStringTrackedDeviceProperty", "pError", "=", "ETrackedPropertyError", "(", ")", "# TODO: automate this string argument manipulation ****", "unRequiredBufferLen", "=", "fn", "(", "unDeviceIndex", ",", "prop", ",", "None", ",", "0", ",", "byref", "(", "pError", ")", ")", "if", "unRequiredBufferLen", "==", "0", ":", "return", "b\"\"", "pchBuffer", "=", "ctypes", ".", "create_string_buffer", "(", "unRequiredBufferLen", ")", "fn", "(", "unDeviceIndex", ",", "prop", ",", "pchBuffer", ",", "unRequiredBufferLen", ",", "byref", "(", "pError", ")", ")", "if", "pError", ".", "value", "!=", "TrackedProp_Success", ":", "raise", "OpenVRError", "(", "str", "(", "pError", ")", ")", "sResult", "=", "bytes", "(", "pchBuffer", ".", "value", ")", "return", "sResult" ]
Returns a string property. If the device index is not valid or the property is not a string type this function will return 0. Otherwise it returns the length of the number of bytes necessary to hold this string including the trailing null. Strings will always fit in buffers of k_unMaxPropertyStringSize characters.
[ "Returns", "a", "string", "property", ".", "If", "the", "device", "index", "is", "not", "valid", "or", "the", "property", "is", "not", "a", "string", "type", "this", "function", "will", "return", "0", ".", "Otherwise", "it", "returns", "the", "length", "of", "the", "number", "of", "bytes", "necessary", "to", "hold", "this", "string", "including", "the", "trailing", "null", ".", "Strings", "will", "always", "fit", "in", "buffers", "of", "k_unMaxPropertyStringSize", "characters", "." ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2942-L2960
15,596
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getPropErrorNameFromEnum
def getPropErrorNameFromEnum(self, error): """ returns a string that corresponds with the specified property error. The string will be the name of the error enum value for all valid error codes """ fn = self.function_table.getPropErrorNameFromEnum result = fn(error) return result
python
def getPropErrorNameFromEnum(self, error): """ returns a string that corresponds with the specified property error. The string will be the name of the error enum value for all valid error codes """ fn = self.function_table.getPropErrorNameFromEnum result = fn(error) return result
[ "def", "getPropErrorNameFromEnum", "(", "self", ",", "error", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getPropErrorNameFromEnum", "result", "=", "fn", "(", "error", ")", "return", "result" ]
returns a string that corresponds with the specified property error. The string will be the name of the error enum value for all valid error codes
[ "returns", "a", "string", "that", "corresponds", "with", "the", "specified", "property", "error", ".", "The", "string", "will", "be", "the", "name", "of", "the", "error", "enum", "value", "for", "all", "valid", "error", "codes" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2962-L2970
15,597
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.pollNextEvent
def pollNextEvent(self, pEvent): """ Returns true and fills the event with the next event on the queue if there is one. If there are no events this method returns false. uncbVREvent should be the size in bytes of the VREvent_t struct """ fn = self.function_table.pollNextEvent result = fn(byref(pEvent), sizeof(VREvent_t)) return result != 0
python
def pollNextEvent(self, pEvent): """ Returns true and fills the event with the next event on the queue if there is one. If there are no events this method returns false. uncbVREvent should be the size in bytes of the VREvent_t struct """ fn = self.function_table.pollNextEvent result = fn(byref(pEvent), sizeof(VREvent_t)) return result != 0
[ "def", "pollNextEvent", "(", "self", ",", "pEvent", ")", ":", "fn", "=", "self", ".", "function_table", ".", "pollNextEvent", "result", "=", "fn", "(", "byref", "(", "pEvent", ")", ",", "sizeof", "(", "VREvent_t", ")", ")", "return", "result", "!=", "0" ]
Returns true and fills the event with the next event on the queue if there is one. If there are no events this method returns false. uncbVREvent should be the size in bytes of the VREvent_t struct
[ "Returns", "true", "and", "fills", "the", "event", "with", "the", "next", "event", "on", "the", "queue", "if", "there", "is", "one", ".", "If", "there", "are", "no", "events", "this", "method", "returns", "false", ".", "uncbVREvent", "should", "be", "the", "size", "in", "bytes", "of", "the", "VREvent_t", "struct" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2972-L2980
15,598
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.pollNextEventWithPose
def pollNextEventWithPose(self, eOrigin, uncbVREvent): """ Returns true and fills the event with the next event on the queue if there is one. If there are no events this method returns false. Fills in the pose of the associated tracked device in the provided pose struct. This pose will always be older than the call to this function and should not be used to render the device. uncbVREvent should be the size in bytes of the VREvent_t struct """ fn = self.function_table.pollNextEventWithPose pEvent = VREvent_t() pTrackedDevicePose = TrackedDevicePose_t() result = fn(eOrigin, byref(pEvent), uncbVREvent, byref(pTrackedDevicePose)) return result, pEvent, pTrackedDevicePose
python
def pollNextEventWithPose(self, eOrigin, uncbVREvent): """ Returns true and fills the event with the next event on the queue if there is one. If there are no events this method returns false. Fills in the pose of the associated tracked device in the provided pose struct. This pose will always be older than the call to this function and should not be used to render the device. uncbVREvent should be the size in bytes of the VREvent_t struct """ fn = self.function_table.pollNextEventWithPose pEvent = VREvent_t() pTrackedDevicePose = TrackedDevicePose_t() result = fn(eOrigin, byref(pEvent), uncbVREvent, byref(pTrackedDevicePose)) return result, pEvent, pTrackedDevicePose
[ "def", "pollNextEventWithPose", "(", "self", ",", "eOrigin", ",", "uncbVREvent", ")", ":", "fn", "=", "self", ".", "function_table", ".", "pollNextEventWithPose", "pEvent", "=", "VREvent_t", "(", ")", "pTrackedDevicePose", "=", "TrackedDevicePose_t", "(", ")", "result", "=", "fn", "(", "eOrigin", ",", "byref", "(", "pEvent", ")", ",", "uncbVREvent", ",", "byref", "(", "pTrackedDevicePose", ")", ")", "return", "result", ",", "pEvent", ",", "pTrackedDevicePose" ]
Returns true and fills the event with the next event on the queue if there is one. If there are no events this method returns false. Fills in the pose of the associated tracked device in the provided pose struct. This pose will always be older than the call to this function and should not be used to render the device. uncbVREvent should be the size in bytes of the VREvent_t struct
[ "Returns", "true", "and", "fills", "the", "event", "with", "the", "next", "event", "on", "the", "queue", "if", "there", "is", "one", ".", "If", "there", "are", "no", "events", "this", "method", "returns", "false", ".", "Fills", "in", "the", "pose", "of", "the", "associated", "tracked", "device", "in", "the", "provided", "pose", "struct", ".", "This", "pose", "will", "always", "be", "older", "than", "the", "call", "to", "this", "function", "and", "should", "not", "be", "used", "to", "render", "the", "device", ".", "uncbVREvent", "should", "be", "the", "size", "in", "bytes", "of", "the", "VREvent_t", "struct" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2982-L2994
15,599
cmbruns/pyopenvr
src/openvr/__init__.py
IVRSystem.getEventTypeNameFromEnum
def getEventTypeNameFromEnum(self, eType): """returns the name of an EVREvent enum value""" fn = self.function_table.getEventTypeNameFromEnum result = fn(eType) return result
python
def getEventTypeNameFromEnum(self, eType): """returns the name of an EVREvent enum value""" fn = self.function_table.getEventTypeNameFromEnum result = fn(eType) return result
[ "def", "getEventTypeNameFromEnum", "(", "self", ",", "eType", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getEventTypeNameFromEnum", "result", "=", "fn", "(", "eType", ")", "return", "result" ]
returns the name of an EVREvent enum value
[ "returns", "the", "name", "of", "an", "EVREvent", "enum", "value" ]
68395d26bb3df6ab1f0f059c38d441f962938be6
https://github.com/cmbruns/pyopenvr/blob/68395d26bb3df6ab1f0f059c38d441f962938be6/src/openvr/__init__.py#L2996-L3001