id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
15,400
|
postlund/pyatv
|
pyatv/dmap/__init__.py
|
DmapRemoteControl.set_position
|
def set_position(self, pos):
"""Seek in the current playing media."""
time_in_ms = int(pos)*1000
return self.apple_tv.set_property('dacp.playingtime', time_in_ms)
|
python
|
def set_position(self, pos):
"""Seek in the current playing media."""
time_in_ms = int(pos)*1000
return self.apple_tv.set_property('dacp.playingtime', time_in_ms)
|
[
"def",
"set_position",
"(",
"self",
",",
"pos",
")",
":",
"time_in_ms",
"=",
"int",
"(",
"pos",
")",
"*",
"1000",
"return",
"self",
".",
"apple_tv",
".",
"set_property",
"(",
"'dacp.playingtime'",
",",
"time_in_ms",
")"
] |
Seek in the current playing media.
|
[
"Seek",
"in",
"the",
"current",
"playing",
"media",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L185-L188
|
15,401
|
postlund/pyatv
|
examples/device_auth.py
|
authenticate_with_device
|
async def authenticate_with_device(atv):
"""Perform device authentication and print credentials."""
credentials = await atv.airplay.generate_credentials()
await atv.airplay.load_credentials(credentials)
try:
await atv.airplay.start_authentication()
pin = input('PIN Code: ')
await atv.airplay.finish_authentication(pin)
print('Credentials: {0}'.format(credentials))
except exceptions.DeviceAuthenticationError:
print('Failed to authenticate', file=sys.stderr)
|
python
|
async def authenticate_with_device(atv):
"""Perform device authentication and print credentials."""
credentials = await atv.airplay.generate_credentials()
await atv.airplay.load_credentials(credentials)
try:
await atv.airplay.start_authentication()
pin = input('PIN Code: ')
await atv.airplay.finish_authentication(pin)
print('Credentials: {0}'.format(credentials))
except exceptions.DeviceAuthenticationError:
print('Failed to authenticate', file=sys.stderr)
|
[
"async",
"def",
"authenticate_with_device",
"(",
"atv",
")",
":",
"credentials",
"=",
"await",
"atv",
".",
"airplay",
".",
"generate_credentials",
"(",
")",
"await",
"atv",
".",
"airplay",
".",
"load_credentials",
"(",
"credentials",
")",
"try",
":",
"await",
"atv",
".",
"airplay",
".",
"start_authentication",
"(",
")",
"pin",
"=",
"input",
"(",
"'PIN Code: '",
")",
"await",
"atv",
".",
"airplay",
".",
"finish_authentication",
"(",
"pin",
")",
"print",
"(",
"'Credentials: {0}'",
".",
"format",
"(",
"credentials",
")",
")",
"except",
"exceptions",
".",
"DeviceAuthenticationError",
":",
"print",
"(",
"'Failed to authenticate'",
",",
"file",
"=",
"sys",
".",
"stderr",
")"
] |
Perform device authentication and print credentials.
|
[
"Perform",
"device",
"authentication",
"and",
"print",
"credentials",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/examples/device_auth.py#L7-L19
|
15,402
|
postlund/pyatv
|
pyatv/mrp/chacha20.py
|
Chacha20Cipher.encrypt
|
def encrypt(self, data, nounce=None):
"""Encrypt data with counter or specified nounce."""
if nounce is None:
nounce = self._out_counter.to_bytes(length=8, byteorder='little')
self._out_counter += 1
return self._enc_out.seal(b'\x00\x00\x00\x00' + nounce, data, bytes())
|
python
|
def encrypt(self, data, nounce=None):
"""Encrypt data with counter or specified nounce."""
if nounce is None:
nounce = self._out_counter.to_bytes(length=8, byteorder='little')
self._out_counter += 1
return self._enc_out.seal(b'\x00\x00\x00\x00' + nounce, data, bytes())
|
[
"def",
"encrypt",
"(",
"self",
",",
"data",
",",
"nounce",
"=",
"None",
")",
":",
"if",
"nounce",
"is",
"None",
":",
"nounce",
"=",
"self",
".",
"_out_counter",
".",
"to_bytes",
"(",
"length",
"=",
"8",
",",
"byteorder",
"=",
"'little'",
")",
"self",
".",
"_out_counter",
"+=",
"1",
"return",
"self",
".",
"_enc_out",
".",
"seal",
"(",
"b'\\x00\\x00\\x00\\x00'",
"+",
"nounce",
",",
"data",
",",
"bytes",
"(",
")",
")"
] |
Encrypt data with counter or specified nounce.
|
[
"Encrypt",
"data",
"with",
"counter",
"or",
"specified",
"nounce",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/chacha20.py#L15-L21
|
15,403
|
postlund/pyatv
|
pyatv/mrp/chacha20.py
|
Chacha20Cipher.decrypt
|
def decrypt(self, data, nounce=None):
"""Decrypt data with counter or specified nounce."""
if nounce is None:
nounce = self._in_counter.to_bytes(length=8, byteorder='little')
self._in_counter += 1
decrypted = self._enc_in.open(
b'\x00\x00\x00\x00' + nounce, data, bytes())
if not decrypted:
raise Exception('data decrypt failed') # TODO: new exception
return bytes(decrypted)
|
python
|
def decrypt(self, data, nounce=None):
"""Decrypt data with counter or specified nounce."""
if nounce is None:
nounce = self._in_counter.to_bytes(length=8, byteorder='little')
self._in_counter += 1
decrypted = self._enc_in.open(
b'\x00\x00\x00\x00' + nounce, data, bytes())
if not decrypted:
raise Exception('data decrypt failed') # TODO: new exception
return bytes(decrypted)
|
[
"def",
"decrypt",
"(",
"self",
",",
"data",
",",
"nounce",
"=",
"None",
")",
":",
"if",
"nounce",
"is",
"None",
":",
"nounce",
"=",
"self",
".",
"_in_counter",
".",
"to_bytes",
"(",
"length",
"=",
"8",
",",
"byteorder",
"=",
"'little'",
")",
"self",
".",
"_in_counter",
"+=",
"1",
"decrypted",
"=",
"self",
".",
"_enc_in",
".",
"open",
"(",
"b'\\x00\\x00\\x00\\x00'",
"+",
"nounce",
",",
"data",
",",
"bytes",
"(",
")",
")",
"if",
"not",
"decrypted",
":",
"raise",
"Exception",
"(",
"'data decrypt failed'",
")",
"# TODO: new exception",
"return",
"bytes",
"(",
"decrypted",
")"
] |
Decrypt data with counter or specified nounce.
|
[
"Decrypt",
"data",
"with",
"counter",
"or",
"specified",
"nounce",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/chacha20.py#L23-L35
|
15,404
|
postlund/pyatv
|
pyatv/helpers.py
|
auto_connect
|
def auto_connect(handler, timeout=5, not_found=None, event_loop=None):
"""Short method for connecting to a device.
This is a convenience method that create an event loop, auto discovers
devices, picks the first device found, connects to it and passes it to a
user provided handler. An optional error handler can be provided that is
called when no device was found. Very inflexible in many cases, but can be
handys sometimes when trying things.
Note 1: both handler and not_found must be coroutines
Note 2: An optional loop can be passed if needed (mainly for testing)
"""
# A coroutine is used so we can connect to the device while being inside
# the event loop
async def _handle(loop):
atvs = await pyatv.scan_for_apple_tvs(
loop, timeout=timeout, abort_on_found=True)
# Take the first device found
if atvs:
atv = pyatv.connect_to_apple_tv(atvs[0], loop)
try:
await handler(atv)
finally:
await atv.logout()
else:
if not_found is not None:
await not_found()
loop = event_loop if event_loop else asyncio.get_event_loop()
loop.run_until_complete(_handle(loop))
|
python
|
def auto_connect(handler, timeout=5, not_found=None, event_loop=None):
"""Short method for connecting to a device.
This is a convenience method that create an event loop, auto discovers
devices, picks the first device found, connects to it and passes it to a
user provided handler. An optional error handler can be provided that is
called when no device was found. Very inflexible in many cases, but can be
handys sometimes when trying things.
Note 1: both handler and not_found must be coroutines
Note 2: An optional loop can be passed if needed (mainly for testing)
"""
# A coroutine is used so we can connect to the device while being inside
# the event loop
async def _handle(loop):
atvs = await pyatv.scan_for_apple_tvs(
loop, timeout=timeout, abort_on_found=True)
# Take the first device found
if atvs:
atv = pyatv.connect_to_apple_tv(atvs[0], loop)
try:
await handler(atv)
finally:
await atv.logout()
else:
if not_found is not None:
await not_found()
loop = event_loop if event_loop else asyncio.get_event_loop()
loop.run_until_complete(_handle(loop))
|
[
"def",
"auto_connect",
"(",
"handler",
",",
"timeout",
"=",
"5",
",",
"not_found",
"=",
"None",
",",
"event_loop",
"=",
"None",
")",
":",
"# A coroutine is used so we can connect to the device while being inside",
"# the event loop",
"async",
"def",
"_handle",
"(",
"loop",
")",
":",
"atvs",
"=",
"await",
"pyatv",
".",
"scan_for_apple_tvs",
"(",
"loop",
",",
"timeout",
"=",
"timeout",
",",
"abort_on_found",
"=",
"True",
")",
"# Take the first device found",
"if",
"atvs",
":",
"atv",
"=",
"pyatv",
".",
"connect_to_apple_tv",
"(",
"atvs",
"[",
"0",
"]",
",",
"loop",
")",
"try",
":",
"await",
"handler",
"(",
"atv",
")",
"finally",
":",
"await",
"atv",
".",
"logout",
"(",
")",
"else",
":",
"if",
"not_found",
"is",
"not",
"None",
":",
"await",
"not_found",
"(",
")",
"loop",
"=",
"event_loop",
"if",
"event_loop",
"else",
"asyncio",
".",
"get_event_loop",
"(",
")",
"loop",
".",
"run_until_complete",
"(",
"_handle",
"(",
"loop",
")",
")"
] |
Short method for connecting to a device.
This is a convenience method that create an event loop, auto discovers
devices, picks the first device found, connects to it and passes it to a
user provided handler. An optional error handler can be provided that is
called when no device was found. Very inflexible in many cases, but can be
handys sometimes when trying things.
Note 1: both handler and not_found must be coroutines
Note 2: An optional loop can be passed if needed (mainly for testing)
|
[
"Short",
"method",
"for",
"connecting",
"to",
"a",
"device",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/helpers.py#L7-L37
|
15,405
|
postlund/pyatv
|
pyatv/dmap/daap.py
|
DaapRequester.login
|
async def login(self):
"""Login to Apple TV using specified login id."""
# Do not use session.get_data(...) in login as that would end up in
# an infinte loop.
def _login_request():
return self.http.get_data(
self._mkurl('login?[AUTH]&hasFP=1',
session=False, login_id=True),
headers=_DMAP_HEADERS)
resp = await self._do(_login_request, is_login=True)
self._session_id = parser.first(resp, 'mlog', 'mlid')
_LOGGER.info('Logged in and got session id %s', self._session_id)
return self._session_id
|
python
|
async def login(self):
"""Login to Apple TV using specified login id."""
# Do not use session.get_data(...) in login as that would end up in
# an infinte loop.
def _login_request():
return self.http.get_data(
self._mkurl('login?[AUTH]&hasFP=1',
session=False, login_id=True),
headers=_DMAP_HEADERS)
resp = await self._do(_login_request, is_login=True)
self._session_id = parser.first(resp, 'mlog', 'mlid')
_LOGGER.info('Logged in and got session id %s', self._session_id)
return self._session_id
|
[
"async",
"def",
"login",
"(",
"self",
")",
":",
"# Do not use session.get_data(...) in login as that would end up in",
"# an infinte loop.",
"def",
"_login_request",
"(",
")",
":",
"return",
"self",
".",
"http",
".",
"get_data",
"(",
"self",
".",
"_mkurl",
"(",
"'login?[AUTH]&hasFP=1'",
",",
"session",
"=",
"False",
",",
"login_id",
"=",
"True",
")",
",",
"headers",
"=",
"_DMAP_HEADERS",
")",
"resp",
"=",
"await",
"self",
".",
"_do",
"(",
"_login_request",
",",
"is_login",
"=",
"True",
")",
"self",
".",
"_session_id",
"=",
"parser",
".",
"first",
"(",
"resp",
",",
"'mlog'",
",",
"'mlid'",
")",
"_LOGGER",
".",
"info",
"(",
"'Logged in and got session id %s'",
",",
"self",
".",
"_session_id",
")",
"return",
"self",
".",
"_session_id"
] |
Login to Apple TV using specified login id.
|
[
"Login",
"to",
"Apple",
"TV",
"using",
"specified",
"login",
"id",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L41-L55
|
15,406
|
postlund/pyatv
|
pyatv/dmap/daap.py
|
DaapRequester.get
|
async def get(self, cmd, daap_data=True, timeout=None, **args):
"""Perform a DAAP GET command."""
def _get_request():
return self.http.get_data(
self._mkurl(cmd, *args),
headers=_DMAP_HEADERS,
timeout=timeout)
await self._assure_logged_in()
return await self._do(_get_request, is_daap=daap_data)
|
python
|
async def get(self, cmd, daap_data=True, timeout=None, **args):
"""Perform a DAAP GET command."""
def _get_request():
return self.http.get_data(
self._mkurl(cmd, *args),
headers=_DMAP_HEADERS,
timeout=timeout)
await self._assure_logged_in()
return await self._do(_get_request, is_daap=daap_data)
|
[
"async",
"def",
"get",
"(",
"self",
",",
"cmd",
",",
"daap_data",
"=",
"True",
",",
"timeout",
"=",
"None",
",",
"*",
"*",
"args",
")",
":",
"def",
"_get_request",
"(",
")",
":",
"return",
"self",
".",
"http",
".",
"get_data",
"(",
"self",
".",
"_mkurl",
"(",
"cmd",
",",
"*",
"args",
")",
",",
"headers",
"=",
"_DMAP_HEADERS",
",",
"timeout",
"=",
"timeout",
")",
"await",
"self",
".",
"_assure_logged_in",
"(",
")",
"return",
"await",
"self",
".",
"_do",
"(",
"_get_request",
",",
"is_daap",
"=",
"daap_data",
")"
] |
Perform a DAAP GET command.
|
[
"Perform",
"a",
"DAAP",
"GET",
"command",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L57-L66
|
15,407
|
postlund/pyatv
|
pyatv/dmap/daap.py
|
DaapRequester.get_url
|
def get_url(self, cmd, **args):
"""Expand the request URL for a request."""
return self.http.base_url + self._mkurl(cmd, *args)
|
python
|
def get_url(self, cmd, **args):
"""Expand the request URL for a request."""
return self.http.base_url + self._mkurl(cmd, *args)
|
[
"def",
"get_url",
"(",
"self",
",",
"cmd",
",",
"*",
"*",
"args",
")",
":",
"return",
"self",
".",
"http",
".",
"base_url",
"+",
"self",
".",
"_mkurl",
"(",
"cmd",
",",
"*",
"args",
")"
] |
Expand the request URL for a request.
|
[
"Expand",
"the",
"request",
"URL",
"for",
"a",
"request",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L68-L70
|
15,408
|
postlund/pyatv
|
pyatv/dmap/daap.py
|
DaapRequester.post
|
async def post(self, cmd, data=None, timeout=None, **args):
"""Perform DAAP POST command with optional data."""
def _post_request():
headers = copy(_DMAP_HEADERS)
headers['Content-Type'] = 'application/x-www-form-urlencoded'
return self.http.post_data(
self._mkurl(cmd, *args),
data=data,
headers=headers,
timeout=timeout)
await self._assure_logged_in()
return await self._do(_post_request)
|
python
|
async def post(self, cmd, data=None, timeout=None, **args):
"""Perform DAAP POST command with optional data."""
def _post_request():
headers = copy(_DMAP_HEADERS)
headers['Content-Type'] = 'application/x-www-form-urlencoded'
return self.http.post_data(
self._mkurl(cmd, *args),
data=data,
headers=headers,
timeout=timeout)
await self._assure_logged_in()
return await self._do(_post_request)
|
[
"async",
"def",
"post",
"(",
"self",
",",
"cmd",
",",
"data",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"*",
"*",
"args",
")",
":",
"def",
"_post_request",
"(",
")",
":",
"headers",
"=",
"copy",
"(",
"_DMAP_HEADERS",
")",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"'application/x-www-form-urlencoded'",
"return",
"self",
".",
"http",
".",
"post_data",
"(",
"self",
".",
"_mkurl",
"(",
"cmd",
",",
"*",
"args",
")",
",",
"data",
"=",
"data",
",",
"headers",
"=",
"headers",
",",
"timeout",
"=",
"timeout",
")",
"await",
"self",
".",
"_assure_logged_in",
"(",
")",
"return",
"await",
"self",
".",
"_do",
"(",
"_post_request",
")"
] |
Perform DAAP POST command with optional data.
|
[
"Perform",
"DAAP",
"POST",
"command",
"with",
"optional",
"data",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L72-L84
|
15,409
|
postlund/pyatv
|
pyatv/mrp/__init__.py
|
MrpRemoteControl.set_repeat
|
def set_repeat(self, repeat_mode):
"""Change repeat mode."""
# TODO: extract to convert module
if int(repeat_mode) == const.REPEAT_STATE_OFF:
state = 1
elif int(repeat_mode) == const.REPEAT_STATE_ALL:
state = 2
elif int(repeat_mode) == const.REPEAT_STATE_TRACK:
state = 3
else:
raise ValueError('Invalid repeat mode: ' + str(repeat_mode))
return self.protocol.send(messages.repeat(state))
|
python
|
def set_repeat(self, repeat_mode):
"""Change repeat mode."""
# TODO: extract to convert module
if int(repeat_mode) == const.REPEAT_STATE_OFF:
state = 1
elif int(repeat_mode) == const.REPEAT_STATE_ALL:
state = 2
elif int(repeat_mode) == const.REPEAT_STATE_TRACK:
state = 3
else:
raise ValueError('Invalid repeat mode: ' + str(repeat_mode))
return self.protocol.send(messages.repeat(state))
|
[
"def",
"set_repeat",
"(",
"self",
",",
"repeat_mode",
")",
":",
"# TODO: extract to convert module",
"if",
"int",
"(",
"repeat_mode",
")",
"==",
"const",
".",
"REPEAT_STATE_OFF",
":",
"state",
"=",
"1",
"elif",
"int",
"(",
"repeat_mode",
")",
"==",
"const",
".",
"REPEAT_STATE_ALL",
":",
"state",
"=",
"2",
"elif",
"int",
"(",
"repeat_mode",
")",
"==",
"const",
".",
"REPEAT_STATE_TRACK",
":",
"state",
"=",
"3",
"else",
":",
"raise",
"ValueError",
"(",
"'Invalid repeat mode: '",
"+",
"str",
"(",
"repeat_mode",
")",
")",
"return",
"self",
".",
"protocol",
".",
"send",
"(",
"messages",
".",
"repeat",
"(",
"state",
")",
")"
] |
Change repeat mode.
|
[
"Change",
"repeat",
"mode",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L116-L128
|
15,410
|
postlund/pyatv
|
pyatv/mrp/__init__.py
|
MrpPlaying.genre
|
def genre(self):
"""Genre of the currently playing song."""
if self._metadata:
from pyatv.mrp.protobuf import ContentItem_pb2
transaction = ContentItem_pb2.ContentItem()
transaction.ParseFromString(self._metadata)
|
python
|
def genre(self):
"""Genre of the currently playing song."""
if self._metadata:
from pyatv.mrp.protobuf import ContentItem_pb2
transaction = ContentItem_pb2.ContentItem()
transaction.ParseFromString(self._metadata)
|
[
"def",
"genre",
"(",
"self",
")",
":",
"if",
"self",
".",
"_metadata",
":",
"from",
"pyatv",
".",
"mrp",
".",
"protobuf",
"import",
"ContentItem_pb2",
"transaction",
"=",
"ContentItem_pb2",
".",
"ContentItem",
"(",
")",
"transaction",
".",
"ParseFromString",
"(",
"self",
".",
"_metadata",
")"
] |
Genre of the currently playing song.
|
[
"Genre",
"of",
"the",
"currently",
"playing",
"song",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L172-L177
|
15,411
|
postlund/pyatv
|
pyatv/mrp/__init__.py
|
MrpPlaying.total_time
|
def total_time(self):
"""Total play time in seconds."""
now_playing = self._setstate.nowPlayingInfo
if now_playing.HasField('duration'):
return int(now_playing.duration)
return None
|
python
|
def total_time(self):
"""Total play time in seconds."""
now_playing = self._setstate.nowPlayingInfo
if now_playing.HasField('duration'):
return int(now_playing.duration)
return None
|
[
"def",
"total_time",
"(",
"self",
")",
":",
"now_playing",
"=",
"self",
".",
"_setstate",
".",
"nowPlayingInfo",
"if",
"now_playing",
".",
"HasField",
"(",
"'duration'",
")",
":",
"return",
"int",
"(",
"now_playing",
".",
"duration",
")",
"return",
"None"
] |
Total play time in seconds.
|
[
"Total",
"play",
"time",
"in",
"seconds",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L181-L187
|
15,412
|
postlund/pyatv
|
pyatv/mrp/__init__.py
|
MrpPlaying.shuffle
|
def shuffle(self):
"""If shuffle is enabled or not."""
info = self._get_command_info(CommandInfo_pb2.ChangeShuffleMode)
return None if info is None else info.shuffleMode
|
python
|
def shuffle(self):
"""If shuffle is enabled or not."""
info = self._get_command_info(CommandInfo_pb2.ChangeShuffleMode)
return None if info is None else info.shuffleMode
|
[
"def",
"shuffle",
"(",
"self",
")",
":",
"info",
"=",
"self",
".",
"_get_command_info",
"(",
"CommandInfo_pb2",
".",
"ChangeShuffleMode",
")",
"return",
"None",
"if",
"info",
"is",
"None",
"else",
"info",
".",
"shuffleMode"
] |
If shuffle is enabled or not.
|
[
"If",
"shuffle",
"is",
"enabled",
"or",
"not",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L205-L208
|
15,413
|
postlund/pyatv
|
pyatv/mrp/__init__.py
|
MrpPlaying.repeat
|
def repeat(self):
"""Repeat mode."""
info = self._get_command_info(CommandInfo_pb2.ChangeRepeatMode)
return None if info is None else info.repeatMode
|
python
|
def repeat(self):
"""Repeat mode."""
info = self._get_command_info(CommandInfo_pb2.ChangeRepeatMode)
return None if info is None else info.repeatMode
|
[
"def",
"repeat",
"(",
"self",
")",
":",
"info",
"=",
"self",
".",
"_get_command_info",
"(",
"CommandInfo_pb2",
".",
"ChangeRepeatMode",
")",
"return",
"None",
"if",
"info",
"is",
"None",
"else",
"info",
".",
"repeatMode"
] |
Repeat mode.
|
[
"Repeat",
"mode",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L211-L214
|
15,414
|
postlund/pyatv
|
pyatv/mrp/__init__.py
|
MrpMetadata.playing
|
async def playing(self):
"""Return what is currently playing."""
# TODO: This is hack-ish
if self._setstate is None:
await self.protocol.start()
# No SET_STATE_MESSAGE received yet, use default
if self._setstate is None:
return MrpPlaying(protobuf.SetStateMessage(), None)
return MrpPlaying(self._setstate, self._nowplaying)
|
python
|
async def playing(self):
"""Return what is currently playing."""
# TODO: This is hack-ish
if self._setstate is None:
await self.protocol.start()
# No SET_STATE_MESSAGE received yet, use default
if self._setstate is None:
return MrpPlaying(protobuf.SetStateMessage(), None)
return MrpPlaying(self._setstate, self._nowplaying)
|
[
"async",
"def",
"playing",
"(",
"self",
")",
":",
"# TODO: This is hack-ish",
"if",
"self",
".",
"_setstate",
"is",
"None",
":",
"await",
"self",
".",
"protocol",
".",
"start",
"(",
")",
"# No SET_STATE_MESSAGE received yet, use default",
"if",
"self",
".",
"_setstate",
"is",
"None",
":",
"return",
"MrpPlaying",
"(",
"protobuf",
".",
"SetStateMessage",
"(",
")",
",",
"None",
")",
"return",
"MrpPlaying",
"(",
"self",
".",
"_setstate",
",",
"self",
".",
"_nowplaying",
")"
] |
Return what is currently playing.
|
[
"Return",
"what",
"is",
"currently",
"playing",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L250-L260
|
15,415
|
postlund/pyatv
|
pyatv/mrp/__init__.py
|
MrpPairingHandler.stop
|
async def stop(self, **kwargs):
"""Stop pairing process."""
if not self._pin_code:
raise Exception('no pin given') # TODO: new exception
self.service.device_credentials = \
await self.pairing_procedure.finish_pairing(self._pin_code)
|
python
|
async def stop(self, **kwargs):
"""Stop pairing process."""
if not self._pin_code:
raise Exception('no pin given') # TODO: new exception
self.service.device_credentials = \
await self.pairing_procedure.finish_pairing(self._pin_code)
|
[
"async",
"def",
"stop",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"_pin_code",
":",
"raise",
"Exception",
"(",
"'no pin given'",
")",
"# TODO: new exception",
"self",
".",
"service",
".",
"device_credentials",
"=",
"await",
"self",
".",
"pairing_procedure",
".",
"finish_pairing",
"(",
"self",
".",
"_pin_code",
")"
] |
Stop pairing process.
|
[
"Stop",
"pairing",
"process",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L324-L330
|
15,416
|
postlund/pyatv
|
pyatv/mrp/tlv8.py
|
read_tlv
|
def read_tlv(data):
"""Parse TLV8 bytes into a dict.
If value is larger than 255 bytes, it is split up in multiple chunks. So
the same tag might occurr several times.
"""
def _parse(data, pos, size, result=None):
if result is None:
result = {}
if pos >= size:
return result
tag = str(data[pos])
length = data[pos+1]
value = data[pos+2:pos+2+length]
if tag in result:
result[tag] += value # value > 255 is split up
else:
result[tag] = value
return _parse(data, pos+2+length, size, result)
return _parse(data, 0, len(data))
|
python
|
def read_tlv(data):
"""Parse TLV8 bytes into a dict.
If value is larger than 255 bytes, it is split up in multiple chunks. So
the same tag might occurr several times.
"""
def _parse(data, pos, size, result=None):
if result is None:
result = {}
if pos >= size:
return result
tag = str(data[pos])
length = data[pos+1]
value = data[pos+2:pos+2+length]
if tag in result:
result[tag] += value # value > 255 is split up
else:
result[tag] = value
return _parse(data, pos+2+length, size, result)
return _parse(data, 0, len(data))
|
[
"def",
"read_tlv",
"(",
"data",
")",
":",
"def",
"_parse",
"(",
"data",
",",
"pos",
",",
"size",
",",
"result",
"=",
"None",
")",
":",
"if",
"result",
"is",
"None",
":",
"result",
"=",
"{",
"}",
"if",
"pos",
">=",
"size",
":",
"return",
"result",
"tag",
"=",
"str",
"(",
"data",
"[",
"pos",
"]",
")",
"length",
"=",
"data",
"[",
"pos",
"+",
"1",
"]",
"value",
"=",
"data",
"[",
"pos",
"+",
"2",
":",
"pos",
"+",
"2",
"+",
"length",
"]",
"if",
"tag",
"in",
"result",
":",
"result",
"[",
"tag",
"]",
"+=",
"value",
"# value > 255 is split up",
"else",
":",
"result",
"[",
"tag",
"]",
"=",
"value",
"return",
"_parse",
"(",
"data",
",",
"pos",
"+",
"2",
"+",
"length",
",",
"size",
",",
"result",
")",
"return",
"_parse",
"(",
"data",
",",
"0",
",",
"len",
"(",
"data",
")",
")"
] |
Parse TLV8 bytes into a dict.
If value is larger than 255 bytes, it is split up in multiple chunks. So
the same tag might occurr several times.
|
[
"Parse",
"TLV8",
"bytes",
"into",
"a",
"dict",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/tlv8.py#L19-L41
|
15,417
|
postlund/pyatv
|
pyatv/mrp/tlv8.py
|
write_tlv
|
def write_tlv(data):
"""Convert a dict to TLV8 bytes."""
tlv = b''
for key, value in data.items():
tag = bytes([int(key)])
length = len(value)
pos = 0
# A tag with length > 255 is added multiple times and concatenated into
# one buffer when reading the TLV again.
while pos < len(value):
size = min(length, 255)
tlv += tag
tlv += bytes([size])
tlv += value[pos:pos+size]
pos += size
length -= size
return tlv
|
python
|
def write_tlv(data):
"""Convert a dict to TLV8 bytes."""
tlv = b''
for key, value in data.items():
tag = bytes([int(key)])
length = len(value)
pos = 0
# A tag with length > 255 is added multiple times and concatenated into
# one buffer when reading the TLV again.
while pos < len(value):
size = min(length, 255)
tlv += tag
tlv += bytes([size])
tlv += value[pos:pos+size]
pos += size
length -= size
return tlv
|
[
"def",
"write_tlv",
"(",
"data",
")",
":",
"tlv",
"=",
"b''",
"for",
"key",
",",
"value",
"in",
"data",
".",
"items",
"(",
")",
":",
"tag",
"=",
"bytes",
"(",
"[",
"int",
"(",
"key",
")",
"]",
")",
"length",
"=",
"len",
"(",
"value",
")",
"pos",
"=",
"0",
"# A tag with length > 255 is added multiple times and concatenated into",
"# one buffer when reading the TLV again.",
"while",
"pos",
"<",
"len",
"(",
"value",
")",
":",
"size",
"=",
"min",
"(",
"length",
",",
"255",
")",
"tlv",
"+=",
"tag",
"tlv",
"+=",
"bytes",
"(",
"[",
"size",
"]",
")",
"tlv",
"+=",
"value",
"[",
"pos",
":",
"pos",
"+",
"size",
"]",
"pos",
"+=",
"size",
"length",
"-=",
"size",
"return",
"tlv"
] |
Convert a dict to TLV8 bytes.
|
[
"Convert",
"a",
"dict",
"to",
"TLV8",
"bytes",
"."
] |
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
|
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/tlv8.py#L44-L61
|
15,418
|
tommikaikkonen/prettyprinter
|
prettyprinter/prettyprinter.py
|
comment
|
def comment(value, comment_text):
"""Annotates a value or a Doc with a comment.
When printed by prettyprinter, the comment will be
rendered next to the value or Doc.
"""
if isinstance(value, Doc):
return comment_doc(value, comment_text)
return comment_value(value, comment_text)
|
python
|
def comment(value, comment_text):
"""Annotates a value or a Doc with a comment.
When printed by prettyprinter, the comment will be
rendered next to the value or Doc.
"""
if isinstance(value, Doc):
return comment_doc(value, comment_text)
return comment_value(value, comment_text)
|
[
"def",
"comment",
"(",
"value",
",",
"comment_text",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"Doc",
")",
":",
"return",
"comment_doc",
"(",
"value",
",",
"comment_text",
")",
"return",
"comment_value",
"(",
"value",
",",
"comment_text",
")"
] |
Annotates a value or a Doc with a comment.
When printed by prettyprinter, the comment will be
rendered next to the value or Doc.
|
[
"Annotates",
"a",
"value",
"or",
"a",
"Doc",
"with",
"a",
"comment",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L156-L164
|
15,419
|
tommikaikkonen/prettyprinter
|
prettyprinter/prettyprinter.py
|
register_pretty
|
def register_pretty(type=None, predicate=None):
"""Returns a decorator that registers the decorated function
as the pretty printer for instances of ``type``.
:param type: the type to register the pretty printer for, or a ``str``
to indicate the module and name, e.g.: ``'collections.Counter'``.
:param predicate: a predicate function that takes one argument
and returns a boolean indicating if the value
should be handled by the registered pretty printer.
Only one of ``type`` and ``predicate`` may be supplied. That means
that ``predicate`` will be run on unregistered types only.
The decorated function must accept exactly two positional arguments:
- ``value`` to pretty print, and
- ``ctx``, a context value.
Here's an example of the pretty printer for OrderedDict:
.. code:: python
from collections import OrderedDict
from prettyprinter import register_pretty, pretty_call
@register_pretty(OrderedDict)
def pretty_orderreddict(value, ctx):
return pretty_call(ctx, OrderedDict, list(value.items()))
"""
if type is None and predicate is None:
raise ValueError(
"You must provide either the 'type' or 'predicate' argument."
)
if type is not None and predicate is not None:
raise ValueError(
"You must provide either the 'type' or 'predicate' argument,"
"but not both"
)
if predicate is not None:
if not callable(predicate):
raise ValueError(
"Expected a callable for 'predicate', got {}".format(
repr(predicate)
)
)
def decorator(fn):
sig = inspect.signature(fn)
value = None
ctx = None
try:
sig.bind(value, ctx)
except TypeError:
fnname = '{}.{}'.format(
fn.__module__,
fn.__qualname__
)
raise ValueError(
"Functions decorated with register_pretty must accept "
"exactly two positional parameters: 'value' and 'ctx'. "
"The function signature for {} was not compatible.".format(
fnname
)
)
if type:
if isinstance(type, str):
# We don't wrap this with _run_pretty,
# so that when we register this printer with an actual
# class, we can call register_pretty(cls)(fn)
_DEFERRED_DISPATCH_BY_NAME[type] = fn
else:
pretty_dispatch.register(type, partial(_run_pretty, fn))
else:
assert callable(predicate)
_PREDICATE_REGISTRY.append((predicate, fn))
return fn
return decorator
|
python
|
def register_pretty(type=None, predicate=None):
"""Returns a decorator that registers the decorated function
as the pretty printer for instances of ``type``.
:param type: the type to register the pretty printer for, or a ``str``
to indicate the module and name, e.g.: ``'collections.Counter'``.
:param predicate: a predicate function that takes one argument
and returns a boolean indicating if the value
should be handled by the registered pretty printer.
Only one of ``type`` and ``predicate`` may be supplied. That means
that ``predicate`` will be run on unregistered types only.
The decorated function must accept exactly two positional arguments:
- ``value`` to pretty print, and
- ``ctx``, a context value.
Here's an example of the pretty printer for OrderedDict:
.. code:: python
from collections import OrderedDict
from prettyprinter import register_pretty, pretty_call
@register_pretty(OrderedDict)
def pretty_orderreddict(value, ctx):
return pretty_call(ctx, OrderedDict, list(value.items()))
"""
if type is None and predicate is None:
raise ValueError(
"You must provide either the 'type' or 'predicate' argument."
)
if type is not None and predicate is not None:
raise ValueError(
"You must provide either the 'type' or 'predicate' argument,"
"but not both"
)
if predicate is not None:
if not callable(predicate):
raise ValueError(
"Expected a callable for 'predicate', got {}".format(
repr(predicate)
)
)
def decorator(fn):
sig = inspect.signature(fn)
value = None
ctx = None
try:
sig.bind(value, ctx)
except TypeError:
fnname = '{}.{}'.format(
fn.__module__,
fn.__qualname__
)
raise ValueError(
"Functions decorated with register_pretty must accept "
"exactly two positional parameters: 'value' and 'ctx'. "
"The function signature for {} was not compatible.".format(
fnname
)
)
if type:
if isinstance(type, str):
# We don't wrap this with _run_pretty,
# so that when we register this printer with an actual
# class, we can call register_pretty(cls)(fn)
_DEFERRED_DISPATCH_BY_NAME[type] = fn
else:
pretty_dispatch.register(type, partial(_run_pretty, fn))
else:
assert callable(predicate)
_PREDICATE_REGISTRY.append((predicate, fn))
return fn
return decorator
|
[
"def",
"register_pretty",
"(",
"type",
"=",
"None",
",",
"predicate",
"=",
"None",
")",
":",
"if",
"type",
"is",
"None",
"and",
"predicate",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"You must provide either the 'type' or 'predicate' argument.\"",
")",
"if",
"type",
"is",
"not",
"None",
"and",
"predicate",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"You must provide either the 'type' or 'predicate' argument,\"",
"\"but not both\"",
")",
"if",
"predicate",
"is",
"not",
"None",
":",
"if",
"not",
"callable",
"(",
"predicate",
")",
":",
"raise",
"ValueError",
"(",
"\"Expected a callable for 'predicate', got {}\"",
".",
"format",
"(",
"repr",
"(",
"predicate",
")",
")",
")",
"def",
"decorator",
"(",
"fn",
")",
":",
"sig",
"=",
"inspect",
".",
"signature",
"(",
"fn",
")",
"value",
"=",
"None",
"ctx",
"=",
"None",
"try",
":",
"sig",
".",
"bind",
"(",
"value",
",",
"ctx",
")",
"except",
"TypeError",
":",
"fnname",
"=",
"'{}.{}'",
".",
"format",
"(",
"fn",
".",
"__module__",
",",
"fn",
".",
"__qualname__",
")",
"raise",
"ValueError",
"(",
"\"Functions decorated with register_pretty must accept \"",
"\"exactly two positional parameters: 'value' and 'ctx'. \"",
"\"The function signature for {} was not compatible.\"",
".",
"format",
"(",
"fnname",
")",
")",
"if",
"type",
":",
"if",
"isinstance",
"(",
"type",
",",
"str",
")",
":",
"# We don't wrap this with _run_pretty,",
"# so that when we register this printer with an actual",
"# class, we can call register_pretty(cls)(fn)",
"_DEFERRED_DISPATCH_BY_NAME",
"[",
"type",
"]",
"=",
"fn",
"else",
":",
"pretty_dispatch",
".",
"register",
"(",
"type",
",",
"partial",
"(",
"_run_pretty",
",",
"fn",
")",
")",
"else",
":",
"assert",
"callable",
"(",
"predicate",
")",
"_PREDICATE_REGISTRY",
".",
"append",
"(",
"(",
"predicate",
",",
"fn",
")",
")",
"return",
"fn",
"return",
"decorator"
] |
Returns a decorator that registers the decorated function
as the pretty printer for instances of ``type``.
:param type: the type to register the pretty printer for, or a ``str``
to indicate the module and name, e.g.: ``'collections.Counter'``.
:param predicate: a predicate function that takes one argument
and returns a boolean indicating if the value
should be handled by the registered pretty printer.
Only one of ``type`` and ``predicate`` may be supplied. That means
that ``predicate`` will be run on unregistered types only.
The decorated function must accept exactly two positional arguments:
- ``value`` to pretty print, and
- ``ctx``, a context value.
Here's an example of the pretty printer for OrderedDict:
.. code:: python
from collections import OrderedDict
from prettyprinter import register_pretty, pretty_call
@register_pretty(OrderedDict)
def pretty_orderreddict(value, ctx):
return pretty_call(ctx, OrderedDict, list(value.items()))
|
[
"Returns",
"a",
"decorator",
"that",
"registers",
"the",
"decorated",
"function",
"as",
"the",
"pretty",
"printer",
"for",
"instances",
"of",
"type",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L462-L544
|
15,420
|
tommikaikkonen/prettyprinter
|
prettyprinter/prettyprinter.py
|
commentdoc
|
def commentdoc(text):
"""Returns a Doc representing a comment `text`. `text` is
treated as words, and any whitespace may be used to break
the comment to multiple lines."""
if not text:
raise ValueError(
'Expected non-empty comment str, got {}'.format(repr(text))
)
commentlines = []
for line in text.splitlines():
alternating_words_ws = list(filter(None, WHITESPACE_PATTERN_TEXT.split(line)))
starts_with_whitespace = bool(
WHITESPACE_PATTERN_TEXT.match(alternating_words_ws[0])
)
if starts_with_whitespace:
prefix = alternating_words_ws[0]
alternating_words_ws = alternating_words_ws[1:]
else:
prefix = NIL
if len(alternating_words_ws) % 2 == 0:
# The last part must be whitespace.
alternating_words_ws = alternating_words_ws[:-1]
for idx, tup in enumerate(zip(alternating_words_ws, cycle([False, True]))):
part, is_ws = tup
if is_ws:
alternating_words_ws[idx] = flat_choice(
when_flat=part,
when_broken=always_break(
concat([
HARDLINE,
'# ',
])
)
)
commentlines.append(
concat([
'# ',
prefix,
fill(alternating_words_ws)
])
)
outer = identity
if len(commentlines) > 1:
outer = always_break
return annotate(
Token.COMMENT_SINGLE,
outer(concat(intersperse(HARDLINE, commentlines)))
)
|
python
|
def commentdoc(text):
"""Returns a Doc representing a comment `text`. `text` is
treated as words, and any whitespace may be used to break
the comment to multiple lines."""
if not text:
raise ValueError(
'Expected non-empty comment str, got {}'.format(repr(text))
)
commentlines = []
for line in text.splitlines():
alternating_words_ws = list(filter(None, WHITESPACE_PATTERN_TEXT.split(line)))
starts_with_whitespace = bool(
WHITESPACE_PATTERN_TEXT.match(alternating_words_ws[0])
)
if starts_with_whitespace:
prefix = alternating_words_ws[0]
alternating_words_ws = alternating_words_ws[1:]
else:
prefix = NIL
if len(alternating_words_ws) % 2 == 0:
# The last part must be whitespace.
alternating_words_ws = alternating_words_ws[:-1]
for idx, tup in enumerate(zip(alternating_words_ws, cycle([False, True]))):
part, is_ws = tup
if is_ws:
alternating_words_ws[idx] = flat_choice(
when_flat=part,
when_broken=always_break(
concat([
HARDLINE,
'# ',
])
)
)
commentlines.append(
concat([
'# ',
prefix,
fill(alternating_words_ws)
])
)
outer = identity
if len(commentlines) > 1:
outer = always_break
return annotate(
Token.COMMENT_SINGLE,
outer(concat(intersperse(HARDLINE, commentlines)))
)
|
[
"def",
"commentdoc",
"(",
"text",
")",
":",
"if",
"not",
"text",
":",
"raise",
"ValueError",
"(",
"'Expected non-empty comment str, got {}'",
".",
"format",
"(",
"repr",
"(",
"text",
")",
")",
")",
"commentlines",
"=",
"[",
"]",
"for",
"line",
"in",
"text",
".",
"splitlines",
"(",
")",
":",
"alternating_words_ws",
"=",
"list",
"(",
"filter",
"(",
"None",
",",
"WHITESPACE_PATTERN_TEXT",
".",
"split",
"(",
"line",
")",
")",
")",
"starts_with_whitespace",
"=",
"bool",
"(",
"WHITESPACE_PATTERN_TEXT",
".",
"match",
"(",
"alternating_words_ws",
"[",
"0",
"]",
")",
")",
"if",
"starts_with_whitespace",
":",
"prefix",
"=",
"alternating_words_ws",
"[",
"0",
"]",
"alternating_words_ws",
"=",
"alternating_words_ws",
"[",
"1",
":",
"]",
"else",
":",
"prefix",
"=",
"NIL",
"if",
"len",
"(",
"alternating_words_ws",
")",
"%",
"2",
"==",
"0",
":",
"# The last part must be whitespace.",
"alternating_words_ws",
"=",
"alternating_words_ws",
"[",
":",
"-",
"1",
"]",
"for",
"idx",
",",
"tup",
"in",
"enumerate",
"(",
"zip",
"(",
"alternating_words_ws",
",",
"cycle",
"(",
"[",
"False",
",",
"True",
"]",
")",
")",
")",
":",
"part",
",",
"is_ws",
"=",
"tup",
"if",
"is_ws",
":",
"alternating_words_ws",
"[",
"idx",
"]",
"=",
"flat_choice",
"(",
"when_flat",
"=",
"part",
",",
"when_broken",
"=",
"always_break",
"(",
"concat",
"(",
"[",
"HARDLINE",
",",
"'# '",
",",
"]",
")",
")",
")",
"commentlines",
".",
"append",
"(",
"concat",
"(",
"[",
"'# '",
",",
"prefix",
",",
"fill",
"(",
"alternating_words_ws",
")",
"]",
")",
")",
"outer",
"=",
"identity",
"if",
"len",
"(",
"commentlines",
")",
">",
"1",
":",
"outer",
"=",
"always_break",
"return",
"annotate",
"(",
"Token",
".",
"COMMENT_SINGLE",
",",
"outer",
"(",
"concat",
"(",
"intersperse",
"(",
"HARDLINE",
",",
"commentlines",
")",
")",
")",
")"
] |
Returns a Doc representing a comment `text`. `text` is
treated as words, and any whitespace may be used to break
the comment to multiple lines.
|
[
"Returns",
"a",
"Doc",
"representing",
"a",
"comment",
"text",
".",
"text",
"is",
"treated",
"as",
"words",
"and",
"any",
"whitespace",
"may",
"be",
"used",
"to",
"break",
"the",
"comment",
"to",
"multiple",
"lines",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L599-L654
|
15,421
|
tommikaikkonen/prettyprinter
|
prettyprinter/prettyprinter.py
|
build_fncall
|
def build_fncall(
ctx,
fndoc,
argdocs=(),
kwargdocs=(),
hug_sole_arg=False,
trailing_comment=None,
):
"""Builds a doc that looks like a function call,
from docs that represent the function, arguments
and keyword arguments.
If ``hug_sole_arg`` is True, and the represented
functional call is done with a single non-keyword
argument, the function call parentheses will hug
the sole argument doc without newlines and indentation
in break mode. This makes a difference in calls
like this::
> hug_sole_arg = False
frozenset(
[
1,
2,
3,
4,
5
]
)
> hug_sole_arg = True
frozenset([
1,
2,
3,
4,
5,
])
If ``trailing_comment`` is provided, the text is
rendered as a comment after the last argument and
before the closing parenthesis. This will force
the function call to be broken to multiple lines.
"""
if callable(fndoc):
fndoc = general_identifier(fndoc)
has_comment = bool(trailing_comment)
argdocs = list(argdocs)
kwargdocs = list(kwargdocs)
kwargdocs = [
# Propagate any comments to the kwarg doc.
(
comment_doc(
concat([
keyword_arg(binding),
ASSIGN_OP,
doc.doc
]),
doc.annotation.value
)
if is_commented(doc)
else concat([
keyword_arg(binding),
ASSIGN_OP,
doc
])
)
for binding, doc in kwargdocs
]
if not (argdocs or kwargdocs):
return concat([
fndoc,
LPAREN,
RPAREN,
])
if (
hug_sole_arg and
not kwargdocs and
len(argdocs) == 1 and
not is_commented(argdocs[0])
):
return group(
concat([
fndoc,
LPAREN,
argdocs[0],
RPAREN
])
)
allarg_docs = [*argdocs, *kwargdocs]
if trailing_comment:
allarg_docs.append(commentdoc(trailing_comment))
parts = []
for idx, doc in enumerate(allarg_docs):
last = idx == len(allarg_docs) - 1
if is_commented(doc):
has_comment = True
comment_str = doc.annotation.value
doc = doc.doc
else:
comment_str = None
part = concat([doc, NIL if last else COMMA])
if comment_str:
part = group(
flat_choice(
when_flat=concat([
part,
' ',
commentdoc(comment_str)
]),
when_broken=concat([
commentdoc(comment_str),
HARDLINE,
part,
]),
)
)
if not last:
part = concat([part, HARDLINE if has_comment else LINE])
parts.append(part)
outer = (
always_break
if has_comment
else group
)
return outer(
concat([
fndoc,
LPAREN,
nest(
ctx.indent,
concat([
SOFTLINE,
concat(parts),
])
),
SOFTLINE,
RPAREN
])
)
|
python
|
def build_fncall(
ctx,
fndoc,
argdocs=(),
kwargdocs=(),
hug_sole_arg=False,
trailing_comment=None,
):
"""Builds a doc that looks like a function call,
from docs that represent the function, arguments
and keyword arguments.
If ``hug_sole_arg`` is True, and the represented
functional call is done with a single non-keyword
argument, the function call parentheses will hug
the sole argument doc without newlines and indentation
in break mode. This makes a difference in calls
like this::
> hug_sole_arg = False
frozenset(
[
1,
2,
3,
4,
5
]
)
> hug_sole_arg = True
frozenset([
1,
2,
3,
4,
5,
])
If ``trailing_comment`` is provided, the text is
rendered as a comment after the last argument and
before the closing parenthesis. This will force
the function call to be broken to multiple lines.
"""
if callable(fndoc):
fndoc = general_identifier(fndoc)
has_comment = bool(trailing_comment)
argdocs = list(argdocs)
kwargdocs = list(kwargdocs)
kwargdocs = [
# Propagate any comments to the kwarg doc.
(
comment_doc(
concat([
keyword_arg(binding),
ASSIGN_OP,
doc.doc
]),
doc.annotation.value
)
if is_commented(doc)
else concat([
keyword_arg(binding),
ASSIGN_OP,
doc
])
)
for binding, doc in kwargdocs
]
if not (argdocs or kwargdocs):
return concat([
fndoc,
LPAREN,
RPAREN,
])
if (
hug_sole_arg and
not kwargdocs and
len(argdocs) == 1 and
not is_commented(argdocs[0])
):
return group(
concat([
fndoc,
LPAREN,
argdocs[0],
RPAREN
])
)
allarg_docs = [*argdocs, *kwargdocs]
if trailing_comment:
allarg_docs.append(commentdoc(trailing_comment))
parts = []
for idx, doc in enumerate(allarg_docs):
last = idx == len(allarg_docs) - 1
if is_commented(doc):
has_comment = True
comment_str = doc.annotation.value
doc = doc.doc
else:
comment_str = None
part = concat([doc, NIL if last else COMMA])
if comment_str:
part = group(
flat_choice(
when_flat=concat([
part,
' ',
commentdoc(comment_str)
]),
when_broken=concat([
commentdoc(comment_str),
HARDLINE,
part,
]),
)
)
if not last:
part = concat([part, HARDLINE if has_comment else LINE])
parts.append(part)
outer = (
always_break
if has_comment
else group
)
return outer(
concat([
fndoc,
LPAREN,
nest(
ctx.indent,
concat([
SOFTLINE,
concat(parts),
])
),
SOFTLINE,
RPAREN
])
)
|
[
"def",
"build_fncall",
"(",
"ctx",
",",
"fndoc",
",",
"argdocs",
"=",
"(",
")",
",",
"kwargdocs",
"=",
"(",
")",
",",
"hug_sole_arg",
"=",
"False",
",",
"trailing_comment",
"=",
"None",
",",
")",
":",
"if",
"callable",
"(",
"fndoc",
")",
":",
"fndoc",
"=",
"general_identifier",
"(",
"fndoc",
")",
"has_comment",
"=",
"bool",
"(",
"trailing_comment",
")",
"argdocs",
"=",
"list",
"(",
"argdocs",
")",
"kwargdocs",
"=",
"list",
"(",
"kwargdocs",
")",
"kwargdocs",
"=",
"[",
"# Propagate any comments to the kwarg doc.",
"(",
"comment_doc",
"(",
"concat",
"(",
"[",
"keyword_arg",
"(",
"binding",
")",
",",
"ASSIGN_OP",
",",
"doc",
".",
"doc",
"]",
")",
",",
"doc",
".",
"annotation",
".",
"value",
")",
"if",
"is_commented",
"(",
"doc",
")",
"else",
"concat",
"(",
"[",
"keyword_arg",
"(",
"binding",
")",
",",
"ASSIGN_OP",
",",
"doc",
"]",
")",
")",
"for",
"binding",
",",
"doc",
"in",
"kwargdocs",
"]",
"if",
"not",
"(",
"argdocs",
"or",
"kwargdocs",
")",
":",
"return",
"concat",
"(",
"[",
"fndoc",
",",
"LPAREN",
",",
"RPAREN",
",",
"]",
")",
"if",
"(",
"hug_sole_arg",
"and",
"not",
"kwargdocs",
"and",
"len",
"(",
"argdocs",
")",
"==",
"1",
"and",
"not",
"is_commented",
"(",
"argdocs",
"[",
"0",
"]",
")",
")",
":",
"return",
"group",
"(",
"concat",
"(",
"[",
"fndoc",
",",
"LPAREN",
",",
"argdocs",
"[",
"0",
"]",
",",
"RPAREN",
"]",
")",
")",
"allarg_docs",
"=",
"[",
"*",
"argdocs",
",",
"*",
"kwargdocs",
"]",
"if",
"trailing_comment",
":",
"allarg_docs",
".",
"append",
"(",
"commentdoc",
"(",
"trailing_comment",
")",
")",
"parts",
"=",
"[",
"]",
"for",
"idx",
",",
"doc",
"in",
"enumerate",
"(",
"allarg_docs",
")",
":",
"last",
"=",
"idx",
"==",
"len",
"(",
"allarg_docs",
")",
"-",
"1",
"if",
"is_commented",
"(",
"doc",
")",
":",
"has_comment",
"=",
"True",
"comment_str",
"=",
"doc",
".",
"annotation",
".",
"value",
"doc",
"=",
"doc",
".",
"doc",
"else",
":",
"comment_str",
"=",
"None",
"part",
"=",
"concat",
"(",
"[",
"doc",
",",
"NIL",
"if",
"last",
"else",
"COMMA",
"]",
")",
"if",
"comment_str",
":",
"part",
"=",
"group",
"(",
"flat_choice",
"(",
"when_flat",
"=",
"concat",
"(",
"[",
"part",
",",
"' '",
",",
"commentdoc",
"(",
"comment_str",
")",
"]",
")",
",",
"when_broken",
"=",
"concat",
"(",
"[",
"commentdoc",
"(",
"comment_str",
")",
",",
"HARDLINE",
",",
"part",
",",
"]",
")",
",",
")",
")",
"if",
"not",
"last",
":",
"part",
"=",
"concat",
"(",
"[",
"part",
",",
"HARDLINE",
"if",
"has_comment",
"else",
"LINE",
"]",
")",
"parts",
".",
"append",
"(",
"part",
")",
"outer",
"=",
"(",
"always_break",
"if",
"has_comment",
"else",
"group",
")",
"return",
"outer",
"(",
"concat",
"(",
"[",
"fndoc",
",",
"LPAREN",
",",
"nest",
"(",
"ctx",
".",
"indent",
",",
"concat",
"(",
"[",
"SOFTLINE",
",",
"concat",
"(",
"parts",
")",
",",
"]",
")",
")",
",",
"SOFTLINE",
",",
"RPAREN",
"]",
")",
")"
] |
Builds a doc that looks like a function call,
from docs that represent the function, arguments
and keyword arguments.
If ``hug_sole_arg`` is True, and the represented
functional call is done with a single non-keyword
argument, the function call parentheses will hug
the sole argument doc without newlines and indentation
in break mode. This makes a difference in calls
like this::
> hug_sole_arg = False
frozenset(
[
1,
2,
3,
4,
5
]
)
> hug_sole_arg = True
frozenset([
1,
2,
3,
4,
5,
])
If ``trailing_comment`` is provided, the text is
rendered as a comment after the last argument and
before the closing parenthesis. This will force
the function call to be broken to multiple lines.
|
[
"Builds",
"a",
"doc",
"that",
"looks",
"like",
"a",
"function",
"call",
"from",
"docs",
"that",
"represent",
"the",
"function",
"arguments",
"and",
"keyword",
"arguments",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L849-L1003
|
15,422
|
tommikaikkonen/prettyprinter
|
prettyprinter/prettyprinter.py
|
PrettyContext.assoc
|
def assoc(self, key, value):
"""
Return a modified PrettyContext with ``key`` set to ``value``
"""
return self._replace(user_ctx={
**self.user_ctx,
key: value,
})
|
python
|
def assoc(self, key, value):
"""
Return a modified PrettyContext with ``key`` set to ``value``
"""
return self._replace(user_ctx={
**self.user_ctx,
key: value,
})
|
[
"def",
"assoc",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"return",
"self",
".",
"_replace",
"(",
"user_ctx",
"=",
"{",
"*",
"*",
"self",
".",
"user_ctx",
",",
"key",
":",
"value",
",",
"}",
")"
] |
Return a modified PrettyContext with ``key`` set to ``value``
|
[
"Return",
"a",
"modified",
"PrettyContext",
"with",
"key",
"set",
"to",
"value"
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L297-L304
|
15,423
|
tommikaikkonen/prettyprinter
|
prettyprinter/doc.py
|
align
|
def align(doc):
"""Aligns each new line in ``doc`` with the first new line.
"""
validate_doc(doc)
def evaluator(indent, column, page_width, ribbon_width):
return Nest(column - indent, doc)
return contextual(evaluator)
|
python
|
def align(doc):
"""Aligns each new line in ``doc`` with the first new line.
"""
validate_doc(doc)
def evaluator(indent, column, page_width, ribbon_width):
return Nest(column - indent, doc)
return contextual(evaluator)
|
[
"def",
"align",
"(",
"doc",
")",
":",
"validate_doc",
"(",
"doc",
")",
"def",
"evaluator",
"(",
"indent",
",",
"column",
",",
"page_width",
",",
"ribbon_width",
")",
":",
"return",
"Nest",
"(",
"column",
"-",
"indent",
",",
"doc",
")",
"return",
"contextual",
"(",
"evaluator",
")"
] |
Aligns each new line in ``doc`` with the first new line.
|
[
"Aligns",
"each",
"new",
"line",
"in",
"doc",
"with",
"the",
"first",
"new",
"line",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/doc.py#L57-L64
|
15,424
|
tommikaikkonen/prettyprinter
|
prettyprinter/layout.py
|
smart_fitting_predicate
|
def smart_fitting_predicate(
page_width,
ribbon_frac,
min_nesting_level,
max_width,
triplestack
):
"""
Lookahead until the last doc at the current indentation level.
Pretty, but not as fast.
"""
chars_left = max_width
while chars_left >= 0:
if not triplestack:
return True
indent, mode, doc = triplestack.pop()
if doc is NIL:
continue
elif isinstance(doc, str):
chars_left -= len(doc)
elif isinstance(doc, Concat):
# Recursive call in Strictly Pretty: docs within Concat
# are processed in order, with keeping the current
# indentation and mode.
# We want the leftmost element at the top of the stack,
# so we append the concatenated documents in reverse order.
triplestack.extend(
(indent, mode, doc)
for doc in reversed(doc.docs)
)
elif isinstance(doc, Annotated):
triplestack.append((indent, mode, doc.doc))
elif isinstance(doc, Fill):
# Same as the Concat case.
triplestack.extend(
(indent, mode, doc)
for doc in reversed(doc.docs)
)
elif isinstance(doc, Nest):
# Nest is a combination of an indent and a doc.
# Increase indentation, then add the doc for processing.
triplestack.append((indent + doc.indent, mode, doc.doc))
elif isinstance(doc, AlwaysBreak):
return False
elif doc is HARDLINE:
# In the fast algorithm, when we see a line,
# we return True. Here, as long as the minimum indentation
# level is satisfied, we continue processing the next line.
# This causes the longer runtime.
if indent > min_nesting_level:
chars_left = page_width - indent
else:
return True
elif isinstance(doc, FlatChoice):
if mode is FLAT_MODE:
triplestack.append((indent, mode, doc.when_flat))
elif mode is BREAK_MODE:
triplestack.append((indent, mode, doc.when_broken))
else:
raise ValueError
elif isinstance(doc, Group):
# Group just changes the mode.
triplestack.append((indent, FLAT_MODE, doc.doc))
elif isinstance(doc, Contextual):
ribbon_width = max(0, min(page_width, round(ribbon_frac * page_width)))
evaluated_doc = doc.fn(
indent=indent,
column=max_width - chars_left,
page_width=page_width,
ribbon_width=ribbon_width,
)
normalized = normalize_doc(evaluated_doc)
triplestack.append((indent, mode, normalized))
elif isinstance(doc, SAnnotationPush):
continue
elif isinstance(doc, SAnnotationPop):
continue
else:
raise ValueError((indent, mode, doc))
return False
|
python
|
def smart_fitting_predicate(
page_width,
ribbon_frac,
min_nesting_level,
max_width,
triplestack
):
"""
Lookahead until the last doc at the current indentation level.
Pretty, but not as fast.
"""
chars_left = max_width
while chars_left >= 0:
if not triplestack:
return True
indent, mode, doc = triplestack.pop()
if doc is NIL:
continue
elif isinstance(doc, str):
chars_left -= len(doc)
elif isinstance(doc, Concat):
# Recursive call in Strictly Pretty: docs within Concat
# are processed in order, with keeping the current
# indentation and mode.
# We want the leftmost element at the top of the stack,
# so we append the concatenated documents in reverse order.
triplestack.extend(
(indent, mode, doc)
for doc in reversed(doc.docs)
)
elif isinstance(doc, Annotated):
triplestack.append((indent, mode, doc.doc))
elif isinstance(doc, Fill):
# Same as the Concat case.
triplestack.extend(
(indent, mode, doc)
for doc in reversed(doc.docs)
)
elif isinstance(doc, Nest):
# Nest is a combination of an indent and a doc.
# Increase indentation, then add the doc for processing.
triplestack.append((indent + doc.indent, mode, doc.doc))
elif isinstance(doc, AlwaysBreak):
return False
elif doc is HARDLINE:
# In the fast algorithm, when we see a line,
# we return True. Here, as long as the minimum indentation
# level is satisfied, we continue processing the next line.
# This causes the longer runtime.
if indent > min_nesting_level:
chars_left = page_width - indent
else:
return True
elif isinstance(doc, FlatChoice):
if mode is FLAT_MODE:
triplestack.append((indent, mode, doc.when_flat))
elif mode is BREAK_MODE:
triplestack.append((indent, mode, doc.when_broken))
else:
raise ValueError
elif isinstance(doc, Group):
# Group just changes the mode.
triplestack.append((indent, FLAT_MODE, doc.doc))
elif isinstance(doc, Contextual):
ribbon_width = max(0, min(page_width, round(ribbon_frac * page_width)))
evaluated_doc = doc.fn(
indent=indent,
column=max_width - chars_left,
page_width=page_width,
ribbon_width=ribbon_width,
)
normalized = normalize_doc(evaluated_doc)
triplestack.append((indent, mode, normalized))
elif isinstance(doc, SAnnotationPush):
continue
elif isinstance(doc, SAnnotationPop):
continue
else:
raise ValueError((indent, mode, doc))
return False
|
[
"def",
"smart_fitting_predicate",
"(",
"page_width",
",",
"ribbon_frac",
",",
"min_nesting_level",
",",
"max_width",
",",
"triplestack",
")",
":",
"chars_left",
"=",
"max_width",
"while",
"chars_left",
">=",
"0",
":",
"if",
"not",
"triplestack",
":",
"return",
"True",
"indent",
",",
"mode",
",",
"doc",
"=",
"triplestack",
".",
"pop",
"(",
")",
"if",
"doc",
"is",
"NIL",
":",
"continue",
"elif",
"isinstance",
"(",
"doc",
",",
"str",
")",
":",
"chars_left",
"-=",
"len",
"(",
"doc",
")",
"elif",
"isinstance",
"(",
"doc",
",",
"Concat",
")",
":",
"# Recursive call in Strictly Pretty: docs within Concat",
"# are processed in order, with keeping the current",
"# indentation and mode.",
"# We want the leftmost element at the top of the stack,",
"# so we append the concatenated documents in reverse order.",
"triplestack",
".",
"extend",
"(",
"(",
"indent",
",",
"mode",
",",
"doc",
")",
"for",
"doc",
"in",
"reversed",
"(",
"doc",
".",
"docs",
")",
")",
"elif",
"isinstance",
"(",
"doc",
",",
"Annotated",
")",
":",
"triplestack",
".",
"append",
"(",
"(",
"indent",
",",
"mode",
",",
"doc",
".",
"doc",
")",
")",
"elif",
"isinstance",
"(",
"doc",
",",
"Fill",
")",
":",
"# Same as the Concat case.",
"triplestack",
".",
"extend",
"(",
"(",
"indent",
",",
"mode",
",",
"doc",
")",
"for",
"doc",
"in",
"reversed",
"(",
"doc",
".",
"docs",
")",
")",
"elif",
"isinstance",
"(",
"doc",
",",
"Nest",
")",
":",
"# Nest is a combination of an indent and a doc.",
"# Increase indentation, then add the doc for processing.",
"triplestack",
".",
"append",
"(",
"(",
"indent",
"+",
"doc",
".",
"indent",
",",
"mode",
",",
"doc",
".",
"doc",
")",
")",
"elif",
"isinstance",
"(",
"doc",
",",
"AlwaysBreak",
")",
":",
"return",
"False",
"elif",
"doc",
"is",
"HARDLINE",
":",
"# In the fast algorithm, when we see a line,",
"# we return True. Here, as long as the minimum indentation",
"# level is satisfied, we continue processing the next line.",
"# This causes the longer runtime.",
"if",
"indent",
">",
"min_nesting_level",
":",
"chars_left",
"=",
"page_width",
"-",
"indent",
"else",
":",
"return",
"True",
"elif",
"isinstance",
"(",
"doc",
",",
"FlatChoice",
")",
":",
"if",
"mode",
"is",
"FLAT_MODE",
":",
"triplestack",
".",
"append",
"(",
"(",
"indent",
",",
"mode",
",",
"doc",
".",
"when_flat",
")",
")",
"elif",
"mode",
"is",
"BREAK_MODE",
":",
"triplestack",
".",
"append",
"(",
"(",
"indent",
",",
"mode",
",",
"doc",
".",
"when_broken",
")",
")",
"else",
":",
"raise",
"ValueError",
"elif",
"isinstance",
"(",
"doc",
",",
"Group",
")",
":",
"# Group just changes the mode.",
"triplestack",
".",
"append",
"(",
"(",
"indent",
",",
"FLAT_MODE",
",",
"doc",
".",
"doc",
")",
")",
"elif",
"isinstance",
"(",
"doc",
",",
"Contextual",
")",
":",
"ribbon_width",
"=",
"max",
"(",
"0",
",",
"min",
"(",
"page_width",
",",
"round",
"(",
"ribbon_frac",
"*",
"page_width",
")",
")",
")",
"evaluated_doc",
"=",
"doc",
".",
"fn",
"(",
"indent",
"=",
"indent",
",",
"column",
"=",
"max_width",
"-",
"chars_left",
",",
"page_width",
"=",
"page_width",
",",
"ribbon_width",
"=",
"ribbon_width",
",",
")",
"normalized",
"=",
"normalize_doc",
"(",
"evaluated_doc",
")",
"triplestack",
".",
"append",
"(",
"(",
"indent",
",",
"mode",
",",
"normalized",
")",
")",
"elif",
"isinstance",
"(",
"doc",
",",
"SAnnotationPush",
")",
":",
"continue",
"elif",
"isinstance",
"(",
"doc",
",",
"SAnnotationPop",
")",
":",
"continue",
"else",
":",
"raise",
"ValueError",
"(",
"(",
"indent",
",",
"mode",
",",
"doc",
")",
")",
"return",
"False"
] |
Lookahead until the last doc at the current indentation level.
Pretty, but not as fast.
|
[
"Lookahead",
"until",
"the",
"last",
"doc",
"at",
"the",
"current",
"indentation",
"level",
".",
"Pretty",
"but",
"not",
"as",
"fast",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/layout.py#L124-L208
|
15,425
|
tommikaikkonen/prettyprinter
|
prettyprinter/color.py
|
set_default_style
|
def set_default_style(style):
"""Sets default global style to be used by ``prettyprinter.cpprint``.
:param style: the style to set, either subclass of
``pygments.styles.Style`` or one of ``'dark'``, ``'light'``
"""
global default_style
if style == 'dark':
style = default_dark_style
elif style == 'light':
style = default_light_style
if not issubclass(style, Style):
raise TypeError(
"style must be a subclass of pygments.styles.Style or "
"one of 'dark', 'light'. Got {}".format(repr(style))
)
default_style = style
|
python
|
def set_default_style(style):
"""Sets default global style to be used by ``prettyprinter.cpprint``.
:param style: the style to set, either subclass of
``pygments.styles.Style`` or one of ``'dark'``, ``'light'``
"""
global default_style
if style == 'dark':
style = default_dark_style
elif style == 'light':
style = default_light_style
if not issubclass(style, Style):
raise TypeError(
"style must be a subclass of pygments.styles.Style or "
"one of 'dark', 'light'. Got {}".format(repr(style))
)
default_style = style
|
[
"def",
"set_default_style",
"(",
"style",
")",
":",
"global",
"default_style",
"if",
"style",
"==",
"'dark'",
":",
"style",
"=",
"default_dark_style",
"elif",
"style",
"==",
"'light'",
":",
"style",
"=",
"default_light_style",
"if",
"not",
"issubclass",
"(",
"style",
",",
"Style",
")",
":",
"raise",
"TypeError",
"(",
"\"style must be a subclass of pygments.styles.Style or \"",
"\"one of 'dark', 'light'. Got {}\"",
".",
"format",
"(",
"repr",
"(",
"style",
")",
")",
")",
"default_style",
"=",
"style"
] |
Sets default global style to be used by ``prettyprinter.cpprint``.
:param style: the style to set, either subclass of
``pygments.styles.Style`` or one of ``'dark'``, ``'light'``
|
[
"Sets",
"default",
"global",
"style",
"to",
"be",
"used",
"by",
"prettyprinter",
".",
"cpprint",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/color.py#L134-L151
|
15,426
|
tommikaikkonen/prettyprinter
|
prettyprinter/utils.py
|
intersperse
|
def intersperse(x, ys):
"""
Returns an iterable where ``x`` is inserted between
each element of ``ys``
:type ys: Iterable
"""
it = iter(ys)
try:
y = next(it)
except StopIteration:
return
yield y
for y in it:
yield x
yield y
|
python
|
def intersperse(x, ys):
"""
Returns an iterable where ``x`` is inserted between
each element of ``ys``
:type ys: Iterable
"""
it = iter(ys)
try:
y = next(it)
except StopIteration:
return
yield y
for y in it:
yield x
yield y
|
[
"def",
"intersperse",
"(",
"x",
",",
"ys",
")",
":",
"it",
"=",
"iter",
"(",
"ys",
")",
"try",
":",
"y",
"=",
"next",
"(",
"it",
")",
"except",
"StopIteration",
":",
"return",
"yield",
"y",
"for",
"y",
"in",
"it",
":",
"yield",
"x",
"yield",
"y"
] |
Returns an iterable where ``x`` is inserted between
each element of ``ys``
:type ys: Iterable
|
[
"Returns",
"an",
"iterable",
"where",
"x",
"is",
"inserted",
"between",
"each",
"element",
"of",
"ys"
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/utils.py#L5-L23
|
15,427
|
tommikaikkonen/prettyprinter
|
prettyprinter/__init__.py
|
pprint
|
def pprint(
object,
stream=_UNSET_SENTINEL,
indent=_UNSET_SENTINEL,
width=_UNSET_SENTINEL,
depth=_UNSET_SENTINEL,
*,
compact=False,
ribbon_width=_UNSET_SENTINEL,
max_seq_len=_UNSET_SENTINEL,
sort_dict_keys=_UNSET_SENTINEL,
end='\n'
):
"""Pretty print a Python value ``object`` to ``stream``,
which defaults to ``sys.stdout``. The output will not be colored.
:param indent: number of spaces to add for each level of nesting.
:param stream: the output stream, defaults to ``sys.stdout``
:param width: a soft maximum allowed number of columns in the output,
which the layout algorithm attempts to stay under.
:param depth: maximum depth to print nested structures
:param ribbon_width: a soft maximum allowed number of columns in the output,
after indenting the line
:param max_seq_len: a maximum sequence length that applies to subclasses of
lists, sets, frozensets, tuples and dicts. A trailing
comment that indicates the number of truncated elements.
Setting max_seq_len to ``None`` disables truncation.
:param sort_dict_keys: a ``bool`` value indicating if dict keys should be
sorted in the output. Defaults to ``False``, in
which case the default order is used, which is the
insertion order in CPython 3.6+.
"""
sdocs = python_to_sdocs(
object,
**_merge_defaults(
indent=indent,
width=width,
depth=depth,
ribbon_width=ribbon_width,
max_seq_len=max_seq_len,
sort_dict_keys=sort_dict_keys,
)
)
stream = (
# This is not in _default_config in case
# sys.stdout changes.
sys.stdout
if stream is _UNSET_SENTINEL
else stream
)
default_render_to_stream(stream, sdocs)
if end:
stream.write(end)
|
python
|
def pprint(
object,
stream=_UNSET_SENTINEL,
indent=_UNSET_SENTINEL,
width=_UNSET_SENTINEL,
depth=_UNSET_SENTINEL,
*,
compact=False,
ribbon_width=_UNSET_SENTINEL,
max_seq_len=_UNSET_SENTINEL,
sort_dict_keys=_UNSET_SENTINEL,
end='\n'
):
"""Pretty print a Python value ``object`` to ``stream``,
which defaults to ``sys.stdout``. The output will not be colored.
:param indent: number of spaces to add for each level of nesting.
:param stream: the output stream, defaults to ``sys.stdout``
:param width: a soft maximum allowed number of columns in the output,
which the layout algorithm attempts to stay under.
:param depth: maximum depth to print nested structures
:param ribbon_width: a soft maximum allowed number of columns in the output,
after indenting the line
:param max_seq_len: a maximum sequence length that applies to subclasses of
lists, sets, frozensets, tuples and dicts. A trailing
comment that indicates the number of truncated elements.
Setting max_seq_len to ``None`` disables truncation.
:param sort_dict_keys: a ``bool`` value indicating if dict keys should be
sorted in the output. Defaults to ``False``, in
which case the default order is used, which is the
insertion order in CPython 3.6+.
"""
sdocs = python_to_sdocs(
object,
**_merge_defaults(
indent=indent,
width=width,
depth=depth,
ribbon_width=ribbon_width,
max_seq_len=max_seq_len,
sort_dict_keys=sort_dict_keys,
)
)
stream = (
# This is not in _default_config in case
# sys.stdout changes.
sys.stdout
if stream is _UNSET_SENTINEL
else stream
)
default_render_to_stream(stream, sdocs)
if end:
stream.write(end)
|
[
"def",
"pprint",
"(",
"object",
",",
"stream",
"=",
"_UNSET_SENTINEL",
",",
"indent",
"=",
"_UNSET_SENTINEL",
",",
"width",
"=",
"_UNSET_SENTINEL",
",",
"depth",
"=",
"_UNSET_SENTINEL",
",",
"*",
",",
"compact",
"=",
"False",
",",
"ribbon_width",
"=",
"_UNSET_SENTINEL",
",",
"max_seq_len",
"=",
"_UNSET_SENTINEL",
",",
"sort_dict_keys",
"=",
"_UNSET_SENTINEL",
",",
"end",
"=",
"'\\n'",
")",
":",
"sdocs",
"=",
"python_to_sdocs",
"(",
"object",
",",
"*",
"*",
"_merge_defaults",
"(",
"indent",
"=",
"indent",
",",
"width",
"=",
"width",
",",
"depth",
"=",
"depth",
",",
"ribbon_width",
"=",
"ribbon_width",
",",
"max_seq_len",
"=",
"max_seq_len",
",",
"sort_dict_keys",
"=",
"sort_dict_keys",
",",
")",
")",
"stream",
"=",
"(",
"# This is not in _default_config in case",
"# sys.stdout changes.",
"sys",
".",
"stdout",
"if",
"stream",
"is",
"_UNSET_SENTINEL",
"else",
"stream",
")",
"default_render_to_stream",
"(",
"stream",
",",
"sdocs",
")",
"if",
"end",
":",
"stream",
".",
"write",
"(",
"end",
")"
] |
Pretty print a Python value ``object`` to ``stream``,
which defaults to ``sys.stdout``. The output will not be colored.
:param indent: number of spaces to add for each level of nesting.
:param stream: the output stream, defaults to ``sys.stdout``
:param width: a soft maximum allowed number of columns in the output,
which the layout algorithm attempts to stay under.
:param depth: maximum depth to print nested structures
:param ribbon_width: a soft maximum allowed number of columns in the output,
after indenting the line
:param max_seq_len: a maximum sequence length that applies to subclasses of
lists, sets, frozensets, tuples and dicts. A trailing
comment that indicates the number of truncated elements.
Setting max_seq_len to ``None`` disables truncation.
:param sort_dict_keys: a ``bool`` value indicating if dict keys should be
sorted in the output. Defaults to ``False``, in
which case the default order is used, which is the
insertion order in CPython 3.6+.
|
[
"Pretty",
"print",
"a",
"Python",
"value",
"object",
"to",
"stream",
"which",
"defaults",
"to",
"sys",
".",
"stdout",
".",
"The",
"output",
"will",
"not",
"be",
"colored",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L142-L195
|
15,428
|
tommikaikkonen/prettyprinter
|
prettyprinter/__init__.py
|
cpprint
|
def cpprint(
object,
stream=_UNSET_SENTINEL,
indent=_UNSET_SENTINEL,
width=_UNSET_SENTINEL,
depth=_UNSET_SENTINEL,
*,
compact=False,
ribbon_width=_UNSET_SENTINEL,
max_seq_len=_UNSET_SENTINEL,
sort_dict_keys=_UNSET_SENTINEL,
style=None,
end='\n'
):
"""Pretty print a Python value ``object`` to ``stream``,
which defaults to sys.stdout. The output will be colored and
syntax highlighted.
:param indent: number of spaces to add for each level of nesting.
:param stream: the output stream, defaults to sys.stdout
:param width: a soft maximum allowed number of columns in the output,
which the layout algorithm attempts to stay under.
:param depth: maximum depth to print nested structures
:param ribbon_width: a soft maximum allowed number of columns in the output,
after indenting the line
:param max_seq_len: a maximum sequence length that applies to subclasses of
lists, sets, frozensets, tuples and dicts. A trailing
comment that indicates the number of truncated elements.
Setting max_seq_len to ``None`` disables truncation.
:param sort_dict_keys: a ``bool`` value indicating if dict keys should be
sorted in the output. Defaults to ``False``, in
which case the default order is used, which is the
insertion order in CPython 3.6+.
:param style: one of ``'light'``, ``'dark'`` or a subclass
of ``pygments.styles.Style``. If omitted,
will use the default style. If the default style
is not changed by the user with :func:`~prettyprinter.set_default_style`,
the default is ``'dark'``.
"""
sdocs = python_to_sdocs(
object,
**_merge_defaults(
indent=indent,
width=width,
depth=depth,
ribbon_width=ribbon_width,
max_seq_len=max_seq_len,
sort_dict_keys=sort_dict_keys,
)
)
stream = (
# This is not in _default_config in case
# sys.stdout changes.
sys.stdout
if stream is _UNSET_SENTINEL
else stream
)
colored_render_to_stream(stream, sdocs, style=style)
if end:
stream.write(end)
|
python
|
def cpprint(
object,
stream=_UNSET_SENTINEL,
indent=_UNSET_SENTINEL,
width=_UNSET_SENTINEL,
depth=_UNSET_SENTINEL,
*,
compact=False,
ribbon_width=_UNSET_SENTINEL,
max_seq_len=_UNSET_SENTINEL,
sort_dict_keys=_UNSET_SENTINEL,
style=None,
end='\n'
):
"""Pretty print a Python value ``object`` to ``stream``,
which defaults to sys.stdout. The output will be colored and
syntax highlighted.
:param indent: number of spaces to add for each level of nesting.
:param stream: the output stream, defaults to sys.stdout
:param width: a soft maximum allowed number of columns in the output,
which the layout algorithm attempts to stay under.
:param depth: maximum depth to print nested structures
:param ribbon_width: a soft maximum allowed number of columns in the output,
after indenting the line
:param max_seq_len: a maximum sequence length that applies to subclasses of
lists, sets, frozensets, tuples and dicts. A trailing
comment that indicates the number of truncated elements.
Setting max_seq_len to ``None`` disables truncation.
:param sort_dict_keys: a ``bool`` value indicating if dict keys should be
sorted in the output. Defaults to ``False``, in
which case the default order is used, which is the
insertion order in CPython 3.6+.
:param style: one of ``'light'``, ``'dark'`` or a subclass
of ``pygments.styles.Style``. If omitted,
will use the default style. If the default style
is not changed by the user with :func:`~prettyprinter.set_default_style`,
the default is ``'dark'``.
"""
sdocs = python_to_sdocs(
object,
**_merge_defaults(
indent=indent,
width=width,
depth=depth,
ribbon_width=ribbon_width,
max_seq_len=max_seq_len,
sort_dict_keys=sort_dict_keys,
)
)
stream = (
# This is not in _default_config in case
# sys.stdout changes.
sys.stdout
if stream is _UNSET_SENTINEL
else stream
)
colored_render_to_stream(stream, sdocs, style=style)
if end:
stream.write(end)
|
[
"def",
"cpprint",
"(",
"object",
",",
"stream",
"=",
"_UNSET_SENTINEL",
",",
"indent",
"=",
"_UNSET_SENTINEL",
",",
"width",
"=",
"_UNSET_SENTINEL",
",",
"depth",
"=",
"_UNSET_SENTINEL",
",",
"*",
",",
"compact",
"=",
"False",
",",
"ribbon_width",
"=",
"_UNSET_SENTINEL",
",",
"max_seq_len",
"=",
"_UNSET_SENTINEL",
",",
"sort_dict_keys",
"=",
"_UNSET_SENTINEL",
",",
"style",
"=",
"None",
",",
"end",
"=",
"'\\n'",
")",
":",
"sdocs",
"=",
"python_to_sdocs",
"(",
"object",
",",
"*",
"*",
"_merge_defaults",
"(",
"indent",
"=",
"indent",
",",
"width",
"=",
"width",
",",
"depth",
"=",
"depth",
",",
"ribbon_width",
"=",
"ribbon_width",
",",
"max_seq_len",
"=",
"max_seq_len",
",",
"sort_dict_keys",
"=",
"sort_dict_keys",
",",
")",
")",
"stream",
"=",
"(",
"# This is not in _default_config in case",
"# sys.stdout changes.",
"sys",
".",
"stdout",
"if",
"stream",
"is",
"_UNSET_SENTINEL",
"else",
"stream",
")",
"colored_render_to_stream",
"(",
"stream",
",",
"sdocs",
",",
"style",
"=",
"style",
")",
"if",
"end",
":",
"stream",
".",
"write",
"(",
"end",
")"
] |
Pretty print a Python value ``object`` to ``stream``,
which defaults to sys.stdout. The output will be colored and
syntax highlighted.
:param indent: number of spaces to add for each level of nesting.
:param stream: the output stream, defaults to sys.stdout
:param width: a soft maximum allowed number of columns in the output,
which the layout algorithm attempts to stay under.
:param depth: maximum depth to print nested structures
:param ribbon_width: a soft maximum allowed number of columns in the output,
after indenting the line
:param max_seq_len: a maximum sequence length that applies to subclasses of
lists, sets, frozensets, tuples and dicts. A trailing
comment that indicates the number of truncated elements.
Setting max_seq_len to ``None`` disables truncation.
:param sort_dict_keys: a ``bool`` value indicating if dict keys should be
sorted in the output. Defaults to ``False``, in
which case the default order is used, which is the
insertion order in CPython 3.6+.
:param style: one of ``'light'``, ``'dark'`` or a subclass
of ``pygments.styles.Style``. If omitted,
will use the default style. If the default style
is not changed by the user with :func:`~prettyprinter.set_default_style`,
the default is ``'dark'``.
|
[
"Pretty",
"print",
"a",
"Python",
"value",
"object",
"to",
"stream",
"which",
"defaults",
"to",
"sys",
".",
"stdout",
".",
"The",
"output",
"will",
"be",
"colored",
"and",
"syntax",
"highlighted",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L198-L257
|
15,429
|
tommikaikkonen/prettyprinter
|
prettyprinter/__init__.py
|
install_extras
|
def install_extras(
include=ALL_EXTRAS,
*,
exclude=EMPTY_SET,
raise_on_error=False,
warn_on_error=True
):
"""Installs extras.
Installing an extra means registering pretty printers for objects from third
party libraries and/or enabling integrations with other python programs.
- ``'attrs'`` - automatically pretty prints classes created using the ``attrs`` package.
- ``'dataclasses'`` - automatically pretty prints classes created using the ``dataclasses``
module.
- ``'django'`` - automatically pretty prints Model and QuerySet subclasses defined in your
Django apps.
- ``numpy`` - automatically pretty prints numpy scalars with explicit types, and,
for numpy>=1.14, numpy arrays.
- ``'requests'`` - automatically pretty prints Requests, Responses, Sessions, etc.
- ``'ipython'`` - makes prettyprinter the default printer in the IPython shell.
- ``'python'`` - makes prettyprinter the default printer in the default Python shell.
- ``'ipython_repr_pretty'`` - automatically prints objects that define a ``_repr_pretty_``
method to integrate with `IPython.lib.pretty
<http://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html#extending>`_.
:param include: an iterable of strs representing the extras to include.
All extras are included by default.
:param exclude: an iterable of strs representing the extras to exclude.
""" # noqa
include = set(include)
exclude = set(exclude)
unexisting_extras = (include | exclude) - ALL_EXTRAS
if unexisting_extras:
raise ValueError(
"The following extras don't exist: {}".format(
', '.join(unexisting_extras)
)
)
extras_to_install = (ALL_EXTRAS & include) - exclude
for extra in extras_to_install:
module_name = 'prettyprinter.extras.' + extra
try:
extra_module = import_module(module_name)
except ImportError as e:
if raise_on_error:
raise e
if warn_on_error:
warnings.warn(
"Failed to import '{0}' PrettyPrinter extra. "
"If you don't need it, call install_extras with "
"exclude=['{0}']".format(extra)
)
else:
try:
extra_module.install()
except Exception as exc:
if raise_on_error:
raise exc
elif warn_on_error:
warnings.warn(
"Failed to install '{0}' PrettyPrinter extra. "
"If you don't need it, call install_extras with "
"exclude=['{0}']".format(extra)
)
|
python
|
def install_extras(
include=ALL_EXTRAS,
*,
exclude=EMPTY_SET,
raise_on_error=False,
warn_on_error=True
):
"""Installs extras.
Installing an extra means registering pretty printers for objects from third
party libraries and/or enabling integrations with other python programs.
- ``'attrs'`` - automatically pretty prints classes created using the ``attrs`` package.
- ``'dataclasses'`` - automatically pretty prints classes created using the ``dataclasses``
module.
- ``'django'`` - automatically pretty prints Model and QuerySet subclasses defined in your
Django apps.
- ``numpy`` - automatically pretty prints numpy scalars with explicit types, and,
for numpy>=1.14, numpy arrays.
- ``'requests'`` - automatically pretty prints Requests, Responses, Sessions, etc.
- ``'ipython'`` - makes prettyprinter the default printer in the IPython shell.
- ``'python'`` - makes prettyprinter the default printer in the default Python shell.
- ``'ipython_repr_pretty'`` - automatically prints objects that define a ``_repr_pretty_``
method to integrate with `IPython.lib.pretty
<http://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html#extending>`_.
:param include: an iterable of strs representing the extras to include.
All extras are included by default.
:param exclude: an iterable of strs representing the extras to exclude.
""" # noqa
include = set(include)
exclude = set(exclude)
unexisting_extras = (include | exclude) - ALL_EXTRAS
if unexisting_extras:
raise ValueError(
"The following extras don't exist: {}".format(
', '.join(unexisting_extras)
)
)
extras_to_install = (ALL_EXTRAS & include) - exclude
for extra in extras_to_install:
module_name = 'prettyprinter.extras.' + extra
try:
extra_module = import_module(module_name)
except ImportError as e:
if raise_on_error:
raise e
if warn_on_error:
warnings.warn(
"Failed to import '{0}' PrettyPrinter extra. "
"If you don't need it, call install_extras with "
"exclude=['{0}']".format(extra)
)
else:
try:
extra_module.install()
except Exception as exc:
if raise_on_error:
raise exc
elif warn_on_error:
warnings.warn(
"Failed to install '{0}' PrettyPrinter extra. "
"If you don't need it, call install_extras with "
"exclude=['{0}']".format(extra)
)
|
[
"def",
"install_extras",
"(",
"include",
"=",
"ALL_EXTRAS",
",",
"*",
",",
"exclude",
"=",
"EMPTY_SET",
",",
"raise_on_error",
"=",
"False",
",",
"warn_on_error",
"=",
"True",
")",
":",
"# noqa",
"include",
"=",
"set",
"(",
"include",
")",
"exclude",
"=",
"set",
"(",
"exclude",
")",
"unexisting_extras",
"=",
"(",
"include",
"|",
"exclude",
")",
"-",
"ALL_EXTRAS",
"if",
"unexisting_extras",
":",
"raise",
"ValueError",
"(",
"\"The following extras don't exist: {}\"",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"unexisting_extras",
")",
")",
")",
"extras_to_install",
"=",
"(",
"ALL_EXTRAS",
"&",
"include",
")",
"-",
"exclude",
"for",
"extra",
"in",
"extras_to_install",
":",
"module_name",
"=",
"'prettyprinter.extras.'",
"+",
"extra",
"try",
":",
"extra_module",
"=",
"import_module",
"(",
"module_name",
")",
"except",
"ImportError",
"as",
"e",
":",
"if",
"raise_on_error",
":",
"raise",
"e",
"if",
"warn_on_error",
":",
"warnings",
".",
"warn",
"(",
"\"Failed to import '{0}' PrettyPrinter extra. \"",
"\"If you don't need it, call install_extras with \"",
"\"exclude=['{0}']\"",
".",
"format",
"(",
"extra",
")",
")",
"else",
":",
"try",
":",
"extra_module",
".",
"install",
"(",
")",
"except",
"Exception",
"as",
"exc",
":",
"if",
"raise_on_error",
":",
"raise",
"exc",
"elif",
"warn_on_error",
":",
"warnings",
".",
"warn",
"(",
"\"Failed to install '{0}' PrettyPrinter extra. \"",
"\"If you don't need it, call install_extras with \"",
"\"exclude=['{0}']\"",
".",
"format",
"(",
"extra",
")",
")"
] |
Installs extras.
Installing an extra means registering pretty printers for objects from third
party libraries and/or enabling integrations with other python programs.
- ``'attrs'`` - automatically pretty prints classes created using the ``attrs`` package.
- ``'dataclasses'`` - automatically pretty prints classes created using the ``dataclasses``
module.
- ``'django'`` - automatically pretty prints Model and QuerySet subclasses defined in your
Django apps.
- ``numpy`` - automatically pretty prints numpy scalars with explicit types, and,
for numpy>=1.14, numpy arrays.
- ``'requests'`` - automatically pretty prints Requests, Responses, Sessions, etc.
- ``'ipython'`` - makes prettyprinter the default printer in the IPython shell.
- ``'python'`` - makes prettyprinter the default printer in the default Python shell.
- ``'ipython_repr_pretty'`` - automatically prints objects that define a ``_repr_pretty_``
method to integrate with `IPython.lib.pretty
<http://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html#extending>`_.
:param include: an iterable of strs representing the extras to include.
All extras are included by default.
:param exclude: an iterable of strs representing the extras to exclude.
|
[
"Installs",
"extras",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L273-L341
|
15,430
|
tommikaikkonen/prettyprinter
|
prettyprinter/__init__.py
|
set_default_config
|
def set_default_config(
*,
style=_UNSET_SENTINEL,
max_seq_len=_UNSET_SENTINEL,
width=_UNSET_SENTINEL,
ribbon_width=_UNSET_SENTINEL,
depth=_UNSET_SENTINEL,
sort_dict_keys=_UNSET_SENTINEL
):
"""
Sets the default configuration values used when calling
`pprint`, `cpprint`, or `pformat`, if those values weren't
explicitly provided. Only overrides the values provided in
the keyword arguments.
"""
global _default_config
if style is not _UNSET_SENTINEL:
set_default_style(style)
new_defaults = {**_default_config}
if max_seq_len is not _UNSET_SENTINEL:
new_defaults['max_seq_len'] = max_seq_len
if width is not _UNSET_SENTINEL:
new_defaults['width'] = width
if ribbon_width is not _UNSET_SENTINEL:
new_defaults['ribbon_width'] = ribbon_width
if depth is not _UNSET_SENTINEL:
new_defaults['depth'] = depth
if sort_dict_keys is not _UNSET_SENTINEL:
new_defaults['sort_dict_keys'] = sort_dict_keys
_default_config = new_defaults
return new_defaults
|
python
|
def set_default_config(
*,
style=_UNSET_SENTINEL,
max_seq_len=_UNSET_SENTINEL,
width=_UNSET_SENTINEL,
ribbon_width=_UNSET_SENTINEL,
depth=_UNSET_SENTINEL,
sort_dict_keys=_UNSET_SENTINEL
):
"""
Sets the default configuration values used when calling
`pprint`, `cpprint`, or `pformat`, if those values weren't
explicitly provided. Only overrides the values provided in
the keyword arguments.
"""
global _default_config
if style is not _UNSET_SENTINEL:
set_default_style(style)
new_defaults = {**_default_config}
if max_seq_len is not _UNSET_SENTINEL:
new_defaults['max_seq_len'] = max_seq_len
if width is not _UNSET_SENTINEL:
new_defaults['width'] = width
if ribbon_width is not _UNSET_SENTINEL:
new_defaults['ribbon_width'] = ribbon_width
if depth is not _UNSET_SENTINEL:
new_defaults['depth'] = depth
if sort_dict_keys is not _UNSET_SENTINEL:
new_defaults['sort_dict_keys'] = sort_dict_keys
_default_config = new_defaults
return new_defaults
|
[
"def",
"set_default_config",
"(",
"*",
",",
"style",
"=",
"_UNSET_SENTINEL",
",",
"max_seq_len",
"=",
"_UNSET_SENTINEL",
",",
"width",
"=",
"_UNSET_SENTINEL",
",",
"ribbon_width",
"=",
"_UNSET_SENTINEL",
",",
"depth",
"=",
"_UNSET_SENTINEL",
",",
"sort_dict_keys",
"=",
"_UNSET_SENTINEL",
")",
":",
"global",
"_default_config",
"if",
"style",
"is",
"not",
"_UNSET_SENTINEL",
":",
"set_default_style",
"(",
"style",
")",
"new_defaults",
"=",
"{",
"*",
"*",
"_default_config",
"}",
"if",
"max_seq_len",
"is",
"not",
"_UNSET_SENTINEL",
":",
"new_defaults",
"[",
"'max_seq_len'",
"]",
"=",
"max_seq_len",
"if",
"width",
"is",
"not",
"_UNSET_SENTINEL",
":",
"new_defaults",
"[",
"'width'",
"]",
"=",
"width",
"if",
"ribbon_width",
"is",
"not",
"_UNSET_SENTINEL",
":",
"new_defaults",
"[",
"'ribbon_width'",
"]",
"=",
"ribbon_width",
"if",
"depth",
"is",
"not",
"_UNSET_SENTINEL",
":",
"new_defaults",
"[",
"'depth'",
"]",
"=",
"depth",
"if",
"sort_dict_keys",
"is",
"not",
"_UNSET_SENTINEL",
":",
"new_defaults",
"[",
"'sort_dict_keys'",
"]",
"=",
"sort_dict_keys",
"_default_config",
"=",
"new_defaults",
"return",
"new_defaults"
] |
Sets the default configuration values used when calling
`pprint`, `cpprint`, or `pformat`, if those values weren't
explicitly provided. Only overrides the values provided in
the keyword arguments.
|
[
"Sets",
"the",
"default",
"configuration",
"values",
"used",
"when",
"calling",
"pprint",
"cpprint",
"or",
"pformat",
"if",
"those",
"values",
"weren",
"t",
"explicitly",
"provided",
".",
"Only",
"overrides",
"the",
"values",
"provided",
"in",
"the",
"keyword",
"arguments",
"."
] |
6b405884b8085eaf867e81c02b7b662b463ac5a0
|
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L344-L382
|
15,431
|
bcdev/jpy
|
setup.py
|
package_maven
|
def package_maven():
""" Run maven package lifecycle """
if not os.getenv('JAVA_HOME'):
# make sure Maven uses the same JDK which we have used to compile
# and link the C-code
os.environ['JAVA_HOME'] = jdk_home_dir
mvn_goal = 'package'
log.info("Executing Maven goal '" + mvn_goal + "'")
code = subprocess.call(['mvn', 'clean', mvn_goal, '-DskipTests'],
shell=platform.system() == 'Windows')
if code:
exit(code)
#
# Copy JAR results to lib/*.jar
#
if not os.path.exists(lib_dir):
os.mkdir(lib_dir)
target_dir = os.path.join(base_dir, 'target')
jar_files = glob.glob(os.path.join(target_dir, '*.jar'))
jar_files = [f for f in jar_files
if not (f.endswith('-sources.jar')
or f.endswith('-javadoc.jar'))]
if not jar_files:
log.error('Maven did not generate any JAR artifacts')
exit(1)
for jar_file in jar_files:
build_dir = _build_dir()
log.info("Copying " + jar_file + " -> " + build_dir + "")
shutil.copy(jar_file, build_dir)
|
python
|
def package_maven():
""" Run maven package lifecycle """
if not os.getenv('JAVA_HOME'):
# make sure Maven uses the same JDK which we have used to compile
# and link the C-code
os.environ['JAVA_HOME'] = jdk_home_dir
mvn_goal = 'package'
log.info("Executing Maven goal '" + mvn_goal + "'")
code = subprocess.call(['mvn', 'clean', mvn_goal, '-DskipTests'],
shell=platform.system() == 'Windows')
if code:
exit(code)
#
# Copy JAR results to lib/*.jar
#
if not os.path.exists(lib_dir):
os.mkdir(lib_dir)
target_dir = os.path.join(base_dir, 'target')
jar_files = glob.glob(os.path.join(target_dir, '*.jar'))
jar_files = [f for f in jar_files
if not (f.endswith('-sources.jar')
or f.endswith('-javadoc.jar'))]
if not jar_files:
log.error('Maven did not generate any JAR artifacts')
exit(1)
for jar_file in jar_files:
build_dir = _build_dir()
log.info("Copying " + jar_file + " -> " + build_dir + "")
shutil.copy(jar_file, build_dir)
|
[
"def",
"package_maven",
"(",
")",
":",
"if",
"not",
"os",
".",
"getenv",
"(",
"'JAVA_HOME'",
")",
":",
"# make sure Maven uses the same JDK which we have used to compile",
"# and link the C-code",
"os",
".",
"environ",
"[",
"'JAVA_HOME'",
"]",
"=",
"jdk_home_dir",
"mvn_goal",
"=",
"'package'",
"log",
".",
"info",
"(",
"\"Executing Maven goal '\"",
"+",
"mvn_goal",
"+",
"\"'\"",
")",
"code",
"=",
"subprocess",
".",
"call",
"(",
"[",
"'mvn'",
",",
"'clean'",
",",
"mvn_goal",
",",
"'-DskipTests'",
"]",
",",
"shell",
"=",
"platform",
".",
"system",
"(",
")",
"==",
"'Windows'",
")",
"if",
"code",
":",
"exit",
"(",
"code",
")",
"#",
"# Copy JAR results to lib/*.jar",
"#",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"lib_dir",
")",
":",
"os",
".",
"mkdir",
"(",
"lib_dir",
")",
"target_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"base_dir",
",",
"'target'",
")",
"jar_files",
"=",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"target_dir",
",",
"'*.jar'",
")",
")",
"jar_files",
"=",
"[",
"f",
"for",
"f",
"in",
"jar_files",
"if",
"not",
"(",
"f",
".",
"endswith",
"(",
"'-sources.jar'",
")",
"or",
"f",
".",
"endswith",
"(",
"'-javadoc.jar'",
")",
")",
"]",
"if",
"not",
"jar_files",
":",
"log",
".",
"error",
"(",
"'Maven did not generate any JAR artifacts'",
")",
"exit",
"(",
"1",
")",
"for",
"jar_file",
"in",
"jar_files",
":",
"build_dir",
"=",
"_build_dir",
"(",
")",
"log",
".",
"info",
"(",
"\"Copying \"",
"+",
"jar_file",
"+",
"\" -> \"",
"+",
"build_dir",
"+",
"\"\"",
")",
"shutil",
".",
"copy",
"(",
"jar_file",
",",
"build_dir",
")"
] |
Run maven package lifecycle
|
[
"Run",
"maven",
"package",
"lifecycle"
] |
ae813df536807fb839650a0b359aa90f8344dd79
|
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/setup.py#L153-L184
|
15,432
|
bcdev/jpy
|
setup.py
|
_write_jpy_config
|
def _write_jpy_config(target_dir=None, install_dir=None):
"""
Write out a well-formed jpyconfig.properties file for easier Java
integration in a given location.
"""
if not target_dir:
target_dir = _build_dir()
args = [sys.executable,
os.path.join(target_dir, 'jpyutil.py'),
'--jvm_dll', jvm_dll_file,
'--java_home', jdk_home_dir,
'--log_level', 'DEBUG',
'--req_java',
'--req_py']
if install_dir:
args.append('--install_dir')
args.append(install_dir)
log.info('Writing jpy configuration to %s using install_dir %s' % (target_dir, install_dir))
return subprocess.call(args)
|
python
|
def _write_jpy_config(target_dir=None, install_dir=None):
"""
Write out a well-formed jpyconfig.properties file for easier Java
integration in a given location.
"""
if not target_dir:
target_dir = _build_dir()
args = [sys.executable,
os.path.join(target_dir, 'jpyutil.py'),
'--jvm_dll', jvm_dll_file,
'--java_home', jdk_home_dir,
'--log_level', 'DEBUG',
'--req_java',
'--req_py']
if install_dir:
args.append('--install_dir')
args.append(install_dir)
log.info('Writing jpy configuration to %s using install_dir %s' % (target_dir, install_dir))
return subprocess.call(args)
|
[
"def",
"_write_jpy_config",
"(",
"target_dir",
"=",
"None",
",",
"install_dir",
"=",
"None",
")",
":",
"if",
"not",
"target_dir",
":",
"target_dir",
"=",
"_build_dir",
"(",
")",
"args",
"=",
"[",
"sys",
".",
"executable",
",",
"os",
".",
"path",
".",
"join",
"(",
"target_dir",
",",
"'jpyutil.py'",
")",
",",
"'--jvm_dll'",
",",
"jvm_dll_file",
",",
"'--java_home'",
",",
"jdk_home_dir",
",",
"'--log_level'",
",",
"'DEBUG'",
",",
"'--req_java'",
",",
"'--req_py'",
"]",
"if",
"install_dir",
":",
"args",
".",
"append",
"(",
"'--install_dir'",
")",
"args",
".",
"append",
"(",
"install_dir",
")",
"log",
".",
"info",
"(",
"'Writing jpy configuration to %s using install_dir %s'",
"%",
"(",
"target_dir",
",",
"install_dir",
")",
")",
"return",
"subprocess",
".",
"call",
"(",
"args",
")"
] |
Write out a well-formed jpyconfig.properties file for easier Java
integration in a given location.
|
[
"Write",
"out",
"a",
"well",
"-",
"formed",
"jpyconfig",
".",
"properties",
"file",
"for",
"easier",
"Java",
"integration",
"in",
"a",
"given",
"location",
"."
] |
ae813df536807fb839650a0b359aa90f8344dd79
|
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/setup.py#L216-L236
|
15,433
|
bcdev/jpy
|
jpyutil.py
|
_get_module_path
|
def _get_module_path(name, fail=False, install_path=None):
""" Find the path to the jpy jni modules. """
import imp
module = imp.find_module(name)
if not module and fail:
raise RuntimeError("can't find module '" + name + "'")
path = module[1]
if not path and fail:
raise RuntimeError("module '" + name + "' is missing a file path")
if install_path:
return os.path.join(install_path, os.path.split(path)[1])
return path
|
python
|
def _get_module_path(name, fail=False, install_path=None):
""" Find the path to the jpy jni modules. """
import imp
module = imp.find_module(name)
if not module and fail:
raise RuntimeError("can't find module '" + name + "'")
path = module[1]
if not path and fail:
raise RuntimeError("module '" + name + "' is missing a file path")
if install_path:
return os.path.join(install_path, os.path.split(path)[1])
return path
|
[
"def",
"_get_module_path",
"(",
"name",
",",
"fail",
"=",
"False",
",",
"install_path",
"=",
"None",
")",
":",
"import",
"imp",
"module",
"=",
"imp",
".",
"find_module",
"(",
"name",
")",
"if",
"not",
"module",
"and",
"fail",
":",
"raise",
"RuntimeError",
"(",
"\"can't find module '\"",
"+",
"name",
"+",
"\"'\"",
")",
"path",
"=",
"module",
"[",
"1",
"]",
"if",
"not",
"path",
"and",
"fail",
":",
"raise",
"RuntimeError",
"(",
"\"module '\"",
"+",
"name",
"+",
"\"' is missing a file path\"",
")",
"if",
"install_path",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"install_path",
",",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"[",
"1",
"]",
")",
"return",
"path"
] |
Find the path to the jpy jni modules.
|
[
"Find",
"the",
"path",
"to",
"the",
"jpy",
"jni",
"modules",
"."
] |
ae813df536807fb839650a0b359aa90f8344dd79
|
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/jpyutil.py#L99-L113
|
15,434
|
bcdev/jpy
|
jpyutil.py
|
init_jvm
|
def init_jvm(java_home=None,
jvm_dll=None,
jvm_maxmem=None,
jvm_classpath=None,
jvm_properties=None,
jvm_options=None,
config_file=None,
config=None):
"""
Creates a configured Java virtual machine which will be used by jpy.
:param java_home: The Java JRE or JDK home directory used to search JVM shared library, if 'jvm_dll' is omitted.
:param jvm_dll: The JVM shared library file. My be inferred from 'java_home'.
:param jvm_maxmem: The JVM maximum heap space, e.g. '400M', '8G'. Refer to the java executable '-Xmx' option.
:param jvm_classpath: The JVM search paths for Java class files. Separated by colons (Unix) or semicolons
(Windows). Refer to the java executable '-cp' option.
:param jvm_properties: A dictionary of key -> value pairs passed to the JVM as Java system properties.
Refer to the java executable '-D' option.
:param jvm_options: A list of extra options for the JVM. Refer to the java executable options.
:param config_file: Extra configuration file (e.g. 'jpyconfig.py') to be loaded if 'config' parameter is omitted.
:param config: An optional default configuration object providing default attributes
for the 'jvm_maxmem', 'jvm_classpath', 'jvm_properties', 'jvm_options' parameters.
:return: a tuple (cdll, actual_jvm_options) on success, None otherwise.
"""
if not config:
config = _get_python_api_config(config_file=config_file)
cdll = preload_jvm_dll(jvm_dll_file=jvm_dll,
java_home_dir=java_home,
config_file=config_file,
config=config,
fail=False)
import jpy
if not jpy.has_jvm():
jvm_options = get_jvm_options(jvm_maxmem=jvm_maxmem,
jvm_classpath=jvm_classpath,
jvm_properties=jvm_properties,
jvm_options=jvm_options,
config=config)
logger.debug('Creating JVM with options %s' % repr(jvm_options))
jpy.create_jvm(options=jvm_options)
else:
jvm_options = None
# print('jvm_dll =', jvm_dll)
# print('jvm_options =', jvm_options)
return cdll, jvm_options
|
python
|
def init_jvm(java_home=None,
jvm_dll=None,
jvm_maxmem=None,
jvm_classpath=None,
jvm_properties=None,
jvm_options=None,
config_file=None,
config=None):
"""
Creates a configured Java virtual machine which will be used by jpy.
:param java_home: The Java JRE or JDK home directory used to search JVM shared library, if 'jvm_dll' is omitted.
:param jvm_dll: The JVM shared library file. My be inferred from 'java_home'.
:param jvm_maxmem: The JVM maximum heap space, e.g. '400M', '8G'. Refer to the java executable '-Xmx' option.
:param jvm_classpath: The JVM search paths for Java class files. Separated by colons (Unix) or semicolons
(Windows). Refer to the java executable '-cp' option.
:param jvm_properties: A dictionary of key -> value pairs passed to the JVM as Java system properties.
Refer to the java executable '-D' option.
:param jvm_options: A list of extra options for the JVM. Refer to the java executable options.
:param config_file: Extra configuration file (e.g. 'jpyconfig.py') to be loaded if 'config' parameter is omitted.
:param config: An optional default configuration object providing default attributes
for the 'jvm_maxmem', 'jvm_classpath', 'jvm_properties', 'jvm_options' parameters.
:return: a tuple (cdll, actual_jvm_options) on success, None otherwise.
"""
if not config:
config = _get_python_api_config(config_file=config_file)
cdll = preload_jvm_dll(jvm_dll_file=jvm_dll,
java_home_dir=java_home,
config_file=config_file,
config=config,
fail=False)
import jpy
if not jpy.has_jvm():
jvm_options = get_jvm_options(jvm_maxmem=jvm_maxmem,
jvm_classpath=jvm_classpath,
jvm_properties=jvm_properties,
jvm_options=jvm_options,
config=config)
logger.debug('Creating JVM with options %s' % repr(jvm_options))
jpy.create_jvm(options=jvm_options)
else:
jvm_options = None
# print('jvm_dll =', jvm_dll)
# print('jvm_options =', jvm_options)
return cdll, jvm_options
|
[
"def",
"init_jvm",
"(",
"java_home",
"=",
"None",
",",
"jvm_dll",
"=",
"None",
",",
"jvm_maxmem",
"=",
"None",
",",
"jvm_classpath",
"=",
"None",
",",
"jvm_properties",
"=",
"None",
",",
"jvm_options",
"=",
"None",
",",
"config_file",
"=",
"None",
",",
"config",
"=",
"None",
")",
":",
"if",
"not",
"config",
":",
"config",
"=",
"_get_python_api_config",
"(",
"config_file",
"=",
"config_file",
")",
"cdll",
"=",
"preload_jvm_dll",
"(",
"jvm_dll_file",
"=",
"jvm_dll",
",",
"java_home_dir",
"=",
"java_home",
",",
"config_file",
"=",
"config_file",
",",
"config",
"=",
"config",
",",
"fail",
"=",
"False",
")",
"import",
"jpy",
"if",
"not",
"jpy",
".",
"has_jvm",
"(",
")",
":",
"jvm_options",
"=",
"get_jvm_options",
"(",
"jvm_maxmem",
"=",
"jvm_maxmem",
",",
"jvm_classpath",
"=",
"jvm_classpath",
",",
"jvm_properties",
"=",
"jvm_properties",
",",
"jvm_options",
"=",
"jvm_options",
",",
"config",
"=",
"config",
")",
"logger",
".",
"debug",
"(",
"'Creating JVM with options %s'",
"%",
"repr",
"(",
"jvm_options",
")",
")",
"jpy",
".",
"create_jvm",
"(",
"options",
"=",
"jvm_options",
")",
"else",
":",
"jvm_options",
"=",
"None",
"# print('jvm_dll =', jvm_dll)",
"# print('jvm_options =', jvm_options)",
"return",
"cdll",
",",
"jvm_options"
] |
Creates a configured Java virtual machine which will be used by jpy.
:param java_home: The Java JRE or JDK home directory used to search JVM shared library, if 'jvm_dll' is omitted.
:param jvm_dll: The JVM shared library file. My be inferred from 'java_home'.
:param jvm_maxmem: The JVM maximum heap space, e.g. '400M', '8G'. Refer to the java executable '-Xmx' option.
:param jvm_classpath: The JVM search paths for Java class files. Separated by colons (Unix) or semicolons
(Windows). Refer to the java executable '-cp' option.
:param jvm_properties: A dictionary of key -> value pairs passed to the JVM as Java system properties.
Refer to the java executable '-D' option.
:param jvm_options: A list of extra options for the JVM. Refer to the java executable options.
:param config_file: Extra configuration file (e.g. 'jpyconfig.py') to be loaded if 'config' parameter is omitted.
:param config: An optional default configuration object providing default attributes
for the 'jvm_maxmem', 'jvm_classpath', 'jvm_properties', 'jvm_options' parameters.
:return: a tuple (cdll, actual_jvm_options) on success, None otherwise.
|
[
"Creates",
"a",
"configured",
"Java",
"virtual",
"machine",
"which",
"will",
"be",
"used",
"by",
"jpy",
"."
] |
ae813df536807fb839650a0b359aa90f8344dd79
|
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/jpyutil.py#L411-L459
|
15,435
|
KeepSafe/android-resource-remover
|
android_clean_app.py
|
run_lint_command
|
def run_lint_command():
"""
Run lint command in the shell and save results to lint-result.xml
"""
lint, app_dir, lint_result, ignore_layouts = parse_args()
if not lint_result:
if not distutils.spawn.find_executable(lint):
raise Exception(
'`%s` executable could not be found and path to lint result not specified. See --help' % lint)
lint_result = os.path.join(app_dir, 'lint-result.xml')
call_result = subprocess.call([lint, app_dir, '--xml', lint_result])
if call_result > 0:
print('Running the command failed with result %s. Try running it from the console.'
' Arguments for subprocess.call: %s' % (call_result, [lint, app_dir, '--xml', lint_result]))
else:
if not os.path.isabs(lint_result):
lint_result = os.path.join(app_dir, lint_result)
lint_result = os.path.abspath(lint_result)
return lint_result, app_dir, ignore_layouts
|
python
|
def run_lint_command():
"""
Run lint command in the shell and save results to lint-result.xml
"""
lint, app_dir, lint_result, ignore_layouts = parse_args()
if not lint_result:
if not distutils.spawn.find_executable(lint):
raise Exception(
'`%s` executable could not be found and path to lint result not specified. See --help' % lint)
lint_result = os.path.join(app_dir, 'lint-result.xml')
call_result = subprocess.call([lint, app_dir, '--xml', lint_result])
if call_result > 0:
print('Running the command failed with result %s. Try running it from the console.'
' Arguments for subprocess.call: %s' % (call_result, [lint, app_dir, '--xml', lint_result]))
else:
if not os.path.isabs(lint_result):
lint_result = os.path.join(app_dir, lint_result)
lint_result = os.path.abspath(lint_result)
return lint_result, app_dir, ignore_layouts
|
[
"def",
"run_lint_command",
"(",
")",
":",
"lint",
",",
"app_dir",
",",
"lint_result",
",",
"ignore_layouts",
"=",
"parse_args",
"(",
")",
"if",
"not",
"lint_result",
":",
"if",
"not",
"distutils",
".",
"spawn",
".",
"find_executable",
"(",
"lint",
")",
":",
"raise",
"Exception",
"(",
"'`%s` executable could not be found and path to lint result not specified. See --help'",
"%",
"lint",
")",
"lint_result",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_dir",
",",
"'lint-result.xml'",
")",
"call_result",
"=",
"subprocess",
".",
"call",
"(",
"[",
"lint",
",",
"app_dir",
",",
"'--xml'",
",",
"lint_result",
"]",
")",
"if",
"call_result",
">",
"0",
":",
"print",
"(",
"'Running the command failed with result %s. Try running it from the console.'",
"' Arguments for subprocess.call: %s'",
"%",
"(",
"call_result",
",",
"[",
"lint",
",",
"app_dir",
",",
"'--xml'",
",",
"lint_result",
"]",
")",
")",
"else",
":",
"if",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"lint_result",
")",
":",
"lint_result",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_dir",
",",
"lint_result",
")",
"lint_result",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"lint_result",
")",
"return",
"lint_result",
",",
"app_dir",
",",
"ignore_layouts"
] |
Run lint command in the shell and save results to lint-result.xml
|
[
"Run",
"lint",
"command",
"in",
"the",
"shell",
"and",
"save",
"results",
"to",
"lint",
"-",
"result",
".",
"xml"
] |
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
|
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L87-L105
|
15,436
|
KeepSafe/android-resource-remover
|
android_clean_app.py
|
parse_lint_result
|
def parse_lint_result(lint_result_path, manifest_path):
"""
Parse lint-result.xml and create Issue for every problem found except unused strings referenced in AndroidManifest
"""
unused_string_pattern = re.compile('The resource `R\.string\.([^`]+)` appears to be unused')
mainfest_string_refs = get_manifest_string_refs(manifest_path)
root = etree.parse(lint_result_path).getroot()
issues = []
for issue_xml in root.findall('.//issue[@id="UnusedResources"]'):
message = issue_xml.get('message')
unused_string = re.match(unused_string_pattern, issue_xml.get('message'))
has_string_in_manifest = unused_string and unused_string.group(1) in mainfest_string_refs
if not has_string_in_manifest:
issues.extend(_get_issues_from_location(UnusedResourceIssue,
issue_xml.findall('location'),
message))
for issue_xml in root.findall('.//issue[@id="ExtraTranslation"]'):
message = issue_xml.get('message')
if re.findall(ExtraTranslationIssue.pattern, message):
issues.extend(_get_issues_from_location(ExtraTranslationIssue,
issue_xml.findall('location'),
message))
return issues
|
python
|
def parse_lint_result(lint_result_path, manifest_path):
"""
Parse lint-result.xml and create Issue for every problem found except unused strings referenced in AndroidManifest
"""
unused_string_pattern = re.compile('The resource `R\.string\.([^`]+)` appears to be unused')
mainfest_string_refs = get_manifest_string_refs(manifest_path)
root = etree.parse(lint_result_path).getroot()
issues = []
for issue_xml in root.findall('.//issue[@id="UnusedResources"]'):
message = issue_xml.get('message')
unused_string = re.match(unused_string_pattern, issue_xml.get('message'))
has_string_in_manifest = unused_string and unused_string.group(1) in mainfest_string_refs
if not has_string_in_manifest:
issues.extend(_get_issues_from_location(UnusedResourceIssue,
issue_xml.findall('location'),
message))
for issue_xml in root.findall('.//issue[@id="ExtraTranslation"]'):
message = issue_xml.get('message')
if re.findall(ExtraTranslationIssue.pattern, message):
issues.extend(_get_issues_from_location(ExtraTranslationIssue,
issue_xml.findall('location'),
message))
return issues
|
[
"def",
"parse_lint_result",
"(",
"lint_result_path",
",",
"manifest_path",
")",
":",
"unused_string_pattern",
"=",
"re",
".",
"compile",
"(",
"'The resource `R\\.string\\.([^`]+)` appears to be unused'",
")",
"mainfest_string_refs",
"=",
"get_manifest_string_refs",
"(",
"manifest_path",
")",
"root",
"=",
"etree",
".",
"parse",
"(",
"lint_result_path",
")",
".",
"getroot",
"(",
")",
"issues",
"=",
"[",
"]",
"for",
"issue_xml",
"in",
"root",
".",
"findall",
"(",
"'.//issue[@id=\"UnusedResources\"]'",
")",
":",
"message",
"=",
"issue_xml",
".",
"get",
"(",
"'message'",
")",
"unused_string",
"=",
"re",
".",
"match",
"(",
"unused_string_pattern",
",",
"issue_xml",
".",
"get",
"(",
"'message'",
")",
")",
"has_string_in_manifest",
"=",
"unused_string",
"and",
"unused_string",
".",
"group",
"(",
"1",
")",
"in",
"mainfest_string_refs",
"if",
"not",
"has_string_in_manifest",
":",
"issues",
".",
"extend",
"(",
"_get_issues_from_location",
"(",
"UnusedResourceIssue",
",",
"issue_xml",
".",
"findall",
"(",
"'location'",
")",
",",
"message",
")",
")",
"for",
"issue_xml",
"in",
"root",
".",
"findall",
"(",
"'.//issue[@id=\"ExtraTranslation\"]'",
")",
":",
"message",
"=",
"issue_xml",
".",
"get",
"(",
"'message'",
")",
"if",
"re",
".",
"findall",
"(",
"ExtraTranslationIssue",
".",
"pattern",
",",
"message",
")",
":",
"issues",
".",
"extend",
"(",
"_get_issues_from_location",
"(",
"ExtraTranslationIssue",
",",
"issue_xml",
".",
"findall",
"(",
"'location'",
")",
",",
"message",
")",
")",
"return",
"issues"
] |
Parse lint-result.xml and create Issue for every problem found except unused strings referenced in AndroidManifest
|
[
"Parse",
"lint",
"-",
"result",
".",
"xml",
"and",
"create",
"Issue",
"for",
"every",
"problem",
"found",
"except",
"unused",
"strings",
"referenced",
"in",
"AndroidManifest"
] |
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
|
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L138-L163
|
15,437
|
KeepSafe/android-resource-remover
|
android_clean_app.py
|
remove_resource_file
|
def remove_resource_file(issue, filepath, ignore_layouts):
"""
Delete a file from the filesystem
"""
if os.path.exists(filepath) and (ignore_layouts is False or issue.elements[0][0] != 'layout'):
print('removing resource: {0}'.format(filepath))
os.remove(os.path.abspath(filepath))
|
python
|
def remove_resource_file(issue, filepath, ignore_layouts):
"""
Delete a file from the filesystem
"""
if os.path.exists(filepath) and (ignore_layouts is False or issue.elements[0][0] != 'layout'):
print('removing resource: {0}'.format(filepath))
os.remove(os.path.abspath(filepath))
|
[
"def",
"remove_resource_file",
"(",
"issue",
",",
"filepath",
",",
"ignore_layouts",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"filepath",
")",
"and",
"(",
"ignore_layouts",
"is",
"False",
"or",
"issue",
".",
"elements",
"[",
"0",
"]",
"[",
"0",
"]",
"!=",
"'layout'",
")",
":",
"print",
"(",
"'removing resource: {0}'",
".",
"format",
"(",
"filepath",
")",
")",
"os",
".",
"remove",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"filepath",
")",
")"
] |
Delete a file from the filesystem
|
[
"Delete",
"a",
"file",
"from",
"the",
"filesystem"
] |
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
|
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L166-L172
|
15,438
|
KeepSafe/android-resource-remover
|
android_clean_app.py
|
remove_resource_value
|
def remove_resource_value(issue, filepath):
"""
Read an xml file and remove an element which is unused, then save the file back to the filesystem
"""
if os.path.exists(filepath):
for element in issue.elements:
print('removing {0} from resource {1}'.format(element, filepath))
parser = etree.XMLParser(remove_blank_text=False, remove_comments=False,
remove_pis=False, strip_cdata=False, resolve_entities=False)
tree = etree.parse(filepath, parser)
root = tree.getroot()
for unused_value in root.findall('.//{0}[@name="{1}"]'.format(element[0], element[1])):
root.remove(unused_value)
with open(filepath, 'wb') as resource:
tree.write(resource, encoding='utf-8', xml_declaration=True)
|
python
|
def remove_resource_value(issue, filepath):
"""
Read an xml file and remove an element which is unused, then save the file back to the filesystem
"""
if os.path.exists(filepath):
for element in issue.elements:
print('removing {0} from resource {1}'.format(element, filepath))
parser = etree.XMLParser(remove_blank_text=False, remove_comments=False,
remove_pis=False, strip_cdata=False, resolve_entities=False)
tree = etree.parse(filepath, parser)
root = tree.getroot()
for unused_value in root.findall('.//{0}[@name="{1}"]'.format(element[0], element[1])):
root.remove(unused_value)
with open(filepath, 'wb') as resource:
tree.write(resource, encoding='utf-8', xml_declaration=True)
|
[
"def",
"remove_resource_value",
"(",
"issue",
",",
"filepath",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"filepath",
")",
":",
"for",
"element",
"in",
"issue",
".",
"elements",
":",
"print",
"(",
"'removing {0} from resource {1}'",
".",
"format",
"(",
"element",
",",
"filepath",
")",
")",
"parser",
"=",
"etree",
".",
"XMLParser",
"(",
"remove_blank_text",
"=",
"False",
",",
"remove_comments",
"=",
"False",
",",
"remove_pis",
"=",
"False",
",",
"strip_cdata",
"=",
"False",
",",
"resolve_entities",
"=",
"False",
")",
"tree",
"=",
"etree",
".",
"parse",
"(",
"filepath",
",",
"parser",
")",
"root",
"=",
"tree",
".",
"getroot",
"(",
")",
"for",
"unused_value",
"in",
"root",
".",
"findall",
"(",
"'.//{0}[@name=\"{1}\"]'",
".",
"format",
"(",
"element",
"[",
"0",
"]",
",",
"element",
"[",
"1",
"]",
")",
")",
":",
"root",
".",
"remove",
"(",
"unused_value",
")",
"with",
"open",
"(",
"filepath",
",",
"'wb'",
")",
"as",
"resource",
":",
"tree",
".",
"write",
"(",
"resource",
",",
"encoding",
"=",
"'utf-8'",
",",
"xml_declaration",
"=",
"True",
")"
] |
Read an xml file and remove an element which is unused, then save the file back to the filesystem
|
[
"Read",
"an",
"xml",
"file",
"and",
"remove",
"an",
"element",
"which",
"is",
"unused",
"then",
"save",
"the",
"file",
"back",
"to",
"the",
"filesystem"
] |
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
|
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L175-L189
|
15,439
|
KeepSafe/android-resource-remover
|
android_clean_app.py
|
remove_unused_resources
|
def remove_unused_resources(issues, app_dir, ignore_layouts):
"""
Remove the file or the value inside the file depending if the whole file is unused or not.
"""
for issue in issues:
filepath = os.path.join(app_dir, issue.filepath)
if issue.remove_file:
remove_resource_file(issue, filepath, ignore_layouts)
else:
remove_resource_value(issue, filepath)
|
python
|
def remove_unused_resources(issues, app_dir, ignore_layouts):
"""
Remove the file or the value inside the file depending if the whole file is unused or not.
"""
for issue in issues:
filepath = os.path.join(app_dir, issue.filepath)
if issue.remove_file:
remove_resource_file(issue, filepath, ignore_layouts)
else:
remove_resource_value(issue, filepath)
|
[
"def",
"remove_unused_resources",
"(",
"issues",
",",
"app_dir",
",",
"ignore_layouts",
")",
":",
"for",
"issue",
"in",
"issues",
":",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_dir",
",",
"issue",
".",
"filepath",
")",
"if",
"issue",
".",
"remove_file",
":",
"remove_resource_file",
"(",
"issue",
",",
"filepath",
",",
"ignore_layouts",
")",
"else",
":",
"remove_resource_value",
"(",
"issue",
",",
"filepath",
")"
] |
Remove the file or the value inside the file depending if the whole file is unused or not.
|
[
"Remove",
"the",
"file",
"or",
"the",
"value",
"inside",
"the",
"file",
"depending",
"if",
"the",
"whole",
"file",
"is",
"unused",
"or",
"not",
"."
] |
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
|
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L192-L201
|
15,440
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/caches/__init__.py
|
_encryption_context_hash
|
def _encryption_context_hash(hasher, encryption_context):
"""Generates the expected hash for the provided encryption context.
:param hasher: Existing hasher to use
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param dict encryption_context: Encryption context to hash
:returns: Complete hash
:rtype: bytes
"""
serialized_encryption_context = serialize_encryption_context(encryption_context)
hasher.update(serialized_encryption_context)
return hasher.finalize()
|
python
|
def _encryption_context_hash(hasher, encryption_context):
"""Generates the expected hash for the provided encryption context.
:param hasher: Existing hasher to use
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param dict encryption_context: Encryption context to hash
:returns: Complete hash
:rtype: bytes
"""
serialized_encryption_context = serialize_encryption_context(encryption_context)
hasher.update(serialized_encryption_context)
return hasher.finalize()
|
[
"def",
"_encryption_context_hash",
"(",
"hasher",
",",
"encryption_context",
")",
":",
"serialized_encryption_context",
"=",
"serialize_encryption_context",
"(",
"encryption_context",
")",
"hasher",
".",
"update",
"(",
"serialized_encryption_context",
")",
"return",
"hasher",
".",
"finalize",
"(",
")"
] |
Generates the expected hash for the provided encryption context.
:param hasher: Existing hasher to use
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param dict encryption_context: Encryption context to hash
:returns: Complete hash
:rtype: bytes
|
[
"Generates",
"the",
"expected",
"hash",
"for",
"the",
"provided",
"encryption",
"context",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/caches/__init__.py#L51-L62
|
15,441
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/caches/__init__.py
|
build_encryption_materials_cache_key
|
def build_encryption_materials_cache_key(partition, request):
"""Generates a cache key for an encrypt request.
:param bytes partition: Partition name for which to generate key
:param request: Request for which to generate key
:type request: aws_encryption_sdk.materials_managers.EncryptionMaterialsRequest
:returns: cache key
:rtype: bytes
"""
if request.algorithm is None:
_algorithm_info = b"\x00"
else:
_algorithm_info = b"\x01" + request.algorithm.id_as_bytes()
hasher = _new_cache_key_hasher()
_partition_hash = _partition_name_hash(hasher=hasher.copy(), partition_name=partition)
_ec_hash = _encryption_context_hash(hasher=hasher.copy(), encryption_context=request.encryption_context)
hasher.update(_partition_hash)
hasher.update(_algorithm_info)
hasher.update(_ec_hash)
return hasher.finalize()
|
python
|
def build_encryption_materials_cache_key(partition, request):
"""Generates a cache key for an encrypt request.
:param bytes partition: Partition name for which to generate key
:param request: Request for which to generate key
:type request: aws_encryption_sdk.materials_managers.EncryptionMaterialsRequest
:returns: cache key
:rtype: bytes
"""
if request.algorithm is None:
_algorithm_info = b"\x00"
else:
_algorithm_info = b"\x01" + request.algorithm.id_as_bytes()
hasher = _new_cache_key_hasher()
_partition_hash = _partition_name_hash(hasher=hasher.copy(), partition_name=partition)
_ec_hash = _encryption_context_hash(hasher=hasher.copy(), encryption_context=request.encryption_context)
hasher.update(_partition_hash)
hasher.update(_algorithm_info)
hasher.update(_ec_hash)
return hasher.finalize()
|
[
"def",
"build_encryption_materials_cache_key",
"(",
"partition",
",",
"request",
")",
":",
"if",
"request",
".",
"algorithm",
"is",
"None",
":",
"_algorithm_info",
"=",
"b\"\\x00\"",
"else",
":",
"_algorithm_info",
"=",
"b\"\\x01\"",
"+",
"request",
".",
"algorithm",
".",
"id_as_bytes",
"(",
")",
"hasher",
"=",
"_new_cache_key_hasher",
"(",
")",
"_partition_hash",
"=",
"_partition_name_hash",
"(",
"hasher",
"=",
"hasher",
".",
"copy",
"(",
")",
",",
"partition_name",
"=",
"partition",
")",
"_ec_hash",
"=",
"_encryption_context_hash",
"(",
"hasher",
"=",
"hasher",
".",
"copy",
"(",
")",
",",
"encryption_context",
"=",
"request",
".",
"encryption_context",
")",
"hasher",
".",
"update",
"(",
"_partition_hash",
")",
"hasher",
".",
"update",
"(",
"_algorithm_info",
")",
"hasher",
".",
"update",
"(",
"_ec_hash",
")",
"return",
"hasher",
".",
"finalize",
"(",
")"
] |
Generates a cache key for an encrypt request.
:param bytes partition: Partition name for which to generate key
:param request: Request for which to generate key
:type request: aws_encryption_sdk.materials_managers.EncryptionMaterialsRequest
:returns: cache key
:rtype: bytes
|
[
"Generates",
"a",
"cache",
"key",
"for",
"an",
"encrypt",
"request",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/caches/__init__.py#L65-L86
|
15,442
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/caches/__init__.py
|
_encrypted_data_keys_hash
|
def _encrypted_data_keys_hash(hasher, encrypted_data_keys):
"""Generates the expected hash for the provided encrypted data keys.
:param hasher: Existing hasher to use
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param iterable encrypted_data_keys: Encrypted data keys to hash
:returns: Concatenated, sorted, list of all hashes
:rtype: bytes
"""
hashed_keys = []
for edk in encrypted_data_keys:
serialized_edk = serialize_encrypted_data_key(edk)
_hasher = hasher.copy()
_hasher.update(serialized_edk)
hashed_keys.append(_hasher.finalize())
return b"".join(sorted(hashed_keys))
|
python
|
def _encrypted_data_keys_hash(hasher, encrypted_data_keys):
"""Generates the expected hash for the provided encrypted data keys.
:param hasher: Existing hasher to use
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param iterable encrypted_data_keys: Encrypted data keys to hash
:returns: Concatenated, sorted, list of all hashes
:rtype: bytes
"""
hashed_keys = []
for edk in encrypted_data_keys:
serialized_edk = serialize_encrypted_data_key(edk)
_hasher = hasher.copy()
_hasher.update(serialized_edk)
hashed_keys.append(_hasher.finalize())
return b"".join(sorted(hashed_keys))
|
[
"def",
"_encrypted_data_keys_hash",
"(",
"hasher",
",",
"encrypted_data_keys",
")",
":",
"hashed_keys",
"=",
"[",
"]",
"for",
"edk",
"in",
"encrypted_data_keys",
":",
"serialized_edk",
"=",
"serialize_encrypted_data_key",
"(",
"edk",
")",
"_hasher",
"=",
"hasher",
".",
"copy",
"(",
")",
"_hasher",
".",
"update",
"(",
"serialized_edk",
")",
"hashed_keys",
".",
"append",
"(",
"_hasher",
".",
"finalize",
"(",
")",
")",
"return",
"b\"\"",
".",
"join",
"(",
"sorted",
"(",
"hashed_keys",
")",
")"
] |
Generates the expected hash for the provided encrypted data keys.
:param hasher: Existing hasher to use
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param iterable encrypted_data_keys: Encrypted data keys to hash
:returns: Concatenated, sorted, list of all hashes
:rtype: bytes
|
[
"Generates",
"the",
"expected",
"hash",
"for",
"the",
"provided",
"encrypted",
"data",
"keys",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/caches/__init__.py#L89-L104
|
15,443
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/caches/__init__.py
|
build_decryption_materials_cache_key
|
def build_decryption_materials_cache_key(partition, request):
"""Generates a cache key for a decrypt request.
:param bytes partition: Partition name for which to generate key
:param request: Request for which to generate key
:type request: aws_encryption_sdk.materials_managers.DecryptionMaterialsRequest
:returns: cache key
:rtype: bytes
"""
hasher = _new_cache_key_hasher()
_partition_hash = _partition_name_hash(hasher=hasher.copy(), partition_name=partition)
_algorithm_info = request.algorithm.id_as_bytes()
_edks_hash = _encrypted_data_keys_hash(hasher=hasher.copy(), encrypted_data_keys=request.encrypted_data_keys)
_ec_hash = _encryption_context_hash(hasher=hasher.copy(), encryption_context=request.encryption_context)
hasher.update(_partition_hash)
hasher.update(_algorithm_info)
hasher.update(_edks_hash)
hasher.update(_512_BIT_PAD)
hasher.update(_ec_hash)
return hasher.finalize()
|
python
|
def build_decryption_materials_cache_key(partition, request):
"""Generates a cache key for a decrypt request.
:param bytes partition: Partition name for which to generate key
:param request: Request for which to generate key
:type request: aws_encryption_sdk.materials_managers.DecryptionMaterialsRequest
:returns: cache key
:rtype: bytes
"""
hasher = _new_cache_key_hasher()
_partition_hash = _partition_name_hash(hasher=hasher.copy(), partition_name=partition)
_algorithm_info = request.algorithm.id_as_bytes()
_edks_hash = _encrypted_data_keys_hash(hasher=hasher.copy(), encrypted_data_keys=request.encrypted_data_keys)
_ec_hash = _encryption_context_hash(hasher=hasher.copy(), encryption_context=request.encryption_context)
hasher.update(_partition_hash)
hasher.update(_algorithm_info)
hasher.update(_edks_hash)
hasher.update(_512_BIT_PAD)
hasher.update(_ec_hash)
return hasher.finalize()
|
[
"def",
"build_decryption_materials_cache_key",
"(",
"partition",
",",
"request",
")",
":",
"hasher",
"=",
"_new_cache_key_hasher",
"(",
")",
"_partition_hash",
"=",
"_partition_name_hash",
"(",
"hasher",
"=",
"hasher",
".",
"copy",
"(",
")",
",",
"partition_name",
"=",
"partition",
")",
"_algorithm_info",
"=",
"request",
".",
"algorithm",
".",
"id_as_bytes",
"(",
")",
"_edks_hash",
"=",
"_encrypted_data_keys_hash",
"(",
"hasher",
"=",
"hasher",
".",
"copy",
"(",
")",
",",
"encrypted_data_keys",
"=",
"request",
".",
"encrypted_data_keys",
")",
"_ec_hash",
"=",
"_encryption_context_hash",
"(",
"hasher",
"=",
"hasher",
".",
"copy",
"(",
")",
",",
"encryption_context",
"=",
"request",
".",
"encryption_context",
")",
"hasher",
".",
"update",
"(",
"_partition_hash",
")",
"hasher",
".",
"update",
"(",
"_algorithm_info",
")",
"hasher",
".",
"update",
"(",
"_edks_hash",
")",
"hasher",
".",
"update",
"(",
"_512_BIT_PAD",
")",
"hasher",
".",
"update",
"(",
"_ec_hash",
")",
"return",
"hasher",
".",
"finalize",
"(",
")"
] |
Generates a cache key for a decrypt request.
:param bytes partition: Partition name for which to generate key
:param request: Request for which to generate key
:type request: aws_encryption_sdk.materials_managers.DecryptionMaterialsRequest
:returns: cache key
:rtype: bytes
|
[
"Generates",
"a",
"cache",
"key",
"for",
"a",
"decrypt",
"request",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/caches/__init__.py#L111-L131
|
15,444
|
aws/aws-encryption-sdk-python
|
examples/src/basic_file_encryption_with_raw_key_provider.py
|
cycle_file
|
def cycle_file(source_plaintext_filename):
"""Encrypts and then decrypts a file under a custom static master key provider.
:param str source_plaintext_filename: Filename of file to encrypt
"""
# Create a static random master key provider
key_id = os.urandom(8)
master_key_provider = StaticRandomMasterKeyProvider()
master_key_provider.add_master_key(key_id)
ciphertext_filename = source_plaintext_filename + ".encrypted"
cycled_plaintext_filename = source_plaintext_filename + ".decrypted"
# Encrypt the plaintext source data
with open(source_plaintext_filename, "rb") as plaintext, open(ciphertext_filename, "wb") as ciphertext:
with aws_encryption_sdk.stream(mode="e", source=plaintext, key_provider=master_key_provider) as encryptor:
for chunk in encryptor:
ciphertext.write(chunk)
# Decrypt the ciphertext
with open(ciphertext_filename, "rb") as ciphertext, open(cycled_plaintext_filename, "wb") as plaintext:
with aws_encryption_sdk.stream(mode="d", source=ciphertext, key_provider=master_key_provider) as decryptor:
for chunk in decryptor:
plaintext.write(chunk)
# Verify that the "cycled" (encrypted, then decrypted) plaintext is identical to the source
# plaintext
assert filecmp.cmp(source_plaintext_filename, cycled_plaintext_filename)
# Verify that the encryption context used in the decrypt operation includes all key pairs from
# the encrypt operation
#
# In production, always use a meaningful encryption context. In this sample, we omit the
# encryption context (no key pairs).
assert all(
pair in decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items()
)
return ciphertext_filename, cycled_plaintext_filename
|
python
|
def cycle_file(source_plaintext_filename):
"""Encrypts and then decrypts a file under a custom static master key provider.
:param str source_plaintext_filename: Filename of file to encrypt
"""
# Create a static random master key provider
key_id = os.urandom(8)
master_key_provider = StaticRandomMasterKeyProvider()
master_key_provider.add_master_key(key_id)
ciphertext_filename = source_plaintext_filename + ".encrypted"
cycled_plaintext_filename = source_plaintext_filename + ".decrypted"
# Encrypt the plaintext source data
with open(source_plaintext_filename, "rb") as plaintext, open(ciphertext_filename, "wb") as ciphertext:
with aws_encryption_sdk.stream(mode="e", source=plaintext, key_provider=master_key_provider) as encryptor:
for chunk in encryptor:
ciphertext.write(chunk)
# Decrypt the ciphertext
with open(ciphertext_filename, "rb") as ciphertext, open(cycled_plaintext_filename, "wb") as plaintext:
with aws_encryption_sdk.stream(mode="d", source=ciphertext, key_provider=master_key_provider) as decryptor:
for chunk in decryptor:
plaintext.write(chunk)
# Verify that the "cycled" (encrypted, then decrypted) plaintext is identical to the source
# plaintext
assert filecmp.cmp(source_plaintext_filename, cycled_plaintext_filename)
# Verify that the encryption context used in the decrypt operation includes all key pairs from
# the encrypt operation
#
# In production, always use a meaningful encryption context. In this sample, we omit the
# encryption context (no key pairs).
assert all(
pair in decryptor.header.encryption_context.items() for pair in encryptor.header.encryption_context.items()
)
return ciphertext_filename, cycled_plaintext_filename
|
[
"def",
"cycle_file",
"(",
"source_plaintext_filename",
")",
":",
"# Create a static random master key provider",
"key_id",
"=",
"os",
".",
"urandom",
"(",
"8",
")",
"master_key_provider",
"=",
"StaticRandomMasterKeyProvider",
"(",
")",
"master_key_provider",
".",
"add_master_key",
"(",
"key_id",
")",
"ciphertext_filename",
"=",
"source_plaintext_filename",
"+",
"\".encrypted\"",
"cycled_plaintext_filename",
"=",
"source_plaintext_filename",
"+",
"\".decrypted\"",
"# Encrypt the plaintext source data",
"with",
"open",
"(",
"source_plaintext_filename",
",",
"\"rb\"",
")",
"as",
"plaintext",
",",
"open",
"(",
"ciphertext_filename",
",",
"\"wb\"",
")",
"as",
"ciphertext",
":",
"with",
"aws_encryption_sdk",
".",
"stream",
"(",
"mode",
"=",
"\"e\"",
",",
"source",
"=",
"plaintext",
",",
"key_provider",
"=",
"master_key_provider",
")",
"as",
"encryptor",
":",
"for",
"chunk",
"in",
"encryptor",
":",
"ciphertext",
".",
"write",
"(",
"chunk",
")",
"# Decrypt the ciphertext",
"with",
"open",
"(",
"ciphertext_filename",
",",
"\"rb\"",
")",
"as",
"ciphertext",
",",
"open",
"(",
"cycled_plaintext_filename",
",",
"\"wb\"",
")",
"as",
"plaintext",
":",
"with",
"aws_encryption_sdk",
".",
"stream",
"(",
"mode",
"=",
"\"d\"",
",",
"source",
"=",
"ciphertext",
",",
"key_provider",
"=",
"master_key_provider",
")",
"as",
"decryptor",
":",
"for",
"chunk",
"in",
"decryptor",
":",
"plaintext",
".",
"write",
"(",
"chunk",
")",
"# Verify that the \"cycled\" (encrypted, then decrypted) plaintext is identical to the source",
"# plaintext",
"assert",
"filecmp",
".",
"cmp",
"(",
"source_plaintext_filename",
",",
"cycled_plaintext_filename",
")",
"# Verify that the encryption context used in the decrypt operation includes all key pairs from",
"# the encrypt operation",
"#",
"# In production, always use a meaningful encryption context. In this sample, we omit the",
"# encryption context (no key pairs).",
"assert",
"all",
"(",
"pair",
"in",
"decryptor",
".",
"header",
".",
"encryption_context",
".",
"items",
"(",
")",
"for",
"pair",
"in",
"encryptor",
".",
"header",
".",
"encryption_context",
".",
"items",
"(",
")",
")",
"return",
"ciphertext_filename",
",",
"cycled_plaintext_filename"
] |
Encrypts and then decrypts a file under a custom static master key provider.
:param str source_plaintext_filename: Filename of file to encrypt
|
[
"Encrypts",
"and",
"then",
"decrypts",
"a",
"file",
"under",
"a",
"custom",
"static",
"master",
"key",
"provider",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/examples/src/basic_file_encryption_with_raw_key_provider.py#L51-L88
|
15,445
|
aws/aws-encryption-sdk-python
|
examples/src/basic_file_encryption_with_raw_key_provider.py
|
StaticRandomMasterKeyProvider._get_raw_key
|
def _get_raw_key(self, key_id):
"""Returns a static, randomly-generated symmetric key for the specified key ID.
:param str key_id: Key ID
:returns: Wrapping key that contains the specified static key
:rtype: :class:`aws_encryption_sdk.internal.crypto.WrappingKey`
"""
try:
static_key = self._static_keys[key_id]
except KeyError:
static_key = os.urandom(32)
self._static_keys[key_id] = static_key
return WrappingKey(
wrapping_algorithm=WrappingAlgorithm.AES_256_GCM_IV12_TAG16_NO_PADDING,
wrapping_key=static_key,
wrapping_key_type=EncryptionKeyType.SYMMETRIC,
)
|
python
|
def _get_raw_key(self, key_id):
"""Returns a static, randomly-generated symmetric key for the specified key ID.
:param str key_id: Key ID
:returns: Wrapping key that contains the specified static key
:rtype: :class:`aws_encryption_sdk.internal.crypto.WrappingKey`
"""
try:
static_key = self._static_keys[key_id]
except KeyError:
static_key = os.urandom(32)
self._static_keys[key_id] = static_key
return WrappingKey(
wrapping_algorithm=WrappingAlgorithm.AES_256_GCM_IV12_TAG16_NO_PADDING,
wrapping_key=static_key,
wrapping_key_type=EncryptionKeyType.SYMMETRIC,
)
|
[
"def",
"_get_raw_key",
"(",
"self",
",",
"key_id",
")",
":",
"try",
":",
"static_key",
"=",
"self",
".",
"_static_keys",
"[",
"key_id",
"]",
"except",
"KeyError",
":",
"static_key",
"=",
"os",
".",
"urandom",
"(",
"32",
")",
"self",
".",
"_static_keys",
"[",
"key_id",
"]",
"=",
"static_key",
"return",
"WrappingKey",
"(",
"wrapping_algorithm",
"=",
"WrappingAlgorithm",
".",
"AES_256_GCM_IV12_TAG16_NO_PADDING",
",",
"wrapping_key",
"=",
"static_key",
",",
"wrapping_key_type",
"=",
"EncryptionKeyType",
".",
"SYMMETRIC",
",",
")"
] |
Returns a static, randomly-generated symmetric key for the specified key ID.
:param str key_id: Key ID
:returns: Wrapping key that contains the specified static key
:rtype: :class:`aws_encryption_sdk.internal.crypto.WrappingKey`
|
[
"Returns",
"a",
"static",
"randomly",
"-",
"generated",
"symmetric",
"key",
"for",
"the",
"specified",
"key",
"ID",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/examples/src/basic_file_encryption_with_raw_key_provider.py#L32-L48
|
15,446
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
_EncryptionStream.stream_length
|
def stream_length(self):
"""Returns the length of the source stream, determining it if not already known."""
if self._stream_length is None:
try:
current_position = self.source_stream.tell()
self.source_stream.seek(0, 2)
self._stream_length = self.source_stream.tell()
self.source_stream.seek(current_position, 0)
except Exception as error:
# Catch-all for unknown issues encountered trying to seek for stream length
raise NotSupportedError(error)
return self._stream_length
|
python
|
def stream_length(self):
"""Returns the length of the source stream, determining it if not already known."""
if self._stream_length is None:
try:
current_position = self.source_stream.tell()
self.source_stream.seek(0, 2)
self._stream_length = self.source_stream.tell()
self.source_stream.seek(current_position, 0)
except Exception as error:
# Catch-all for unknown issues encountered trying to seek for stream length
raise NotSupportedError(error)
return self._stream_length
|
[
"def",
"stream_length",
"(",
"self",
")",
":",
"if",
"self",
".",
"_stream_length",
"is",
"None",
":",
"try",
":",
"current_position",
"=",
"self",
".",
"source_stream",
".",
"tell",
"(",
")",
"self",
".",
"source_stream",
".",
"seek",
"(",
"0",
",",
"2",
")",
"self",
".",
"_stream_length",
"=",
"self",
".",
"source_stream",
".",
"tell",
"(",
")",
"self",
".",
"source_stream",
".",
"seek",
"(",
"current_position",
",",
"0",
")",
"except",
"Exception",
"as",
"error",
":",
"# Catch-all for unknown issues encountered trying to seek for stream length",
"raise",
"NotSupportedError",
"(",
"error",
")",
"return",
"self",
".",
"_stream_length"
] |
Returns the length of the source stream, determining it if not already known.
|
[
"Returns",
"the",
"length",
"of",
"the",
"source",
"stream",
"determining",
"it",
"if",
"not",
"already",
"known",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L173-L184
|
15,447
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
_EncryptionStream.read
|
def read(self, b=-1):
"""Returns either the requested number of bytes or the entire stream.
:param int b: Number of bytes to read
:returns: Processed (encrypted or decrypted) bytes from source stream
:rtype: bytes
"""
# Any negative value for b is interpreted as a full read
# None is also accepted for legacy compatibility
if b is None or b < 0:
b = -1
_LOGGER.debug("Stream read called, requesting %d bytes", b)
output = io.BytesIO()
if not self._message_prepped:
self._prep_message()
if self.closed:
raise ValueError("I/O operation on closed file")
if b >= 0:
self._read_bytes(b)
output.write(self.output_buffer[:b])
self.output_buffer = self.output_buffer[b:]
else:
while True:
line = self.readline()
if not line:
break
output.write(line)
self.bytes_read += output.tell()
_LOGGER.debug("Returning %d bytes of %d bytes requested", output.tell(), b)
return output.getvalue()
|
python
|
def read(self, b=-1):
"""Returns either the requested number of bytes or the entire stream.
:param int b: Number of bytes to read
:returns: Processed (encrypted or decrypted) bytes from source stream
:rtype: bytes
"""
# Any negative value for b is interpreted as a full read
# None is also accepted for legacy compatibility
if b is None or b < 0:
b = -1
_LOGGER.debug("Stream read called, requesting %d bytes", b)
output = io.BytesIO()
if not self._message_prepped:
self._prep_message()
if self.closed:
raise ValueError("I/O operation on closed file")
if b >= 0:
self._read_bytes(b)
output.write(self.output_buffer[:b])
self.output_buffer = self.output_buffer[b:]
else:
while True:
line = self.readline()
if not line:
break
output.write(line)
self.bytes_read += output.tell()
_LOGGER.debug("Returning %d bytes of %d bytes requested", output.tell(), b)
return output.getvalue()
|
[
"def",
"read",
"(",
"self",
",",
"b",
"=",
"-",
"1",
")",
":",
"# Any negative value for b is interpreted as a full read",
"# None is also accepted for legacy compatibility",
"if",
"b",
"is",
"None",
"or",
"b",
"<",
"0",
":",
"b",
"=",
"-",
"1",
"_LOGGER",
".",
"debug",
"(",
"\"Stream read called, requesting %d bytes\"",
",",
"b",
")",
"output",
"=",
"io",
".",
"BytesIO",
"(",
")",
"if",
"not",
"self",
".",
"_message_prepped",
":",
"self",
".",
"_prep_message",
"(",
")",
"if",
"self",
".",
"closed",
":",
"raise",
"ValueError",
"(",
"\"I/O operation on closed file\"",
")",
"if",
"b",
">=",
"0",
":",
"self",
".",
"_read_bytes",
"(",
"b",
")",
"output",
".",
"write",
"(",
"self",
".",
"output_buffer",
"[",
":",
"b",
"]",
")",
"self",
".",
"output_buffer",
"=",
"self",
".",
"output_buffer",
"[",
"b",
":",
"]",
"else",
":",
"while",
"True",
":",
"line",
"=",
"self",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"break",
"output",
".",
"write",
"(",
"line",
")",
"self",
".",
"bytes_read",
"+=",
"output",
".",
"tell",
"(",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Returning %d bytes of %d bytes requested\"",
",",
"output",
".",
"tell",
"(",
")",
",",
"b",
")",
"return",
"output",
".",
"getvalue",
"(",
")"
] |
Returns either the requested number of bytes or the entire stream.
:param int b: Number of bytes to read
:returns: Processed (encrypted or decrypted) bytes from source stream
:rtype: bytes
|
[
"Returns",
"either",
"the",
"requested",
"number",
"of",
"bytes",
"or",
"the",
"entire",
"stream",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L220-L254
|
15,448
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
_EncryptionStream.readline
|
def readline(self):
"""Read a chunk of the output"""
_LOGGER.info("reading line")
line = self.read(self.line_length)
if len(line) < self.line_length:
_LOGGER.info("all lines read")
return line
|
python
|
def readline(self):
"""Read a chunk of the output"""
_LOGGER.info("reading line")
line = self.read(self.line_length)
if len(line) < self.line_length:
_LOGGER.info("all lines read")
return line
|
[
"def",
"readline",
"(",
"self",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"reading line\"",
")",
"line",
"=",
"self",
".",
"read",
"(",
"self",
".",
"line_length",
")",
"if",
"len",
"(",
"line",
")",
"<",
"self",
".",
"line_length",
":",
"_LOGGER",
".",
"info",
"(",
"\"all lines read\"",
")",
"return",
"line"
] |
Read a chunk of the output
|
[
"Read",
"a",
"chunk",
"of",
"the",
"output"
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L276-L282
|
15,449
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
_EncryptionStream.next
|
def next(self):
"""Provides hook for Python2 iterator functionality."""
_LOGGER.debug("reading next")
if self.closed:
_LOGGER.debug("stream is closed")
raise StopIteration()
line = self.readline()
if not line:
_LOGGER.debug("nothing more to read")
raise StopIteration()
return line
|
python
|
def next(self):
"""Provides hook for Python2 iterator functionality."""
_LOGGER.debug("reading next")
if self.closed:
_LOGGER.debug("stream is closed")
raise StopIteration()
line = self.readline()
if not line:
_LOGGER.debug("nothing more to read")
raise StopIteration()
return line
|
[
"def",
"next",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"reading next\"",
")",
"if",
"self",
".",
"closed",
":",
"_LOGGER",
".",
"debug",
"(",
"\"stream is closed\"",
")",
"raise",
"StopIteration",
"(",
")",
"line",
"=",
"self",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"_LOGGER",
".",
"debug",
"(",
"\"nothing more to read\"",
")",
"raise",
"StopIteration",
"(",
")",
"return",
"line"
] |
Provides hook for Python2 iterator functionality.
|
[
"Provides",
"hook",
"for",
"Python2",
"iterator",
"functionality",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L292-L304
|
15,450
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamEncryptor.ciphertext_length
|
def ciphertext_length(self):
"""Returns the length of the resulting ciphertext message in bytes.
:rtype: int
"""
return aws_encryption_sdk.internal.formatting.ciphertext_length(
header=self.header, plaintext_length=self.stream_length
)
|
python
|
def ciphertext_length(self):
"""Returns the length of the resulting ciphertext message in bytes.
:rtype: int
"""
return aws_encryption_sdk.internal.formatting.ciphertext_length(
header=self.header, plaintext_length=self.stream_length
)
|
[
"def",
"ciphertext_length",
"(",
"self",
")",
":",
"return",
"aws_encryption_sdk",
".",
"internal",
".",
"formatting",
".",
"ciphertext_length",
"(",
"header",
"=",
"self",
".",
"header",
",",
"plaintext_length",
"=",
"self",
".",
"stream_length",
")"
] |
Returns the length of the resulting ciphertext message in bytes.
:rtype: int
|
[
"Returns",
"the",
"length",
"of",
"the",
"resulting",
"ciphertext",
"message",
"in",
"bytes",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L409-L416
|
15,451
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamEncryptor._write_header
|
def _write_header(self):
"""Builds the message header and writes it to the output stream."""
self.output_buffer += serialize_header(header=self._header, signer=self.signer)
self.output_buffer += serialize_header_auth(
algorithm=self._encryption_materials.algorithm,
header=self.output_buffer,
data_encryption_key=self._derived_data_key,
signer=self.signer,
)
|
python
|
def _write_header(self):
"""Builds the message header and writes it to the output stream."""
self.output_buffer += serialize_header(header=self._header, signer=self.signer)
self.output_buffer += serialize_header_auth(
algorithm=self._encryption_materials.algorithm,
header=self.output_buffer,
data_encryption_key=self._derived_data_key,
signer=self.signer,
)
|
[
"def",
"_write_header",
"(",
"self",
")",
":",
"self",
".",
"output_buffer",
"+=",
"serialize_header",
"(",
"header",
"=",
"self",
".",
"_header",
",",
"signer",
"=",
"self",
".",
"signer",
")",
"self",
".",
"output_buffer",
"+=",
"serialize_header_auth",
"(",
"algorithm",
"=",
"self",
".",
"_encryption_materials",
".",
"algorithm",
",",
"header",
"=",
"self",
".",
"output_buffer",
",",
"data_encryption_key",
"=",
"self",
".",
"_derived_data_key",
",",
"signer",
"=",
"self",
".",
"signer",
",",
")"
] |
Builds the message header and writes it to the output stream.
|
[
"Builds",
"the",
"message",
"header",
"and",
"writes",
"it",
"to",
"the",
"output",
"stream",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L484-L492
|
15,452
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamEncryptor._read_bytes_to_non_framed_body
|
def _read_bytes_to_non_framed_body(self, b):
"""Reads the requested number of bytes from source to a streaming non-framed message body.
:param int b: Number of bytes to read
:returns: Encrypted bytes from source stream
:rtype: bytes
"""
_LOGGER.debug("Reading %d bytes", b)
plaintext = self.__unframed_plaintext_cache.read(b)
plaintext_length = len(plaintext)
if self.tell() + len(plaintext) > MAX_NON_FRAMED_SIZE:
raise SerializationError("Source too large for non-framed message")
ciphertext = self.encryptor.update(plaintext)
self._bytes_encrypted += plaintext_length
if self.signer is not None:
self.signer.update(ciphertext)
if len(plaintext) < b:
_LOGGER.debug("Closing encryptor after receiving only %d bytes of %d bytes requested", plaintext_length, b)
closing = self.encryptor.finalize()
if self.signer is not None:
self.signer.update(closing)
closing += serialize_non_framed_close(tag=self.encryptor.tag, signer=self.signer)
if self.signer is not None:
closing += serialize_footer(self.signer)
self.__message_complete = True
return ciphertext + closing
return ciphertext
|
python
|
def _read_bytes_to_non_framed_body(self, b):
"""Reads the requested number of bytes from source to a streaming non-framed message body.
:param int b: Number of bytes to read
:returns: Encrypted bytes from source stream
:rtype: bytes
"""
_LOGGER.debug("Reading %d bytes", b)
plaintext = self.__unframed_plaintext_cache.read(b)
plaintext_length = len(plaintext)
if self.tell() + len(plaintext) > MAX_NON_FRAMED_SIZE:
raise SerializationError("Source too large for non-framed message")
ciphertext = self.encryptor.update(plaintext)
self._bytes_encrypted += plaintext_length
if self.signer is not None:
self.signer.update(ciphertext)
if len(plaintext) < b:
_LOGGER.debug("Closing encryptor after receiving only %d bytes of %d bytes requested", plaintext_length, b)
closing = self.encryptor.finalize()
if self.signer is not None:
self.signer.update(closing)
closing += serialize_non_framed_close(tag=self.encryptor.tag, signer=self.signer)
if self.signer is not None:
closing += serialize_footer(self.signer)
self.__message_complete = True
return ciphertext + closing
return ciphertext
|
[
"def",
"_read_bytes_to_non_framed_body",
"(",
"self",
",",
"b",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Reading %d bytes\"",
",",
"b",
")",
"plaintext",
"=",
"self",
".",
"__unframed_plaintext_cache",
".",
"read",
"(",
"b",
")",
"plaintext_length",
"=",
"len",
"(",
"plaintext",
")",
"if",
"self",
".",
"tell",
"(",
")",
"+",
"len",
"(",
"plaintext",
")",
">",
"MAX_NON_FRAMED_SIZE",
":",
"raise",
"SerializationError",
"(",
"\"Source too large for non-framed message\"",
")",
"ciphertext",
"=",
"self",
".",
"encryptor",
".",
"update",
"(",
"plaintext",
")",
"self",
".",
"_bytes_encrypted",
"+=",
"plaintext_length",
"if",
"self",
".",
"signer",
"is",
"not",
"None",
":",
"self",
".",
"signer",
".",
"update",
"(",
"ciphertext",
")",
"if",
"len",
"(",
"plaintext",
")",
"<",
"b",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Closing encryptor after receiving only %d bytes of %d bytes requested\"",
",",
"plaintext_length",
",",
"b",
")",
"closing",
"=",
"self",
".",
"encryptor",
".",
"finalize",
"(",
")",
"if",
"self",
".",
"signer",
"is",
"not",
"None",
":",
"self",
".",
"signer",
".",
"update",
"(",
"closing",
")",
"closing",
"+=",
"serialize_non_framed_close",
"(",
"tag",
"=",
"self",
".",
"encryptor",
".",
"tag",
",",
"signer",
"=",
"self",
".",
"signer",
")",
"if",
"self",
".",
"signer",
"is",
"not",
"None",
":",
"closing",
"+=",
"serialize_footer",
"(",
"self",
".",
"signer",
")",
"self",
".",
"__message_complete",
"=",
"True",
"return",
"ciphertext",
"+",
"closing",
"return",
"ciphertext"
] |
Reads the requested number of bytes from source to a streaming non-framed message body.
:param int b: Number of bytes to read
:returns: Encrypted bytes from source stream
:rtype: bytes
|
[
"Reads",
"the",
"requested",
"number",
"of",
"bytes",
"from",
"source",
"to",
"a",
"streaming",
"non",
"-",
"framed",
"message",
"body",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L529-L562
|
15,453
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamEncryptor._read_bytes_to_framed_body
|
def _read_bytes_to_framed_body(self, b):
"""Reads the requested number of bytes from source to a streaming framed message body.
:param int b: Number of bytes to read
:returns: Bytes read from source stream, encrypted, and serialized
:rtype: bytes
"""
_LOGGER.debug("collecting %d bytes", b)
_b = b
if b > 0:
_frames_to_read = math.ceil(b / float(self.config.frame_length))
b = int(_frames_to_read * self.config.frame_length)
_LOGGER.debug("%d bytes requested; reading %d bytes after normalizing to frame length", _b, b)
plaintext = self.source_stream.read(b)
plaintext_length = len(plaintext)
_LOGGER.debug("%d bytes read from source", plaintext_length)
finalize = False
if b < 0 or plaintext_length < b:
_LOGGER.debug("Final plaintext read from source")
finalize = True
output = b""
final_frame_written = False
while (
# If not finalizing on this pass, exit when plaintext is exhausted
(not finalize and plaintext)
# If finalizing on this pass, wait until final frame is written
or (finalize and not final_frame_written)
):
current_plaintext_length = len(plaintext)
is_final_frame = finalize and current_plaintext_length < self.config.frame_length
bytes_in_frame = min(current_plaintext_length, self.config.frame_length)
_LOGGER.debug(
"Writing %d bytes into%s frame %d",
bytes_in_frame,
" final" if is_final_frame else "",
self.sequence_number,
)
self._bytes_encrypted += bytes_in_frame
ciphertext, plaintext = serialize_frame(
algorithm=self._encryption_materials.algorithm,
plaintext=plaintext,
message_id=self._header.message_id,
data_encryption_key=self._derived_data_key,
frame_length=self.config.frame_length,
sequence_number=self.sequence_number,
is_final_frame=is_final_frame,
signer=self.signer,
)
final_frame_written = is_final_frame
output += ciphertext
self.sequence_number += 1
if finalize:
_LOGGER.debug("Writing footer")
if self.signer is not None:
output += serialize_footer(self.signer)
self.__message_complete = True
return output
|
python
|
def _read_bytes_to_framed_body(self, b):
"""Reads the requested number of bytes from source to a streaming framed message body.
:param int b: Number of bytes to read
:returns: Bytes read from source stream, encrypted, and serialized
:rtype: bytes
"""
_LOGGER.debug("collecting %d bytes", b)
_b = b
if b > 0:
_frames_to_read = math.ceil(b / float(self.config.frame_length))
b = int(_frames_to_read * self.config.frame_length)
_LOGGER.debug("%d bytes requested; reading %d bytes after normalizing to frame length", _b, b)
plaintext = self.source_stream.read(b)
plaintext_length = len(plaintext)
_LOGGER.debug("%d bytes read from source", plaintext_length)
finalize = False
if b < 0 or plaintext_length < b:
_LOGGER.debug("Final plaintext read from source")
finalize = True
output = b""
final_frame_written = False
while (
# If not finalizing on this pass, exit when plaintext is exhausted
(not finalize and plaintext)
# If finalizing on this pass, wait until final frame is written
or (finalize and not final_frame_written)
):
current_plaintext_length = len(plaintext)
is_final_frame = finalize and current_plaintext_length < self.config.frame_length
bytes_in_frame = min(current_plaintext_length, self.config.frame_length)
_LOGGER.debug(
"Writing %d bytes into%s frame %d",
bytes_in_frame,
" final" if is_final_frame else "",
self.sequence_number,
)
self._bytes_encrypted += bytes_in_frame
ciphertext, plaintext = serialize_frame(
algorithm=self._encryption_materials.algorithm,
plaintext=plaintext,
message_id=self._header.message_id,
data_encryption_key=self._derived_data_key,
frame_length=self.config.frame_length,
sequence_number=self.sequence_number,
is_final_frame=is_final_frame,
signer=self.signer,
)
final_frame_written = is_final_frame
output += ciphertext
self.sequence_number += 1
if finalize:
_LOGGER.debug("Writing footer")
if self.signer is not None:
output += serialize_footer(self.signer)
self.__message_complete = True
return output
|
[
"def",
"_read_bytes_to_framed_body",
"(",
"self",
",",
"b",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"collecting %d bytes\"",
",",
"b",
")",
"_b",
"=",
"b",
"if",
"b",
">",
"0",
":",
"_frames_to_read",
"=",
"math",
".",
"ceil",
"(",
"b",
"/",
"float",
"(",
"self",
".",
"config",
".",
"frame_length",
")",
")",
"b",
"=",
"int",
"(",
"_frames_to_read",
"*",
"self",
".",
"config",
".",
"frame_length",
")",
"_LOGGER",
".",
"debug",
"(",
"\"%d bytes requested; reading %d bytes after normalizing to frame length\"",
",",
"_b",
",",
"b",
")",
"plaintext",
"=",
"self",
".",
"source_stream",
".",
"read",
"(",
"b",
")",
"plaintext_length",
"=",
"len",
"(",
"plaintext",
")",
"_LOGGER",
".",
"debug",
"(",
"\"%d bytes read from source\"",
",",
"plaintext_length",
")",
"finalize",
"=",
"False",
"if",
"b",
"<",
"0",
"or",
"plaintext_length",
"<",
"b",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Final plaintext read from source\"",
")",
"finalize",
"=",
"True",
"output",
"=",
"b\"\"",
"final_frame_written",
"=",
"False",
"while",
"(",
"# If not finalizing on this pass, exit when plaintext is exhausted",
"(",
"not",
"finalize",
"and",
"plaintext",
")",
"# If finalizing on this pass, wait until final frame is written",
"or",
"(",
"finalize",
"and",
"not",
"final_frame_written",
")",
")",
":",
"current_plaintext_length",
"=",
"len",
"(",
"plaintext",
")",
"is_final_frame",
"=",
"finalize",
"and",
"current_plaintext_length",
"<",
"self",
".",
"config",
".",
"frame_length",
"bytes_in_frame",
"=",
"min",
"(",
"current_plaintext_length",
",",
"self",
".",
"config",
".",
"frame_length",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Writing %d bytes into%s frame %d\"",
",",
"bytes_in_frame",
",",
"\" final\"",
"if",
"is_final_frame",
"else",
"\"\"",
",",
"self",
".",
"sequence_number",
",",
")",
"self",
".",
"_bytes_encrypted",
"+=",
"bytes_in_frame",
"ciphertext",
",",
"plaintext",
"=",
"serialize_frame",
"(",
"algorithm",
"=",
"self",
".",
"_encryption_materials",
".",
"algorithm",
",",
"plaintext",
"=",
"plaintext",
",",
"message_id",
"=",
"self",
".",
"_header",
".",
"message_id",
",",
"data_encryption_key",
"=",
"self",
".",
"_derived_data_key",
",",
"frame_length",
"=",
"self",
".",
"config",
".",
"frame_length",
",",
"sequence_number",
"=",
"self",
".",
"sequence_number",
",",
"is_final_frame",
"=",
"is_final_frame",
",",
"signer",
"=",
"self",
".",
"signer",
",",
")",
"final_frame_written",
"=",
"is_final_frame",
"output",
"+=",
"ciphertext",
"self",
".",
"sequence_number",
"+=",
"1",
"if",
"finalize",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Writing footer\"",
")",
"if",
"self",
".",
"signer",
"is",
"not",
"None",
":",
"output",
"+=",
"serialize_footer",
"(",
"self",
".",
"signer",
")",
"self",
".",
"__message_complete",
"=",
"True",
"return",
"output"
] |
Reads the requested number of bytes from source to a streaming framed message body.
:param int b: Number of bytes to read
:returns: Bytes read from source stream, encrypted, and serialized
:rtype: bytes
|
[
"Reads",
"the",
"requested",
"number",
"of",
"bytes",
"from",
"source",
"to",
"a",
"streaming",
"framed",
"message",
"body",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L564-L627
|
15,454
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamDecryptor._read_header
|
def _read_header(self):
"""Reads the message header from the input stream.
:returns: tuple containing deserialized header and header_auth objects
:rtype: tuple of aws_encryption_sdk.structures.MessageHeader
and aws_encryption_sdk.internal.structures.MessageHeaderAuthentication
:raises CustomMaximumValueExceeded: if frame length is greater than the custom max value
"""
header, raw_header = deserialize_header(self.source_stream)
self.__unframed_bytes_read += len(raw_header)
if (
self.config.max_body_length is not None
and header.content_type == ContentType.FRAMED_DATA
and header.frame_length > self.config.max_body_length
):
raise CustomMaximumValueExceeded(
"Frame Size in header found larger than custom value: {found:d} > {custom:d}".format(
found=header.frame_length, custom=self.config.max_body_length
)
)
decrypt_materials_request = DecryptionMaterialsRequest(
encrypted_data_keys=header.encrypted_data_keys,
algorithm=header.algorithm,
encryption_context=header.encryption_context,
)
decryption_materials = self.config.materials_manager.decrypt_materials(request=decrypt_materials_request)
if decryption_materials.verification_key is None:
self.verifier = None
else:
self.verifier = Verifier.from_key_bytes(
algorithm=header.algorithm, key_bytes=decryption_materials.verification_key
)
if self.verifier is not None:
self.verifier.update(raw_header)
header_auth = deserialize_header_auth(
stream=self.source_stream, algorithm=header.algorithm, verifier=self.verifier
)
self._derived_data_key = derive_data_encryption_key(
source_key=decryption_materials.data_key.data_key, algorithm=header.algorithm, message_id=header.message_id
)
validate_header(header=header, header_auth=header_auth, raw_header=raw_header, data_key=self._derived_data_key)
return header, header_auth
|
python
|
def _read_header(self):
"""Reads the message header from the input stream.
:returns: tuple containing deserialized header and header_auth objects
:rtype: tuple of aws_encryption_sdk.structures.MessageHeader
and aws_encryption_sdk.internal.structures.MessageHeaderAuthentication
:raises CustomMaximumValueExceeded: if frame length is greater than the custom max value
"""
header, raw_header = deserialize_header(self.source_stream)
self.__unframed_bytes_read += len(raw_header)
if (
self.config.max_body_length is not None
and header.content_type == ContentType.FRAMED_DATA
and header.frame_length > self.config.max_body_length
):
raise CustomMaximumValueExceeded(
"Frame Size in header found larger than custom value: {found:d} > {custom:d}".format(
found=header.frame_length, custom=self.config.max_body_length
)
)
decrypt_materials_request = DecryptionMaterialsRequest(
encrypted_data_keys=header.encrypted_data_keys,
algorithm=header.algorithm,
encryption_context=header.encryption_context,
)
decryption_materials = self.config.materials_manager.decrypt_materials(request=decrypt_materials_request)
if decryption_materials.verification_key is None:
self.verifier = None
else:
self.verifier = Verifier.from_key_bytes(
algorithm=header.algorithm, key_bytes=decryption_materials.verification_key
)
if self.verifier is not None:
self.verifier.update(raw_header)
header_auth = deserialize_header_auth(
stream=self.source_stream, algorithm=header.algorithm, verifier=self.verifier
)
self._derived_data_key = derive_data_encryption_key(
source_key=decryption_materials.data_key.data_key, algorithm=header.algorithm, message_id=header.message_id
)
validate_header(header=header, header_auth=header_auth, raw_header=raw_header, data_key=self._derived_data_key)
return header, header_auth
|
[
"def",
"_read_header",
"(",
"self",
")",
":",
"header",
",",
"raw_header",
"=",
"deserialize_header",
"(",
"self",
".",
"source_stream",
")",
"self",
".",
"__unframed_bytes_read",
"+=",
"len",
"(",
"raw_header",
")",
"if",
"(",
"self",
".",
"config",
".",
"max_body_length",
"is",
"not",
"None",
"and",
"header",
".",
"content_type",
"==",
"ContentType",
".",
"FRAMED_DATA",
"and",
"header",
".",
"frame_length",
">",
"self",
".",
"config",
".",
"max_body_length",
")",
":",
"raise",
"CustomMaximumValueExceeded",
"(",
"\"Frame Size in header found larger than custom value: {found:d} > {custom:d}\"",
".",
"format",
"(",
"found",
"=",
"header",
".",
"frame_length",
",",
"custom",
"=",
"self",
".",
"config",
".",
"max_body_length",
")",
")",
"decrypt_materials_request",
"=",
"DecryptionMaterialsRequest",
"(",
"encrypted_data_keys",
"=",
"header",
".",
"encrypted_data_keys",
",",
"algorithm",
"=",
"header",
".",
"algorithm",
",",
"encryption_context",
"=",
"header",
".",
"encryption_context",
",",
")",
"decryption_materials",
"=",
"self",
".",
"config",
".",
"materials_manager",
".",
"decrypt_materials",
"(",
"request",
"=",
"decrypt_materials_request",
")",
"if",
"decryption_materials",
".",
"verification_key",
"is",
"None",
":",
"self",
".",
"verifier",
"=",
"None",
"else",
":",
"self",
".",
"verifier",
"=",
"Verifier",
".",
"from_key_bytes",
"(",
"algorithm",
"=",
"header",
".",
"algorithm",
",",
"key_bytes",
"=",
"decryption_materials",
".",
"verification_key",
")",
"if",
"self",
".",
"verifier",
"is",
"not",
"None",
":",
"self",
".",
"verifier",
".",
"update",
"(",
"raw_header",
")",
"header_auth",
"=",
"deserialize_header_auth",
"(",
"stream",
"=",
"self",
".",
"source_stream",
",",
"algorithm",
"=",
"header",
".",
"algorithm",
",",
"verifier",
"=",
"self",
".",
"verifier",
")",
"self",
".",
"_derived_data_key",
"=",
"derive_data_encryption_key",
"(",
"source_key",
"=",
"decryption_materials",
".",
"data_key",
".",
"data_key",
",",
"algorithm",
"=",
"header",
".",
"algorithm",
",",
"message_id",
"=",
"header",
".",
"message_id",
")",
"validate_header",
"(",
"header",
"=",
"header",
",",
"header_auth",
"=",
"header_auth",
",",
"raw_header",
"=",
"raw_header",
",",
"data_key",
"=",
"self",
".",
"_derived_data_key",
")",
"return",
"header",
",",
"header_auth"
] |
Reads the message header from the input stream.
:returns: tuple containing deserialized header and header_auth objects
:rtype: tuple of aws_encryption_sdk.structures.MessageHeader
and aws_encryption_sdk.internal.structures.MessageHeaderAuthentication
:raises CustomMaximumValueExceeded: if frame length is greater than the custom max value
|
[
"Reads",
"the",
"message",
"header",
"from",
"the",
"input",
"stream",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L738-L782
|
15,455
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamDecryptor._read_bytes_from_non_framed_body
|
def _read_bytes_from_non_framed_body(self, b):
"""Reads the requested number of bytes from a streaming non-framed message body.
:param int b: Number of bytes to read
:returns: Decrypted bytes from source stream
:rtype: bytes
"""
_LOGGER.debug("starting non-framed body read")
# Always read the entire message for non-framed message bodies.
bytes_to_read = self.body_length
_LOGGER.debug("%d bytes requested; reading %d bytes", b, bytes_to_read)
ciphertext = self.source_stream.read(bytes_to_read)
if len(self.output_buffer) + len(ciphertext) < self.body_length:
raise SerializationError("Total message body contents less than specified in body description")
if self.verifier is not None:
self.verifier.update(ciphertext)
tag = deserialize_tag(stream=self.source_stream, header=self._header, verifier=self.verifier)
aad_content_string = aws_encryption_sdk.internal.utils.get_aad_content_string(
content_type=self._header.content_type, is_final_frame=True
)
associated_data = assemble_content_aad(
message_id=self._header.message_id,
aad_content_string=aad_content_string,
seq_num=1,
length=self.body_length,
)
self.decryptor = Decryptor(
algorithm=self._header.algorithm,
key=self._derived_data_key,
associated_data=associated_data,
iv=self._unframed_body_iv,
tag=tag,
)
plaintext = self.decryptor.update(ciphertext)
plaintext += self.decryptor.finalize()
self.footer = deserialize_footer(stream=self.source_stream, verifier=self.verifier)
return plaintext
|
python
|
def _read_bytes_from_non_framed_body(self, b):
"""Reads the requested number of bytes from a streaming non-framed message body.
:param int b: Number of bytes to read
:returns: Decrypted bytes from source stream
:rtype: bytes
"""
_LOGGER.debug("starting non-framed body read")
# Always read the entire message for non-framed message bodies.
bytes_to_read = self.body_length
_LOGGER.debug("%d bytes requested; reading %d bytes", b, bytes_to_read)
ciphertext = self.source_stream.read(bytes_to_read)
if len(self.output_buffer) + len(ciphertext) < self.body_length:
raise SerializationError("Total message body contents less than specified in body description")
if self.verifier is not None:
self.verifier.update(ciphertext)
tag = deserialize_tag(stream=self.source_stream, header=self._header, verifier=self.verifier)
aad_content_string = aws_encryption_sdk.internal.utils.get_aad_content_string(
content_type=self._header.content_type, is_final_frame=True
)
associated_data = assemble_content_aad(
message_id=self._header.message_id,
aad_content_string=aad_content_string,
seq_num=1,
length=self.body_length,
)
self.decryptor = Decryptor(
algorithm=self._header.algorithm,
key=self._derived_data_key,
associated_data=associated_data,
iv=self._unframed_body_iv,
tag=tag,
)
plaintext = self.decryptor.update(ciphertext)
plaintext += self.decryptor.finalize()
self.footer = deserialize_footer(stream=self.source_stream, verifier=self.verifier)
return plaintext
|
[
"def",
"_read_bytes_from_non_framed_body",
"(",
"self",
",",
"b",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"starting non-framed body read\"",
")",
"# Always read the entire message for non-framed message bodies.",
"bytes_to_read",
"=",
"self",
".",
"body_length",
"_LOGGER",
".",
"debug",
"(",
"\"%d bytes requested; reading %d bytes\"",
",",
"b",
",",
"bytes_to_read",
")",
"ciphertext",
"=",
"self",
".",
"source_stream",
".",
"read",
"(",
"bytes_to_read",
")",
"if",
"len",
"(",
"self",
".",
"output_buffer",
")",
"+",
"len",
"(",
"ciphertext",
")",
"<",
"self",
".",
"body_length",
":",
"raise",
"SerializationError",
"(",
"\"Total message body contents less than specified in body description\"",
")",
"if",
"self",
".",
"verifier",
"is",
"not",
"None",
":",
"self",
".",
"verifier",
".",
"update",
"(",
"ciphertext",
")",
"tag",
"=",
"deserialize_tag",
"(",
"stream",
"=",
"self",
".",
"source_stream",
",",
"header",
"=",
"self",
".",
"_header",
",",
"verifier",
"=",
"self",
".",
"verifier",
")",
"aad_content_string",
"=",
"aws_encryption_sdk",
".",
"internal",
".",
"utils",
".",
"get_aad_content_string",
"(",
"content_type",
"=",
"self",
".",
"_header",
".",
"content_type",
",",
"is_final_frame",
"=",
"True",
")",
"associated_data",
"=",
"assemble_content_aad",
"(",
"message_id",
"=",
"self",
".",
"_header",
".",
"message_id",
",",
"aad_content_string",
"=",
"aad_content_string",
",",
"seq_num",
"=",
"1",
",",
"length",
"=",
"self",
".",
"body_length",
",",
")",
"self",
".",
"decryptor",
"=",
"Decryptor",
"(",
"algorithm",
"=",
"self",
".",
"_header",
".",
"algorithm",
",",
"key",
"=",
"self",
".",
"_derived_data_key",
",",
"associated_data",
"=",
"associated_data",
",",
"iv",
"=",
"self",
".",
"_unframed_body_iv",
",",
"tag",
"=",
"tag",
",",
")",
"plaintext",
"=",
"self",
".",
"decryptor",
".",
"update",
"(",
"ciphertext",
")",
"plaintext",
"+=",
"self",
".",
"decryptor",
".",
"finalize",
"(",
")",
"self",
".",
"footer",
"=",
"deserialize_footer",
"(",
"stream",
"=",
"self",
".",
"source_stream",
",",
"verifier",
"=",
"self",
".",
"verifier",
")",
"return",
"plaintext"
] |
Reads the requested number of bytes from a streaming non-framed message body.
:param int b: Number of bytes to read
:returns: Decrypted bytes from source stream
:rtype: bytes
|
[
"Reads",
"the",
"requested",
"number",
"of",
"bytes",
"from",
"a",
"streaming",
"non",
"-",
"framed",
"message",
"body",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L814-L857
|
15,456
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamDecryptor._read_bytes_from_framed_body
|
def _read_bytes_from_framed_body(self, b):
"""Reads the requested number of bytes from a streaming framed message body.
:param int b: Number of bytes to read
:returns: Bytes read from source stream and decrypted
:rtype: bytes
"""
plaintext = b""
final_frame = False
_LOGGER.debug("collecting %d bytes", b)
while len(plaintext) < b and not final_frame:
_LOGGER.debug("Reading frame")
frame_data, final_frame = deserialize_frame(
stream=self.source_stream, header=self._header, verifier=self.verifier
)
_LOGGER.debug("Read complete for frame %d", frame_data.sequence_number)
if frame_data.sequence_number != self.last_sequence_number + 1:
raise SerializationError("Malformed message: frames out of order")
self.last_sequence_number += 1
aad_content_string = aws_encryption_sdk.internal.utils.get_aad_content_string(
content_type=self._header.content_type, is_final_frame=frame_data.final_frame
)
associated_data = assemble_content_aad(
message_id=self._header.message_id,
aad_content_string=aad_content_string,
seq_num=frame_data.sequence_number,
length=len(frame_data.ciphertext),
)
plaintext += decrypt(
algorithm=self._header.algorithm,
key=self._derived_data_key,
encrypted_data=frame_data,
associated_data=associated_data,
)
plaintext_length = len(plaintext)
_LOGGER.debug("bytes collected: %d", plaintext_length)
if final_frame:
_LOGGER.debug("Reading footer")
self.footer = deserialize_footer(stream=self.source_stream, verifier=self.verifier)
return plaintext
|
python
|
def _read_bytes_from_framed_body(self, b):
"""Reads the requested number of bytes from a streaming framed message body.
:param int b: Number of bytes to read
:returns: Bytes read from source stream and decrypted
:rtype: bytes
"""
plaintext = b""
final_frame = False
_LOGGER.debug("collecting %d bytes", b)
while len(plaintext) < b and not final_frame:
_LOGGER.debug("Reading frame")
frame_data, final_frame = deserialize_frame(
stream=self.source_stream, header=self._header, verifier=self.verifier
)
_LOGGER.debug("Read complete for frame %d", frame_data.sequence_number)
if frame_data.sequence_number != self.last_sequence_number + 1:
raise SerializationError("Malformed message: frames out of order")
self.last_sequence_number += 1
aad_content_string = aws_encryption_sdk.internal.utils.get_aad_content_string(
content_type=self._header.content_type, is_final_frame=frame_data.final_frame
)
associated_data = assemble_content_aad(
message_id=self._header.message_id,
aad_content_string=aad_content_string,
seq_num=frame_data.sequence_number,
length=len(frame_data.ciphertext),
)
plaintext += decrypt(
algorithm=self._header.algorithm,
key=self._derived_data_key,
encrypted_data=frame_data,
associated_data=associated_data,
)
plaintext_length = len(plaintext)
_LOGGER.debug("bytes collected: %d", plaintext_length)
if final_frame:
_LOGGER.debug("Reading footer")
self.footer = deserialize_footer(stream=self.source_stream, verifier=self.verifier)
return plaintext
|
[
"def",
"_read_bytes_from_framed_body",
"(",
"self",
",",
"b",
")",
":",
"plaintext",
"=",
"b\"\"",
"final_frame",
"=",
"False",
"_LOGGER",
".",
"debug",
"(",
"\"collecting %d bytes\"",
",",
"b",
")",
"while",
"len",
"(",
"plaintext",
")",
"<",
"b",
"and",
"not",
"final_frame",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Reading frame\"",
")",
"frame_data",
",",
"final_frame",
"=",
"deserialize_frame",
"(",
"stream",
"=",
"self",
".",
"source_stream",
",",
"header",
"=",
"self",
".",
"_header",
",",
"verifier",
"=",
"self",
".",
"verifier",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Read complete for frame %d\"",
",",
"frame_data",
".",
"sequence_number",
")",
"if",
"frame_data",
".",
"sequence_number",
"!=",
"self",
".",
"last_sequence_number",
"+",
"1",
":",
"raise",
"SerializationError",
"(",
"\"Malformed message: frames out of order\"",
")",
"self",
".",
"last_sequence_number",
"+=",
"1",
"aad_content_string",
"=",
"aws_encryption_sdk",
".",
"internal",
".",
"utils",
".",
"get_aad_content_string",
"(",
"content_type",
"=",
"self",
".",
"_header",
".",
"content_type",
",",
"is_final_frame",
"=",
"frame_data",
".",
"final_frame",
")",
"associated_data",
"=",
"assemble_content_aad",
"(",
"message_id",
"=",
"self",
".",
"_header",
".",
"message_id",
",",
"aad_content_string",
"=",
"aad_content_string",
",",
"seq_num",
"=",
"frame_data",
".",
"sequence_number",
",",
"length",
"=",
"len",
"(",
"frame_data",
".",
"ciphertext",
")",
",",
")",
"plaintext",
"+=",
"decrypt",
"(",
"algorithm",
"=",
"self",
".",
"_header",
".",
"algorithm",
",",
"key",
"=",
"self",
".",
"_derived_data_key",
",",
"encrypted_data",
"=",
"frame_data",
",",
"associated_data",
"=",
"associated_data",
",",
")",
"plaintext_length",
"=",
"len",
"(",
"plaintext",
")",
"_LOGGER",
".",
"debug",
"(",
"\"bytes collected: %d\"",
",",
"plaintext_length",
")",
"if",
"final_frame",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Reading footer\"",
")",
"self",
".",
"footer",
"=",
"deserialize_footer",
"(",
"stream",
"=",
"self",
".",
"source_stream",
",",
"verifier",
"=",
"self",
".",
"verifier",
")",
"return",
"plaintext"
] |
Reads the requested number of bytes from a streaming framed message body.
:param int b: Number of bytes to read
:returns: Bytes read from source stream and decrypted
:rtype: bytes
|
[
"Reads",
"the",
"requested",
"number",
"of",
"bytes",
"from",
"a",
"streaming",
"framed",
"message",
"body",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L859-L899
|
15,457
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/streaming_client.py
|
StreamDecryptor.close
|
def close(self):
"""Closes out the stream."""
_LOGGER.debug("Closing stream")
if not hasattr(self, "footer"):
raise SerializationError("Footer not read")
super(StreamDecryptor, self).close()
|
python
|
def close(self):
"""Closes out the stream."""
_LOGGER.debug("Closing stream")
if not hasattr(self, "footer"):
raise SerializationError("Footer not read")
super(StreamDecryptor, self).close()
|
[
"def",
"close",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Closing stream\"",
")",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"footer\"",
")",
":",
"raise",
"SerializationError",
"(",
"\"Footer not read\"",
")",
"super",
"(",
"StreamDecryptor",
",",
"self",
")",
".",
"close",
"(",
")"
] |
Closes out the stream.
|
[
"Closes",
"out",
"the",
"stream",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/streaming_client.py#L923-L928
|
15,458
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
_region_from_key_id
|
def _region_from_key_id(key_id, default_region=None):
"""Determine the target region from a key ID, falling back to a default region if provided.
:param str key_id: AWS KMS key ID
:param str default_region: Region to use if no region found in key_id
:returns: region name
:rtype: str
:raises UnknownRegionError: if no region found in key_id and no default_region provided
"""
try:
region_name = key_id.split(":", 4)[3]
except IndexError:
if default_region is None:
raise UnknownRegionError(
"No default region found and no region determinable from key id: {}".format(key_id)
)
region_name = default_region
return region_name
|
python
|
def _region_from_key_id(key_id, default_region=None):
"""Determine the target region from a key ID, falling back to a default region if provided.
:param str key_id: AWS KMS key ID
:param str default_region: Region to use if no region found in key_id
:returns: region name
:rtype: str
:raises UnknownRegionError: if no region found in key_id and no default_region provided
"""
try:
region_name = key_id.split(":", 4)[3]
except IndexError:
if default_region is None:
raise UnknownRegionError(
"No default region found and no region determinable from key id: {}".format(key_id)
)
region_name = default_region
return region_name
|
[
"def",
"_region_from_key_id",
"(",
"key_id",
",",
"default_region",
"=",
"None",
")",
":",
"try",
":",
"region_name",
"=",
"key_id",
".",
"split",
"(",
"\":\"",
",",
"4",
")",
"[",
"3",
"]",
"except",
"IndexError",
":",
"if",
"default_region",
"is",
"None",
":",
"raise",
"UnknownRegionError",
"(",
"\"No default region found and no region determinable from key id: {}\"",
".",
"format",
"(",
"key_id",
")",
")",
"region_name",
"=",
"default_region",
"return",
"region_name"
] |
Determine the target region from a key ID, falling back to a default region if provided.
:param str key_id: AWS KMS key ID
:param str default_region: Region to use if no region found in key_id
:returns: region name
:rtype: str
:raises UnknownRegionError: if no region found in key_id and no default_region provided
|
[
"Determine",
"the",
"target",
"region",
"from",
"a",
"key",
"ID",
"falling",
"back",
"to",
"a",
"default",
"region",
"if",
"provided",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L35-L52
|
15,459
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKeyProvider._process_config
|
def _process_config(self):
"""Traverses the config and adds master keys and regional clients as needed."""
self._user_agent_adding_config = botocore.config.Config(user_agent_extra=USER_AGENT_SUFFIX)
if self.config.region_names:
self.add_regional_clients_from_list(self.config.region_names)
self.default_region = self.config.region_names[0]
else:
self.default_region = self.config.botocore_session.get_config_variable("region")
if self.default_region is not None:
self.add_regional_client(self.default_region)
if self.config.key_ids:
self.add_master_keys_from_list(self.config.key_ids)
|
python
|
def _process_config(self):
"""Traverses the config and adds master keys and regional clients as needed."""
self._user_agent_adding_config = botocore.config.Config(user_agent_extra=USER_AGENT_SUFFIX)
if self.config.region_names:
self.add_regional_clients_from_list(self.config.region_names)
self.default_region = self.config.region_names[0]
else:
self.default_region = self.config.botocore_session.get_config_variable("region")
if self.default_region is not None:
self.add_regional_client(self.default_region)
if self.config.key_ids:
self.add_master_keys_from_list(self.config.key_ids)
|
[
"def",
"_process_config",
"(",
"self",
")",
":",
"self",
".",
"_user_agent_adding_config",
"=",
"botocore",
".",
"config",
".",
"Config",
"(",
"user_agent_extra",
"=",
"USER_AGENT_SUFFIX",
")",
"if",
"self",
".",
"config",
".",
"region_names",
":",
"self",
".",
"add_regional_clients_from_list",
"(",
"self",
".",
"config",
".",
"region_names",
")",
"self",
".",
"default_region",
"=",
"self",
".",
"config",
".",
"region_names",
"[",
"0",
"]",
"else",
":",
"self",
".",
"default_region",
"=",
"self",
".",
"config",
".",
"botocore_session",
".",
"get_config_variable",
"(",
"\"region\"",
")",
"if",
"self",
".",
"default_region",
"is",
"not",
"None",
":",
"self",
".",
"add_regional_client",
"(",
"self",
".",
"default_region",
")",
"if",
"self",
".",
"config",
".",
"key_ids",
":",
"self",
".",
"add_master_keys_from_list",
"(",
"self",
".",
"config",
".",
"key_ids",
")"
] |
Traverses the config and adds master keys and regional clients as needed.
|
[
"Traverses",
"the",
"config",
"and",
"adds",
"master",
"keys",
"and",
"regional",
"clients",
"as",
"needed",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L115-L128
|
15,460
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKeyProvider._wrap_client
|
def _wrap_client(self, region_name, method, *args, **kwargs):
"""Proxies all calls to a kms clients methods and removes misbehaving clients
:param str region_name: AWS Region ID (ex: us-east-1)
:param callable method: a method on the KMS client to proxy
:param tuple args: list of arguments to pass to the provided ``method``
:param dict kwargs: dictonary of keyword arguments to pass to the provided ``method``
"""
try:
return method(*args, **kwargs)
except botocore.exceptions.BotoCoreError:
self._regional_clients.pop(region_name)
_LOGGER.error(
'Removing regional client "%s" from cache due to BotoCoreError on %s call', region_name, method.__name__
)
raise
|
python
|
def _wrap_client(self, region_name, method, *args, **kwargs):
"""Proxies all calls to a kms clients methods and removes misbehaving clients
:param str region_name: AWS Region ID (ex: us-east-1)
:param callable method: a method on the KMS client to proxy
:param tuple args: list of arguments to pass to the provided ``method``
:param dict kwargs: dictonary of keyword arguments to pass to the provided ``method``
"""
try:
return method(*args, **kwargs)
except botocore.exceptions.BotoCoreError:
self._regional_clients.pop(region_name)
_LOGGER.error(
'Removing regional client "%s" from cache due to BotoCoreError on %s call', region_name, method.__name__
)
raise
|
[
"def",
"_wrap_client",
"(",
"self",
",",
"region_name",
",",
"method",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"method",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"botocore",
".",
"exceptions",
".",
"BotoCoreError",
":",
"self",
".",
"_regional_clients",
".",
"pop",
"(",
"region_name",
")",
"_LOGGER",
".",
"error",
"(",
"'Removing regional client \"%s\" from cache due to BotoCoreError on %s call'",
",",
"region_name",
",",
"method",
".",
"__name__",
")",
"raise"
] |
Proxies all calls to a kms clients methods and removes misbehaving clients
:param str region_name: AWS Region ID (ex: us-east-1)
:param callable method: a method on the KMS client to proxy
:param tuple args: list of arguments to pass to the provided ``method``
:param dict kwargs: dictonary of keyword arguments to pass to the provided ``method``
|
[
"Proxies",
"all",
"calls",
"to",
"a",
"kms",
"clients",
"methods",
"and",
"removes",
"misbehaving",
"clients"
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L130-L145
|
15,461
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKeyProvider._register_client
|
def _register_client(self, client, region_name):
"""Uses functools.partial to wrap all methods on a client with the self._wrap_client method
:param botocore.client.BaseClient client: the client to proxy
:param str region_name: AWS Region ID (ex: us-east-1)
"""
for item in client.meta.method_to_api_mapping:
method = getattr(client, item)
wrapped_method = functools.partial(self._wrap_client, region_name, method)
setattr(client, item, wrapped_method)
|
python
|
def _register_client(self, client, region_name):
"""Uses functools.partial to wrap all methods on a client with the self._wrap_client method
:param botocore.client.BaseClient client: the client to proxy
:param str region_name: AWS Region ID (ex: us-east-1)
"""
for item in client.meta.method_to_api_mapping:
method = getattr(client, item)
wrapped_method = functools.partial(self._wrap_client, region_name, method)
setattr(client, item, wrapped_method)
|
[
"def",
"_register_client",
"(",
"self",
",",
"client",
",",
"region_name",
")",
":",
"for",
"item",
"in",
"client",
".",
"meta",
".",
"method_to_api_mapping",
":",
"method",
"=",
"getattr",
"(",
"client",
",",
"item",
")",
"wrapped_method",
"=",
"functools",
".",
"partial",
"(",
"self",
".",
"_wrap_client",
",",
"region_name",
",",
"method",
")",
"setattr",
"(",
"client",
",",
"item",
",",
"wrapped_method",
")"
] |
Uses functools.partial to wrap all methods on a client with the self._wrap_client method
:param botocore.client.BaseClient client: the client to proxy
:param str region_name: AWS Region ID (ex: us-east-1)
|
[
"Uses",
"functools",
".",
"partial",
"to",
"wrap",
"all",
"methods",
"on",
"a",
"client",
"with",
"the",
"self",
".",
"_wrap_client",
"method"
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L147-L156
|
15,462
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKeyProvider.add_regional_client
|
def add_regional_client(self, region_name):
"""Adds a regional client for the specified region if it does not already exist.
:param str region_name: AWS Region ID (ex: us-east-1)
"""
if region_name not in self._regional_clients:
session = boto3.session.Session(region_name=region_name, botocore_session=self.config.botocore_session)
client = session.client("kms", config=self._user_agent_adding_config)
self._register_client(client, region_name)
self._regional_clients[region_name] = client
|
python
|
def add_regional_client(self, region_name):
"""Adds a regional client for the specified region if it does not already exist.
:param str region_name: AWS Region ID (ex: us-east-1)
"""
if region_name not in self._regional_clients:
session = boto3.session.Session(region_name=region_name, botocore_session=self.config.botocore_session)
client = session.client("kms", config=self._user_agent_adding_config)
self._register_client(client, region_name)
self._regional_clients[region_name] = client
|
[
"def",
"add_regional_client",
"(",
"self",
",",
"region_name",
")",
":",
"if",
"region_name",
"not",
"in",
"self",
".",
"_regional_clients",
":",
"session",
"=",
"boto3",
".",
"session",
".",
"Session",
"(",
"region_name",
"=",
"region_name",
",",
"botocore_session",
"=",
"self",
".",
"config",
".",
"botocore_session",
")",
"client",
"=",
"session",
".",
"client",
"(",
"\"kms\"",
",",
"config",
"=",
"self",
".",
"_user_agent_adding_config",
")",
"self",
".",
"_register_client",
"(",
"client",
",",
"region_name",
")",
"self",
".",
"_regional_clients",
"[",
"region_name",
"]",
"=",
"client"
] |
Adds a regional client for the specified region if it does not already exist.
:param str region_name: AWS Region ID (ex: us-east-1)
|
[
"Adds",
"a",
"regional",
"client",
"for",
"the",
"specified",
"region",
"if",
"it",
"does",
"not",
"already",
"exist",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L158-L167
|
15,463
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKeyProvider._client
|
def _client(self, key_id):
"""Returns a Boto3 KMS client for the appropriate region.
:param str key_id: KMS CMK ID
"""
region_name = _region_from_key_id(key_id, self.default_region)
self.add_regional_client(region_name)
return self._regional_clients[region_name]
|
python
|
def _client(self, key_id):
"""Returns a Boto3 KMS client for the appropriate region.
:param str key_id: KMS CMK ID
"""
region_name = _region_from_key_id(key_id, self.default_region)
self.add_regional_client(region_name)
return self._regional_clients[region_name]
|
[
"def",
"_client",
"(",
"self",
",",
"key_id",
")",
":",
"region_name",
"=",
"_region_from_key_id",
"(",
"key_id",
",",
"self",
".",
"default_region",
")",
"self",
".",
"add_regional_client",
"(",
"region_name",
")",
"return",
"self",
".",
"_regional_clients",
"[",
"region_name",
"]"
] |
Returns a Boto3 KMS client for the appropriate region.
:param str key_id: KMS CMK ID
|
[
"Returns",
"a",
"Boto3",
"KMS",
"client",
"for",
"the",
"appropriate",
"region",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L177-L184
|
15,464
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKeyProvider._new_master_key
|
def _new_master_key(self, key_id):
"""Returns a KMSMasterKey for the specified key_id.
:param bytes key_id: KMS CMK ID
:returns: KMS Master Key based on key_id
:rtype: aws_encryption_sdk.key_providers.kms.KMSMasterKey
:raises InvalidKeyIdError: if key_id is not a valid KMS CMK ID to which this key provider has access
"""
_key_id = to_str(key_id) # KMS client requires str, not bytes
return KMSMasterKey(config=KMSMasterKeyConfig(key_id=key_id, client=self._client(_key_id)))
|
python
|
def _new_master_key(self, key_id):
"""Returns a KMSMasterKey for the specified key_id.
:param bytes key_id: KMS CMK ID
:returns: KMS Master Key based on key_id
:rtype: aws_encryption_sdk.key_providers.kms.KMSMasterKey
:raises InvalidKeyIdError: if key_id is not a valid KMS CMK ID to which this key provider has access
"""
_key_id = to_str(key_id) # KMS client requires str, not bytes
return KMSMasterKey(config=KMSMasterKeyConfig(key_id=key_id, client=self._client(_key_id)))
|
[
"def",
"_new_master_key",
"(",
"self",
",",
"key_id",
")",
":",
"_key_id",
"=",
"to_str",
"(",
"key_id",
")",
"# KMS client requires str, not bytes",
"return",
"KMSMasterKey",
"(",
"config",
"=",
"KMSMasterKeyConfig",
"(",
"key_id",
"=",
"key_id",
",",
"client",
"=",
"self",
".",
"_client",
"(",
"_key_id",
")",
")",
")"
] |
Returns a KMSMasterKey for the specified key_id.
:param bytes key_id: KMS CMK ID
:returns: KMS Master Key based on key_id
:rtype: aws_encryption_sdk.key_providers.kms.KMSMasterKey
:raises InvalidKeyIdError: if key_id is not a valid KMS CMK ID to which this key provider has access
|
[
"Returns",
"a",
"KMSMasterKey",
"for",
"the",
"specified",
"key_id",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L186-L195
|
15,465
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKey._generate_data_key
|
def _generate_data_key(self, algorithm, encryption_context=None):
"""Generates data key and returns plaintext and ciphertext of key.
:param algorithm: Algorithm on which to base data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to pass to KMS
:returns: Generated data key
:rtype: aws_encryption_sdk.structures.DataKey
"""
kms_params = {"KeyId": self._key_id, "NumberOfBytes": algorithm.kdf_input_len}
if encryption_context is not None:
kms_params["EncryptionContext"] = encryption_context
if self.config.grant_tokens:
kms_params["GrantTokens"] = self.config.grant_tokens
# Catch any boto3 errors and normalize to expected EncryptKeyError
try:
response = self.config.client.generate_data_key(**kms_params)
plaintext = response["Plaintext"]
ciphertext = response["CiphertextBlob"]
key_id = response["KeyId"]
except (ClientError, KeyError):
error_message = "Master Key {key_id} unable to generate data key".format(key_id=self._key_id)
_LOGGER.exception(error_message)
raise GenerateKeyError(error_message)
return DataKey(
key_provider=MasterKeyInfo(provider_id=self.provider_id, key_info=key_id),
data_key=plaintext,
encrypted_data_key=ciphertext,
)
|
python
|
def _generate_data_key(self, algorithm, encryption_context=None):
"""Generates data key and returns plaintext and ciphertext of key.
:param algorithm: Algorithm on which to base data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to pass to KMS
:returns: Generated data key
:rtype: aws_encryption_sdk.structures.DataKey
"""
kms_params = {"KeyId": self._key_id, "NumberOfBytes": algorithm.kdf_input_len}
if encryption_context is not None:
kms_params["EncryptionContext"] = encryption_context
if self.config.grant_tokens:
kms_params["GrantTokens"] = self.config.grant_tokens
# Catch any boto3 errors and normalize to expected EncryptKeyError
try:
response = self.config.client.generate_data_key(**kms_params)
plaintext = response["Plaintext"]
ciphertext = response["CiphertextBlob"]
key_id = response["KeyId"]
except (ClientError, KeyError):
error_message = "Master Key {key_id} unable to generate data key".format(key_id=self._key_id)
_LOGGER.exception(error_message)
raise GenerateKeyError(error_message)
return DataKey(
key_provider=MasterKeyInfo(provider_id=self.provider_id, key_info=key_id),
data_key=plaintext,
encrypted_data_key=ciphertext,
)
|
[
"def",
"_generate_data_key",
"(",
"self",
",",
"algorithm",
",",
"encryption_context",
"=",
"None",
")",
":",
"kms_params",
"=",
"{",
"\"KeyId\"",
":",
"self",
".",
"_key_id",
",",
"\"NumberOfBytes\"",
":",
"algorithm",
".",
"kdf_input_len",
"}",
"if",
"encryption_context",
"is",
"not",
"None",
":",
"kms_params",
"[",
"\"EncryptionContext\"",
"]",
"=",
"encryption_context",
"if",
"self",
".",
"config",
".",
"grant_tokens",
":",
"kms_params",
"[",
"\"GrantTokens\"",
"]",
"=",
"self",
".",
"config",
".",
"grant_tokens",
"# Catch any boto3 errors and normalize to expected EncryptKeyError",
"try",
":",
"response",
"=",
"self",
".",
"config",
".",
"client",
".",
"generate_data_key",
"(",
"*",
"*",
"kms_params",
")",
"plaintext",
"=",
"response",
"[",
"\"Plaintext\"",
"]",
"ciphertext",
"=",
"response",
"[",
"\"CiphertextBlob\"",
"]",
"key_id",
"=",
"response",
"[",
"\"KeyId\"",
"]",
"except",
"(",
"ClientError",
",",
"KeyError",
")",
":",
"error_message",
"=",
"\"Master Key {key_id} unable to generate data key\"",
".",
"format",
"(",
"key_id",
"=",
"self",
".",
"_key_id",
")",
"_LOGGER",
".",
"exception",
"(",
"error_message",
")",
"raise",
"GenerateKeyError",
"(",
"error_message",
")",
"return",
"DataKey",
"(",
"key_provider",
"=",
"MasterKeyInfo",
"(",
"provider_id",
"=",
"self",
".",
"provider_id",
",",
"key_info",
"=",
"key_id",
")",
",",
"data_key",
"=",
"plaintext",
",",
"encrypted_data_key",
"=",
"ciphertext",
",",
")"
] |
Generates data key and returns plaintext and ciphertext of key.
:param algorithm: Algorithm on which to base data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to pass to KMS
:returns: Generated data key
:rtype: aws_encryption_sdk.structures.DataKey
|
[
"Generates",
"data",
"key",
"and",
"returns",
"plaintext",
"and",
"ciphertext",
"of",
"key",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L244-L272
|
15,466
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/kms.py
|
KMSMasterKey._encrypt_data_key
|
def _encrypt_data_key(self, data_key, algorithm, encryption_context=None):
"""Encrypts a data key and returns the ciphertext.
:param data_key: Unencrypted data key
:type data_key: :class:`aws_encryption_sdk.structures.RawDataKey`
or :class:`aws_encryption_sdk.structures.DataKey`
:param algorithm: Placeholder to maintain API compatibility with parent
:param dict encryption_context: Encryption context to pass to KMS
:returns: Data key containing encrypted data key
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
:raises EncryptKeyError: if Master Key is unable to encrypt data key
"""
kms_params = {"KeyId": self._key_id, "Plaintext": data_key.data_key}
if encryption_context:
kms_params["EncryptionContext"] = encryption_context
if self.config.grant_tokens:
kms_params["GrantTokens"] = self.config.grant_tokens
# Catch any boto3 errors and normalize to expected EncryptKeyError
try:
response = self.config.client.encrypt(**kms_params)
ciphertext = response["CiphertextBlob"]
key_id = response["KeyId"]
except (ClientError, KeyError):
error_message = "Master Key {key_id} unable to encrypt data key".format(key_id=self._key_id)
_LOGGER.exception(error_message)
raise EncryptKeyError(error_message)
return EncryptedDataKey(
key_provider=MasterKeyInfo(provider_id=self.provider_id, key_info=key_id), encrypted_data_key=ciphertext
)
|
python
|
def _encrypt_data_key(self, data_key, algorithm, encryption_context=None):
"""Encrypts a data key and returns the ciphertext.
:param data_key: Unencrypted data key
:type data_key: :class:`aws_encryption_sdk.structures.RawDataKey`
or :class:`aws_encryption_sdk.structures.DataKey`
:param algorithm: Placeholder to maintain API compatibility with parent
:param dict encryption_context: Encryption context to pass to KMS
:returns: Data key containing encrypted data key
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
:raises EncryptKeyError: if Master Key is unable to encrypt data key
"""
kms_params = {"KeyId": self._key_id, "Plaintext": data_key.data_key}
if encryption_context:
kms_params["EncryptionContext"] = encryption_context
if self.config.grant_tokens:
kms_params["GrantTokens"] = self.config.grant_tokens
# Catch any boto3 errors and normalize to expected EncryptKeyError
try:
response = self.config.client.encrypt(**kms_params)
ciphertext = response["CiphertextBlob"]
key_id = response["KeyId"]
except (ClientError, KeyError):
error_message = "Master Key {key_id} unable to encrypt data key".format(key_id=self._key_id)
_LOGGER.exception(error_message)
raise EncryptKeyError(error_message)
return EncryptedDataKey(
key_provider=MasterKeyInfo(provider_id=self.provider_id, key_info=key_id), encrypted_data_key=ciphertext
)
|
[
"def",
"_encrypt_data_key",
"(",
"self",
",",
"data_key",
",",
"algorithm",
",",
"encryption_context",
"=",
"None",
")",
":",
"kms_params",
"=",
"{",
"\"KeyId\"",
":",
"self",
".",
"_key_id",
",",
"\"Plaintext\"",
":",
"data_key",
".",
"data_key",
"}",
"if",
"encryption_context",
":",
"kms_params",
"[",
"\"EncryptionContext\"",
"]",
"=",
"encryption_context",
"if",
"self",
".",
"config",
".",
"grant_tokens",
":",
"kms_params",
"[",
"\"GrantTokens\"",
"]",
"=",
"self",
".",
"config",
".",
"grant_tokens",
"# Catch any boto3 errors and normalize to expected EncryptKeyError",
"try",
":",
"response",
"=",
"self",
".",
"config",
".",
"client",
".",
"encrypt",
"(",
"*",
"*",
"kms_params",
")",
"ciphertext",
"=",
"response",
"[",
"\"CiphertextBlob\"",
"]",
"key_id",
"=",
"response",
"[",
"\"KeyId\"",
"]",
"except",
"(",
"ClientError",
",",
"KeyError",
")",
":",
"error_message",
"=",
"\"Master Key {key_id} unable to encrypt data key\"",
".",
"format",
"(",
"key_id",
"=",
"self",
".",
"_key_id",
")",
"_LOGGER",
".",
"exception",
"(",
"error_message",
")",
"raise",
"EncryptKeyError",
"(",
"error_message",
")",
"return",
"EncryptedDataKey",
"(",
"key_provider",
"=",
"MasterKeyInfo",
"(",
"provider_id",
"=",
"self",
".",
"provider_id",
",",
"key_info",
"=",
"key_id",
")",
",",
"encrypted_data_key",
"=",
"ciphertext",
")"
] |
Encrypts a data key and returns the ciphertext.
:param data_key: Unencrypted data key
:type data_key: :class:`aws_encryption_sdk.structures.RawDataKey`
or :class:`aws_encryption_sdk.structures.DataKey`
:param algorithm: Placeholder to maintain API compatibility with parent
:param dict encryption_context: Encryption context to pass to KMS
:returns: Data key containing encrypted data key
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
:raises EncryptKeyError: if Master Key is unable to encrypt data key
|
[
"Encrypts",
"a",
"data",
"key",
"and",
"returns",
"the",
"ciphertext",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/kms.py#L274-L302
|
15,467
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_encrypted_data_key
|
def serialize_encrypted_data_key(encrypted_data_key):
"""Serializes an encrypted data key.
.. versionadded:: 1.3.0
:param encrypted_data_key: Encrypted data key to serialize
:type encrypted_data_key: aws_encryption_sdk.structures.EncryptedDataKey
:returns: Serialized encrypted data key
:rtype: bytes
"""
encrypted_data_key_format = (
">" # big endian
"H" # key provider ID length
"{provider_id_len}s" # key provider ID
"H" # key info length
"{provider_info_len}s" # key info
"H" # encrypted data key length
"{enc_data_key_len}s" # encrypted data key
)
return struct.pack(
encrypted_data_key_format.format(
provider_id_len=len(encrypted_data_key.key_provider.provider_id),
provider_info_len=len(encrypted_data_key.key_provider.key_info),
enc_data_key_len=len(encrypted_data_key.encrypted_data_key),
),
len(encrypted_data_key.key_provider.provider_id),
to_bytes(encrypted_data_key.key_provider.provider_id),
len(encrypted_data_key.key_provider.key_info),
to_bytes(encrypted_data_key.key_provider.key_info),
len(encrypted_data_key.encrypted_data_key),
encrypted_data_key.encrypted_data_key,
)
|
python
|
def serialize_encrypted_data_key(encrypted_data_key):
"""Serializes an encrypted data key.
.. versionadded:: 1.3.0
:param encrypted_data_key: Encrypted data key to serialize
:type encrypted_data_key: aws_encryption_sdk.structures.EncryptedDataKey
:returns: Serialized encrypted data key
:rtype: bytes
"""
encrypted_data_key_format = (
">" # big endian
"H" # key provider ID length
"{provider_id_len}s" # key provider ID
"H" # key info length
"{provider_info_len}s" # key info
"H" # encrypted data key length
"{enc_data_key_len}s" # encrypted data key
)
return struct.pack(
encrypted_data_key_format.format(
provider_id_len=len(encrypted_data_key.key_provider.provider_id),
provider_info_len=len(encrypted_data_key.key_provider.key_info),
enc_data_key_len=len(encrypted_data_key.encrypted_data_key),
),
len(encrypted_data_key.key_provider.provider_id),
to_bytes(encrypted_data_key.key_provider.provider_id),
len(encrypted_data_key.key_provider.key_info),
to_bytes(encrypted_data_key.key_provider.key_info),
len(encrypted_data_key.encrypted_data_key),
encrypted_data_key.encrypted_data_key,
)
|
[
"def",
"serialize_encrypted_data_key",
"(",
"encrypted_data_key",
")",
":",
"encrypted_data_key_format",
"=",
"(",
"\">\"",
"# big endian",
"\"H\"",
"# key provider ID length",
"\"{provider_id_len}s\"",
"# key provider ID",
"\"H\"",
"# key info length",
"\"{provider_info_len}s\"",
"# key info",
"\"H\"",
"# encrypted data key length",
"\"{enc_data_key_len}s\"",
"# encrypted data key",
")",
"return",
"struct",
".",
"pack",
"(",
"encrypted_data_key_format",
".",
"format",
"(",
"provider_id_len",
"=",
"len",
"(",
"encrypted_data_key",
".",
"key_provider",
".",
"provider_id",
")",
",",
"provider_info_len",
"=",
"len",
"(",
"encrypted_data_key",
".",
"key_provider",
".",
"key_info",
")",
",",
"enc_data_key_len",
"=",
"len",
"(",
"encrypted_data_key",
".",
"encrypted_data_key",
")",
",",
")",
",",
"len",
"(",
"encrypted_data_key",
".",
"key_provider",
".",
"provider_id",
")",
",",
"to_bytes",
"(",
"encrypted_data_key",
".",
"key_provider",
".",
"provider_id",
")",
",",
"len",
"(",
"encrypted_data_key",
".",
"key_provider",
".",
"key_info",
")",
",",
"to_bytes",
"(",
"encrypted_data_key",
".",
"key_provider",
".",
"key_info",
")",
",",
"len",
"(",
"encrypted_data_key",
".",
"encrypted_data_key",
")",
",",
"encrypted_data_key",
".",
"encrypted_data_key",
",",
")"
] |
Serializes an encrypted data key.
.. versionadded:: 1.3.0
:param encrypted_data_key: Encrypted data key to serialize
:type encrypted_data_key: aws_encryption_sdk.structures.EncryptedDataKey
:returns: Serialized encrypted data key
:rtype: bytes
|
[
"Serializes",
"an",
"encrypted",
"data",
"key",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L29-L60
|
15,468
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_header
|
def serialize_header(header, signer=None):
"""Serializes a header object.
:param header: Header to serialize
:type header: aws_encryption_sdk.structures.MessageHeader
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized header
:rtype: bytes
"""
ec_serialized = aws_encryption_sdk.internal.formatting.encryption_context.serialize_encryption_context(
header.encryption_context
)
header_start_format = (
">" # big endian
"B" # version
"B" # type
"H" # algorithm ID
"16s" # message ID
"H" # encryption context length
"{}s" # serialized encryption context
).format(len(ec_serialized))
header_bytes = bytearray()
header_bytes.extend(
struct.pack(
header_start_format,
header.version.value,
header.type.value,
header.algorithm.algorithm_id,
header.message_id,
len(ec_serialized),
ec_serialized,
)
)
serialized_data_keys = bytearray()
for data_key in header.encrypted_data_keys:
serialized_data_keys.extend(serialize_encrypted_data_key(data_key))
header_bytes.extend(struct.pack(">H", len(header.encrypted_data_keys)))
header_bytes.extend(serialized_data_keys)
header_close_format = (
">" # big endian
"B" # content type (no framing vs framing)
"4x" # reserved (formerly content AAD length)
"B" # nonce/IV length, this applies to all IVs in this message
"I" # frame length
)
header_bytes.extend(
struct.pack(header_close_format, header.content_type.value, header.algorithm.iv_len, header.frame_length)
)
output = bytes(header_bytes)
if signer is not None:
signer.update(output)
return output
|
python
|
def serialize_header(header, signer=None):
"""Serializes a header object.
:param header: Header to serialize
:type header: aws_encryption_sdk.structures.MessageHeader
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized header
:rtype: bytes
"""
ec_serialized = aws_encryption_sdk.internal.formatting.encryption_context.serialize_encryption_context(
header.encryption_context
)
header_start_format = (
">" # big endian
"B" # version
"B" # type
"H" # algorithm ID
"16s" # message ID
"H" # encryption context length
"{}s" # serialized encryption context
).format(len(ec_serialized))
header_bytes = bytearray()
header_bytes.extend(
struct.pack(
header_start_format,
header.version.value,
header.type.value,
header.algorithm.algorithm_id,
header.message_id,
len(ec_serialized),
ec_serialized,
)
)
serialized_data_keys = bytearray()
for data_key in header.encrypted_data_keys:
serialized_data_keys.extend(serialize_encrypted_data_key(data_key))
header_bytes.extend(struct.pack(">H", len(header.encrypted_data_keys)))
header_bytes.extend(serialized_data_keys)
header_close_format = (
">" # big endian
"B" # content type (no framing vs framing)
"4x" # reserved (formerly content AAD length)
"B" # nonce/IV length, this applies to all IVs in this message
"I" # frame length
)
header_bytes.extend(
struct.pack(header_close_format, header.content_type.value, header.algorithm.iv_len, header.frame_length)
)
output = bytes(header_bytes)
if signer is not None:
signer.update(output)
return output
|
[
"def",
"serialize_header",
"(",
"header",
",",
"signer",
"=",
"None",
")",
":",
"ec_serialized",
"=",
"aws_encryption_sdk",
".",
"internal",
".",
"formatting",
".",
"encryption_context",
".",
"serialize_encryption_context",
"(",
"header",
".",
"encryption_context",
")",
"header_start_format",
"=",
"(",
"\">\"",
"# big endian",
"\"B\"",
"# version",
"\"B\"",
"# type",
"\"H\"",
"# algorithm ID",
"\"16s\"",
"# message ID",
"\"H\"",
"# encryption context length",
"\"{}s\"",
"# serialized encryption context",
")",
".",
"format",
"(",
"len",
"(",
"ec_serialized",
")",
")",
"header_bytes",
"=",
"bytearray",
"(",
")",
"header_bytes",
".",
"extend",
"(",
"struct",
".",
"pack",
"(",
"header_start_format",
",",
"header",
".",
"version",
".",
"value",
",",
"header",
".",
"type",
".",
"value",
",",
"header",
".",
"algorithm",
".",
"algorithm_id",
",",
"header",
".",
"message_id",
",",
"len",
"(",
"ec_serialized",
")",
",",
"ec_serialized",
",",
")",
")",
"serialized_data_keys",
"=",
"bytearray",
"(",
")",
"for",
"data_key",
"in",
"header",
".",
"encrypted_data_keys",
":",
"serialized_data_keys",
".",
"extend",
"(",
"serialize_encrypted_data_key",
"(",
"data_key",
")",
")",
"header_bytes",
".",
"extend",
"(",
"struct",
".",
"pack",
"(",
"\">H\"",
",",
"len",
"(",
"header",
".",
"encrypted_data_keys",
")",
")",
")",
"header_bytes",
".",
"extend",
"(",
"serialized_data_keys",
")",
"header_close_format",
"=",
"(",
"\">\"",
"# big endian",
"\"B\"",
"# content type (no framing vs framing)",
"\"4x\"",
"# reserved (formerly content AAD length)",
"\"B\"",
"# nonce/IV length, this applies to all IVs in this message",
"\"I\"",
"# frame length",
")",
"header_bytes",
".",
"extend",
"(",
"struct",
".",
"pack",
"(",
"header_close_format",
",",
"header",
".",
"content_type",
".",
"value",
",",
"header",
".",
"algorithm",
".",
"iv_len",
",",
"header",
".",
"frame_length",
")",
")",
"output",
"=",
"bytes",
"(",
"header_bytes",
")",
"if",
"signer",
"is",
"not",
"None",
":",
"signer",
".",
"update",
"(",
"output",
")",
"return",
"output"
] |
Serializes a header object.
:param header: Header to serialize
:type header: aws_encryption_sdk.structures.MessageHeader
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized header
:rtype: bytes
|
[
"Serializes",
"a",
"header",
"object",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L63-L118
|
15,469
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_header_auth
|
def serialize_header_auth(algorithm, header, data_encryption_key, signer=None):
"""Creates serialized header authentication data.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes header: Serialized message header
:param bytes data_encryption_key: Data key with which to encrypt message
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.Signer
:returns: Serialized header authentication data
:rtype: bytes
"""
header_auth = encrypt(
algorithm=algorithm,
key=data_encryption_key,
plaintext=b"",
associated_data=header,
iv=header_auth_iv(algorithm),
)
output = struct.pack(
">{iv_len}s{tag_len}s".format(iv_len=algorithm.iv_len, tag_len=algorithm.tag_len),
header_auth.iv,
header_auth.tag,
)
if signer is not None:
signer.update(output)
return output
|
python
|
def serialize_header_auth(algorithm, header, data_encryption_key, signer=None):
"""Creates serialized header authentication data.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes header: Serialized message header
:param bytes data_encryption_key: Data key with which to encrypt message
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.Signer
:returns: Serialized header authentication data
:rtype: bytes
"""
header_auth = encrypt(
algorithm=algorithm,
key=data_encryption_key,
plaintext=b"",
associated_data=header,
iv=header_auth_iv(algorithm),
)
output = struct.pack(
">{iv_len}s{tag_len}s".format(iv_len=algorithm.iv_len, tag_len=algorithm.tag_len),
header_auth.iv,
header_auth.tag,
)
if signer is not None:
signer.update(output)
return output
|
[
"def",
"serialize_header_auth",
"(",
"algorithm",
",",
"header",
",",
"data_encryption_key",
",",
"signer",
"=",
"None",
")",
":",
"header_auth",
"=",
"encrypt",
"(",
"algorithm",
"=",
"algorithm",
",",
"key",
"=",
"data_encryption_key",
",",
"plaintext",
"=",
"b\"\"",
",",
"associated_data",
"=",
"header",
",",
"iv",
"=",
"header_auth_iv",
"(",
"algorithm",
")",
",",
")",
"output",
"=",
"struct",
".",
"pack",
"(",
"\">{iv_len}s{tag_len}s\"",
".",
"format",
"(",
"iv_len",
"=",
"algorithm",
".",
"iv_len",
",",
"tag_len",
"=",
"algorithm",
".",
"tag_len",
")",
",",
"header_auth",
".",
"iv",
",",
"header_auth",
".",
"tag",
",",
")",
"if",
"signer",
"is",
"not",
"None",
":",
"signer",
".",
"update",
"(",
"output",
")",
"return",
"output"
] |
Creates serialized header authentication data.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes header: Serialized message header
:param bytes data_encryption_key: Data key with which to encrypt message
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.Signer
:returns: Serialized header authentication data
:rtype: bytes
|
[
"Creates",
"serialized",
"header",
"authentication",
"data",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L121-L147
|
15,470
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_non_framed_open
|
def serialize_non_framed_open(algorithm, iv, plaintext_length, signer=None):
"""Serializes the opening block for a non-framed message body.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes iv: IV value used to encrypt body
:param int plaintext_length: Length of plaintext (and thus ciphertext) in body
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized body start block
:rtype: bytes
"""
body_start_format = (">" "{iv_length}s" "Q").format(iv_length=algorithm.iv_len) # nonce (IV) # content length
body_start = struct.pack(body_start_format, iv, plaintext_length)
if signer:
signer.update(body_start)
return body_start
|
python
|
def serialize_non_framed_open(algorithm, iv, plaintext_length, signer=None):
"""Serializes the opening block for a non-framed message body.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes iv: IV value used to encrypt body
:param int plaintext_length: Length of plaintext (and thus ciphertext) in body
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized body start block
:rtype: bytes
"""
body_start_format = (">" "{iv_length}s" "Q").format(iv_length=algorithm.iv_len) # nonce (IV) # content length
body_start = struct.pack(body_start_format, iv, plaintext_length)
if signer:
signer.update(body_start)
return body_start
|
[
"def",
"serialize_non_framed_open",
"(",
"algorithm",
",",
"iv",
",",
"plaintext_length",
",",
"signer",
"=",
"None",
")",
":",
"body_start_format",
"=",
"(",
"\">\"",
"\"{iv_length}s\"",
"\"Q\"",
")",
".",
"format",
"(",
"iv_length",
"=",
"algorithm",
".",
"iv_len",
")",
"# nonce (IV) # content length",
"body_start",
"=",
"struct",
".",
"pack",
"(",
"body_start_format",
",",
"iv",
",",
"plaintext_length",
")",
"if",
"signer",
":",
"signer",
".",
"update",
"(",
"body_start",
")",
"return",
"body_start"
] |
Serializes the opening block for a non-framed message body.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes iv: IV value used to encrypt body
:param int plaintext_length: Length of plaintext (and thus ciphertext) in body
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized body start block
:rtype: bytes
|
[
"Serializes",
"the",
"opening",
"block",
"for",
"a",
"non",
"-",
"framed",
"message",
"body",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L150-L166
|
15,471
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_non_framed_close
|
def serialize_non_framed_close(tag, signer=None):
"""Serializes the closing block for a non-framed message body.
:param bytes tag: Auth tag value from body encryptor
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized body close block
:rtype: bytes
"""
body_close = struct.pack("{auth_len}s".format(auth_len=len(tag)), tag)
if signer:
signer.update(body_close)
return body_close
|
python
|
def serialize_non_framed_close(tag, signer=None):
"""Serializes the closing block for a non-framed message body.
:param bytes tag: Auth tag value from body encryptor
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized body close block
:rtype: bytes
"""
body_close = struct.pack("{auth_len}s".format(auth_len=len(tag)), tag)
if signer:
signer.update(body_close)
return body_close
|
[
"def",
"serialize_non_framed_close",
"(",
"tag",
",",
"signer",
"=",
"None",
")",
":",
"body_close",
"=",
"struct",
".",
"pack",
"(",
"\"{auth_len}s\"",
".",
"format",
"(",
"auth_len",
"=",
"len",
"(",
"tag",
")",
")",
",",
"tag",
")",
"if",
"signer",
":",
"signer",
".",
"update",
"(",
"body_close",
")",
"return",
"body_close"
] |
Serializes the closing block for a non-framed message body.
:param bytes tag: Auth tag value from body encryptor
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized body close block
:rtype: bytes
|
[
"Serializes",
"the",
"closing",
"block",
"for",
"a",
"non",
"-",
"framed",
"message",
"body",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L169-L181
|
15,472
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_frame
|
def serialize_frame(
algorithm, plaintext, message_id, data_encryption_key, frame_length, sequence_number, is_final_frame, signer=None
):
"""Receives a message plaintext, breaks off a frame, encrypts and serializes
the frame, and returns the encrypted frame and the remaining plaintext.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes plaintext: Source plaintext to encrypt and serialize
:param bytes message_id: Message ID
:param bytes data_encryption_key: Data key with which to encrypt message
:param int frame_length: Length of the framed data
:param int sequence_number: Sequence number for frame to be generated
:param bool is_final_frame: Boolean stating whether or not this frame is a final frame
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.Signer
:returns: Serialized frame and remaining plaintext
:rtype: tuple of bytes
:raises SerializationError: if number of frames is too large
"""
if sequence_number < 1:
raise SerializationError("Frame sequence number must be greater than 0")
if sequence_number > aws_encryption_sdk.internal.defaults.MAX_FRAME_COUNT:
raise SerializationError("Max frame count exceeded")
if is_final_frame:
content_string = ContentAADString.FINAL_FRAME_STRING_ID
else:
content_string = ContentAADString.FRAME_STRING_ID
frame_plaintext = plaintext[:frame_length]
frame_ciphertext = encrypt(
algorithm=algorithm,
key=data_encryption_key,
plaintext=frame_plaintext,
associated_data=aws_encryption_sdk.internal.formatting.encryption_context.assemble_content_aad(
message_id=message_id,
aad_content_string=content_string,
seq_num=sequence_number,
length=len(frame_plaintext),
),
iv=frame_iv(algorithm, sequence_number),
)
plaintext = plaintext[frame_length:]
if is_final_frame:
_LOGGER.debug("Serializing final frame")
packed_frame = struct.pack(
">II{iv_len}sI{content_len}s{auth_len}s".format(
iv_len=algorithm.iv_len, content_len=len(frame_ciphertext.ciphertext), auth_len=algorithm.auth_len
),
SequenceIdentifier.SEQUENCE_NUMBER_END.value,
sequence_number,
frame_ciphertext.iv,
len(frame_ciphertext.ciphertext),
frame_ciphertext.ciphertext,
frame_ciphertext.tag,
)
else:
_LOGGER.debug("Serializing frame")
packed_frame = struct.pack(
">I{iv_len}s{content_len}s{auth_len}s".format(
iv_len=algorithm.iv_len, content_len=frame_length, auth_len=algorithm.auth_len
),
sequence_number,
frame_ciphertext.iv,
frame_ciphertext.ciphertext,
frame_ciphertext.tag,
)
if signer is not None:
signer.update(packed_frame)
return packed_frame, plaintext
|
python
|
def serialize_frame(
algorithm, plaintext, message_id, data_encryption_key, frame_length, sequence_number, is_final_frame, signer=None
):
"""Receives a message plaintext, breaks off a frame, encrypts and serializes
the frame, and returns the encrypted frame and the remaining plaintext.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes plaintext: Source plaintext to encrypt and serialize
:param bytes message_id: Message ID
:param bytes data_encryption_key: Data key with which to encrypt message
:param int frame_length: Length of the framed data
:param int sequence_number: Sequence number for frame to be generated
:param bool is_final_frame: Boolean stating whether or not this frame is a final frame
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.Signer
:returns: Serialized frame and remaining plaintext
:rtype: tuple of bytes
:raises SerializationError: if number of frames is too large
"""
if sequence_number < 1:
raise SerializationError("Frame sequence number must be greater than 0")
if sequence_number > aws_encryption_sdk.internal.defaults.MAX_FRAME_COUNT:
raise SerializationError("Max frame count exceeded")
if is_final_frame:
content_string = ContentAADString.FINAL_FRAME_STRING_ID
else:
content_string = ContentAADString.FRAME_STRING_ID
frame_plaintext = plaintext[:frame_length]
frame_ciphertext = encrypt(
algorithm=algorithm,
key=data_encryption_key,
plaintext=frame_plaintext,
associated_data=aws_encryption_sdk.internal.formatting.encryption_context.assemble_content_aad(
message_id=message_id,
aad_content_string=content_string,
seq_num=sequence_number,
length=len(frame_plaintext),
),
iv=frame_iv(algorithm, sequence_number),
)
plaintext = plaintext[frame_length:]
if is_final_frame:
_LOGGER.debug("Serializing final frame")
packed_frame = struct.pack(
">II{iv_len}sI{content_len}s{auth_len}s".format(
iv_len=algorithm.iv_len, content_len=len(frame_ciphertext.ciphertext), auth_len=algorithm.auth_len
),
SequenceIdentifier.SEQUENCE_NUMBER_END.value,
sequence_number,
frame_ciphertext.iv,
len(frame_ciphertext.ciphertext),
frame_ciphertext.ciphertext,
frame_ciphertext.tag,
)
else:
_LOGGER.debug("Serializing frame")
packed_frame = struct.pack(
">I{iv_len}s{content_len}s{auth_len}s".format(
iv_len=algorithm.iv_len, content_len=frame_length, auth_len=algorithm.auth_len
),
sequence_number,
frame_ciphertext.iv,
frame_ciphertext.ciphertext,
frame_ciphertext.tag,
)
if signer is not None:
signer.update(packed_frame)
return packed_frame, plaintext
|
[
"def",
"serialize_frame",
"(",
"algorithm",
",",
"plaintext",
",",
"message_id",
",",
"data_encryption_key",
",",
"frame_length",
",",
"sequence_number",
",",
"is_final_frame",
",",
"signer",
"=",
"None",
")",
":",
"if",
"sequence_number",
"<",
"1",
":",
"raise",
"SerializationError",
"(",
"\"Frame sequence number must be greater than 0\"",
")",
"if",
"sequence_number",
">",
"aws_encryption_sdk",
".",
"internal",
".",
"defaults",
".",
"MAX_FRAME_COUNT",
":",
"raise",
"SerializationError",
"(",
"\"Max frame count exceeded\"",
")",
"if",
"is_final_frame",
":",
"content_string",
"=",
"ContentAADString",
".",
"FINAL_FRAME_STRING_ID",
"else",
":",
"content_string",
"=",
"ContentAADString",
".",
"FRAME_STRING_ID",
"frame_plaintext",
"=",
"plaintext",
"[",
":",
"frame_length",
"]",
"frame_ciphertext",
"=",
"encrypt",
"(",
"algorithm",
"=",
"algorithm",
",",
"key",
"=",
"data_encryption_key",
",",
"plaintext",
"=",
"frame_plaintext",
",",
"associated_data",
"=",
"aws_encryption_sdk",
".",
"internal",
".",
"formatting",
".",
"encryption_context",
".",
"assemble_content_aad",
"(",
"message_id",
"=",
"message_id",
",",
"aad_content_string",
"=",
"content_string",
",",
"seq_num",
"=",
"sequence_number",
",",
"length",
"=",
"len",
"(",
"frame_plaintext",
")",
",",
")",
",",
"iv",
"=",
"frame_iv",
"(",
"algorithm",
",",
"sequence_number",
")",
",",
")",
"plaintext",
"=",
"plaintext",
"[",
"frame_length",
":",
"]",
"if",
"is_final_frame",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Serializing final frame\"",
")",
"packed_frame",
"=",
"struct",
".",
"pack",
"(",
"\">II{iv_len}sI{content_len}s{auth_len}s\"",
".",
"format",
"(",
"iv_len",
"=",
"algorithm",
".",
"iv_len",
",",
"content_len",
"=",
"len",
"(",
"frame_ciphertext",
".",
"ciphertext",
")",
",",
"auth_len",
"=",
"algorithm",
".",
"auth_len",
")",
",",
"SequenceIdentifier",
".",
"SEQUENCE_NUMBER_END",
".",
"value",
",",
"sequence_number",
",",
"frame_ciphertext",
".",
"iv",
",",
"len",
"(",
"frame_ciphertext",
".",
"ciphertext",
")",
",",
"frame_ciphertext",
".",
"ciphertext",
",",
"frame_ciphertext",
".",
"tag",
",",
")",
"else",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Serializing frame\"",
")",
"packed_frame",
"=",
"struct",
".",
"pack",
"(",
"\">I{iv_len}s{content_len}s{auth_len}s\"",
".",
"format",
"(",
"iv_len",
"=",
"algorithm",
".",
"iv_len",
",",
"content_len",
"=",
"frame_length",
",",
"auth_len",
"=",
"algorithm",
".",
"auth_len",
")",
",",
"sequence_number",
",",
"frame_ciphertext",
".",
"iv",
",",
"frame_ciphertext",
".",
"ciphertext",
",",
"frame_ciphertext",
".",
"tag",
",",
")",
"if",
"signer",
"is",
"not",
"None",
":",
"signer",
".",
"update",
"(",
"packed_frame",
")",
"return",
"packed_frame",
",",
"plaintext"
] |
Receives a message plaintext, breaks off a frame, encrypts and serializes
the frame, and returns the encrypted frame and the remaining plaintext.
:param algorithm: Algorithm to use for encryption
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes plaintext: Source plaintext to encrypt and serialize
:param bytes message_id: Message ID
:param bytes data_encryption_key: Data key with which to encrypt message
:param int frame_length: Length of the framed data
:param int sequence_number: Sequence number for frame to be generated
:param bool is_final_frame: Boolean stating whether or not this frame is a final frame
:param signer: Cryptographic signer object (optional)
:type signer: aws_encryption_sdk.Signer
:returns: Serialized frame and remaining plaintext
:rtype: tuple of bytes
:raises SerializationError: if number of frames is too large
|
[
"Receives",
"a",
"message",
"plaintext",
"breaks",
"off",
"a",
"frame",
"encrypts",
"and",
"serializes",
"the",
"frame",
"and",
"returns",
"the",
"encrypted",
"frame",
"and",
"the",
"remaining",
"plaintext",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L184-L252
|
15,473
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_footer
|
def serialize_footer(signer):
"""Uses the signer object which has been used to sign the message to generate
the signature, then serializes that signature.
:param signer: Cryptographic signer object
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized footer
:rtype: bytes
"""
footer = b""
if signer is not None:
signature = signer.finalize()
footer = struct.pack(">H{sig_len}s".format(sig_len=len(signature)), len(signature), signature)
return footer
|
python
|
def serialize_footer(signer):
"""Uses the signer object which has been used to sign the message to generate
the signature, then serializes that signature.
:param signer: Cryptographic signer object
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized footer
:rtype: bytes
"""
footer = b""
if signer is not None:
signature = signer.finalize()
footer = struct.pack(">H{sig_len}s".format(sig_len=len(signature)), len(signature), signature)
return footer
|
[
"def",
"serialize_footer",
"(",
"signer",
")",
":",
"footer",
"=",
"b\"\"",
"if",
"signer",
"is",
"not",
"None",
":",
"signature",
"=",
"signer",
".",
"finalize",
"(",
")",
"footer",
"=",
"struct",
".",
"pack",
"(",
"\">H{sig_len}s\"",
".",
"format",
"(",
"sig_len",
"=",
"len",
"(",
"signature",
")",
")",
",",
"len",
"(",
"signature",
")",
",",
"signature",
")",
"return",
"footer"
] |
Uses the signer object which has been used to sign the message to generate
the signature, then serializes that signature.
:param signer: Cryptographic signer object
:type signer: aws_encryption_sdk.internal.crypto.Signer
:returns: Serialized footer
:rtype: bytes
|
[
"Uses",
"the",
"signer",
"object",
"which",
"has",
"been",
"used",
"to",
"sign",
"the",
"message",
"to",
"generate",
"the",
"signature",
"then",
"serializes",
"that",
"signature",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L255-L268
|
15,474
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_raw_master_key_prefix
|
def serialize_raw_master_key_prefix(raw_master_key):
"""Produces the prefix that a RawMasterKey will always use for the
key_info value of keys which require additional information.
:param raw_master_key: RawMasterKey for which to produce a prefix
:type raw_master_key: aws_encryption_sdk.key_providers.raw.RawMasterKey
:returns: Serialized key_info prefix
:rtype: bytes
"""
if raw_master_key.config.wrapping_key.wrapping_algorithm.encryption_type is EncryptionType.ASYMMETRIC:
return to_bytes(raw_master_key.key_id)
return struct.pack(
">{}sII".format(len(raw_master_key.key_id)),
to_bytes(raw_master_key.key_id),
# Tag Length is stored in bits, not bytes
raw_master_key.config.wrapping_key.wrapping_algorithm.algorithm.tag_len * 8,
raw_master_key.config.wrapping_key.wrapping_algorithm.algorithm.iv_len,
)
|
python
|
def serialize_raw_master_key_prefix(raw_master_key):
"""Produces the prefix that a RawMasterKey will always use for the
key_info value of keys which require additional information.
:param raw_master_key: RawMasterKey for which to produce a prefix
:type raw_master_key: aws_encryption_sdk.key_providers.raw.RawMasterKey
:returns: Serialized key_info prefix
:rtype: bytes
"""
if raw_master_key.config.wrapping_key.wrapping_algorithm.encryption_type is EncryptionType.ASYMMETRIC:
return to_bytes(raw_master_key.key_id)
return struct.pack(
">{}sII".format(len(raw_master_key.key_id)),
to_bytes(raw_master_key.key_id),
# Tag Length is stored in bits, not bytes
raw_master_key.config.wrapping_key.wrapping_algorithm.algorithm.tag_len * 8,
raw_master_key.config.wrapping_key.wrapping_algorithm.algorithm.iv_len,
)
|
[
"def",
"serialize_raw_master_key_prefix",
"(",
"raw_master_key",
")",
":",
"if",
"raw_master_key",
".",
"config",
".",
"wrapping_key",
".",
"wrapping_algorithm",
".",
"encryption_type",
"is",
"EncryptionType",
".",
"ASYMMETRIC",
":",
"return",
"to_bytes",
"(",
"raw_master_key",
".",
"key_id",
")",
"return",
"struct",
".",
"pack",
"(",
"\">{}sII\"",
".",
"format",
"(",
"len",
"(",
"raw_master_key",
".",
"key_id",
")",
")",
",",
"to_bytes",
"(",
"raw_master_key",
".",
"key_id",
")",
",",
"# Tag Length is stored in bits, not bytes",
"raw_master_key",
".",
"config",
".",
"wrapping_key",
".",
"wrapping_algorithm",
".",
"algorithm",
".",
"tag_len",
"*",
"8",
",",
"raw_master_key",
".",
"config",
".",
"wrapping_key",
".",
"wrapping_algorithm",
".",
"algorithm",
".",
"iv_len",
",",
")"
] |
Produces the prefix that a RawMasterKey will always use for the
key_info value of keys which require additional information.
:param raw_master_key: RawMasterKey for which to produce a prefix
:type raw_master_key: aws_encryption_sdk.key_providers.raw.RawMasterKey
:returns: Serialized key_info prefix
:rtype: bytes
|
[
"Produces",
"the",
"prefix",
"that",
"a",
"RawMasterKey",
"will",
"always",
"use",
"for",
"the",
"key_info",
"value",
"of",
"keys",
"which",
"require",
"additional",
"information",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L271-L288
|
15,475
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/serialize.py
|
serialize_wrapped_key
|
def serialize_wrapped_key(key_provider, wrapping_algorithm, wrapping_key_id, encrypted_wrapped_key):
"""Serializes EncryptedData into a Wrapped EncryptedDataKey.
:param key_provider: Info for Wrapping MasterKey
:type key_provider: aws_encryption_sdk.structures.MasterKeyInfo
:param wrapping_algorithm: Wrapping Algorithm with which to wrap plaintext_data_key
:type wrapping_algorithm: aws_encryption_sdk.identifiers.WrappingAlgorithm
:param bytes wrapping_key_id: Key ID of wrapping MasterKey
:param encrypted_wrapped_key: Encrypted data key
:type encrypted_wrapped_key: aws_encryption_sdk.internal.structures.EncryptedData
:returns: Wrapped EncryptedDataKey
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
"""
if encrypted_wrapped_key.iv is None:
key_info = wrapping_key_id
key_ciphertext = encrypted_wrapped_key.ciphertext
else:
key_info = struct.pack(
">{key_id_len}sII{iv_len}s".format(
key_id_len=len(wrapping_key_id), iv_len=wrapping_algorithm.algorithm.iv_len
),
to_bytes(wrapping_key_id),
len(encrypted_wrapped_key.tag) * 8, # Tag Length is stored in bits, not bytes
wrapping_algorithm.algorithm.iv_len,
encrypted_wrapped_key.iv,
)
key_ciphertext = encrypted_wrapped_key.ciphertext + encrypted_wrapped_key.tag
return EncryptedDataKey(
key_provider=MasterKeyInfo(provider_id=key_provider.provider_id, key_info=key_info),
encrypted_data_key=key_ciphertext,
)
|
python
|
def serialize_wrapped_key(key_provider, wrapping_algorithm, wrapping_key_id, encrypted_wrapped_key):
"""Serializes EncryptedData into a Wrapped EncryptedDataKey.
:param key_provider: Info for Wrapping MasterKey
:type key_provider: aws_encryption_sdk.structures.MasterKeyInfo
:param wrapping_algorithm: Wrapping Algorithm with which to wrap plaintext_data_key
:type wrapping_algorithm: aws_encryption_sdk.identifiers.WrappingAlgorithm
:param bytes wrapping_key_id: Key ID of wrapping MasterKey
:param encrypted_wrapped_key: Encrypted data key
:type encrypted_wrapped_key: aws_encryption_sdk.internal.structures.EncryptedData
:returns: Wrapped EncryptedDataKey
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
"""
if encrypted_wrapped_key.iv is None:
key_info = wrapping_key_id
key_ciphertext = encrypted_wrapped_key.ciphertext
else:
key_info = struct.pack(
">{key_id_len}sII{iv_len}s".format(
key_id_len=len(wrapping_key_id), iv_len=wrapping_algorithm.algorithm.iv_len
),
to_bytes(wrapping_key_id),
len(encrypted_wrapped_key.tag) * 8, # Tag Length is stored in bits, not bytes
wrapping_algorithm.algorithm.iv_len,
encrypted_wrapped_key.iv,
)
key_ciphertext = encrypted_wrapped_key.ciphertext + encrypted_wrapped_key.tag
return EncryptedDataKey(
key_provider=MasterKeyInfo(provider_id=key_provider.provider_id, key_info=key_info),
encrypted_data_key=key_ciphertext,
)
|
[
"def",
"serialize_wrapped_key",
"(",
"key_provider",
",",
"wrapping_algorithm",
",",
"wrapping_key_id",
",",
"encrypted_wrapped_key",
")",
":",
"if",
"encrypted_wrapped_key",
".",
"iv",
"is",
"None",
":",
"key_info",
"=",
"wrapping_key_id",
"key_ciphertext",
"=",
"encrypted_wrapped_key",
".",
"ciphertext",
"else",
":",
"key_info",
"=",
"struct",
".",
"pack",
"(",
"\">{key_id_len}sII{iv_len}s\"",
".",
"format",
"(",
"key_id_len",
"=",
"len",
"(",
"wrapping_key_id",
")",
",",
"iv_len",
"=",
"wrapping_algorithm",
".",
"algorithm",
".",
"iv_len",
")",
",",
"to_bytes",
"(",
"wrapping_key_id",
")",
",",
"len",
"(",
"encrypted_wrapped_key",
".",
"tag",
")",
"*",
"8",
",",
"# Tag Length is stored in bits, not bytes",
"wrapping_algorithm",
".",
"algorithm",
".",
"iv_len",
",",
"encrypted_wrapped_key",
".",
"iv",
",",
")",
"key_ciphertext",
"=",
"encrypted_wrapped_key",
".",
"ciphertext",
"+",
"encrypted_wrapped_key",
".",
"tag",
"return",
"EncryptedDataKey",
"(",
"key_provider",
"=",
"MasterKeyInfo",
"(",
"provider_id",
"=",
"key_provider",
".",
"provider_id",
",",
"key_info",
"=",
"key_info",
")",
",",
"encrypted_data_key",
"=",
"key_ciphertext",
",",
")"
] |
Serializes EncryptedData into a Wrapped EncryptedDataKey.
:param key_provider: Info for Wrapping MasterKey
:type key_provider: aws_encryption_sdk.structures.MasterKeyInfo
:param wrapping_algorithm: Wrapping Algorithm with which to wrap plaintext_data_key
:type wrapping_algorithm: aws_encryption_sdk.identifiers.WrappingAlgorithm
:param bytes wrapping_key_id: Key ID of wrapping MasterKey
:param encrypted_wrapped_key: Encrypted data key
:type encrypted_wrapped_key: aws_encryption_sdk.internal.structures.EncryptedData
:returns: Wrapped EncryptedDataKey
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
|
[
"Serializes",
"EncryptedData",
"into",
"a",
"Wrapped",
"EncryptedDataKey",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/serialize.py#L291-L321
|
15,476
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/encryption_context.py
|
assemble_content_aad
|
def assemble_content_aad(message_id, aad_content_string, seq_num, length):
"""Assembles the Body AAD string for a message body structure.
:param message_id: Message ID
:type message_id: str
:param aad_content_string: ContentAADString object for frame type
:type aad_content_string: aws_encryption_sdk.identifiers.ContentAADString
:param seq_num: Sequence number of frame
:type seq_num: int
:param length: Content Length
:type length: int
:returns: Properly formatted AAD bytes for message body structure.
:rtype: bytes
:raises SerializationError: if aad_content_string is not known
"""
if not isinstance(aad_content_string, aws_encryption_sdk.identifiers.ContentAADString):
raise SerializationError("Unknown aad_content_string")
fmt = ">16s{}sIQ".format(len(aad_content_string.value))
return struct.pack(fmt, message_id, aad_content_string.value, seq_num, length)
|
python
|
def assemble_content_aad(message_id, aad_content_string, seq_num, length):
"""Assembles the Body AAD string for a message body structure.
:param message_id: Message ID
:type message_id: str
:param aad_content_string: ContentAADString object for frame type
:type aad_content_string: aws_encryption_sdk.identifiers.ContentAADString
:param seq_num: Sequence number of frame
:type seq_num: int
:param length: Content Length
:type length: int
:returns: Properly formatted AAD bytes for message body structure.
:rtype: bytes
:raises SerializationError: if aad_content_string is not known
"""
if not isinstance(aad_content_string, aws_encryption_sdk.identifiers.ContentAADString):
raise SerializationError("Unknown aad_content_string")
fmt = ">16s{}sIQ".format(len(aad_content_string.value))
return struct.pack(fmt, message_id, aad_content_string.value, seq_num, length)
|
[
"def",
"assemble_content_aad",
"(",
"message_id",
",",
"aad_content_string",
",",
"seq_num",
",",
"length",
")",
":",
"if",
"not",
"isinstance",
"(",
"aad_content_string",
",",
"aws_encryption_sdk",
".",
"identifiers",
".",
"ContentAADString",
")",
":",
"raise",
"SerializationError",
"(",
"\"Unknown aad_content_string\"",
")",
"fmt",
"=",
"\">16s{}sIQ\"",
".",
"format",
"(",
"len",
"(",
"aad_content_string",
".",
"value",
")",
")",
"return",
"struct",
".",
"pack",
"(",
"fmt",
",",
"message_id",
",",
"aad_content_string",
".",
"value",
",",
"seq_num",
",",
"length",
")"
] |
Assembles the Body AAD string for a message body structure.
:param message_id: Message ID
:type message_id: str
:param aad_content_string: ContentAADString object for frame type
:type aad_content_string: aws_encryption_sdk.identifiers.ContentAADString
:param seq_num: Sequence number of frame
:type seq_num: int
:param length: Content Length
:type length: int
:returns: Properly formatted AAD bytes for message body structure.
:rtype: bytes
:raises SerializationError: if aad_content_string is not known
|
[
"Assembles",
"the",
"Body",
"AAD",
"string",
"for",
"a",
"message",
"body",
"structure",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/encryption_context.py#L29-L47
|
15,477
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/encryption_context.py
|
serialize_encryption_context
|
def serialize_encryption_context(encryption_context):
"""Serializes the contents of a dictionary into a byte string.
:param dict encryption_context: Dictionary of encrytion context keys/values.
:returns: Serialized encryption context
:rtype: bytes
"""
if not encryption_context:
return bytes()
serialized_context = bytearray()
dict_size = len(encryption_context)
if dict_size > aws_encryption_sdk.internal.defaults.MAX_BYTE_ARRAY_SIZE:
raise SerializationError("The encryption context contains too many elements.")
serialized_context.extend(struct.pack(">H", dict_size))
# Encode strings first to catch bad values.
encryption_context_list = []
for key, value in encryption_context.items():
try:
if isinstance(key, bytes):
key = codecs.decode(key)
if isinstance(value, bytes):
value = codecs.decode(value)
encryption_context_list.append(
(aws_encryption_sdk.internal.str_ops.to_bytes(key), aws_encryption_sdk.internal.str_ops.to_bytes(value))
)
except Exception:
raise SerializationError(
"Cannot encode dictionary key or value using {}.".format(aws_encryption_sdk.internal.defaults.ENCODING)
)
for key, value in sorted(encryption_context_list, key=lambda x: x[0]):
serialized_context.extend(
struct.pack(
">H{key_size}sH{value_size}s".format(key_size=len(key), value_size=len(value)),
len(key),
key,
len(value),
value,
)
)
if len(serialized_context) > aws_encryption_sdk.internal.defaults.MAX_BYTE_ARRAY_SIZE:
raise SerializationError("The serialized context is too large.")
return bytes(serialized_context)
|
python
|
def serialize_encryption_context(encryption_context):
"""Serializes the contents of a dictionary into a byte string.
:param dict encryption_context: Dictionary of encrytion context keys/values.
:returns: Serialized encryption context
:rtype: bytes
"""
if not encryption_context:
return bytes()
serialized_context = bytearray()
dict_size = len(encryption_context)
if dict_size > aws_encryption_sdk.internal.defaults.MAX_BYTE_ARRAY_SIZE:
raise SerializationError("The encryption context contains too many elements.")
serialized_context.extend(struct.pack(">H", dict_size))
# Encode strings first to catch bad values.
encryption_context_list = []
for key, value in encryption_context.items():
try:
if isinstance(key, bytes):
key = codecs.decode(key)
if isinstance(value, bytes):
value = codecs.decode(value)
encryption_context_list.append(
(aws_encryption_sdk.internal.str_ops.to_bytes(key), aws_encryption_sdk.internal.str_ops.to_bytes(value))
)
except Exception:
raise SerializationError(
"Cannot encode dictionary key or value using {}.".format(aws_encryption_sdk.internal.defaults.ENCODING)
)
for key, value in sorted(encryption_context_list, key=lambda x: x[0]):
serialized_context.extend(
struct.pack(
">H{key_size}sH{value_size}s".format(key_size=len(key), value_size=len(value)),
len(key),
key,
len(value),
value,
)
)
if len(serialized_context) > aws_encryption_sdk.internal.defaults.MAX_BYTE_ARRAY_SIZE:
raise SerializationError("The serialized context is too large.")
return bytes(serialized_context)
|
[
"def",
"serialize_encryption_context",
"(",
"encryption_context",
")",
":",
"if",
"not",
"encryption_context",
":",
"return",
"bytes",
"(",
")",
"serialized_context",
"=",
"bytearray",
"(",
")",
"dict_size",
"=",
"len",
"(",
"encryption_context",
")",
"if",
"dict_size",
">",
"aws_encryption_sdk",
".",
"internal",
".",
"defaults",
".",
"MAX_BYTE_ARRAY_SIZE",
":",
"raise",
"SerializationError",
"(",
"\"The encryption context contains too many elements.\"",
")",
"serialized_context",
".",
"extend",
"(",
"struct",
".",
"pack",
"(",
"\">H\"",
",",
"dict_size",
")",
")",
"# Encode strings first to catch bad values.",
"encryption_context_list",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"encryption_context",
".",
"items",
"(",
")",
":",
"try",
":",
"if",
"isinstance",
"(",
"key",
",",
"bytes",
")",
":",
"key",
"=",
"codecs",
".",
"decode",
"(",
"key",
")",
"if",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"value",
"=",
"codecs",
".",
"decode",
"(",
"value",
")",
"encryption_context_list",
".",
"append",
"(",
"(",
"aws_encryption_sdk",
".",
"internal",
".",
"str_ops",
".",
"to_bytes",
"(",
"key",
")",
",",
"aws_encryption_sdk",
".",
"internal",
".",
"str_ops",
".",
"to_bytes",
"(",
"value",
")",
")",
")",
"except",
"Exception",
":",
"raise",
"SerializationError",
"(",
"\"Cannot encode dictionary key or value using {}.\"",
".",
"format",
"(",
"aws_encryption_sdk",
".",
"internal",
".",
"defaults",
".",
"ENCODING",
")",
")",
"for",
"key",
",",
"value",
"in",
"sorted",
"(",
"encryption_context_list",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
")",
":",
"serialized_context",
".",
"extend",
"(",
"struct",
".",
"pack",
"(",
"\">H{key_size}sH{value_size}s\"",
".",
"format",
"(",
"key_size",
"=",
"len",
"(",
"key",
")",
",",
"value_size",
"=",
"len",
"(",
"value",
")",
")",
",",
"len",
"(",
"key",
")",
",",
"key",
",",
"len",
"(",
"value",
")",
",",
"value",
",",
")",
")",
"if",
"len",
"(",
"serialized_context",
")",
">",
"aws_encryption_sdk",
".",
"internal",
".",
"defaults",
".",
"MAX_BYTE_ARRAY_SIZE",
":",
"raise",
"SerializationError",
"(",
"\"The serialized context is too large.\"",
")",
"return",
"bytes",
"(",
"serialized_context",
")"
] |
Serializes the contents of a dictionary into a byte string.
:param dict encryption_context: Dictionary of encrytion context keys/values.
:returns: Serialized encryption context
:rtype: bytes
|
[
"Serializes",
"the",
"contents",
"of",
"a",
"dictionary",
"into",
"a",
"byte",
"string",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/encryption_context.py#L50-L96
|
15,478
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/encryption_context.py
|
read_short
|
def read_short(source, offset):
"""Reads a number from a byte array.
:param bytes source: Source byte string
:param int offset: Point in byte string to start reading
:returns: Read number and offset at point after read data
:rtype: tuple of ints
:raises: SerializationError if unable to unpack
"""
try:
(short,) = struct.unpack_from(">H", source, offset)
return short, offset + struct.calcsize(">H")
except struct.error:
raise SerializationError("Bad format of serialized context.")
|
python
|
def read_short(source, offset):
"""Reads a number from a byte array.
:param bytes source: Source byte string
:param int offset: Point in byte string to start reading
:returns: Read number and offset at point after read data
:rtype: tuple of ints
:raises: SerializationError if unable to unpack
"""
try:
(short,) = struct.unpack_from(">H", source, offset)
return short, offset + struct.calcsize(">H")
except struct.error:
raise SerializationError("Bad format of serialized context.")
|
[
"def",
"read_short",
"(",
"source",
",",
"offset",
")",
":",
"try",
":",
"(",
"short",
",",
")",
"=",
"struct",
".",
"unpack_from",
"(",
"\">H\"",
",",
"source",
",",
"offset",
")",
"return",
"short",
",",
"offset",
"+",
"struct",
".",
"calcsize",
"(",
"\">H\"",
")",
"except",
"struct",
".",
"error",
":",
"raise",
"SerializationError",
"(",
"\"Bad format of serialized context.\"",
")"
] |
Reads a number from a byte array.
:param bytes source: Source byte string
:param int offset: Point in byte string to start reading
:returns: Read number and offset at point after read data
:rtype: tuple of ints
:raises: SerializationError if unable to unpack
|
[
"Reads",
"a",
"number",
"from",
"a",
"byte",
"array",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/encryption_context.py#L99-L112
|
15,479
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/encryption_context.py
|
read_string
|
def read_string(source, offset, length):
"""Reads a string from a byte string.
:param bytes source: Source byte string
:param int offset: Point in byte string to start reading
:param int length: Length of string to read
:returns: Read string and offset at point after read data
:rtype: tuple of str and int
:raises SerializationError: if unable to unpack
"""
end = offset + length
try:
return (codecs.decode(source[offset:end], aws_encryption_sdk.internal.defaults.ENCODING), end)
except Exception:
raise SerializationError("Bad format of serialized context.")
|
python
|
def read_string(source, offset, length):
"""Reads a string from a byte string.
:param bytes source: Source byte string
:param int offset: Point in byte string to start reading
:param int length: Length of string to read
:returns: Read string and offset at point after read data
:rtype: tuple of str and int
:raises SerializationError: if unable to unpack
"""
end = offset + length
try:
return (codecs.decode(source[offset:end], aws_encryption_sdk.internal.defaults.ENCODING), end)
except Exception:
raise SerializationError("Bad format of serialized context.")
|
[
"def",
"read_string",
"(",
"source",
",",
"offset",
",",
"length",
")",
":",
"end",
"=",
"offset",
"+",
"length",
"try",
":",
"return",
"(",
"codecs",
".",
"decode",
"(",
"source",
"[",
"offset",
":",
"end",
"]",
",",
"aws_encryption_sdk",
".",
"internal",
".",
"defaults",
".",
"ENCODING",
")",
",",
"end",
")",
"except",
"Exception",
":",
"raise",
"SerializationError",
"(",
"\"Bad format of serialized context.\"",
")"
] |
Reads a string from a byte string.
:param bytes source: Source byte string
:param int offset: Point in byte string to start reading
:param int length: Length of string to read
:returns: Read string and offset at point after read data
:rtype: tuple of str and int
:raises SerializationError: if unable to unpack
|
[
"Reads",
"a",
"string",
"from",
"a",
"byte",
"string",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/encryption_context.py#L115-L129
|
15,480
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/encryption_context.py
|
deserialize_encryption_context
|
def deserialize_encryption_context(serialized_encryption_context):
"""Deserializes the contents of a byte string into a dictionary.
:param bytes serialized_encryption_context: Source byte string containing serialized dictionary
:returns: Deserialized encryption context
:rtype: dict
:raises SerializationError: if serialized encryption context is too large
:raises SerializationError: if duplicate key found in serialized encryption context
:raises SerializationError: if malformed data found in serialized encryption context
"""
if len(serialized_encryption_context) > aws_encryption_sdk.internal.defaults.MAX_BYTE_ARRAY_SIZE:
raise SerializationError("Serialized context is too long.")
if serialized_encryption_context == b"":
_LOGGER.debug("No encryption context data found")
return {}
deserialized_size = 0
encryption_context = {}
dict_size, deserialized_size = read_short(source=serialized_encryption_context, offset=deserialized_size)
_LOGGER.debug("Found %d keys", dict_size)
for _ in range(dict_size):
key_size, deserialized_size = read_short(source=serialized_encryption_context, offset=deserialized_size)
key, deserialized_size = read_string(
source=serialized_encryption_context, offset=deserialized_size, length=key_size
)
value_size, deserialized_size = read_short(source=serialized_encryption_context, offset=deserialized_size)
value, deserialized_size = read_string(
source=serialized_encryption_context, offset=deserialized_size, length=value_size
)
if key in encryption_context:
raise SerializationError("Duplicate key in serialized context.")
encryption_context[key] = value
if deserialized_size != len(serialized_encryption_context):
raise SerializationError("Formatting error: Extra data in serialized context.")
return encryption_context
|
python
|
def deserialize_encryption_context(serialized_encryption_context):
"""Deserializes the contents of a byte string into a dictionary.
:param bytes serialized_encryption_context: Source byte string containing serialized dictionary
:returns: Deserialized encryption context
:rtype: dict
:raises SerializationError: if serialized encryption context is too large
:raises SerializationError: if duplicate key found in serialized encryption context
:raises SerializationError: if malformed data found in serialized encryption context
"""
if len(serialized_encryption_context) > aws_encryption_sdk.internal.defaults.MAX_BYTE_ARRAY_SIZE:
raise SerializationError("Serialized context is too long.")
if serialized_encryption_context == b"":
_LOGGER.debug("No encryption context data found")
return {}
deserialized_size = 0
encryption_context = {}
dict_size, deserialized_size = read_short(source=serialized_encryption_context, offset=deserialized_size)
_LOGGER.debug("Found %d keys", dict_size)
for _ in range(dict_size):
key_size, deserialized_size = read_short(source=serialized_encryption_context, offset=deserialized_size)
key, deserialized_size = read_string(
source=serialized_encryption_context, offset=deserialized_size, length=key_size
)
value_size, deserialized_size = read_short(source=serialized_encryption_context, offset=deserialized_size)
value, deserialized_size = read_string(
source=serialized_encryption_context, offset=deserialized_size, length=value_size
)
if key in encryption_context:
raise SerializationError("Duplicate key in serialized context.")
encryption_context[key] = value
if deserialized_size != len(serialized_encryption_context):
raise SerializationError("Formatting error: Extra data in serialized context.")
return encryption_context
|
[
"def",
"deserialize_encryption_context",
"(",
"serialized_encryption_context",
")",
":",
"if",
"len",
"(",
"serialized_encryption_context",
")",
">",
"aws_encryption_sdk",
".",
"internal",
".",
"defaults",
".",
"MAX_BYTE_ARRAY_SIZE",
":",
"raise",
"SerializationError",
"(",
"\"Serialized context is too long.\"",
")",
"if",
"serialized_encryption_context",
"==",
"b\"\"",
":",
"_LOGGER",
".",
"debug",
"(",
"\"No encryption context data found\"",
")",
"return",
"{",
"}",
"deserialized_size",
"=",
"0",
"encryption_context",
"=",
"{",
"}",
"dict_size",
",",
"deserialized_size",
"=",
"read_short",
"(",
"source",
"=",
"serialized_encryption_context",
",",
"offset",
"=",
"deserialized_size",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Found %d keys\"",
",",
"dict_size",
")",
"for",
"_",
"in",
"range",
"(",
"dict_size",
")",
":",
"key_size",
",",
"deserialized_size",
"=",
"read_short",
"(",
"source",
"=",
"serialized_encryption_context",
",",
"offset",
"=",
"deserialized_size",
")",
"key",
",",
"deserialized_size",
"=",
"read_string",
"(",
"source",
"=",
"serialized_encryption_context",
",",
"offset",
"=",
"deserialized_size",
",",
"length",
"=",
"key_size",
")",
"value_size",
",",
"deserialized_size",
"=",
"read_short",
"(",
"source",
"=",
"serialized_encryption_context",
",",
"offset",
"=",
"deserialized_size",
")",
"value",
",",
"deserialized_size",
"=",
"read_string",
"(",
"source",
"=",
"serialized_encryption_context",
",",
"offset",
"=",
"deserialized_size",
",",
"length",
"=",
"value_size",
")",
"if",
"key",
"in",
"encryption_context",
":",
"raise",
"SerializationError",
"(",
"\"Duplicate key in serialized context.\"",
")",
"encryption_context",
"[",
"key",
"]",
"=",
"value",
"if",
"deserialized_size",
"!=",
"len",
"(",
"serialized_encryption_context",
")",
":",
"raise",
"SerializationError",
"(",
"\"Formatting error: Extra data in serialized context.\"",
")",
"return",
"encryption_context"
] |
Deserializes the contents of a byte string into a dictionary.
:param bytes serialized_encryption_context: Source byte string containing serialized dictionary
:returns: Deserialized encryption context
:rtype: dict
:raises SerializationError: if serialized encryption context is too large
:raises SerializationError: if duplicate key found in serialized encryption context
:raises SerializationError: if malformed data found in serialized encryption context
|
[
"Deserializes",
"the",
"contents",
"of",
"a",
"byte",
"string",
"into",
"a",
"dictionary",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/encryption_context.py#L132-L170
|
15,481
|
aws/aws-encryption-sdk-python
|
decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/key_providers/null.py
|
NullMasterKey.owns_data_key
|
def owns_data_key(self, data_key: DataKey) -> bool:
"""Determine whether the data key is owned by a ``null`` or ``zero`` provider.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encryption_sdk.structures.EncryptedDataKey`
:returns: Boolean statement of ownership
:rtype: bool
"""
return data_key.key_provider.provider_id in self._allowed_provider_ids
|
python
|
def owns_data_key(self, data_key: DataKey) -> bool:
"""Determine whether the data key is owned by a ``null`` or ``zero`` provider.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encryption_sdk.structures.EncryptedDataKey`
:returns: Boolean statement of ownership
:rtype: bool
"""
return data_key.key_provider.provider_id in self._allowed_provider_ids
|
[
"def",
"owns_data_key",
"(",
"self",
",",
"data_key",
":",
"DataKey",
")",
"->",
"bool",
":",
"return",
"data_key",
".",
"key_provider",
".",
"provider_id",
"in",
"self",
".",
"_allowed_provider_ids"
] |
Determine whether the data key is owned by a ``null`` or ``zero`` provider.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encryption_sdk.structures.EncryptedDataKey`
:returns: Boolean statement of ownership
:rtype: bool
|
[
"Determine",
"whether",
"the",
"data",
"key",
"is",
"owned",
"by",
"a",
"null",
"or",
"zero",
"provider",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/decrypt_oracle/src/aws_encryption_sdk_decrypt_oracle/key_providers/null.py#L46-L56
|
15,482
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/crypto/iv.py
|
frame_iv
|
def frame_iv(algorithm, sequence_number):
"""Builds the deterministic IV for a body frame.
:param algorithm: Algorithm for which to build IV
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param int sequence_number: Frame sequence number
:returns: Generated IV
:rtype: bytes
:raises ActionNotAllowedError: if sequence number of out bounds
"""
if sequence_number < 1 or sequence_number > MAX_FRAME_COUNT:
raise ActionNotAllowedError(
"Invalid frame sequence number: {actual}\nMust be between 1 and {max}".format(
actual=sequence_number, max=MAX_FRAME_COUNT
)
)
prefix_len = algorithm.iv_len - 4
prefix = b"\x00" * prefix_len
return prefix + struct.pack(">I", sequence_number)
|
python
|
def frame_iv(algorithm, sequence_number):
"""Builds the deterministic IV for a body frame.
:param algorithm: Algorithm for which to build IV
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param int sequence_number: Frame sequence number
:returns: Generated IV
:rtype: bytes
:raises ActionNotAllowedError: if sequence number of out bounds
"""
if sequence_number < 1 or sequence_number > MAX_FRAME_COUNT:
raise ActionNotAllowedError(
"Invalid frame sequence number: {actual}\nMust be between 1 and {max}".format(
actual=sequence_number, max=MAX_FRAME_COUNT
)
)
prefix_len = algorithm.iv_len - 4
prefix = b"\x00" * prefix_len
return prefix + struct.pack(">I", sequence_number)
|
[
"def",
"frame_iv",
"(",
"algorithm",
",",
"sequence_number",
")",
":",
"if",
"sequence_number",
"<",
"1",
"or",
"sequence_number",
">",
"MAX_FRAME_COUNT",
":",
"raise",
"ActionNotAllowedError",
"(",
"\"Invalid frame sequence number: {actual}\\nMust be between 1 and {max}\"",
".",
"format",
"(",
"actual",
"=",
"sequence_number",
",",
"max",
"=",
"MAX_FRAME_COUNT",
")",
")",
"prefix_len",
"=",
"algorithm",
".",
"iv_len",
"-",
"4",
"prefix",
"=",
"b\"\\x00\"",
"*",
"prefix_len",
"return",
"prefix",
"+",
"struct",
".",
"pack",
"(",
"\">I\"",
",",
"sequence_number",
")"
] |
Builds the deterministic IV for a body frame.
:param algorithm: Algorithm for which to build IV
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param int sequence_number: Frame sequence number
:returns: Generated IV
:rtype: bytes
:raises ActionNotAllowedError: if sequence number of out bounds
|
[
"Builds",
"the",
"deterministic",
"IV",
"for",
"a",
"body",
"frame",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/iv.py#L46-L64
|
15,483
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/identifiers.py
|
EncryptionSuite.valid_kdf
|
def valid_kdf(self, kdf):
"""Determine whether a KDFSuite can be used with this EncryptionSuite.
:param kdf: KDFSuite to evaluate
:type kdf: aws_encryption_sdk.identifiers.KDFSuite
:rtype: bool
"""
if kdf.input_length is None:
return True
if self.data_key_length > kdf.input_length(self):
raise InvalidAlgorithmError(
"Invalid Algorithm definition: data_key_len must not be greater than kdf_input_len"
)
return True
|
python
|
def valid_kdf(self, kdf):
"""Determine whether a KDFSuite can be used with this EncryptionSuite.
:param kdf: KDFSuite to evaluate
:type kdf: aws_encryption_sdk.identifiers.KDFSuite
:rtype: bool
"""
if kdf.input_length is None:
return True
if self.data_key_length > kdf.input_length(self):
raise InvalidAlgorithmError(
"Invalid Algorithm definition: data_key_len must not be greater than kdf_input_len"
)
return True
|
[
"def",
"valid_kdf",
"(",
"self",
",",
"kdf",
")",
":",
"if",
"kdf",
".",
"input_length",
"is",
"None",
":",
"return",
"True",
"if",
"self",
".",
"data_key_length",
">",
"kdf",
".",
"input_length",
"(",
"self",
")",
":",
"raise",
"InvalidAlgorithmError",
"(",
"\"Invalid Algorithm definition: data_key_len must not be greater than kdf_input_len\"",
")",
"return",
"True"
] |
Determine whether a KDFSuite can be used with this EncryptionSuite.
:param kdf: KDFSuite to evaluate
:type kdf: aws_encryption_sdk.identifiers.KDFSuite
:rtype: bool
|
[
"Determine",
"whether",
"a",
"KDFSuite",
"can",
"be",
"used",
"with",
"this",
"EncryptionSuite",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/identifiers.py#L63-L78
|
15,484
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/__init__.py
|
header_length
|
def header_length(header):
"""Calculates the ciphertext message header length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
"""
# Because encrypted data key lengths may not be knowable until the ciphertext
# is received from the providers, just serialize the header directly.
header_length = len(serialize_header(header))
header_length += header.algorithm.iv_len # Header Authentication IV
header_length += header.algorithm.auth_len # Header Authentication Tag
return header_length
|
python
|
def header_length(header):
"""Calculates the ciphertext message header length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
"""
# Because encrypted data key lengths may not be knowable until the ciphertext
# is received from the providers, just serialize the header directly.
header_length = len(serialize_header(header))
header_length += header.algorithm.iv_len # Header Authentication IV
header_length += header.algorithm.auth_len # Header Authentication Tag
return header_length
|
[
"def",
"header_length",
"(",
"header",
")",
":",
"# Because encrypted data key lengths may not be knowable until the ciphertext",
"# is received from the providers, just serialize the header directly.",
"header_length",
"=",
"len",
"(",
"serialize_header",
"(",
"header",
")",
")",
"header_length",
"+=",
"header",
".",
"algorithm",
".",
"iv_len",
"# Header Authentication IV",
"header_length",
"+=",
"header",
".",
"algorithm",
".",
"auth_len",
"# Header Authentication Tag",
"return",
"header_length"
] |
Calculates the ciphertext message header length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
|
[
"Calculates",
"the",
"ciphertext",
"message",
"header",
"length",
"given",
"a",
"complete",
"header",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/__init__.py#L17-L29
|
15,485
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/__init__.py
|
_non_framed_body_length
|
def _non_framed_body_length(header, plaintext_length):
"""Calculates the length of a non-framed message body, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
"""
body_length = header.algorithm.iv_len # IV
body_length += 8 # Encrypted Content Length
body_length += plaintext_length # Encrypted Content
body_length += header.algorithm.auth_len # Authentication Tag
return body_length
|
python
|
def _non_framed_body_length(header, plaintext_length):
"""Calculates the length of a non-framed message body, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
"""
body_length = header.algorithm.iv_len # IV
body_length += 8 # Encrypted Content Length
body_length += plaintext_length # Encrypted Content
body_length += header.algorithm.auth_len # Authentication Tag
return body_length
|
[
"def",
"_non_framed_body_length",
"(",
"header",
",",
"plaintext_length",
")",
":",
"body_length",
"=",
"header",
".",
"algorithm",
".",
"iv_len",
"# IV",
"body_length",
"+=",
"8",
"# Encrypted Content Length",
"body_length",
"+=",
"plaintext_length",
"# Encrypted Content",
"body_length",
"+=",
"header",
".",
"algorithm",
".",
"auth_len",
"# Authentication Tag",
"return",
"body_length"
] |
Calculates the length of a non-framed message body, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
|
[
"Calculates",
"the",
"length",
"of",
"a",
"non",
"-",
"framed",
"message",
"body",
"given",
"a",
"complete",
"header",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/__init__.py#L32-L44
|
15,486
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/__init__.py
|
_standard_frame_length
|
def _standard_frame_length(header):
"""Calculates the length of a standard ciphertext frame, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
"""
frame_length = 4 # Sequence Number
frame_length += header.algorithm.iv_len # IV
frame_length += header.frame_length # Encrypted Content
frame_length += header.algorithm.auth_len # Authentication Tag
return frame_length
|
python
|
def _standard_frame_length(header):
"""Calculates the length of a standard ciphertext frame, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
"""
frame_length = 4 # Sequence Number
frame_length += header.algorithm.iv_len # IV
frame_length += header.frame_length # Encrypted Content
frame_length += header.algorithm.auth_len # Authentication Tag
return frame_length
|
[
"def",
"_standard_frame_length",
"(",
"header",
")",
":",
"frame_length",
"=",
"4",
"# Sequence Number",
"frame_length",
"+=",
"header",
".",
"algorithm",
".",
"iv_len",
"# IV",
"frame_length",
"+=",
"header",
".",
"frame_length",
"# Encrypted Content",
"frame_length",
"+=",
"header",
".",
"algorithm",
".",
"auth_len",
"# Authentication Tag",
"return",
"frame_length"
] |
Calculates the length of a standard ciphertext frame, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
|
[
"Calculates",
"the",
"length",
"of",
"a",
"standard",
"ciphertext",
"frame",
"given",
"a",
"complete",
"header",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/__init__.py#L47-L58
|
15,487
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/__init__.py
|
_final_frame_length
|
def _final_frame_length(header, final_frame_bytes):
"""Calculates the length of a final ciphertext frame, given a complete header
and the number of bytes of ciphertext in the final frame.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int final_frame_bytes: Bytes of ciphertext in the final frame
:rtype: int
"""
final_frame_length = 4 # Sequence Number End
final_frame_length += 4 # Sequence Number
final_frame_length += header.algorithm.iv_len # IV
final_frame_length += 4 # Encrypted Content Length
final_frame_length += final_frame_bytes # Encrypted Content
final_frame_length += header.algorithm.auth_len # Authentication Tag
return final_frame_length
|
python
|
def _final_frame_length(header, final_frame_bytes):
"""Calculates the length of a final ciphertext frame, given a complete header
and the number of bytes of ciphertext in the final frame.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int final_frame_bytes: Bytes of ciphertext in the final frame
:rtype: int
"""
final_frame_length = 4 # Sequence Number End
final_frame_length += 4 # Sequence Number
final_frame_length += header.algorithm.iv_len # IV
final_frame_length += 4 # Encrypted Content Length
final_frame_length += final_frame_bytes # Encrypted Content
final_frame_length += header.algorithm.auth_len # Authentication Tag
return final_frame_length
|
[
"def",
"_final_frame_length",
"(",
"header",
",",
"final_frame_bytes",
")",
":",
"final_frame_length",
"=",
"4",
"# Sequence Number End",
"final_frame_length",
"+=",
"4",
"# Sequence Number",
"final_frame_length",
"+=",
"header",
".",
"algorithm",
".",
"iv_len",
"# IV",
"final_frame_length",
"+=",
"4",
"# Encrypted Content Length",
"final_frame_length",
"+=",
"final_frame_bytes",
"# Encrypted Content",
"final_frame_length",
"+=",
"header",
".",
"algorithm",
".",
"auth_len",
"# Authentication Tag",
"return",
"final_frame_length"
] |
Calculates the length of a final ciphertext frame, given a complete header
and the number of bytes of ciphertext in the final frame.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int final_frame_bytes: Bytes of ciphertext in the final frame
:rtype: int
|
[
"Calculates",
"the",
"length",
"of",
"a",
"final",
"ciphertext",
"frame",
"given",
"a",
"complete",
"header",
"and",
"the",
"number",
"of",
"bytes",
"of",
"ciphertext",
"in",
"the",
"final",
"frame",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/__init__.py#L61-L76
|
15,488
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/__init__.py
|
body_length
|
def body_length(header, plaintext_length):
"""Calculates the ciphertext message body length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
"""
body_length = 0
if header.frame_length == 0: # Non-framed
body_length += _non_framed_body_length(header, plaintext_length)
else: # Framed
frames, final_frame_bytes = divmod(plaintext_length, header.frame_length)
body_length += frames * _standard_frame_length(header)
body_length += _final_frame_length(header, final_frame_bytes) # Final frame is always written
return body_length
|
python
|
def body_length(header, plaintext_length):
"""Calculates the ciphertext message body length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
"""
body_length = 0
if header.frame_length == 0: # Non-framed
body_length += _non_framed_body_length(header, plaintext_length)
else: # Framed
frames, final_frame_bytes = divmod(plaintext_length, header.frame_length)
body_length += frames * _standard_frame_length(header)
body_length += _final_frame_length(header, final_frame_bytes) # Final frame is always written
return body_length
|
[
"def",
"body_length",
"(",
"header",
",",
"plaintext_length",
")",
":",
"body_length",
"=",
"0",
"if",
"header",
".",
"frame_length",
"==",
"0",
":",
"# Non-framed",
"body_length",
"+=",
"_non_framed_body_length",
"(",
"header",
",",
"plaintext_length",
")",
"else",
":",
"# Framed",
"frames",
",",
"final_frame_bytes",
"=",
"divmod",
"(",
"plaintext_length",
",",
"header",
".",
"frame_length",
")",
"body_length",
"+=",
"frames",
"*",
"_standard_frame_length",
"(",
"header",
")",
"body_length",
"+=",
"_final_frame_length",
"(",
"header",
",",
"final_frame_bytes",
")",
"# Final frame is always written",
"return",
"body_length"
] |
Calculates the ciphertext message body length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
|
[
"Calculates",
"the",
"ciphertext",
"message",
"body",
"length",
"given",
"a",
"complete",
"header",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/__init__.py#L79-L94
|
15,489
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/__init__.py
|
footer_length
|
def footer_length(header):
"""Calculates the ciphertext message footer length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
"""
footer_length = 0
if header.algorithm.signing_algorithm_info is not None:
footer_length += 2 # Signature Length
footer_length += header.algorithm.signature_len # Signature
return footer_length
|
python
|
def footer_length(header):
"""Calculates the ciphertext message footer length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
"""
footer_length = 0
if header.algorithm.signing_algorithm_info is not None:
footer_length += 2 # Signature Length
footer_length += header.algorithm.signature_len # Signature
return footer_length
|
[
"def",
"footer_length",
"(",
"header",
")",
":",
"footer_length",
"=",
"0",
"if",
"header",
".",
"algorithm",
".",
"signing_algorithm_info",
"is",
"not",
"None",
":",
"footer_length",
"+=",
"2",
"# Signature Length",
"footer_length",
"+=",
"header",
".",
"algorithm",
".",
"signature_len",
"# Signature",
"return",
"footer_length"
] |
Calculates the ciphertext message footer length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:rtype: int
|
[
"Calculates",
"the",
"ciphertext",
"message",
"footer",
"length",
"given",
"a",
"complete",
"header",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/__init__.py#L97-L108
|
15,490
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/formatting/__init__.py
|
ciphertext_length
|
def ciphertext_length(header, plaintext_length):
"""Calculates the complete ciphertext message length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
"""
ciphertext_length = header_length(header)
ciphertext_length += body_length(header, plaintext_length)
ciphertext_length += footer_length(header)
return ciphertext_length
|
python
|
def ciphertext_length(header, plaintext_length):
"""Calculates the complete ciphertext message length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
"""
ciphertext_length = header_length(header)
ciphertext_length += body_length(header, plaintext_length)
ciphertext_length += footer_length(header)
return ciphertext_length
|
[
"def",
"ciphertext_length",
"(",
"header",
",",
"plaintext_length",
")",
":",
"ciphertext_length",
"=",
"header_length",
"(",
"header",
")",
"ciphertext_length",
"+=",
"body_length",
"(",
"header",
",",
"plaintext_length",
")",
"ciphertext_length",
"+=",
"footer_length",
"(",
"header",
")",
"return",
"ciphertext_length"
] |
Calculates the complete ciphertext message length, given a complete header.
:param header: Complete message header object
:type header: aws_encryption_sdk.structures.MessageHeader
:param int plaintext_length: Length of plaintext in bytes
:rtype: int
|
[
"Calculates",
"the",
"complete",
"ciphertext",
"message",
"length",
"given",
"a",
"complete",
"header",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/formatting/__init__.py#L111-L122
|
15,491
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/raw.py
|
RawMasterKey.owns_data_key
|
def owns_data_key(self, data_key):
"""Determines if data_key object is owned by this RawMasterKey.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encryption_sdk.structures.EncryptedDataKey`
:returns: Boolean statement of ownership
:rtype: bool
"""
expected_key_info_len = -1
if (
self.config.wrapping_key.wrapping_algorithm.encryption_type is EncryptionType.ASYMMETRIC
and data_key.key_provider == self.key_provider
):
return True
elif self.config.wrapping_key.wrapping_algorithm.encryption_type is EncryptionType.SYMMETRIC:
expected_key_info_len = (
len(self._key_info_prefix) + self.config.wrapping_key.wrapping_algorithm.algorithm.iv_len
)
if (
data_key.key_provider.provider_id == self.provider_id
and len(data_key.key_provider.key_info) == expected_key_info_len
and data_key.key_provider.key_info.startswith(self._key_info_prefix)
):
return True
_LOGGER.debug(
(
"RawMasterKey does not own data_key: %s\n"
"Expected provider_id: %s\n"
"Expected key_info len: %s\n"
"Expected key_info prefix: %s"
),
data_key,
self.provider_id,
expected_key_info_len,
self._key_info_prefix,
)
return False
|
python
|
def owns_data_key(self, data_key):
"""Determines if data_key object is owned by this RawMasterKey.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encryption_sdk.structures.EncryptedDataKey`
:returns: Boolean statement of ownership
:rtype: bool
"""
expected_key_info_len = -1
if (
self.config.wrapping_key.wrapping_algorithm.encryption_type is EncryptionType.ASYMMETRIC
and data_key.key_provider == self.key_provider
):
return True
elif self.config.wrapping_key.wrapping_algorithm.encryption_type is EncryptionType.SYMMETRIC:
expected_key_info_len = (
len(self._key_info_prefix) + self.config.wrapping_key.wrapping_algorithm.algorithm.iv_len
)
if (
data_key.key_provider.provider_id == self.provider_id
and len(data_key.key_provider.key_info) == expected_key_info_len
and data_key.key_provider.key_info.startswith(self._key_info_prefix)
):
return True
_LOGGER.debug(
(
"RawMasterKey does not own data_key: %s\n"
"Expected provider_id: %s\n"
"Expected key_info len: %s\n"
"Expected key_info prefix: %s"
),
data_key,
self.provider_id,
expected_key_info_len,
self._key_info_prefix,
)
return False
|
[
"def",
"owns_data_key",
"(",
"self",
",",
"data_key",
")",
":",
"expected_key_info_len",
"=",
"-",
"1",
"if",
"(",
"self",
".",
"config",
".",
"wrapping_key",
".",
"wrapping_algorithm",
".",
"encryption_type",
"is",
"EncryptionType",
".",
"ASYMMETRIC",
"and",
"data_key",
".",
"key_provider",
"==",
"self",
".",
"key_provider",
")",
":",
"return",
"True",
"elif",
"self",
".",
"config",
".",
"wrapping_key",
".",
"wrapping_algorithm",
".",
"encryption_type",
"is",
"EncryptionType",
".",
"SYMMETRIC",
":",
"expected_key_info_len",
"=",
"(",
"len",
"(",
"self",
".",
"_key_info_prefix",
")",
"+",
"self",
".",
"config",
".",
"wrapping_key",
".",
"wrapping_algorithm",
".",
"algorithm",
".",
"iv_len",
")",
"if",
"(",
"data_key",
".",
"key_provider",
".",
"provider_id",
"==",
"self",
".",
"provider_id",
"and",
"len",
"(",
"data_key",
".",
"key_provider",
".",
"key_info",
")",
"==",
"expected_key_info_len",
"and",
"data_key",
".",
"key_provider",
".",
"key_info",
".",
"startswith",
"(",
"self",
".",
"_key_info_prefix",
")",
")",
":",
"return",
"True",
"_LOGGER",
".",
"debug",
"(",
"(",
"\"RawMasterKey does not own data_key: %s\\n\"",
"\"Expected provider_id: %s\\n\"",
"\"Expected key_info len: %s\\n\"",
"\"Expected key_info prefix: %s\"",
")",
",",
"data_key",
",",
"self",
".",
"provider_id",
",",
"expected_key_info_len",
",",
"self",
".",
"_key_info_prefix",
",",
")",
"return",
"False"
] |
Determines if data_key object is owned by this RawMasterKey.
:param data_key: Data key to evaluate
:type data_key: :class:`aws_encryption_sdk.structures.DataKey`,
:class:`aws_encryption_sdk.structures.RawDataKey`,
or :class:`aws_encryption_sdk.structures.EncryptedDataKey`
:returns: Boolean statement of ownership
:rtype: bool
|
[
"Determines",
"if",
"data_key",
"object",
"is",
"owned",
"by",
"this",
"RawMasterKey",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/raw.py#L75-L113
|
15,492
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/key_providers/raw.py
|
RawMasterKey._encrypt_data_key
|
def _encrypt_data_key(self, data_key, algorithm, encryption_context):
"""Performs the provider-specific key encryption actions.
:param data_key: Unencrypted data key
:type data_key: :class:`aws_encryption_sdk.structures.RawDataKey`
or :class:`aws_encryption_sdk.structures.DataKey`
:param algorithm: Algorithm object which directs how this Master Key will encrypt the data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to use in encryption
:returns: Decrypted data key
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
:raises EncryptKeyError: if Master Key is unable to encrypt data key
"""
# Raw key string to EncryptedData
encrypted_wrapped_key = self.config.wrapping_key.encrypt(
plaintext_data_key=data_key.data_key, encryption_context=encryption_context
)
# EncryptedData to EncryptedDataKey
return aws_encryption_sdk.internal.formatting.serialize.serialize_wrapped_key(
key_provider=self.key_provider,
wrapping_algorithm=self.config.wrapping_key.wrapping_algorithm,
wrapping_key_id=self.key_id,
encrypted_wrapped_key=encrypted_wrapped_key,
)
|
python
|
def _encrypt_data_key(self, data_key, algorithm, encryption_context):
"""Performs the provider-specific key encryption actions.
:param data_key: Unencrypted data key
:type data_key: :class:`aws_encryption_sdk.structures.RawDataKey`
or :class:`aws_encryption_sdk.structures.DataKey`
:param algorithm: Algorithm object which directs how this Master Key will encrypt the data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to use in encryption
:returns: Decrypted data key
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
:raises EncryptKeyError: if Master Key is unable to encrypt data key
"""
# Raw key string to EncryptedData
encrypted_wrapped_key = self.config.wrapping_key.encrypt(
plaintext_data_key=data_key.data_key, encryption_context=encryption_context
)
# EncryptedData to EncryptedDataKey
return aws_encryption_sdk.internal.formatting.serialize.serialize_wrapped_key(
key_provider=self.key_provider,
wrapping_algorithm=self.config.wrapping_key.wrapping_algorithm,
wrapping_key_id=self.key_id,
encrypted_wrapped_key=encrypted_wrapped_key,
)
|
[
"def",
"_encrypt_data_key",
"(",
"self",
",",
"data_key",
",",
"algorithm",
",",
"encryption_context",
")",
":",
"# Raw key string to EncryptedData",
"encrypted_wrapped_key",
"=",
"self",
".",
"config",
".",
"wrapping_key",
".",
"encrypt",
"(",
"plaintext_data_key",
"=",
"data_key",
".",
"data_key",
",",
"encryption_context",
"=",
"encryption_context",
")",
"# EncryptedData to EncryptedDataKey",
"return",
"aws_encryption_sdk",
".",
"internal",
".",
"formatting",
".",
"serialize",
".",
"serialize_wrapped_key",
"(",
"key_provider",
"=",
"self",
".",
"key_provider",
",",
"wrapping_algorithm",
"=",
"self",
".",
"config",
".",
"wrapping_key",
".",
"wrapping_algorithm",
",",
"wrapping_key_id",
"=",
"self",
".",
"key_id",
",",
"encrypted_wrapped_key",
"=",
"encrypted_wrapped_key",
",",
")"
] |
Performs the provider-specific key encryption actions.
:param data_key: Unencrypted data key
:type data_key: :class:`aws_encryption_sdk.structures.RawDataKey`
or :class:`aws_encryption_sdk.structures.DataKey`
:param algorithm: Algorithm object which directs how this Master Key will encrypt the data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to use in encryption
:returns: Decrypted data key
:rtype: aws_encryption_sdk.structures.EncryptedDataKey
:raises EncryptKeyError: if Master Key is unable to encrypt data key
|
[
"Performs",
"the",
"provider",
"-",
"specific",
"key",
"encryption",
"actions",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/key_providers/raw.py#L136-L159
|
15,493
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/caches/null.py
|
NullCryptoMaterialsCache.put_encryption_materials
|
def put_encryption_materials(self, cache_key, encryption_materials, plaintext_length, entry_hints=None):
"""Does not add encryption materials to the cache since there is no cache to which to add them.
:param bytes cache_key: Identifier for entries in cache
:param encryption_materials: Encryption materials to add to cache
:type encryption_materials: aws_encryption_sdk.materials_managers.EncryptionMaterials
:param int plaintext_length: Length of plaintext associated with this request to the cache
:param entry_hints: Metadata to associate with entry (optional)
:type entry_hints: aws_encryption_sdk.caches.CryptoCacheEntryHints
:rtype: aws_encryption_sdk.caches.CryptoMaterialsCacheEntry
"""
return CryptoMaterialsCacheEntry(cache_key=cache_key, value=encryption_materials)
|
python
|
def put_encryption_materials(self, cache_key, encryption_materials, plaintext_length, entry_hints=None):
"""Does not add encryption materials to the cache since there is no cache to which to add them.
:param bytes cache_key: Identifier for entries in cache
:param encryption_materials: Encryption materials to add to cache
:type encryption_materials: aws_encryption_sdk.materials_managers.EncryptionMaterials
:param int plaintext_length: Length of plaintext associated with this request to the cache
:param entry_hints: Metadata to associate with entry (optional)
:type entry_hints: aws_encryption_sdk.caches.CryptoCacheEntryHints
:rtype: aws_encryption_sdk.caches.CryptoMaterialsCacheEntry
"""
return CryptoMaterialsCacheEntry(cache_key=cache_key, value=encryption_materials)
|
[
"def",
"put_encryption_materials",
"(",
"self",
",",
"cache_key",
",",
"encryption_materials",
",",
"plaintext_length",
",",
"entry_hints",
"=",
"None",
")",
":",
"return",
"CryptoMaterialsCacheEntry",
"(",
"cache_key",
"=",
"cache_key",
",",
"value",
"=",
"encryption_materials",
")"
] |
Does not add encryption materials to the cache since there is no cache to which to add them.
:param bytes cache_key: Identifier for entries in cache
:param encryption_materials: Encryption materials to add to cache
:type encryption_materials: aws_encryption_sdk.materials_managers.EncryptionMaterials
:param int plaintext_length: Length of plaintext associated with this request to the cache
:param entry_hints: Metadata to associate with entry (optional)
:type entry_hints: aws_encryption_sdk.caches.CryptoCacheEntryHints
:rtype: aws_encryption_sdk.caches.CryptoMaterialsCacheEntry
|
[
"Does",
"not",
"add",
"encryption",
"materials",
"to",
"the",
"cache",
"since",
"there",
"is",
"no",
"cache",
"to",
"which",
"to",
"add",
"them",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/caches/null.py#L25-L36
|
15,494
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/crypto/authentication.py
|
_PrehashingAuthenticator._set_signature_type
|
def _set_signature_type(self):
"""Ensures that the algorithm signature type is a known type and sets a reference value."""
try:
verify_interface(ec.EllipticCurve, self.algorithm.signing_algorithm_info)
return ec.EllipticCurve
except InterfaceNotImplemented:
raise NotSupportedError("Unsupported signing algorithm info")
|
python
|
def _set_signature_type(self):
"""Ensures that the algorithm signature type is a known type and sets a reference value."""
try:
verify_interface(ec.EllipticCurve, self.algorithm.signing_algorithm_info)
return ec.EllipticCurve
except InterfaceNotImplemented:
raise NotSupportedError("Unsupported signing algorithm info")
|
[
"def",
"_set_signature_type",
"(",
"self",
")",
":",
"try",
":",
"verify_interface",
"(",
"ec",
".",
"EllipticCurve",
",",
"self",
".",
"algorithm",
".",
"signing_algorithm_info",
")",
"return",
"ec",
".",
"EllipticCurve",
"except",
"InterfaceNotImplemented",
":",
"raise",
"NotSupportedError",
"(",
"\"Unsupported signing algorithm info\"",
")"
] |
Ensures that the algorithm signature type is a known type and sets a reference value.
|
[
"Ensures",
"that",
"the",
"algorithm",
"signature",
"type",
"is",
"a",
"known",
"type",
"and",
"sets",
"a",
"reference",
"value",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L48-L54
|
15,495
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/crypto/authentication.py
|
Signer.from_key_bytes
|
def from_key_bytes(cls, algorithm, key_bytes):
"""Builds a `Signer` from an algorithm suite and a raw signing key.
:param algorithm: Algorithm on which to base signer
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes key_bytes: Raw signing key
:rtype: aws_encryption_sdk.internal.crypto.Signer
"""
key = serialization.load_der_private_key(data=key_bytes, password=None, backend=default_backend())
return cls(algorithm, key)
|
python
|
def from_key_bytes(cls, algorithm, key_bytes):
"""Builds a `Signer` from an algorithm suite and a raw signing key.
:param algorithm: Algorithm on which to base signer
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes key_bytes: Raw signing key
:rtype: aws_encryption_sdk.internal.crypto.Signer
"""
key = serialization.load_der_private_key(data=key_bytes, password=None, backend=default_backend())
return cls(algorithm, key)
|
[
"def",
"from_key_bytes",
"(",
"cls",
",",
"algorithm",
",",
"key_bytes",
")",
":",
"key",
"=",
"serialization",
".",
"load_der_private_key",
"(",
"data",
"=",
"key_bytes",
",",
"password",
"=",
"None",
",",
"backend",
"=",
"default_backend",
"(",
")",
")",
"return",
"cls",
"(",
"algorithm",
",",
"key",
")"
] |
Builds a `Signer` from an algorithm suite and a raw signing key.
:param algorithm: Algorithm on which to base signer
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes key_bytes: Raw signing key
:rtype: aws_encryption_sdk.internal.crypto.Signer
|
[
"Builds",
"a",
"Signer",
"from",
"an",
"algorithm",
"suite",
"and",
"a",
"raw",
"signing",
"key",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L74-L83
|
15,496
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/crypto/authentication.py
|
Signer.key_bytes
|
def key_bytes(self):
"""Returns the raw signing key.
:rtype: bytes
"""
return self.key.private_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
|
python
|
def key_bytes(self):
"""Returns the raw signing key.
:rtype: bytes
"""
return self.key.private_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
|
[
"def",
"key_bytes",
"(",
"self",
")",
":",
"return",
"self",
".",
"key",
".",
"private_bytes",
"(",
"encoding",
"=",
"serialization",
".",
"Encoding",
".",
"DER",
",",
"format",
"=",
"serialization",
".",
"PrivateFormat",
".",
"PKCS8",
",",
"encryption_algorithm",
"=",
"serialization",
".",
"NoEncryption",
"(",
")",
",",
")"
] |
Returns the raw signing key.
:rtype: bytes
|
[
"Returns",
"the",
"raw",
"signing",
"key",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L85-L94
|
15,497
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/crypto/authentication.py
|
Signer.finalize
|
def finalize(self):
"""Finalizes the signer and returns the signature.
:returns: Calculated signer signature
:rtype: bytes
"""
prehashed_digest = self._hasher.finalize()
return _ecc_static_length_signature(key=self.key, algorithm=self.algorithm, digest=prehashed_digest)
|
python
|
def finalize(self):
"""Finalizes the signer and returns the signature.
:returns: Calculated signer signature
:rtype: bytes
"""
prehashed_digest = self._hasher.finalize()
return _ecc_static_length_signature(key=self.key, algorithm=self.algorithm, digest=prehashed_digest)
|
[
"def",
"finalize",
"(",
"self",
")",
":",
"prehashed_digest",
"=",
"self",
".",
"_hasher",
".",
"finalize",
"(",
")",
"return",
"_ecc_static_length_signature",
"(",
"key",
"=",
"self",
".",
"key",
",",
"algorithm",
"=",
"self",
".",
"algorithm",
",",
"digest",
"=",
"prehashed_digest",
")"
] |
Finalizes the signer and returns the signature.
:returns: Calculated signer signature
:rtype: bytes
|
[
"Finalizes",
"the",
"signer",
"and",
"returns",
"the",
"signature",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L114-L121
|
15,498
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/crypto/authentication.py
|
Verifier.from_encoded_point
|
def from_encoded_point(cls, algorithm, encoded_point):
"""Creates a Verifier object based on the supplied algorithm and encoded compressed ECC curve point.
:param algorithm: Algorithm on which to base verifier
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes encoded_point: ECC public point compressed and encoded with _ecc_encode_compressed_point
:returns: Instance of Verifier generated from encoded point
:rtype: aws_encryption_sdk.internal.crypto.Verifier
"""
return cls(
algorithm=algorithm,
key=_ecc_public_numbers_from_compressed_point(
curve=algorithm.signing_algorithm_info(), compressed_point=base64.b64decode(encoded_point)
).public_key(default_backend()),
)
|
python
|
def from_encoded_point(cls, algorithm, encoded_point):
"""Creates a Verifier object based on the supplied algorithm and encoded compressed ECC curve point.
:param algorithm: Algorithm on which to base verifier
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes encoded_point: ECC public point compressed and encoded with _ecc_encode_compressed_point
:returns: Instance of Verifier generated from encoded point
:rtype: aws_encryption_sdk.internal.crypto.Verifier
"""
return cls(
algorithm=algorithm,
key=_ecc_public_numbers_from_compressed_point(
curve=algorithm.signing_algorithm_info(), compressed_point=base64.b64decode(encoded_point)
).public_key(default_backend()),
)
|
[
"def",
"from_encoded_point",
"(",
"cls",
",",
"algorithm",
",",
"encoded_point",
")",
":",
"return",
"cls",
"(",
"algorithm",
"=",
"algorithm",
",",
"key",
"=",
"_ecc_public_numbers_from_compressed_point",
"(",
"curve",
"=",
"algorithm",
".",
"signing_algorithm_info",
"(",
")",
",",
"compressed_point",
"=",
"base64",
".",
"b64decode",
"(",
"encoded_point",
")",
")",
".",
"public_key",
"(",
"default_backend",
"(",
")",
")",
",",
")"
] |
Creates a Verifier object based on the supplied algorithm and encoded compressed ECC curve point.
:param algorithm: Algorithm on which to base verifier
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes encoded_point: ECC public point compressed and encoded with _ecc_encode_compressed_point
:returns: Instance of Verifier generated from encoded point
:rtype: aws_encryption_sdk.internal.crypto.Verifier
|
[
"Creates",
"a",
"Verifier",
"object",
"based",
"on",
"the",
"supplied",
"algorithm",
"and",
"encoded",
"compressed",
"ECC",
"curve",
"point",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L137-L151
|
15,499
|
aws/aws-encryption-sdk-python
|
src/aws_encryption_sdk/internal/crypto/authentication.py
|
Verifier.from_key_bytes
|
def from_key_bytes(cls, algorithm, key_bytes):
"""Creates a `Verifier` object based on the supplied algorithm and raw verification key.
:param algorithm: Algorithm on which to base verifier
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes encoded_point: Raw verification key
:returns: Instance of Verifier generated from encoded point
:rtype: aws_encryption_sdk.internal.crypto.Verifier
"""
return cls(
algorithm=algorithm, key=serialization.load_der_public_key(data=key_bytes, backend=default_backend())
)
|
python
|
def from_key_bytes(cls, algorithm, key_bytes):
"""Creates a `Verifier` object based on the supplied algorithm and raw verification key.
:param algorithm: Algorithm on which to base verifier
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes encoded_point: Raw verification key
:returns: Instance of Verifier generated from encoded point
:rtype: aws_encryption_sdk.internal.crypto.Verifier
"""
return cls(
algorithm=algorithm, key=serialization.load_der_public_key(data=key_bytes, backend=default_backend())
)
|
[
"def",
"from_key_bytes",
"(",
"cls",
",",
"algorithm",
",",
"key_bytes",
")",
":",
"return",
"cls",
"(",
"algorithm",
"=",
"algorithm",
",",
"key",
"=",
"serialization",
".",
"load_der_public_key",
"(",
"data",
"=",
"key_bytes",
",",
"backend",
"=",
"default_backend",
"(",
")",
")",
")"
] |
Creates a `Verifier` object based on the supplied algorithm and raw verification key.
:param algorithm: Algorithm on which to base verifier
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param bytes encoded_point: Raw verification key
:returns: Instance of Verifier generated from encoded point
:rtype: aws_encryption_sdk.internal.crypto.Verifier
|
[
"Creates",
"a",
"Verifier",
"object",
"based",
"on",
"the",
"supplied",
"algorithm",
"and",
"raw",
"verification",
"key",
"."
] |
d182155d5fb1ef176d9e7d0647679737d5146495
|
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/internal/crypto/authentication.py#L154-L165
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.