id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
5,080 | import datetime
import logging
from copy import deepcopy
from typing import Any, Callable, Dict, Optional, Tuple
from aws_lambda_powertools.utilities.idempotency.config import IdempotencyConfig
from aws_lambda_powertools.utilities.idempotency.exceptions import (
IdempotencyAlreadyInProgressError,
IdempotencyInconsistentStateError,
IdempotencyItemAlreadyExistsError,
IdempotencyItemNotFoundError,
IdempotencyKeyError,
IdempotencyPersistenceLayerError,
IdempotencyValidationError,
)
from aws_lambda_powertools.utilities.idempotency.persistence.base import (
BasePersistenceLayer,
)
from aws_lambda_powertools.utilities.idempotency.persistence.datarecord import (
STATUS_CONSTANTS,
DataRecord,
)
from aws_lambda_powertools.utilities.idempotency.serialization.base import (
BaseIdempotencySerializer,
)
from aws_lambda_powertools.utilities.idempotency.serialization.no_op import (
NoOpSerializer,
)
The provided code snippet includes necessary dependencies for implementing the `_prepare_data` function. Write a Python function `def _prepare_data(data: Any) -> Any` to solve the following problem:
Prepare data for json serialization. We will convert Python dataclasses, pydantic models or event source data classes to a dict, otherwise return data as-is.
Here is the function:
def _prepare_data(data: Any) -> Any:
"""Prepare data for json serialization.
We will convert Python dataclasses, pydantic models or event source data classes to a dict,
otherwise return data as-is.
"""
if hasattr(data, "__dataclass_fields__"):
import dataclasses
return dataclasses.asdict(data)
if callable(getattr(data, "dict", None)):
return data.dict()
return getattr(data, "raw_event", data) | Prepare data for json serialization. We will convert Python dataclasses, pydantic models or event source data classes to a dict, otherwise return data as-is. |
5,081 |
The provided code snippet includes necessary dependencies for implementing the `aiohttp_trace_config` function. Write a Python function `def aiohttp_trace_config()` to solve the following problem:
aiohttp extension for X-Ray (aws_xray_trace_config) It expects you to have aiohttp as a dependency. Returns ------- TraceConfig aiohttp trace config
Here is the function:
def aiohttp_trace_config():
"""aiohttp extension for X-Ray (aws_xray_trace_config)
It expects you to have aiohttp as a dependency.
Returns
-------
TraceConfig
aiohttp trace config
"""
from aws_xray_sdk.ext.aiohttp.client import (
aws_xray_trace_config, # pragma: no cover
)
aws_xray_trace_config.__doc__ = "aiohttp extension for X-Ray (aws_xray_trace_config)" # pragma: no cover
return aws_xray_trace_config() # pragma: no cover | aiohttp extension for X-Ray (aws_xray_trace_config) It expects you to have aiohttp as a dependency. Returns ------- TraceConfig aiohttp trace config |
5,082 | import inspect
import re
from typing import Any, Callable, Dict, ForwardRef, List, Optional, Set, Tuple, Type, cast
from pydantic import BaseModel
from aws_lambda_powertools.event_handler.openapi.compat import (
ModelField,
create_body_model,
evaluate_forwardref,
is_scalar_field,
is_scalar_sequence_field,
)
from aws_lambda_powertools.event_handler.openapi.params import (
Body,
Dependant,
Header,
Param,
ParamTypes,
Query,
_File,
_Form,
analyze_param,
create_response_field,
get_flat_dependant,
)
from aws_lambda_powertools.event_handler.openapi.types import OpenAPIResponse, OpenAPIResponseContentModel
def add_param_to_fields(
*,
field: ModelField,
dependant: Dependant,
) -> None:
"""
Adds a parameter to the list of parameters in the dependant model.
Parameters
----------
field: ModelField
The field to add
dependant: Dependant
The dependant model to add the field to
"""
field_info = cast(Param, field.field_info)
# Dictionary to map ParamTypes to their corresponding lists in dependant
param_type_map = {
ParamTypes.path: dependant.path_params,
ParamTypes.query: dependant.query_params,
ParamTypes.header: dependant.header_params,
ParamTypes.cookie: dependant.cookie_params,
}
# Check if field_info.in_ is a valid key in param_type_map and append the field to the corresponding list
# or raise an exception if it's not a valid key.
if field_info.in_ in param_type_map:
param_type_map[field_info.in_].append(field)
else:
raise AssertionError(f"Unsupported param type: {field_info.in_}")
def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature:
"""
Returns a typed signature for a callable, resolving forward references.
Parameters
----------
call: Callable[..., Any]
The callable to get the signature for
Returns
-------
inspect.Signature
The typed signature
"""
signature = inspect.signature(call)
# Gets the global namespace for the call. This is used to resolve forward references.
globalns = getattr(call, "__global__", {})
typed_params = [
inspect.Parameter(
name=param.name,
kind=param.kind,
default=param.default,
annotation=get_typed_annotation(param.annotation, globalns),
)
for param in signature.parameters.values()
]
# If the return annotation is not empty, add it to the signature.
if signature.return_annotation is not inspect.Signature.empty:
return_param = inspect.Parameter(
name="Return",
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
default=None,
annotation=get_typed_annotation(signature.return_annotation, globalns),
)
return inspect.Signature(typed_params, return_annotation=return_param.annotation)
else:
return inspect.Signature(typed_params)
def get_path_param_names(path: str) -> Set[str]:
"""
Returns the path parameter names from a path template. Those are the strings between { and }.
Parameters
----------
path: str
The path template
Returns
-------
Set[str]
The path parameter names
"""
return set(re.findall("{(.*?)}", path))
def _add_extra_responses(dependant: Dependant, responses: Optional[Dict[int, OpenAPIResponse]]):
# Also add the optional extra responses to the dependant model.
if not responses:
return
for response in responses.values():
for schema in response.get("content", {}).values():
if "model" in schema:
response_field = analyze_param(
param_name="return",
annotation=cast(OpenAPIResponseContentModel, schema)["model"],
value=None,
is_path_param=False,
is_response_param=True,
)
if response_field is None:
raise AssertionError("Response field is None for response model")
dependant.response_extra_models.append(response_field)
def _add_return_annotation(dependant: Dependant, endpoint_signature: inspect.Signature):
# If the return annotation is not empty, add it to the dependant model.
return_annotation = endpoint_signature.return_annotation
if return_annotation is not inspect.Signature.empty:
param_field = analyze_param(
param_name="return",
annotation=return_annotation,
value=None,
is_path_param=False,
is_response_param=True,
)
if param_field is None:
raise AssertionError("Param field is None for return annotation")
dependant.return_param = param_field
def is_body_param(*, param_field: ModelField, is_path_param: bool) -> bool:
"""
Returns whether a parameter is a request body parameter, by checking if it is a scalar field or a body field.
Parameters
----------
param_field: ModelField
The parameter field
is_path_param: bool
Whether the parameter is a path parameter
Returns
-------
bool
Whether the parameter is a request body parameter
"""
if is_path_param:
if not is_scalar_field(field=param_field):
raise AssertionError("Path params must be of one of the supported types")
return False
elif is_scalar_field(field=param_field):
return False
elif isinstance(param_field.field_info, (Query, Header)) and is_scalar_sequence_field(param_field):
return False
else:
if not isinstance(param_field.field_info, Body):
raise AssertionError(f"Param: {param_field.name} can only be a request body, use Body()")
return True
class Dependant:
"""
A class used internally to represent a dependency between path operation decorators and the path operation function.
"""
def __init__(
self,
*,
path_params: Optional[List[ModelField]] = None,
query_params: Optional[List[ModelField]] = None,
header_params: Optional[List[ModelField]] = None,
cookie_params: Optional[List[ModelField]] = None,
body_params: Optional[List[ModelField]] = None,
return_param: Optional[ModelField] = None,
response_extra_models: Optional[List[ModelField]] = None,
name: Optional[str] = None,
call: Optional[Callable[..., Any]] = None,
request_param_name: Optional[str] = None,
websocket_param_name: Optional[str] = None,
http_connection_param_name: Optional[str] = None,
response_param_name: Optional[str] = None,
background_tasks_param_name: Optional[str] = None,
path: Optional[str] = None,
) -> None:
self.path_params = path_params or []
self.query_params = query_params or []
self.header_params = header_params or []
self.cookie_params = cookie_params or []
self.body_params = body_params or []
self.return_param = return_param or None
self.response_extra_models = response_extra_models or []
self.request_param_name = request_param_name
self.websocket_param_name = websocket_param_name
self.http_connection_param_name = http_connection_param_name
self.response_param_name = response_param_name
self.background_tasks_param_name = background_tasks_param_name
self.name = name
self.call = call
# Store the path to be able to re-generate a dependable from it in overrides
self.path = path
# Save the cache key at creation to optimize performance
self.cache_key: CacheKey = self.call
def analyze_param(
*,
param_name: str,
annotation: Any,
value: Any,
is_path_param: bool,
is_response_param: bool,
) -> Optional[ModelField]:
"""
Analyze a parameter annotation and value to determine the type and default value of the parameter.
Parameters
----------
param_name: str
The name of the parameter
annotation
The annotation of the parameter
value
The value of the parameter
is_path_param
Whether the parameter is a path parameter
is_response_param
Whether the parameter is the return annotation
Returns
-------
Optional[ModelField]
The type annotation and the Pydantic field representing the parameter
"""
field_info, type_annotation = get_field_info_and_type_annotation(annotation, value, is_path_param)
# If the value is a FieldInfo, we use it as the FieldInfo for the parameter
if isinstance(value, FieldInfo):
if field_info is not None:
raise AssertionError("Cannot use a FieldInfo as a parameter annotation and pass a FieldInfo as a value")
field_info = value
if PYDANTIC_V2:
field_info.annotation = type_annotation # type: ignore[attr-defined,unused-ignore]
# If we didn't determine the FieldInfo yet, we create a default one
if field_info is None:
default_value = value if value is not inspect.Signature.empty else Required
# Check if the parameter is part of the path. Otherwise, defaults to query.
if is_path_param:
field_info = Path(annotation=type_annotation)
elif not field_annotation_is_scalar(annotation=type_annotation):
field_info = Body(annotation=type_annotation, default=default_value)
else:
field_info = Query(annotation=type_annotation, default=default_value)
# When we have a response field, we need to set the default value to Required
if is_response_param:
field_info.default = Required
field = _create_model_field(field_info, type_annotation, param_name, is_path_param)
return field
class OpenAPIResponse(TypedDict):
description: str
content: NotRequired[Dict[str, Union[OpenAPIResponseContentSchema, OpenAPIResponseContentModel]]]
The provided code snippet includes necessary dependencies for implementing the `get_dependant` function. Write a Python function `def get_dependant( *, path: str, call: Callable[..., Any], name: Optional[str] = None, responses: Optional[Dict[int, OpenAPIResponse]] = None, ) -> Dependant` to solve the following problem:
Returns a dependant model for a handler function. A dependant model is a model that contains the parameters and return value of a handler function. Parameters ---------- path: str The path template call: Callable[..., Any] The handler function name: str, optional The name of the handler function responses: List[Dict[int, OpenAPIResponse]], optional The list of extra responses for the handler function Returns ------- Dependant The dependant model for the handler function
Here is the function:
def get_dependant(
*,
path: str,
call: Callable[..., Any],
name: Optional[str] = None,
responses: Optional[Dict[int, OpenAPIResponse]] = None,
) -> Dependant:
"""
Returns a dependant model for a handler function. A dependant model is a model that contains
the parameters and return value of a handler function.
Parameters
----------
path: str
The path template
call: Callable[..., Any]
The handler function
name: str, optional
The name of the handler function
responses: List[Dict[int, OpenAPIResponse]], optional
The list of extra responses for the handler function
Returns
-------
Dependant
The dependant model for the handler function
"""
path_param_names = get_path_param_names(path)
endpoint_signature = get_typed_signature(call)
signature_params = endpoint_signature.parameters
dependant = Dependant(
call=call,
name=name,
path=path,
)
# Add each parameter to the dependant model
for param_name, param in signature_params.items():
# If the parameter is a path parameter, we need to set the in_ field to "path".
is_path_param = param_name in path_param_names
# Analyze the parameter to get the Pydantic field.
param_field = analyze_param(
param_name=param_name,
annotation=param.annotation,
value=param.default,
is_path_param=is_path_param,
is_response_param=False,
)
if param_field is None:
raise AssertionError(f"Parameter field is None for param: {param_name}")
if is_body_param(param_field=param_field, is_path_param=is_path_param):
dependant.body_params.append(param_field)
else:
add_param_to_fields(field=param_field, dependant=dependant)
_add_return_annotation(dependant, endpoint_signature)
_add_extra_responses(dependant, responses)
return dependant | Returns a dependant model for a handler function. A dependant model is a model that contains the parameters and return value of a handler function. Parameters ---------- path: str The path template call: Callable[..., Any] The handler function name: str, optional The name of the handler function responses: List[Dict[int, OpenAPIResponse]], optional The list of extra responses for the handler function Returns ------- Dependant The dependant model for the handler function |
5,083 | import inspect
import re
from typing import Any, Callable, Dict, ForwardRef, List, Optional, Set, Tuple, Type, cast
from pydantic import BaseModel
from aws_lambda_powertools.event_handler.openapi.compat import (
ModelField,
create_body_model,
evaluate_forwardref,
is_scalar_field,
is_scalar_sequence_field,
)
from aws_lambda_powertools.event_handler.openapi.params import (
Body,
Dependant,
Header,
Param,
ParamTypes,
Query,
_File,
_Form,
analyze_param,
create_response_field,
get_flat_dependant,
)
from aws_lambda_powertools.event_handler.openapi.types import OpenAPIResponse, OpenAPIResponseContentModel
class Dependant:
"""
A class used internally to represent a dependency between path operation decorators and the path operation function.
"""
def __init__(
self,
*,
path_params: Optional[List[ModelField]] = None,
query_params: Optional[List[ModelField]] = None,
header_params: Optional[List[ModelField]] = None,
cookie_params: Optional[List[ModelField]] = None,
body_params: Optional[List[ModelField]] = None,
return_param: Optional[ModelField] = None,
response_extra_models: Optional[List[ModelField]] = None,
name: Optional[str] = None,
call: Optional[Callable[..., Any]] = None,
request_param_name: Optional[str] = None,
websocket_param_name: Optional[str] = None,
http_connection_param_name: Optional[str] = None,
response_param_name: Optional[str] = None,
background_tasks_param_name: Optional[str] = None,
path: Optional[str] = None,
) -> None:
self.path_params = path_params or []
self.query_params = query_params or []
self.header_params = header_params or []
self.cookie_params = cookie_params or []
self.body_params = body_params or []
self.return_param = return_param or None
self.response_extra_models = response_extra_models or []
self.request_param_name = request_param_name
self.websocket_param_name = websocket_param_name
self.http_connection_param_name = http_connection_param_name
self.response_param_name = response_param_name
self.background_tasks_param_name = background_tasks_param_name
self.name = name
self.call = call
# Store the path to be able to re-generate a dependable from it in overrides
self.path = path
# Save the cache key at creation to optimize performance
self.cache_key: CacheKey = self.call
def get_flat_dependant(
dependant: Dependant,
visited: Optional[List[CacheKey]] = None,
) -> Dependant:
"""
Flatten a recursive Dependant model structure.
This function recursively concatenates the parameter fields of a Dependant model and its dependencies into a flat
Dependant structure. This is useful for scenarios like parameter validation where the nested structure is not
relevant.
Parameters
----------
dependant: Dependant
The dependant model to flatten
skip_repeats: bool
If True, child Dependents already visited will be skipped to avoid duplicates
visited: List[CacheKey], optional
Keeps track of visited Dependents to avoid infinite recursion. Defaults to empty list.
Returns
-------
Dependant
The flattened Dependant model
"""
if visited is None:
visited = []
visited.append(dependant.cache_key)
return Dependant(
path_params=dependant.path_params.copy(),
query_params=dependant.query_params.copy(),
header_params=dependant.header_params.copy(),
cookie_params=dependant.cookie_params.copy(),
body_params=dependant.body_params.copy(),
path=dependant.path,
)
The provided code snippet includes necessary dependencies for implementing the `get_flat_params` function. Write a Python function `def get_flat_params(dependant: Dependant) -> List[ModelField]` to solve the following problem:
Get a list of all the parameters from a Dependant object. Parameters ---------- dependant : Dependant The Dependant object containing the parameters. Returns ------- List[ModelField] A list of ModelField objects containing the flat parameters from the Dependant object.
Here is the function:
def get_flat_params(dependant: Dependant) -> List[ModelField]:
"""
Get a list of all the parameters from a Dependant object.
Parameters
----------
dependant : Dependant
The Dependant object containing the parameters.
Returns
-------
List[ModelField]
A list of ModelField objects containing the flat parameters from the Dependant object.
"""
flat_dependant = get_flat_dependant(dependant)
return (
flat_dependant.path_params
+ flat_dependant.query_params
+ flat_dependant.header_params
+ flat_dependant.cookie_params
) | Get a list of all the parameters from a Dependant object. Parameters ---------- dependant : Dependant The Dependant object containing the parameters. Returns ------- List[ModelField] A list of ModelField objects containing the flat parameters from the Dependant object. |
5,084 | import inspect
import re
from typing import Any, Callable, Dict, ForwardRef, List, Optional, Set, Tuple, Type, cast
from pydantic import BaseModel
from aws_lambda_powertools.event_handler.openapi.compat import (
ModelField,
create_body_model,
evaluate_forwardref,
is_scalar_field,
is_scalar_sequence_field,
)
from aws_lambda_powertools.event_handler.openapi.params import (
Body,
Dependant,
Header,
Param,
ParamTypes,
Query,
_File,
_Form,
analyze_param,
create_response_field,
get_flat_dependant,
)
from aws_lambda_powertools.event_handler.openapi.types import OpenAPIResponse, OpenAPIResponseContentModel
def get_body_field_info(
*,
body_model: Type[BaseModel],
flat_dependant: Dependant,
required: bool,
) -> Tuple[Type[Body], Dict[str, Any]]:
"""
Get the Body field info and kwargs for a given body model.
"""
body_field_info_kwargs: Dict[str, Any] = {"annotation": body_model, "alias": "body"}
if not required:
body_field_info_kwargs["default"] = None
if any(isinstance(f.field_info, _File) for f in flat_dependant.body_params):
# MAINTENANCE: body_field_info: Type[Body] = _File
raise NotImplementedError("_File fields are not supported in request bodies")
elif any(isinstance(f.field_info, _Form) for f in flat_dependant.body_params):
# MAINTENANCE: body_field_info: Type[Body] = _Form
raise NotImplementedError("_Form fields are not supported in request bodies")
else:
body_field_info = Body
body_param_media_types = [
f.field_info.media_type for f in flat_dependant.body_params if isinstance(f.field_info, Body)
]
if len(set(body_param_media_types)) == 1:
body_field_info_kwargs["media_type"] = body_param_media_types[0]
return body_field_info, body_field_info_kwargs
class Dependant:
"""
A class used internally to represent a dependency between path operation decorators and the path operation function.
"""
def __init__(
self,
*,
path_params: Optional[List[ModelField]] = None,
query_params: Optional[List[ModelField]] = None,
header_params: Optional[List[ModelField]] = None,
cookie_params: Optional[List[ModelField]] = None,
body_params: Optional[List[ModelField]] = None,
return_param: Optional[ModelField] = None,
response_extra_models: Optional[List[ModelField]] = None,
name: Optional[str] = None,
call: Optional[Callable[..., Any]] = None,
request_param_name: Optional[str] = None,
websocket_param_name: Optional[str] = None,
http_connection_param_name: Optional[str] = None,
response_param_name: Optional[str] = None,
background_tasks_param_name: Optional[str] = None,
path: Optional[str] = None,
) -> None:
self.path_params = path_params or []
self.query_params = query_params or []
self.header_params = header_params or []
self.cookie_params = cookie_params or []
self.body_params = body_params or []
self.return_param = return_param or None
self.response_extra_models = response_extra_models or []
self.request_param_name = request_param_name
self.websocket_param_name = websocket_param_name
self.http_connection_param_name = http_connection_param_name
self.response_param_name = response_param_name
self.background_tasks_param_name = background_tasks_param_name
self.name = name
self.call = call
# Store the path to be able to re-generate a dependable from it in overrides
self.path = path
# Save the cache key at creation to optimize performance
self.cache_key: CacheKey = self.call
def get_flat_dependant(
dependant: Dependant,
visited: Optional[List[CacheKey]] = None,
) -> Dependant:
"""
Flatten a recursive Dependant model structure.
This function recursively concatenates the parameter fields of a Dependant model and its dependencies into a flat
Dependant structure. This is useful for scenarios like parameter validation where the nested structure is not
relevant.
Parameters
----------
dependant: Dependant
The dependant model to flatten
skip_repeats: bool
If True, child Dependents already visited will be skipped to avoid duplicates
visited: List[CacheKey], optional
Keeps track of visited Dependents to avoid infinite recursion. Defaults to empty list.
Returns
-------
Dependant
The flattened Dependant model
"""
if visited is None:
visited = []
visited.append(dependant.cache_key)
return Dependant(
path_params=dependant.path_params.copy(),
query_params=dependant.query_params.copy(),
header_params=dependant.header_params.copy(),
cookie_params=dependant.cookie_params.copy(),
body_params=dependant.body_params.copy(),
path=dependant.path,
)
def create_response_field(
name: str,
type_: Type[Any],
default: Optional[Any] = Undefined,
required: Union[bool, UndefinedType] = Undefined,
model_config: Type[BaseConfig] = BaseConfig,
field_info: Optional[FieldInfo] = None,
alias: Optional[str] = None,
mode: Literal["validation", "serialization"] = "validation",
) -> ModelField:
"""
Create a new response field. Raises if type_ is invalid.
"""
if PYDANTIC_V2:
field_info = field_info or FieldInfo(
annotation=type_,
default=default,
alias=alias,
)
else:
field_info = field_info or FieldInfo()
kwargs = {"name": name, "field_info": field_info}
if PYDANTIC_V2:
kwargs.update({"mode": mode})
else:
kwargs.update(
{
"type_": type_,
"class_validators": {},
"default": default,
"required": required,
"model_config": model_config,
"alias": alias,
},
)
return ModelField(**kwargs) # type: ignore[arg-type]
The provided code snippet includes necessary dependencies for implementing the `get_body_field` function. Write a Python function `def get_body_field(*, dependant: Dependant, name: str) -> Optional[ModelField]` to solve the following problem:
Get the Body field for a given Dependant object.
Here is the function:
def get_body_field(*, dependant: Dependant, name: str) -> Optional[ModelField]:
"""
Get the Body field for a given Dependant object.
"""
flat_dependant = get_flat_dependant(dependant)
if not flat_dependant.body_params:
return None
first_param = flat_dependant.body_params[0]
field_info = first_param.field_info
# Handle the case where there is only one body parameter and it is embedded
embed = getattr(field_info, "embed", None)
body_param_names_set = {param.name for param in flat_dependant.body_params}
if len(body_param_names_set) == 1 and not embed:
return first_param
# If one field requires to embed, all have to be embedded
for param in flat_dependant.body_params:
setattr(param.field_info, "embed", True) # noqa: B010
# Generate a custom body model for this endpoint
model_name = "Body_" + name
body_model = create_body_model(fields=flat_dependant.body_params, model_name=model_name)
required = any(True for f in flat_dependant.body_params if f.required)
body_field_info, body_field_info_kwargs = get_body_field_info(
body_model=body_model,
flat_dependant=flat_dependant,
required=required,
)
final_field = create_response_field(
name="body",
type_=body_model,
required=required,
alias="body",
field_info=body_field_info(**body_field_info_kwargs),
)
return final_field | Get the Body field for a given Dependant object. |
5,085 | import dataclasses
import datetime
from collections import defaultdict, deque
from decimal import Decimal
from enum import Enum
from pathlib import Path, PurePath
from re import Pattern
from types import GeneratorType
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
from uuid import UUID
from pydantic import BaseModel
from pydantic.color import Color
from pydantic.types import SecretBytes, SecretStr
from aws_lambda_powertools.event_handler.openapi.compat import _model_dump
from aws_lambda_powertools.event_handler.openapi.types import IncEx
The provided code snippet includes necessary dependencies for implementing the `iso_format` function. Write a Python function `def iso_format(o: Union[datetime.date, datetime.time]) -> str` to solve the following problem:
ISO format for date and time
Here is the function:
def iso_format(o: Union[datetime.date, datetime.time]) -> str:
"""
ISO format for date and time
"""
return o.isoformat() | ISO format for date and time |
5,086 | import dataclasses
import datetime
from collections import defaultdict, deque
from decimal import Decimal
from enum import Enum
from pathlib import Path, PurePath
from re import Pattern
from types import GeneratorType
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
from uuid import UUID
from pydantic import BaseModel
from pydantic.color import Color
from pydantic.types import SecretBytes, SecretStr
from aws_lambda_powertools.event_handler.openapi.compat import _model_dump
from aws_lambda_powertools.event_handler.openapi.types import IncEx
The provided code snippet includes necessary dependencies for implementing the `decimal_encoder` function. Write a Python function `def decimal_encoder(dec_value: Decimal) -> Union[int, float]` to solve the following problem:
Encodes a Decimal as int of there's no exponent, otherwise float This is useful when we use ConstrainedDecimal to represent Numeric(x,0) where an integer (but not int typed) is used. Encoding this as a float results in failed round-tripping between encode and parse. >>> decimal_encoder(Decimal("1.0")) 1.0 >>> decimal_encoder(Decimal("1")) 1
Here is the function:
def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
"""
Encodes a Decimal as int of there's no exponent, otherwise float
This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
where an integer (but not int typed) is used. Encoding this as a float
results in failed round-tripping between encode and parse.
>>> decimal_encoder(Decimal("1.0"))
1.0
>>> decimal_encoder(Decimal("1"))
1
"""
if dec_value.as_tuple().exponent >= 0: # type: ignore[operator]
return int(dec_value)
else:
return float(dec_value) | Encodes a Decimal as int of there's no exponent, otherwise float This is useful when we use ConstrainedDecimal to represent Numeric(x,0) where an integer (but not int typed) is used. Encoding this as a float results in failed round-tripping between encode and parse. >>> decimal_encoder(Decimal("1.0")) 1.0 >>> decimal_encoder(Decimal("1")) 1 |
5,087 | import dataclasses
import datetime
from collections import defaultdict, deque
from decimal import Decimal
from enum import Enum
from pathlib import Path, PurePath
from re import Pattern
from types import GeneratorType
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
from uuid import UUID
from pydantic import BaseModel
from pydantic.color import Color
from pydantic.types import SecretBytes, SecretStr
from aws_lambda_powertools.event_handler.openapi.compat import _model_dump
from aws_lambda_powertools.event_handler.openapi.types import IncEx
def generate_encoders_by_class_tuples(
type_encoder_map: Dict[Any, Callable[[Any], Any]],
) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]:
encoders: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple)
for type_, encoder in type_encoder_map.items():
encoders[encoder] += (type_,)
return encoders | null |
5,088 |
The provided code snippet includes necessary dependencies for implementing the `generate_swagger_html` function. Write a Python function `def generate_swagger_html(spec: str, path: str, swagger_js: str, swagger_css: str, swagger_base_url: str) -> str` to solve the following problem:
Generate Swagger UI HTML page Parameters ---------- spec: str The OpenAPI spec path: str The path to the Swagger documentation js_url: str The URL to the Swagger UI JavaScript file css_url: str The URL to the Swagger UI CSS file
Here is the function:
def generate_swagger_html(spec: str, path: str, swagger_js: str, swagger_css: str, swagger_base_url: str) -> str:
"""
Generate Swagger UI HTML page
Parameters
----------
spec: str
The OpenAPI spec
path: str
The path to the Swagger documentation
js_url: str
The URL to the Swagger UI JavaScript file
css_url: str
The URL to the Swagger UI CSS file
"""
# If Swagger base URL is present, generate HTML content with linked CSS and JavaScript files
# If no Swagger base URL is provided, include CSS and JavaScript directly in the HTML
if swagger_base_url:
swagger_css_content = f"<link rel='stylesheet' type='text/css' href='{swagger_css}'>"
swagger_js_content = f"<script src='{swagger_js}'></script>"
else:
swagger_css_content = f"<style>{swagger_css}</style>"
swagger_js_content = f"<script>{swagger_js}</script>"
return f"""
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Swagger UI</title>
<meta
http-equiv="Cache-control"
content="no-cache, no-store, must-revalidate"
/>
{swagger_css_content}
</head>
<body>
<div id="swagger-ui">
Loading...
</div>
</body>
{swagger_js_content}
<script>
var swaggerUIOptions = {{
dom_id: "#swagger-ui",
docExpansion: "list",
deepLinking: true,
filter: true,
layout: "BaseLayout",
showExtensions: true,
showCommonExtensions: true,
spec: {spec},
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
plugins: [
SwaggerUIBundle.plugins.DownloadUrl
]
}}
var ui = SwaggerUIBundle(swaggerUIOptions)
ui.specActions.updateUrl('{path}?format=json');
</script>
</html>
""".strip() | Generate Swagger UI HTML page Parameters ---------- spec: str The OpenAPI spec path: str The path to the Swagger documentation js_url: str The URL to the Swagger UI JavaScript file css_url: str The URL to the Swagger UI CSS file |
5,089 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
def alias(self) -> str:
def required(self) -> bool:
def default(self) -> Any:
def type_(self) -> Any:
def __post_init__(self) -> None:
def get_default(self) -> Any:
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
def __hash__(self) -> int:
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
ModelNameMap = Dict[Union[Type["BaseModel"], Type[Enum]], str]
def get_schema_from_model_field(
*,
field: ModelField,
model_name_map: ModelNameMap,
field_mapping: Dict[
Tuple[ModelField, Literal["validation", "serialization"]],
JsonSchemaValue,
],
) -> Dict[str, Any]:
json_schema = field_mapping[(field, field.mode)]
if "$ref" not in json_schema:
# MAINTENANCE: remove when deprecating Pydantic v1
# Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207
json_schema["title"] = field.field_info.title or field.alias.title().replace("_", " ")
return json_schema | null |
5,090 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
def alias(self) -> str:
def required(self) -> bool:
def default(self) -> Any:
def type_(self) -> Any:
def __post_init__(self) -> None:
def get_default(self) -> Any:
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
def __hash__(self) -> int:
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
class GenerateJsonSchema:
ModelNameMap = Dict[Union[Type["BaseModel"], Type[Enum]], str]
def get_definitions(
*,
fields: List[ModelField],
schema_generator: GenerateJsonSchema,
model_name_map: ModelNameMap,
) -> Tuple[
Dict[
Tuple[ModelField, Literal["validation", "serialization"]],
Dict[str, Any],
],
Dict[str, Dict[str, Any]],
]:
inputs = [(field, field.mode, field._type_adapter.core_schema) for field in fields]
field_mapping, definitions = schema_generator.generate_definitions(inputs=inputs)
return field_mapping, definitions | null |
5,091 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
ModelNameMap = Dict[Union[Type["BaseModel"], Type[Enum]], str]
def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
return {} | null |
5,092 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
def model_rebuild(model: Type[BaseModel]) -> None:
model.model_rebuild() | null |
5,093 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool:
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
at_least_one_scalar_sequence = False
for arg in get_args(annotation):
if field_annotation_is_scalar_sequence(arg):
at_least_one_scalar_sequence = True
continue
elif not field_annotation_is_scalar(arg):
return False
return at_least_one_scalar_sequence
return field_annotation_is_sequence(annotation) and all(
field_annotation_is_scalar(sub_annotation) for sub_annotation in get_args(annotation)
)
def is_scalar_sequence_field(field: ModelField) -> bool:
return field_annotation_is_scalar_sequence(field.field_info.annotation) | null |
5,094 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
return _annotation_is_sequence(annotation) or _annotation_is_sequence(get_origin(annotation))
def is_sequence_field(field: ModelField) -> bool:
return field_annotation_is_sequence(field.field_info.annotation) | null |
5,095 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool:
if lenient_issubclass(annotation, bytes):
return True
origin = get_origin(annotation)
if origin is Union or origin is UnionType:
for arg in get_args(annotation):
if lenient_issubclass(arg, bytes):
return True
return False
def is_bytes_field(field: ModelField) -> bool:
return is_bytes_or_nonable_bytes_annotation(field.type_) | null |
5,096 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
def alias(self) -> str:
def required(self) -> bool:
def default(self) -> Any:
def type_(self) -> Any:
def __post_init__(self) -> None:
def get_default(self) -> Any:
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
def __hash__(self) -> int:
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def is_bytes_sequence_annotation(annotation: Any) -> bool:
def is_bytes_sequence_field(field: ModelField) -> bool:
return is_bytes_sequence_annotation(field.type_) | null |
5,097 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
sequence_annotation_to_type = {
Sequence: list,
List: list,
list: list,
Tuple: tuple,
tuple: tuple,
Set: set,
set: set,
FrozenSet: frozenset,
frozenset: frozenset,
Deque: deque,
deque: deque,
}
sequence_types = tuple(sequence_annotation_to_type.keys())
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
def alias(self) -> str:
def required(self) -> bool:
def default(self) -> Any:
def type_(self) -> Any:
def __post_init__(self) -> None:
def get_default(self) -> Any:
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
def __hash__(self) -> int:
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
origin_type = get_origin(field.field_info.annotation) or field.field_info.annotation
if not issubclass(origin_type, sequence_types): # type: ignore[arg-type]
raise AssertionError(f"Expected sequence type, got {origin_type}")
return sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return] | null |
5,098 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def create_body_model(*, fields: Sequence[ModelField], model_name: str) -> Type[BaseModel]:
field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields}
model: Type[BaseModel] = create_model(model_name, **field_params)
return model | null |
5,099 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
def _model_dump(model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any) -> Any:
return model.model_dump(mode=mode, **kwargs) | null |
5,100 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
def model_json(model: BaseModel, **kwargs: Any) -> Any:
return model.model_dump_json(**kwargs) | null |
5,101 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
def alias(self) -> str:
def required(self) -> bool:
def default(self) -> Any:
def type_(self) -> Any:
def __post_init__(self) -> None:
def get_default(self) -> Any:
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
def __hash__(self) -> int:
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
ModelNameMap = Dict[Union[Type["BaseModel"], Type[Enum]], str]
COMPONENT_REF_PREFIX = "#/components/schemas/"
def get_schema_from_model_field(
*,
field: ModelField,
model_name_map: ModelNameMap,
field_mapping: Dict[
Tuple[ModelField, Literal["validation", "serialization"]],
JsonSchemaValue,
],
) -> Dict[str, Any]:
return field_schema(
field,
model_name_map=model_name_map,
ref_prefix=COMPONENT_REF_PREFIX,
)[0] | null |
5,102 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
class GenerateJsonSchema:
ref_template: str
def get_model_definitions(
*,
flat_models: Set[Union[Type[BaseModel], Type[Enum]]],
model_name_map: ModelNameMap,
) -> Dict[str, Any]:
definitions: Dict[str, Dict[str, Any]] = {}
for model in flat_models:
m_schema, m_definitions, _ = model_process_schema(
model,
model_name_map=model_name_map,
ref_prefix=COMPONENT_REF_PREFIX,
)
definitions.update(m_definitions)
model_name = model_name_map[model]
if "description" in m_schema:
m_schema["description"] = m_schema["description"].split("\f")[0]
definitions[model_name] = m_schema
return definitions
ModelNameMap = Dict[Union[Type["BaseModel"], Type[Enum]], str]
def get_definitions(
*,
fields: List[ModelField],
schema_generator: GenerateJsonSchema,
model_name_map: ModelNameMap,
) -> Tuple[
Dict[Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
Dict[str, Dict[str, Any]],
]:
models = get_flat_models_from_fields(fields, known_models=set())
return {}, get_model_definitions(flat_models=models, model_name_map=model_name_map) | null |
5,103 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
ModelNameMap = Dict[Union[Type["BaseModel"], Type[Enum]], str]
def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
models = get_flat_models_from_fields(fields, known_models=set())
return get_model_name_map(models) | null |
5,104 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
def model_rebuild(model: Type[BaseModel]) -> None:
model.update_forward_refs() | null |
5,105 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def is_pv1_scalar_sequence_field(field: ModelField) -> bool:
if (field.shape in sequence_shapes) and not lenient_issubclass(field.type_, BaseModel):
if field.sub_fields is not None:
for sub_field in field.sub_fields:
if not is_pv1_scalar_field(sub_field):
return False
return True
if _annotation_is_sequence(field.type_):
return True
return False
def is_scalar_sequence_field(field: ModelField) -> bool:
return is_pv1_scalar_sequence_field(field) | null |
5,106 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
def alias(self) -> str:
def required(self) -> bool:
def default(self) -> Any:
def type_(self) -> Any:
def __post_init__(self) -> None:
def get_default(self) -> Any:
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
def __hash__(self) -> int:
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
def is_sequence_field(field: ModelField) -> bool:
return field.shape in sequence_shapes or _annotation_is_sequence(field.type_) | null |
5,107 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def is_bytes_field(field: ModelField) -> bool:
return lenient_issubclass(field.type_, bytes) | null |
5,108 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def is_bytes_sequence_field(field: ModelField) -> bool:
return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes) # type: ignore[attr-defined] | null |
5,109 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
def alias(self) -> str:
def required(self) -> bool:
def default(self) -> Any:
def type_(self) -> Any:
def __post_init__(self) -> None:
def get_default(self) -> Any:
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
def __hash__(self) -> int:
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def create_body_model(*, fields: Sequence[ModelField], model_name: str) -> Type[BaseModel]:
body_model = create_model(model_name)
for f in fields:
body_model.__fields__[f.name] = f # type: ignore[index]
return body_model | null |
5,110 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
if PYDANTIC_V2:
from pydantic import TypeAdapter, ValidationError
from pydantic._internal._typing_extra import eval_type_lenient
from pydantic.fields import FieldInfo
from pydantic._internal._utils import lenient_issubclass
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
from pydantic_core import PydanticUndefined, PydanticUndefinedType
from aws_lambda_powertools.event_handler.openapi.types import IncEx
Undefined = PydanticUndefined
Required = PydanticUndefined
UndefinedType = PydanticUndefinedType
evaluate_forwardref = eval_type_lenient
class ModelField:
field_info: FieldInfo
name: str
mode: Literal["validation", "serialization"] = "validation"
def alias(self) -> str:
value = self.field_info.alias
return value if value is not None else self.name
def required(self) -> bool:
return self.field_info.is_required()
def default(self) -> Any:
return self.get_default()
def type_(self) -> Any:
return self.field_info.annotation
def __post_init__(self) -> None:
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
Annotated[self.field_info.annotation, self.field_info],
)
def get_default(self) -> Any:
if self.field_info.is_required():
return Undefined
return self.field_info.get_default(call_default_factory=True)
def serialize(
self,
value: Any,
*,
mode: Literal["json", "python"] = "json",
include: Union[IncEx, None] = None,
exclude: Union[IncEx, None] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
return self._type_adapter.dump_python(
value,
mode=mode,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
def validate(
self, value: Any, values: Dict[str, Any] = {}, *, loc: Tuple[Union[int, str], ...] = ()
) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
try:
return (self._type_adapter.validate_python(value, from_attributes=True), None)
except ValidationError as exc:
return None, _regenerate_error_with_loc(errors=exc.errors(), loc_prefix=loc)
def __hash__(self) -> int:
# Each ModelField is unique for our purposes
return id(self)
else:
from pydantic import BaseModel, ValidationError
from pydantic.fields import (
ModelField,
Required,
Undefined,
UndefinedType,
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SINGLETON,
)
from pydantic.schema import (
field_schema,
get_annotation_from_field_info,
get_flat_models_from_fields,
get_model_name_map,
model_process_schema,
)
from pydantic.errors import MissingError
from pydantic.error_wrappers import ErrorWrapper
from pydantic.utils import lenient_issubclass
from pydantic.typing import evaluate_forwardref
JsonSchemaValue = Dict[str, Any]
sequence_shapes = [
SHAPE_LIST,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
]
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
return sequence_shape_to_type[field.shape](value) | null |
5,111 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
def _model_dump(model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any) -> Any:
return model.dict(**kwargs) | null |
5,112 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
def model_json(model: BaseModel, **kwargs: Any) -> Any:
return model.json(**kwargs) | null |
5,113 | from collections import deque
from copy import copy
from dataclasses import dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Type, Union, FrozenSet, Deque, Sequence, Mapping
from typing_extensions import Annotated, Literal, get_origin, get_args
from pydantic import BaseModel, create_model
from pydantic.fields import FieldInfo
from aws_lambda_powertools.event_handler.openapi.pydantic_loader import PYDANTIC_V2
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
ModelNameMap,
UnionType,
)
sequence_types = tuple(sequence_annotation_to_type.keys())
def value_is_sequence(value: Any) -> bool:
return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type] | null |
5,114 | import base64
import json
import logging
import re
import traceback
import warnings
import zlib
from abc import ABC, abstractmethod
from enum import Enum
from functools import partial
from http import HTTPStatus
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Generic,
List,
Mapping,
Match,
Optional,
Pattern,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from aws_lambda_powertools.event_handler import content_types
from aws_lambda_powertools.event_handler.exceptions import NotFoundError, ServiceError
from aws_lambda_powertools.event_handler.openapi.constants import DEFAULT_API_VERSION, DEFAULT_OPENAPI_VERSION
from aws_lambda_powertools.event_handler.openapi.exceptions import RequestValidationError
from aws_lambda_powertools.event_handler.openapi.swagger_ui.html import generate_swagger_html
from aws_lambda_powertools.event_handler.openapi.types import (
COMPONENT_REF_PREFIX,
METHODS_WITH_BODY,
OpenAPIResponse,
OpenAPIResponseContentModel,
OpenAPIResponseContentSchema,
validation_error_definition,
validation_error_response_definition,
)
from aws_lambda_powertools.event_handler.util import _FrozenDict
from aws_lambda_powertools.shared.cookies import Cookie
from aws_lambda_powertools.shared.functions import powertools_dev_is_set
from aws_lambda_powertools.shared.json_encoder import Encoder
from aws_lambda_powertools.shared.types import Literal
from aws_lambda_powertools.utilities.data_classes import (
ALBEvent,
APIGatewayProxyEvent,
APIGatewayProxyEventV2,
BedrockAgentEvent,
LambdaFunctionUrlEvent,
VPCLatticeEvent,
VPCLatticeEventV2,
)
from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = logging.getLogger(__name__)
class Response(Generic[ResponseT]):
"""Response data class that provides greater control over what is returned from the proxy event"""
def __init__(
self,
status_code: int,
content_type: Optional[str] = None,
body: Optional[ResponseT] = None,
headers: Optional[Mapping[str, Union[str, List[str]]]] = None,
cookies: Optional[List[Cookie]] = None,
compress: Optional[bool] = None,
):
"""
Parameters
----------
status_code: int
Http status code, example 200
content_type: str
Optionally set the Content-Type header, example "application/json". Note this will be merged into any
provided http headers
body: Union[str, bytes, None]
Optionally set the response body. Note: bytes body will be automatically base64 encoded
headers: Mapping[str, Union[str, List[str]]]
Optionally set specific http headers. Setting "Content-Type" here would override the `content_type` value.
cookies: list[Cookie]
Optionally set cookies.
"""
self.status_code = status_code
self.body = body
self.base64_encoded = False
self.headers: Dict[str, Union[str, List[str]]] = dict(headers) if headers else {}
self.cookies = cookies or []
self.compress = compress
self.content_type = content_type
if content_type:
self.headers.setdefault("Content-Type", content_type)
def is_json(self) -> bool:
"""
Returns True if the response is JSON, based on the Content-Type.
"""
content_type = self.headers.get("Content-Type", "")
if isinstance(content_type, list):
content_type = content_type[0]
return content_type.startswith("application/json")
The provided code snippet includes necessary dependencies for implementing the `_registered_api_adapter` function. Write a Python function `def _registered_api_adapter( app: "ApiGatewayResolver", next_middleware: Callable[..., Any], ) -> Union[Dict, Tuple, Response]` to solve the following problem:
Calls the registered API using the "_route_args" from the Resolver context to ensure the last call in the chain will match the API route function signature and ensure that Powertools passes the API route handler the expected arguments. **IMPORTANT: This internal middleware ensures the actual API route is called with the correct call signature and it MUST be the final frame in the middleware stack. This can only be removed when the API Route function accepts `app: BaseRouter` as the first argument - which is the breaking change. Parameters ---------- app: ApiGatewayResolver The API Gateway resolver next_middleware: Callable[..., Any] The function to handle the API Returns ------- Response The API Response Object
Here is the function:
def _registered_api_adapter(
app: "ApiGatewayResolver",
next_middleware: Callable[..., Any],
) -> Union[Dict, Tuple, Response]:
"""
Calls the registered API using the "_route_args" from the Resolver context to ensure the last call
in the chain will match the API route function signature and ensure that Powertools passes the API
route handler the expected arguments.
**IMPORTANT: This internal middleware ensures the actual API route is called with the correct call signature
and it MUST be the final frame in the middleware stack. This can only be removed when the API Route
function accepts `app: BaseRouter` as the first argument - which is the breaking change.
Parameters
----------
app: ApiGatewayResolver
The API Gateway resolver
next_middleware: Callable[..., Any]
The function to handle the API
Returns
-------
Response
The API Response Object
"""
route_args: Dict = app.context.get("_route_args", {})
logger.debug(f"Calling API Route Handler: {route_args}")
return app._to_response(next_middleware(**route_args)) | Calls the registered API using the "_route_args" from the Resolver context to ensure the last call in the chain will match the API route function signature and ensure that Powertools passes the API route handler the expected arguments. **IMPORTANT: This internal middleware ensures the actual API route is called with the correct call signature and it MUST be the final frame in the middleware stack. This can only be removed when the API Route function accepts `app: BaseRouter` as the first argument - which is the breaking change. Parameters ---------- app: ApiGatewayResolver The API Gateway resolver next_middleware: Callable[..., Any] The function to handle the API Returns ------- Response The API Response Object |
5,115 | import dataclasses
import json
import logging
from copy import deepcopy
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple
from pydantic import BaseModel
from aws_lambda_powertools.event_handler import Response
from aws_lambda_powertools.event_handler.api_gateway import Route
from aws_lambda_powertools.event_handler.middlewares import BaseMiddlewareHandler, NextMiddleware
from aws_lambda_powertools.event_handler.openapi.compat import (
ModelField,
_model_dump,
_normalize_errors,
_regenerate_error_with_loc,
get_missing_field_error,
)
from aws_lambda_powertools.event_handler.openapi.dependant import is_scalar_field
from aws_lambda_powertools.event_handler.openapi.encoders import jsonable_encoder
from aws_lambda_powertools.event_handler.openapi.exceptions import RequestValidationError
from aws_lambda_powertools.event_handler.openapi.params import Param
from aws_lambda_powertools.event_handler.openapi.types import IncEx
from aws_lambda_powertools.event_handler.types import EventHandlerInstance
def _validate_field(
*,
field: ModelField,
value: Any,
loc: Tuple[str, ...],
existing_errors: List[Dict[str, Any]],
):
"""
Validate a field, and append any errors to the existing_errors list.
"""
validated_value, errors = field.validate(value, value, loc=loc)
if isinstance(errors, list):
processed_errors = _regenerate_error_with_loc(errors=errors, loc_prefix=())
existing_errors.extend(processed_errors)
elif errors:
existing_errors.append(errors)
return validated_value
class Param(FieldInfo):
"""
A class used internally to represent a parameter in a path operation.
"""
in_: ParamTypes
def __init__(
self,
default: Any = Undefined,
*,
default_factory: Union[Callable[[], Any], None] = _Unset,
annotation: Optional[Any] = None,
alias: Optional[str] = None,
alias_priority: Union[int, None] = _Unset,
# MAINTENANCE: update when deprecating Pydantic v1, import these types
# MAINTENANCE: validation_alias: str | AliasPath | AliasChoices | None
validation_alias: Union[str, None] = None,
serialization_alias: Union[str, None] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
pattern: Optional[str] = None,
discriminator: Union[str, None] = None,
strict: Union[bool, None] = _Unset,
multiple_of: Union[float, None] = _Unset,
allow_inf_nan: Union[bool, None] = _Unset,
max_digits: Union[int, None] = _Unset,
decimal_places: Union[int, None] = _Unset,
examples: Optional[List[Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
json_schema_extra: Union[Dict[str, Any], None] = None,
**extra: Any,
):
"""
Constructs a new Param.
Parameters
----------
default: Any
The default value of the parameter
default_factory: Callable[[], Any], optional
Callable that will be called when a default value is needed for this field
annotation: Any, optional
The type annotation of the parameter
alias: str, optional
The public name of the field
alias_priority: int, optional
Priority of the alias. This affects whether an alias generator is used
validation_alias: str | AliasPath | AliasChoices | None, optional
Alias to be used for validation only
serialization_alias: str | AliasPath | AliasChoices | None, optional
Alias to be used for serialization only
title: str, optional
The title of the parameter
description: str, optional
The description of the parameter
gt: float, optional
Only applies to numbers, required the field to be "greater than"
ge: float, optional
Only applies to numbers, required the field to be "greater than or equal"
lt: float, optional
Only applies to numbers, required the field to be "less than"
le: float, optional
Only applies to numbers, required the field to be "less than or equal"
min_length: int, optional
Only applies to strings, required the field to have a minimum length
max_length: int, optional
Only applies to strings, required the field to have a maximum length
pattern: str, optional
Only applies to strings, requires the field match against a regular expression pattern string
discriminator: str, optional
Parameter field name for discriminating the type in a tagged union
strict: bool, optional
Enables Pydantic's strict mode for the field
multiple_of: float, optional
Only applies to numbers, requires the field to be a multiple of the given value
allow_inf_nan: bool, optional
Only applies to numbers, requires the field to allow infinity and NaN values
max_digits: int, optional
Only applies to Decimals, requires the field to have a maxmium number of digits within the decimal.
decimal_places: int, optional
Only applies to Decimals, requires the field to have at most a number of decimal places
examples: List[Any], optional
A list of examples for the parameter
deprecated: bool, optional
If `True`, the parameter will be marked as deprecated
include_in_schema: bool, optional
If `False`, the parameter will be excluded from the generated OpenAPI schema
json_schema_extra: Dict[str, Any], optional
Extra values to include in the generated OpenAPI schema
"""
self.deprecated = deprecated
self.include_in_schema = include_in_schema
kwargs = dict(
default=default,
default_factory=default_factory,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
discriminator=discriminator,
multiple_of=multiple_of,
allow_nan=allow_inf_nan,
max_digits=max_digits,
decimal_places=decimal_places,
**extra,
)
if examples is not None:
kwargs["examples"] = examples
current_json_schema_extra = json_schema_extra or extra
if PYDANTIC_V2:
kwargs.update(
{
"annotation": annotation,
"alias_priority": alias_priority,
"validation_alias": validation_alias,
"serialization_alias": serialization_alias,
"strict": strict,
"json_schema_extra": current_json_schema_extra,
"pattern": pattern,
},
)
else:
kwargs["regex"] = pattern
kwargs.update(**current_json_schema_extra)
use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset}
super().__init__(**use_kwargs)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.default})"
The provided code snippet includes necessary dependencies for implementing the `_request_params_to_args` function. Write a Python function `def _request_params_to_args( required_params: Sequence[ModelField], received_params: Mapping[str, Any], ) -> Tuple[Dict[str, Any], List[Any]]` to solve the following problem:
Convert the request params to a dictionary of values using validation, and returns a list of errors.
Here is the function:
def _request_params_to_args(
required_params: Sequence[ModelField],
received_params: Mapping[str, Any],
) -> Tuple[Dict[str, Any], List[Any]]:
"""
Convert the request params to a dictionary of values using validation, and returns a list of errors.
"""
values = {}
errors = []
for field in required_params:
field_info = field.field_info
# To ensure early failure, we check if it's not an instance of Param.
if not isinstance(field_info, Param):
raise AssertionError(f"Expected Param field_info, got {field_info}")
value = received_params.get(field.alias)
loc = (field_info.in_.value, field.alias)
# If we don't have a value, see if it's required or has a default
if value is None:
if field.required:
errors.append(get_missing_field_error(loc=loc))
else:
values[field.name] = deepcopy(field.default)
continue
# Finally, validate the value
values[field.name] = _validate_field(field=field, value=value, loc=loc, existing_errors=errors)
return values, errors | Convert the request params to a dictionary of values using validation, and returns a list of errors. |
5,116 | import dataclasses
import json
import logging
from copy import deepcopy
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple
from pydantic import BaseModel
from aws_lambda_powertools.event_handler import Response
from aws_lambda_powertools.event_handler.api_gateway import Route
from aws_lambda_powertools.event_handler.middlewares import BaseMiddlewareHandler, NextMiddleware
from aws_lambda_powertools.event_handler.openapi.compat import (
ModelField,
_model_dump,
_normalize_errors,
_regenerate_error_with_loc,
get_missing_field_error,
)
from aws_lambda_powertools.event_handler.openapi.dependant import is_scalar_field
from aws_lambda_powertools.event_handler.openapi.encoders import jsonable_encoder
from aws_lambda_powertools.event_handler.openapi.exceptions import RequestValidationError
from aws_lambda_powertools.event_handler.openapi.params import Param
from aws_lambda_powertools.event_handler.openapi.types import IncEx
from aws_lambda_powertools.event_handler.types import EventHandlerInstance
def _validate_field(
*,
field: ModelField,
value: Any,
loc: Tuple[str, ...],
existing_errors: List[Dict[str, Any]],
):
"""
Validate a field, and append any errors to the existing_errors list.
"""
validated_value, errors = field.validate(value, value, loc=loc)
if isinstance(errors, list):
processed_errors = _regenerate_error_with_loc(errors=errors, loc_prefix=())
existing_errors.extend(processed_errors)
elif errors:
existing_errors.append(errors)
return validated_value
def _get_embed_body(
*,
field: ModelField,
required_params: List[ModelField],
received_body: Optional[Dict[str, Any]],
) -> Tuple[Optional[Dict[str, Any]], bool]:
field_info = field.field_info
embed = getattr(field_info, "embed", None)
# If the field is an embed, and the field alias is omitted, we need to wrap the received body in the field alias.
field_alias_omitted = len(required_params) == 1 and not embed
if field_alias_omitted:
received_body = {field.alias: received_body}
return received_body, field_alias_omitted
The provided code snippet includes necessary dependencies for implementing the `_request_body_to_args` function. Write a Python function `def _request_body_to_args( required_params: List[ModelField], received_body: Optional[Dict[str, Any]], ) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]` to solve the following problem:
Convert the request body to a dictionary of values using validation, and returns a list of errors.
Here is the function:
def _request_body_to_args(
required_params: List[ModelField],
received_body: Optional[Dict[str, Any]],
) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]:
"""
Convert the request body to a dictionary of values using validation, and returns a list of errors.
"""
values: Dict[str, Any] = {}
errors: List[Dict[str, Any]] = []
received_body, field_alias_omitted = _get_embed_body(
field=required_params[0],
required_params=required_params,
received_body=received_body,
)
for field in required_params:
# This sets the location to:
# { "user": { object } } if field.alias == user
# { { object } if field_alias is omitted
loc: Tuple[str, ...] = ("body", field.alias)
if field_alias_omitted:
loc = ("body",)
value: Optional[Any] = None
# Now that we know what to look for, try to get the value from the received body
if received_body is not None:
try:
value = received_body.get(field.alias)
except AttributeError:
errors.append(get_missing_field_error(loc))
continue
# Determine if the field is required
if value is None:
if field.required:
errors.append(get_missing_field_error(loc))
else:
values[field.name] = deepcopy(field.default)
continue
# MAINTENANCE: Handle byte and file fields
# Finally, validate the value
values[field.name] = _validate_field(field=field, value=value, loc=loc, existing_errors=errors)
return values, errors | Convert the request body to a dictionary of values using validation, and returns a list of errors. |
5,117 | import dataclasses
import json
import logging
from copy import deepcopy
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple
from pydantic import BaseModel
from aws_lambda_powertools.event_handler import Response
from aws_lambda_powertools.event_handler.api_gateway import Route
from aws_lambda_powertools.event_handler.middlewares import BaseMiddlewareHandler, NextMiddleware
from aws_lambda_powertools.event_handler.openapi.compat import (
ModelField,
_model_dump,
_normalize_errors,
_regenerate_error_with_loc,
get_missing_field_error,
)
from aws_lambda_powertools.event_handler.openapi.dependant import is_scalar_field
from aws_lambda_powertools.event_handler.openapi.encoders import jsonable_encoder
from aws_lambda_powertools.event_handler.openapi.exceptions import RequestValidationError
from aws_lambda_powertools.event_handler.openapi.params import Param
from aws_lambda_powertools.event_handler.openapi.types import IncEx
from aws_lambda_powertools.event_handler.types import EventHandlerInstance
The provided code snippet includes necessary dependencies for implementing the `_normalize_multi_query_string_with_param` function. Write a Python function `def _normalize_multi_query_string_with_param( query_string: Dict[str, List[str]], params: Sequence[ModelField], ) -> Dict[str, Any]` to solve the following problem:
Extract and normalize resolved_query_string_parameters Parameters ---------- query_string: Dict A dictionary containing the initial query string parameters. params: Sequence[ModelField] A sequence of ModelField objects representing parameters. Returns ------- A dictionary containing the processed multi_query_string_parameters.
Here is the function:
def _normalize_multi_query_string_with_param(
query_string: Dict[str, List[str]],
params: Sequence[ModelField],
) -> Dict[str, Any]:
"""
Extract and normalize resolved_query_string_parameters
Parameters
----------
query_string: Dict
A dictionary containing the initial query string parameters.
params: Sequence[ModelField]
A sequence of ModelField objects representing parameters.
Returns
-------
A dictionary containing the processed multi_query_string_parameters.
"""
resolved_query_string: Dict[str, Any] = query_string
for param in filter(is_scalar_field, params):
try:
# if the target parameter is a scalar, we keep the first value of the query string
# regardless if there are more in the payload
resolved_query_string[param.alias] = query_string[param.alias][0]
except KeyError:
pass
return resolved_query_string | Extract and normalize resolved_query_string_parameters Parameters ---------- query_string: Dict A dictionary containing the initial query string parameters. params: Sequence[ModelField] A sequence of ModelField objects representing parameters. Returns ------- A dictionary containing the processed multi_query_string_parameters. |
5,118 | import dataclasses
import json
import logging
from copy import deepcopy
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple
from pydantic import BaseModel
from aws_lambda_powertools.event_handler import Response
from aws_lambda_powertools.event_handler.api_gateway import Route
from aws_lambda_powertools.event_handler.middlewares import BaseMiddlewareHandler, NextMiddleware
from aws_lambda_powertools.event_handler.openapi.compat import (
ModelField,
_model_dump,
_normalize_errors,
_regenerate_error_with_loc,
get_missing_field_error,
)
from aws_lambda_powertools.event_handler.openapi.dependant import is_scalar_field
from aws_lambda_powertools.event_handler.openapi.encoders import jsonable_encoder
from aws_lambda_powertools.event_handler.openapi.exceptions import RequestValidationError
from aws_lambda_powertools.event_handler.openapi.params import Param
from aws_lambda_powertools.event_handler.openapi.types import IncEx
from aws_lambda_powertools.event_handler.types import EventHandlerInstance
The provided code snippet includes necessary dependencies for implementing the `_normalize_multi_header_values_with_param` function. Write a Python function `def _normalize_multi_header_values_with_param(headers: Optional[Dict[str, str]], params: Sequence[ModelField])` to solve the following problem:
Extract and normalize resolved_headers_field Parameters ---------- headers: Dict A dictionary containing the initial header parameters. params: Sequence[ModelField] A sequence of ModelField objects representing parameters. Returns ------- A dictionary containing the processed headers.
Here is the function:
def _normalize_multi_header_values_with_param(headers: Optional[Dict[str, str]], params: Sequence[ModelField]):
"""
Extract and normalize resolved_headers_field
Parameters
----------
headers: Dict
A dictionary containing the initial header parameters.
params: Sequence[ModelField]
A sequence of ModelField objects representing parameters.
Returns
-------
A dictionary containing the processed headers.
"""
if headers:
for param in filter(is_scalar_field, params):
try:
if len(headers[param.alias]) == 1:
# if the target parameter is a scalar and the list contains only 1 element
# we keep the first value of the headers regardless if there are more in the payload
headers[param.alias] = headers[param.alias][0]
except KeyError:
pass
return headers | Extract and normalize resolved_headers_field Parameters ---------- headers: Dict A dictionary containing the initial header parameters. params: Sequence[ModelField] A sequence of ModelField objects representing parameters. Returns ------- A dictionary containing the processed headers. |
5,119 | import functools
import inspect
import logging
import os
from typing import Any, Callable, Optional
from ..shared import constants
from ..shared.functions import resolve_truthy_env_var_choice
from ..tracing import Tracer
from .exceptions import MiddlewareInvalidArgumentError
logger = logging.getLogger(__name__)
def resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bool:
"""Pick explicit choice over truthy env value, if available, otherwise return truthy env value
NOTE: Environment variable should be resolved by the caller.
Parameters
----------
env : str
environment variable actual value
choice : bool
explicit choice
Returns
-------
choice : str
resolved choice as either bool or environment value
"""
return choice if choice is not None else strtobool(env)
class MiddlewareInvalidArgumentError(Exception):
"""When middleware receives non keyword=arguments"""
The provided code snippet includes necessary dependencies for implementing the `lambda_handler_decorator` function. Write a Python function `def lambda_handler_decorator(decorator: Optional[Callable] = None, trace_execution: Optional[bool] = None) -> Callable` to solve the following problem:
Decorator factory for decorating Lambda handlers. You can use lambda_handler_decorator to create your own middlewares, where your function signature follows: `fn(handler, event, context)` Custom keyword arguments are also supported e.g. `fn(handler, event, context, option=value)` Middlewares created by this factory supports tracing to help you quickly troubleshoot any overhead that custom middlewares may cause - They will appear as custom subsegments. **Non-key value params are not supported** e.g. `fn(handler, event, context, option)` Environment variables --------------------- POWERTOOLS_TRACE_MIDDLEWARES : str uses `aws_lambda_powertools.tracing.Tracer` to create sub-segments per middleware (e.g. `"true", "True", "TRUE"`) Parameters ---------- decorator: Callable Middleware to be wrapped by this factory trace_execution: bool Flag to explicitly enable trace execution for middlewares.\n `Env POWERTOOLS_TRACE_MIDDLEWARES="true"` Example ------- **Create a middleware no params** from aws_lambda_powertools.middleware_factory import lambda_handler_decorator @lambda_handler_decorator def log_response(handler, event, context): any_code_to_execute_before_lambda_handler() response = handler(event, context) any_code_to_execute_after_lambda_handler() print(f"Lambda handler response: {response}") @log_response def lambda_handler(event, context): return True **Create a middleware with params** from aws_lambda_powertools.middleware_factory import lambda_handler_decorator @lambda_handler_decorator def obfuscate_sensitive_data(handler, event, context, fields=None): # Obfuscate email before calling Lambda handler if fields: for field in fields: field = event.get(field, "") event[field] = obfuscate_pii(field) response = handler(event, context) print(f"Lambda handler response: {response}") @obfuscate_sensitive_data(fields=["email"]) def lambda_handler(event, context): return True **Trace execution of custom middleware** from aws_lambda_powertools import Tracer from aws_lambda_powertools.middleware_factory import lambda_handler_decorator tracer = Tracer(service="payment") # or via env var ... @lambda_handler_decorator(trace_execution=True) def log_response(handler, event, context): ... @tracer.capture_lambda_handler @log_response def lambda_handler(event, context): return True Limitations ----------- * Async middlewares not supported * Classes, class methods middlewares not supported Raises ------ MiddlewareInvalidArgumentError When middleware receives non keyword=arguments
Here is the function:
def lambda_handler_decorator(decorator: Optional[Callable] = None, trace_execution: Optional[bool] = None) -> Callable:
"""Decorator factory for decorating Lambda handlers.
You can use lambda_handler_decorator to create your own middlewares,
where your function signature follows: `fn(handler, event, context)`
Custom keyword arguments are also supported e.g. `fn(handler, event, context, option=value)`
Middlewares created by this factory supports tracing to help you quickly troubleshoot
any overhead that custom middlewares may cause - They will appear as custom subsegments.
**Non-key value params are not supported** e.g. `fn(handler, event, context, option)`
Environment variables
---------------------
POWERTOOLS_TRACE_MIDDLEWARES : str
uses `aws_lambda_powertools.tracing.Tracer`
to create sub-segments per middleware (e.g. `"true", "True", "TRUE"`)
Parameters
----------
decorator: Callable
Middleware to be wrapped by this factory
trace_execution: bool
Flag to explicitly enable trace execution for middlewares.\n
`Env POWERTOOLS_TRACE_MIDDLEWARES="true"`
Example
-------
**Create a middleware no params**
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
@lambda_handler_decorator
def log_response(handler, event, context):
any_code_to_execute_before_lambda_handler()
response = handler(event, context)
any_code_to_execute_after_lambda_handler()
print(f"Lambda handler response: {response}")
@log_response
def lambda_handler(event, context):
return True
**Create a middleware with params**
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
@lambda_handler_decorator
def obfuscate_sensitive_data(handler, event, context, fields=None):
# Obfuscate email before calling Lambda handler
if fields:
for field in fields:
field = event.get(field, "")
event[field] = obfuscate_pii(field)
response = handler(event, context)
print(f"Lambda handler response: {response}")
@obfuscate_sensitive_data(fields=["email"])
def lambda_handler(event, context):
return True
**Trace execution of custom middleware**
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
tracer = Tracer(service="payment") # or via env var
...
@lambda_handler_decorator(trace_execution=True)
def log_response(handler, event, context):
...
@tracer.capture_lambda_handler
@log_response
def lambda_handler(event, context):
return True
Limitations
-----------
* Async middlewares not supported
* Classes, class methods middlewares not supported
Raises
------
MiddlewareInvalidArgumentError
When middleware receives non keyword=arguments
"""
if decorator is None:
return functools.partial(lambda_handler_decorator, trace_execution=trace_execution)
trace_execution = resolve_truthy_env_var_choice(
env=os.getenv(constants.MIDDLEWARE_FACTORY_TRACE_ENV, "false"),
choice=trace_execution,
)
@functools.wraps(decorator)
def final_decorator(func: Optional[Callable] = None, **kwargs: Any):
# If called with kwargs return new func with kwargs
if func is None:
return functools.partial(final_decorator, **kwargs)
if not inspect.isfunction(func):
# @custom_middleware(True) vs @custom_middleware(log_event=True)
raise MiddlewareInvalidArgumentError(
f"Only keyword arguments is supported for middlewares: {decorator.__qualname__} received {func}", # type: ignore # noqa: E501
)
@functools.wraps(func)
def wrapper(event, context, **handler_kwargs):
try:
middleware = functools.partial(decorator, func, event, context, **kwargs, **handler_kwargs)
if trace_execution:
tracer = Tracer(auto_patch=False)
with tracer.provider.in_subsegment(name=f"## {decorator.__qualname__}"):
response = middleware()
else:
response = middleware()
return response
except Exception:
logger.exception(f"Caught exception in {decorator.__qualname__}")
raise
return wrapper
return final_decorator | Decorator factory for decorating Lambda handlers. You can use lambda_handler_decorator to create your own middlewares, where your function signature follows: `fn(handler, event, context)` Custom keyword arguments are also supported e.g. `fn(handler, event, context, option=value)` Middlewares created by this factory supports tracing to help you quickly troubleshoot any overhead that custom middlewares may cause - They will appear as custom subsegments. **Non-key value params are not supported** e.g. `fn(handler, event, context, option)` Environment variables --------------------- POWERTOOLS_TRACE_MIDDLEWARES : str uses `aws_lambda_powertools.tracing.Tracer` to create sub-segments per middleware (e.g. `"true", "True", "TRUE"`) Parameters ---------- decorator: Callable Middleware to be wrapped by this factory trace_execution: bool Flag to explicitly enable trace execution for middlewares.\n `Env POWERTOOLS_TRACE_MIDDLEWARES="true"` Example ------- **Create a middleware no params** from aws_lambda_powertools.middleware_factory import lambda_handler_decorator @lambda_handler_decorator def log_response(handler, event, context): any_code_to_execute_before_lambda_handler() response = handler(event, context) any_code_to_execute_after_lambda_handler() print(f"Lambda handler response: {response}") @log_response def lambda_handler(event, context): return True **Create a middleware with params** from aws_lambda_powertools.middleware_factory import lambda_handler_decorator @lambda_handler_decorator def obfuscate_sensitive_data(handler, event, context, fields=None): # Obfuscate email before calling Lambda handler if fields: for field in fields: field = event.get(field, "") event[field] = obfuscate_pii(field) response = handler(event, context) print(f"Lambda handler response: {response}") @obfuscate_sensitive_data(fields=["email"]) def lambda_handler(event, context): return True **Trace execution of custom middleware** from aws_lambda_powertools import Tracer from aws_lambda_powertools.middleware_factory import lambda_handler_decorator tracer = Tracer(service="payment") # or via env var ... @lambda_handler_decorator(trace_execution=True) def log_response(handler, event, context): ... @tracer.capture_lambda_handler @log_response def lambda_handler(event, context): return True Limitations ----------- * Async middlewares not supported * Classes, class methods middlewares not supported Raises ------ MiddlewareInvalidArgumentError When middleware receives non keyword=arguments |
5,120 | from __future__ import annotations
import os
from aws_lambda_powertools import Logger, Tracer
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider
from aws_lambda_powertools.utilities.typing import LambdaContext
KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "")
logger = Logger()
class AWSEncryptionSDKProvider(BaseProvider):
"""
The AWSEncryptionSDKProvider is used as a provider for the DataMasking class.
Usage
-------
```
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.providers.kms.aws_encryption_sdk import (
AWSEncryptionSDKProvider,
)
def lambda_handler(event, context):
provider = AWSEncryptionSDKProvider(["arn:aws:kms:us-east-1:0123456789012:key/key-id"])
data_masker = DataMasking(provider=provider)
data = {
"project": "powertools",
"sensitive": "password"
}
encrypted = data_masker.encrypt(data)
return encrypted
```
"""
def __init__(
self,
keys: List[str],
key_provider=None,
local_cache_capacity: int = CACHE_CAPACITY,
max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS,
max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED,
max_bytes_encrypted: int = MAX_BYTES_ENCRYPTED,
json_serializer: Callable[..., str] = functools.partial(json.dumps, ensure_ascii=False),
json_deserializer: Callable[[str], Any] = json.loads,
):
super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer)
self._key_provider = key_provider or KMSKeyProvider(
keys=keys,
local_cache_capacity=local_cache_capacity,
max_cache_age_seconds=max_cache_age_seconds,
max_messages_encrypted=max_messages_encrypted,
max_bytes_encrypted=max_bytes_encrypted,
json_serializer=json_serializer,
json_deserializer=json_deserializer,
)
def encrypt(self, data: Any, provider_options: dict | None = None, **encryption_context: str) -> str:
return self._key_provider.encrypt(data=data, provider_options=provider_options, **encryption_context)
def decrypt(self, data: str, provider_options: dict | None = None, **encryption_context: str) -> Any:
return self._key_provider.decrypt(data=data, provider_options=provider_options, **encryption_context)
def lambda_handler(event: dict, context: LambdaContext) -> dict:
logger.info("Hello world function - HTTP 200")
data = event["body"]
data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]))
encrypted = data_masker.encrypt(data)
decrypted = data_masker.decrypt(encrypted)
return {"Decrypted_json": decrypted} | null |
5,121 | from __future__ import annotations
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = Logger()
data_masker = DataMasking()
def lambda_handler(event: dict, context: LambdaContext) -> dict:
data: dict = event.get("body", {})
logger.info("Erasing fields email, address.street, and company_address")
erased = data_masker.erase(data, fields=["email", "address.street", "company_address"]) # (1)!
return erased | null |
5,122 | from __future__ import annotations
import os
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import (
AWSEncryptionSDKProvider,
)
from aws_lambda_powertools.utilities.typing import LambdaContext
data_masker = DataMasking(provider=encryption_provider)
logger = Logger()
def lambda_handler(event: dict, context: LambdaContext) -> dict:
data: dict = event.get("body", {})
logger.info("Encrypting the whole object")
encrypted = data_masker.encrypt(data)
return {"body": encrypted} | null |
5,123 | from __future__ import annotations
import os
from aws_encryption_sdk.identifiers import Algorithm
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider
from aws_lambda_powertools.utilities.typing import LambdaContext
data_masker = DataMasking(provider=encryption_provider)
logger = Logger()
def lambda_handler(event: dict, context: LambdaContext) -> str:
data: dict = event.get("body", {})
logger.info("Encrypting whole object with a different algorithm")
provider_options = {"algorithm": Algorithm.AES_256_GCM_HKDF_SHA512_COMMIT_KEY}
encrypted = data_masker.encrypt(
data,
provider_options=provider_options,
)
return encrypted | null |
5,124 | from __future__ import annotations
import os
import ujson
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import (
AWSEncryptionSDKProvider,
)
from aws_lambda_powertools.utilities.typing import LambdaContext
data_masker = DataMasking(provider=encryption_provider)
def lambda_handler(event: dict, context: LambdaContext) -> str:
data: dict = event.get("body", {})
return data_masker.encrypt(data) | null |
5,126 | from __future__ import annotations
import os
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider
from aws_lambda_powertools.utilities.typing import LambdaContext
data_masker = DataMasking(provider=encryption_provider)
logger = Logger()
def lambda_handler(event: dict, context: LambdaContext) -> dict:
data = event.get("body", {})
logger.info("Decrypting whole object")
decrypted: dict = data_masker.decrypt(
data,
data_classification="confidential", # (1)!
data_type="customer-data",
tenant_id="a06bf973-0734-4b53-9072-39d7ac5b2cba",
)
return decrypted | null |
5,127 | from __future__ import annotations
import os
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider
from aws_lambda_powertools.utilities.typing import LambdaContext
data_masker = DataMasking(provider=encryption_provider)
logger = Logger()
def lambda_handler(event: dict, context: LambdaContext) -> dict:
data: dict = event.get("body", {})
logger.info("Decrypting whole object")
decrypted = data_masker.decrypt(data)
return decrypted | null |
5,129 | from __future__ import annotations
import os
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.data_masking import DataMasking
from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider
from aws_lambda_powertools.utilities.typing import LambdaContext
data_masker = DataMasking(provider=encryption_provider)
logger = Logger()
def lambda_handler(event: dict, context: LambdaContext) -> str:
data = event.get("body", {})
logger.info("Encrypting whole object")
encrypted: str = data_masker.encrypt(
data,
data_classification="confidential", # (1)!
data_type="customer-data",
tenant_id="a06bf973-0734-4b53-9072-39d7ac5b2cba",
)
return encrypted | null |
5,130 | from time import sleep
import requests
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = Logger()
def lambda_handler(event, context: LambdaContext) -> dict:
limit_execution: int = 1000 # milliseconds
# scrape website and exit before lambda timeout
while context.get_remaining_time_in_millis() > limit_execution:
comments: requests.Response = requests.get("https://jsonplaceholder.typicode.com/comments")
# add logic here and save the results of the request to an S3 bucket, for example.
logger.info(
{
"operation": "scrape_website",
"request_id": context.aws_request_id,
"remaining_time": context.get_remaining_time_in_millis(),
"comments": comments.json()[:2],
},
)
sleep(1)
return {"message": "Success"} | null |
5,131 | from aws_lambda_powertools.utilities.typing import LambdaContext
def handler(event: dict, context: LambdaContext) -> dict:
# Insert business logic
return event | null |
5,132 | import os
from aws_lambda_powertools import single_metric
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
STAGE = os.getenv("STAGE", "dev")
def lambda_handler(event: dict, context: LambdaContext):
with single_metric(
name="RecordsCount",
unit=MetricUnit.Count,
value=10,
default_dimensions={"environment": STAGE},
) as metric:
metric.add_dimension(name="TableName", value="Users") | null |
5,133 | from uuid import uuid4
from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
metrics = Metrics()
def lambda_handler(event: dict, context: LambdaContext):
metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1)
metrics.add_metadata(key="booking_id", value=f"{uuid4()}") | null |
5,134 | from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricResolution, MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
metrics = Metrics()
def lambda_handler(event: dict, context: LambdaContext):
metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1, resolution=MetricResolution.High) | null |
5,135 | import os
from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
metrics = Metrics()
metrics.set_default_dimensions(environment=STAGE, another="one")
def lambda_handler(event: dict, context: LambdaContext):
metrics.add_metric(name="TurbineReads", unit=MetricUnit.Count, value=1)
metrics.add_metric(name="TurbineReads", unit=MetricUnit.Count, value=8) | null |
5,136 | import os
from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
STAGE = os.getenv("STAGE", "dev")
metrics = Metrics()
def lambda_handler(event: dict, context: LambdaContext):
metrics.add_dimension(name="environment", value=STAGE)
metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) | null |
5,137 | from aws_lambda_powertools.metrics import EphemeralMetrics, MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
metrics = EphemeralMetrics()
def lambda_handler(event: dict, context: LambdaContext):
metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) | null |
5,138 | import json
from dataclasses import dataclass
import assert_multiple_emf_blobs_module
import pytest
def lambda_context():
@dataclass
class LambdaContext:
function_name: str = "test"
memory_limit_in_mb: int = 128
invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test"
aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72"
return LambdaContext() | null |
5,139 | import json
from dataclasses import dataclass
import assert_multiple_emf_blobs_module
import pytest
def capture_metrics_output_multiple_emf_objects(capsys):
return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line]
def test_log_metrics(capsys, lambda_context):
assert_multiple_emf_blobs_module.lambda_handler({}, lambda_context)
cold_start_blob, custom_metrics_blob = capture_metrics_output_multiple_emf_objects(capsys)
# Since `capture_cold_start_metric` is used
# we should have one JSON blob for cold start metric and one for the application
assert cold_start_blob["ColdStart"] == [1.0]
assert cold_start_blob["function_name"] == "test"
assert "SuccessfulBooking" in custom_metrics_blob | null |
5,140 | from aws_lambda_powertools import Metrics
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext):
... | null |
5,141 | import os
from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
metrics = Metrics()
def lambda_handler(event: dict, context: LambdaContext):
metrics.add_metric(name="TurbineReads", unit=MetricUnit.Count, value=1)
metrics.add_metric(name="TurbineReads", unit=MetricUnit.Count, value=8) | null |
5,142 | import json
import add_metrics
def test_log_metrics(capsys):
add_metrics.lambda_handler({}, {})
log = capsys.readouterr().out.strip() # remove any extra line
metrics_output = json.loads(log) # deserialize JSON str
# THEN we should have no exceptions
# and a valid EMF object should be flushed correctly
assert "SuccessfulBooking" in log # basic string assertion in JSON str
assert "SuccessfulBooking" in metrics_output["_aws"]["CloudWatchMetrics"][0]["Metrics"][0]["Name"] | null |
5,143 | from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
metrics = Metrics()
def book_flight(flight_id: str, **kwargs):
# logic to book flight
...
metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1)
def lambda_handler(event: dict, context: LambdaContext):
try:
book_flight(flight_id=event.get("flight_id", ""))
finally:
metrics.flush_metrics() | null |
5,144 | from aws_lambda_powertools.metrics import Metrics
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext):
# no metrics being created will now raise SchemaValidationError
... | null |
5,145 | import os
from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
STAGE = os.getenv("STAGE", "dev")
metrics = Metrics()
def lambda_handler(event: dict, context: LambdaContext):
metrics.add_dimension(name="environment", value=STAGE)
metrics.add_metric(name="TurbineReads", unit=MetricUnit.Count, value=1)
metrics.add_metric(name="TurbineReads", unit=MetricUnit.Count, value=8) | null |
5,146 | import os
from aws_lambda_powertools import single_metric
from aws_lambda_powertools.metrics import Metrics, MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
metrics = Metrics()
metrics.set_default_dimensions(environment=STAGE)
def lambda_handler(event: dict, context: LambdaContext):
with single_metric(
name="RecordsCount",
unit=MetricUnit.Count,
value=10,
default_dimensions=metrics.default_dimensions,
) as metric:
metric.add_dimension(name="TableName", value="Users") | null |
5,147 | import os
from aws_lambda_powertools import single_metric
from aws_lambda_powertools.metrics import MetricUnit
from aws_lambda_powertools.utilities.typing import LambdaContext
STAGE = os.getenv("STAGE", "dev")
def lambda_handler(event: dict, context: LambdaContext):
with single_metric(name="MySingleMetric", unit=MetricUnit.Count, value=1) as metric:
metric.add_dimension(name="environment", value=STAGE) | null |
5,148 | from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def collect_payment(charge_id: str) -> str:
tracer.put_annotation(key="PaymentId", value=charge_id)
return f"dummy payment collected for charge: {charge_id}"
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
return collect_payment(charge_id=charge_id) | null |
5,150 | import contextlib
from collections.abc import Generator
from aws_lambda_powertools import Logger, Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = Logger()
def collect_payment(charge_id: str) -> Generator[str, None, None]:
try:
yield f"dummy payment collected for charge: {charge_id}"
finally:
tracer.put_annotation(key="PaymentId", value=charge_id)
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
with collect_payment(charge_id=charge_id) as receipt_id:
logger.info(f"Processing payment collection for charge {charge_id} with receipt {receipt_id}")
return receipt_id | null |
5,151 | from tracer_reuse_module import collect_payment
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def collect_payment(charge_id: str) -> str:
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
return collect_payment(charge_id=charge_id) | null |
5,152 | from aws_lambda_powertools import Logger, Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def collect_payment(charge_id: str) -> str:
tracer.put_annotation(key="PaymentId", value=charge_id)
logger.debug("Returning sensitive information....")
return f"dummy payment collected for charge: {charge_id}"
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
return collect_payment(charge_id=charge_id) | null |
5,153 | from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
tracer = Tracer()
def collect_payment(charge_id: str) -> str:
return f"dummy payment collected for charge: {charge_id}"
def lambda_handler(event: dict, context: LambdaContext) -> str:
payment_context = {
"charge_id": event.get("charge_id", ""),
"merchant_id": event.get("merchant_id", ""),
"request_id": context.aws_request_id,
}
payment_context["receipt_id"] = collect_payment(charge_id=payment_context["charge_id"])
tracer.put_metadata(key="payment_response", value=payment_context)
return payment_context["receipt_id"] | null |
5,154 | import asyncio
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
async def collect_payment(charge_id: str) -> str:
await asyncio.gather(another_async_task(), another_async_task_2())
return f"dummy payment collected for charge: {charge_id}"
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
return asyncio.run(collect_payment(charge_id=charge_id)) | null |
5,155 | from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
tracer = Tracer()
def collect_payment(charge_id: str) -> str:
return f"dummy payment collected for charge: {charge_id}"
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
with tracer.provider.in_subsegment("## collect_payment") as subsegment:
subsegment.put_annotation(key="PaymentId", value=charge_id)
ret = collect_payment(charge_id=charge_id)
subsegment.put_metadata(key="payment_response", value=ret)
return ret | null |
5,156 | import requests
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext) -> str:
ret = requests.get("https://httpbin.org/get")
ret.raise_for_status()
return ret.json() | null |
5,157 | from collections.abc import Generator
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def collect_payment(charge_id: str) -> Generator[str, None, None]:
yield f"dummy payment collected for charge: {charge_id}"
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
return next(collect_payment(charge_id=charge_id)) | null |
5,158 | import os
import boto3
from botocore.response import StreamingBody
from aws_lambda_powertools import Logger, Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def fetch_payment_report(payment_id: str) -> StreamingBody:
ret = s3.get_object(Bucket=BUCKET, Key=f"{REPORT_KEY}/{payment_id}")
logger.debug("Returning streaming body from S3 object....")
return ret["body"]
def lambda_handler(event: dict, context: LambdaContext) -> str:
payment_id = event.get("payment_id", "")
report = fetch_payment_report(payment_id=payment_id)
return report.read().decode() | null |
5,159 | from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def collect_payment(charge_id: str) -> str:
return f"dummy payment collected for charge: {charge_id}"
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
return collect_payment(charge_id=charge_id) | null |
5,160 | import asyncio
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
async def collect_payment(charge_id: str) -> str:
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
return asyncio.run(collect_payment(charge_id=charge_id)) | null |
5,161 | import asyncio
import os
import aiohttp
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.tracing import aiohttp_trace_config
from aws_lambda_powertools.utilities.typing import LambdaContext
async def collect_payment(charge_id: str) -> dict:
async with aiohttp.ClientSession(trace_configs=[aiohttp_trace_config()]) as session:
async with session.get(f"{ENDPOINT}/collect") as resp:
return await resp.json()
def lambda_handler(event: dict, context: LambdaContext) -> dict:
charge_id = event.get("charge_id", "")
return asyncio.run(collect_payment(charge_id=charge_id)) | null |
5,162 | import os
import requests
from aws_lambda_powertools import Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
def collect_payment(charge_id: str) -> dict:
try:
ret = requests.post(url=f"{ENDPOINT}/collect", data={"charge_id": charge_id})
ret.raise_for_status()
return ret.json()
except requests.HTTPError as e:
raise PaymentError(f"Unable to collect payment for charge {charge_id}") from e
def lambda_handler(event: dict, context: LambdaContext) -> str:
charge_id = event.get("charge_id", "")
ret = collect_payment(charge_id=charge_id)
return ret.get("receipt_id", "") | null |
5,164 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve a single parameter
endpoint_comments: Any = parameters.get_app_config(
name="config",
environment="dev",
application="comments",
force_fetch=True,
)
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,165 | import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve all parameters within a path e.g., /dev
# Say, you had two parameters under `/dev`: /dev/config, /dev/webhook/config
all_parameters: dict = parameters.get_parameters("/dev", max_age=20)
endpoint_comments = None
# We strip the path prefix name for readability and memory usage in deeply nested paths
# all_parameters would then look like:
## all_parameters["config"] = value # noqa: ERA001
## all_parameters["webhook/config"] = value # noqa: ERA001
for parameter, value in all_parameters.items():
if parameter == "endpoint_comments":
endpoint_comments = value
if endpoint_comments is None:
return {"comments": None}
# the value of parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10]}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,166 | from typing import Any
import hvac
import requests
from custom_provider_vault import VaultProvider
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = Logger()
vault_provider = VaultProvider(vault_url="http://192.168.68.105:8200/", vault_token="YOUR_TOKEN")
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve a single parameter
endpoint_comments: Any = vault_provider.get("comments_endpoint")
# you can get all parameters using get_multiple and specifying vault mount point
# # for testing purposes we will not use it
all_parameters: Any = vault_provider.get_multiple("/")
logger.info(all_parameters)
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments["url"])
return {"comments": comments.json()[:10], "statusCode": 200}
except hvac.exceptions.InvalidPath as error:
return {"comments": None, "message": str(error), "statusCode": 400}
# general exception
except Exception as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,167 | from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
ssm_provider = parameters.SSMProvider()
def lambda_handler(event: dict, context: LambdaContext):
values = ssm_provider.get_multiple("/param", transform="auto")
return values | null |
5,168 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve a single parameter
endpoint_comments: Any = parameters.get_app_config(name="config", environment="dev", application="comments")
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,169 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
ssm_provider = parameters.SSMProvider()
class ConfigNotFound(Exception):
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve multiple parameters from a path prefix
# /config = root
# /config/endpoint = url
# /config/endpoint/query = querystring
all_parameters: Any = ssm_provider.get_multiple("/config", recursive=False)
endpoint_comments = "https://jsonplaceholder.typicode.com/comments/"
for parameter, value in all_parameters.items():
# query parameter is used to query endpoint
if "query" in parameter:
endpoint_comments = f"{endpoint_comments}{value}"
break
else:
# scheme config was not found because get_multiple is not recursive
raise ConfigNotFound("URL query parameter was not found")
# the value of parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,170 | from typing import Any
import requests
from botocore.config import Config
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
ssm_provider = parameters.SSMProvider(config=config)
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve a single parameter
endpoint_comments: Any = ssm_provider.get("/lambda-powertools/endpoint_comments")
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,171 | from typing import Any
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
ssm_provider = parameters.SSMProvider()
def lambda_handler(event: dict, context: LambdaContext):
# This will display:
# /param/a: [some value]
# /param/b: [some value]
# /param/c: None
values: Any = ssm_provider.get_multiple("/param", transform="json")
for key, value in values.items():
print(f"{key}: {value}")
try:
# This will raise a TransformParameterError exception
values = ssm_provider.get_multiple("/param", transform="json", raise_on_transform_error=True)
except parameters.exceptions.TransformParameterError:
... | null |
5,172 | from typing import Any
import requests
from custom_provider_s3 import S3Provider
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = Logger()
s3_provider = S3Provider(bucket_name="bucket_name")
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve a single parameter using key
endpoint_comments: Any = s3_provider.get("comments_endpoint")
# you can get all parameters using get_multiple and specifying a bucket prefix
# # for testing purposes we will not use it
all_parameters: Any = s3_provider.get_multiple("/")
logger.info(all_parameters)
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
# general exception
except Exception as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,173 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve a single parameter with 20s cache
endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments", max_age=20)
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,174 | from typing import Any
from uuid import uuid4
import boto3
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
ec2 = boto3.resource("ec2")
ssm_provider = parameters.SSMProvider()
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve the key pair from secure string parameter
ec2_pem: Any = ssm_provider.get("/lambda-powertools/ec2_pem", decrypt=True)
name_key_pair = f"kp_{uuid4()}"
ec2.import_key_pair(KeyName=name_key_pair, PublicKeyMaterial=ec2_pem)
ec2.create_instances(
ImageId="ami-026b57f3c383c2eec",
InstanceType="t2.micro",
MinCount=1,
MaxCount=1,
KeyName=name_key_pair,
)
return {"message": "EC2 created", "success": True}
except parameters.exceptions.GetParameterError as error:
return {"message": f"Error creating EC2 => {str(error)}", "success": False} | null |
5,175 | from typing import Any
import requests
from botocore.config import Config
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
ssm_provider = parameters.SecretsProvider(config=config)
def lambda_handler(event: dict, context: LambdaContext):
try:
# Usually an endpoint is not sensitive data, so we store it in SSM Parameters
endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments")
# An API-KEY is a sensitive data and should be stored in SecretsManager
api_key: Any = ssm_provider.get("/lambda-powertools/api-key")
headers: dict = {"X-API-Key": api_key}
comments: requests.Response = requests.get(endpoint_comments, headers=headers)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,176 | import boto3
from aws_lambda_powertools.utilities import parameters
ssm_provider = parameters.SSMProvider(boto3_session=boto3_session)
def handler(event, context):
# Retrieve a single parameter
value = ssm_provider.get("/my/parameter")
return value | null |
5,177 | import boto3
from aws_lambda_powertools.utilities import parameters
ssm_provider = parameters.SSMProvider(boto3_client=boto3_client)
def handler(event, context):
# Retrieve a single parameter
value = ssm_provider.get("/my/parameter")
return value | null |
5,178 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
dynamodb_provider = parameters.DynamoDBProvider(table_name="ParameterTable", endpoint_url="http://localhost:8000")
def lambda_handler(event: dict, context: LambdaContext):
try:
# Usually an endpoint is not sensitive data, so we store it in DynamoDB Table
endpoint_comments: Any = dynamodb_provider.get("comments_endpoint")
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
# general exception
except Exception as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,179 | from typing import Any
import requests
from botocore.config import Config
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
appconf_provider = parameters.AppConfigProvider(environment="dev", application="comments", config=config)
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve a single parameter
endpoint_comments: Any = appconf_provider.get("config")
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,180 | from botocore.config import Config
from aws_lambda_powertools.utilities import parameters
ssm_provider = parameters.SSMProvider(config=boto_config)
def handler(event, context):
# Retrieve a single parameter
value = ssm_provider.get("/my/parameter")
return value | null |
5,181 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
dynamodb_provider = parameters.DynamoDBProvider(table_name="ParameterTable")
def lambda_handler(event: dict, context: LambdaContext):
try:
# Retrieve multiple parameters using HASH KEY
all_parameters: Any = dynamodb_provider.get_multiple("config")
endpoint_comments = "https://jsonplaceholder.typicode.com/noexists/"
limit = 2
for parameter, value in all_parameters.items():
if parameter == "endpoint_comments":
endpoint_comments = value
if parameter == "limit":
limit = int(value)
# the value of parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[limit]}
# general exception
except Exception as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,182 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext):
try:
# Usually an endpoint is not sensitive data, so we store it in SSM Parameters
endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments")
# An API-KEY is a sensitive data and should be stored in SecretsManager
api_key: Any = parameters.get_secret("/lambda-powertools/api-key")
headers: dict = {"X-API-Key": api_key}
comments: requests.Response = requests.get(endpoint_comments, headers=headers)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
5,183 | from typing import Any
import requests
from aws_lambda_powertools.utilities import parameters
from aws_lambda_powertools.utilities.typing import LambdaContext
def lambda_handler(event: dict, context: LambdaContext) -> dict:
try:
# Retrieve a single parameter
endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments", transform="json")
# the value of this parameter is https://jsonplaceholder.typicode.com/comments/
comments: requests.Response = requests.get(endpoint_comments)
return {"comments": comments.json()[:10], "statusCode": 200}
except parameters.exceptions.GetParameterError as error:
return {"comments": None, "message": str(error), "statusCode": 400} | null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.