Module aws_lambda_powertools.utilities.parser.models
Expand source code
from aws_lambda_powertools.utilities.parser.compat import disable_pydantic_v2_warning
disable_pydantic_v2_warning()
from .alb import AlbModel, AlbRequestContext, AlbRequestContextData
from .apigw import (
APIGatewayEventAuthorizer,
APIGatewayEventIdentity,
APIGatewayEventRequestContext,
APIGatewayProxyEventModel,
)
from .apigwv2 import (
APIGatewayProxyEventV2Model,
RequestContextV2,
RequestContextV2Authorizer,
RequestContextV2AuthorizerIam,
RequestContextV2AuthorizerIamCognito,
RequestContextV2AuthorizerJwt,
RequestContextV2Http,
)
from .bedrock_agent import (
BedrockAgentEventModel,
BedrockAgentModel,
BedrockAgentPropertyModel,
BedrockAgentRequestBodyModel,
BedrockAgentRequestMediaModel,
)
from .cloudformation_custom_resource import (
CloudFormationCustomResourceBaseModel,
CloudFormationCustomResourceCreateModel,
CloudFormationCustomResourceDeleteModel,
CloudFormationCustomResourceUpdateModel,
)
from .cloudwatch import (
CloudWatchLogsData,
CloudWatchLogsDecode,
CloudWatchLogsLogEvent,
CloudWatchLogsModel,
)
from .dynamodb import (
DynamoDBStreamChangedRecordModel,
DynamoDBStreamModel,
DynamoDBStreamRecordModel,
)
from .event_bridge import EventBridgeModel
from .kafka import (
KafkaBaseEventModel,
KafkaMskEventModel,
KafkaRecordModel,
KafkaSelfManagedEventModel,
)
from .kinesis import (
KinesisDataStreamModel,
KinesisDataStreamRecord,
KinesisDataStreamRecordPayload,
)
from .kinesis_firehose import (
KinesisFirehoseModel,
KinesisFirehoseRecord,
KinesisFirehoseRecordMetadata,
)
from .kinesis_firehose_sqs import KinesisFirehoseSqsModel, KinesisFirehoseSqsRecord
from .lambda_function_url import LambdaFunctionUrlModel
from .s3 import (
S3EventNotificationEventBridgeDetailModel,
S3EventNotificationEventBridgeModel,
S3EventNotificationObjectModel,
S3Model,
S3RecordModel,
)
from .s3_event_notification import (
S3SqsEventNotificationModel,
S3SqsEventNotificationRecordModel,
)
from .s3_object_event import (
S3ObjectConfiguration,
S3ObjectContext,
S3ObjectLambdaEvent,
S3ObjectSessionAttributes,
S3ObjectSessionContext,
S3ObjectSessionIssuer,
S3ObjectUserIdentity,
S3ObjectUserRequest,
)
from .ses import (
SesMail,
SesMailCommonHeaders,
SesMailHeaders,
SesMessage,
SesModel,
SesReceipt,
SesReceiptAction,
SesReceiptVerdict,
SesRecordModel,
)
from .sns import SnsModel, SnsNotificationModel, SnsRecordModel
from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel
from .vpc_lattice import VpcLatticeModel
from .vpc_latticev2 import VpcLatticeV2Model
__all__ = [
"APIGatewayProxyEventV2Model",
"RequestContextV2",
"RequestContextV2Http",
"RequestContextV2Authorizer",
"RequestContextV2AuthorizerJwt",
"RequestContextV2AuthorizerIam",
"RequestContextV2AuthorizerIamCognito",
"CloudWatchLogsData",
"CloudWatchLogsDecode",
"CloudWatchLogsLogEvent",
"CloudWatchLogsModel",
"AlbModel",
"AlbRequestContext",
"AlbRequestContextData",
"DynamoDBStreamModel",
"EventBridgeModel",
"DynamoDBStreamChangedRecordModel",
"DynamoDBStreamRecordModel",
"DynamoDBStreamChangedRecordModel",
"KinesisDataStreamModel",
"KinesisDataStreamRecord",
"KinesisDataStreamRecordPayload",
"KinesisFirehoseModel",
"KinesisFirehoseRecord",
"KinesisFirehoseRecordMetadata",
"LambdaFunctionUrlModel",
"S3Model",
"S3RecordModel",
"S3ObjectLambdaEvent",
"S3ObjectUserIdentity",
"S3ObjectSessionContext",
"S3ObjectSessionAttributes",
"S3ObjectSessionIssuer",
"S3ObjectUserRequest",
"S3ObjectConfiguration",
"S3ObjectContext",
"S3EventNotificationObjectModel",
"S3EventNotificationEventBridgeModel",
"S3EventNotificationEventBridgeDetailModel",
"SesModel",
"SesRecordModel",
"SesMessage",
"SesMail",
"SesMailCommonHeaders",
"SesMailHeaders",
"SesReceipt",
"SesReceiptAction",
"SesReceiptVerdict",
"SnsModel",
"SnsNotificationModel",
"SnsRecordModel",
"SqsModel",
"SqsRecordModel",
"SqsMsgAttributeModel",
"SqsAttributesModel",
"S3SqsEventNotificationModel",
"S3SqsEventNotificationRecordModel",
"APIGatewayProxyEventModel",
"APIGatewayEventRequestContext",
"APIGatewayEventAuthorizer",
"APIGatewayEventIdentity",
"KafkaSelfManagedEventModel",
"KafkaRecordModel",
"KafkaMskEventModel",
"KafkaBaseEventModel",
"KinesisFirehoseSqsModel",
"KinesisFirehoseSqsRecord",
"CloudFormationCustomResourceUpdateModel",
"CloudFormationCustomResourceDeleteModel",
"CloudFormationCustomResourceCreateModel",
"CloudFormationCustomResourceBaseModel",
"VpcLatticeModel",
"VpcLatticeV2Model",
"BedrockAgentModel",
"BedrockAgentPropertyModel",
"BedrockAgentEventModel",
"BedrockAgentRequestBodyModel",
"BedrockAgentRequestMediaModel",
]
Sub-modules
aws_lambda_powertools.utilities.parser.models.alb
aws_lambda_powertools.utilities.parser.models.apigw
aws_lambda_powertools.utilities.parser.models.apigwv2
aws_lambda_powertools.utilities.parser.models.bedrock_agent
aws_lambda_powertools.utilities.parser.models.cloudformation_custom_resource
aws_lambda_powertools.utilities.parser.models.cloudwatch
aws_lambda_powertools.utilities.parser.models.dynamodb
aws_lambda_powertools.utilities.parser.models.event_bridge
aws_lambda_powertools.utilities.parser.models.kafka
aws_lambda_powertools.utilities.parser.models.kinesis
aws_lambda_powertools.utilities.parser.models.kinesis_firehose
aws_lambda_powertools.utilities.parser.models.kinesis_firehose_sqs
aws_lambda_powertools.utilities.parser.models.lambda_function_url
aws_lambda_powertools.utilities.parser.models.s3
aws_lambda_powertools.utilities.parser.models.s3_event_notification
aws_lambda_powertools.utilities.parser.models.s3_object_event
aws_lambda_powertools.utilities.parser.models.ses
aws_lambda_powertools.utilities.parser.models.sns
aws_lambda_powertools.utilities.parser.models.sqs
aws_lambda_powertools.utilities.parser.models.vpc_lattice
aws_lambda_powertools.utilities.parser.models.vpc_latticev2
Classes
class APIGatewayEventAuthorizer (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayEventAuthorizer(BaseModel): claims: Optional[Dict[str, Any]] = None scopes: Optional[List[str]] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var claims : Optional[Dict[str, Any]]
var scopes : Optional[List[str]]
class APIGatewayEventIdentity (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayEventIdentity(BaseModel): accessKey: Optional[str] = None accountId: Optional[str] = None apiKey: Optional[str] = None apiKeyId: Optional[str] = None caller: Optional[str] = None cognitoAuthenticationProvider: Optional[str] = None cognitoAuthenticationType: Optional[str] = None cognitoIdentityId: Optional[str] = None cognitoIdentityPoolId: Optional[str] = None principalOrgId: Optional[str] = None # see #1562, temp workaround until API Gateway fixes it the Test button payload # removing it will not be considered a regression in the future sourceIp: Union[IPvAnyNetwork, Literal["test-invoke-source-ip"]] user: Optional[str] = None userAgent: Optional[str] = None userArn: Optional[str] = None clientCert: Optional[ApiGatewayUserCert] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessKey : Optional[str]
var accountId : Optional[str]
var apiKey : Optional[str]
var apiKeyId : Optional[str]
var caller : Optional[str]
var clientCert : Optional[ApiGatewayUserCert]
var cognitoAuthenticationProvider : Optional[str]
var cognitoAuthenticationType : Optional[str]
var cognitoIdentityId : Optional[str]
var cognitoIdentityPoolId : Optional[str]
var principalOrgId : Optional[str]
var sourceIp : Union[pydantic.networks.IPvAnyNetwork, Literal['test-invoke-source-ip']]
var user : Optional[str]
var userAgent : Optional[str]
var userArn : Optional[str]
class APIGatewayEventRequestContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayEventRequestContext(BaseModel): accountId: str apiId: str authorizer: Optional[APIGatewayEventAuthorizer] = None stage: str protocol: str identity: APIGatewayEventIdentity requestId: str requestTime: str requestTimeEpoch: datetime resourceId: Optional[str] = None resourcePath: str domainName: Optional[str] = None domainPrefix: Optional[str] = None extendedRequestId: Optional[str] = None httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] path: str connectedAt: Optional[datetime] = None connectionId: Optional[str] = None eventType: Optional[Literal["CONNECT", "MESSAGE", "DISCONNECT"]] = None messageDirection: Optional[str] = None messageId: Optional[str] = None routeKey: Optional[str] = None operationName: Optional[str] = None @root_validator(allow_reuse=True, skip_on_failure=True) def check_message_id(cls, values): message_id, event_type = values.get("messageId"), values.get("eventType") if message_id is not None and event_type != "MESSAGE": raise ValueError("messageId is available only when the `eventType` is `MESSAGE`") return values
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accountId : str
var apiId : str
var connectedAt : Optional[datetime.datetime]
var connectionId : Optional[str]
var domainName : Optional[str]
var domainPrefix : Optional[str]
var eventType : Optional[Literal['CONNECT', 'MESSAGE', 'DISCONNECT']]
var extendedRequestId : Optional[str]
var httpMethod : Literal['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT']
var identity : APIGatewayEventIdentity
var messageDirection : Optional[str]
var messageId : Optional[str]
var operationName : Optional[str]
var path : str
var protocol : str
var requestId : str
var requestTime : str
var requestTimeEpoch : datetime.datetime
var resourceId : Optional[str]
var resourcePath : str
var routeKey : Optional[str]
var stage : str
Static methods
def check_message_id(values)
-
Expand source code
@root_validator(allow_reuse=True, skip_on_failure=True) def check_message_id(cls, values): message_id, event_type = values.get("messageId"), values.get("eventType") if message_id is not None and event_type != "MESSAGE": raise ValueError("messageId is available only when the `eventType` is `MESSAGE`") return values
class APIGatewayProxyEventModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayProxyEventModel(BaseModel): version: Optional[str] = None resource: str path: str httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] headers: Dict[str, str] multiValueHeaders: Dict[str, List[str]] queryStringParameters: Optional[Dict[str, str]] = None multiValueQueryStringParameters: Optional[Dict[str, List[str]]] = None requestContext: APIGatewayEventRequestContext pathParameters: Optional[Dict[str, str]] = None stageVariables: Optional[Dict[str, str]] = None isBase64Encoded: bool body: Optional[Union[str, Type[BaseModel]]] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : Union[str, Type[pydantic.main.BaseModel], ForwardRef(None)]
var headers : Dict[str, str]
var httpMethod : Literal['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT']
var isBase64Encoded : bool
var multiValueHeaders : Dict[str, List[str]]
var multiValueQueryStringParameters : Optional[Dict[str, List[str]]]
var path : str
var pathParameters : Optional[Dict[str, str]]
var queryStringParameters : Optional[Dict[str, str]]
var requestContext : APIGatewayEventRequestContext
var resource : str
var stageVariables : Optional[Dict[str, str]]
var version : Optional[str]
class APIGatewayProxyEventV2Model (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayProxyEventV2Model(BaseModel): version: str routeKey: str rawPath: str rawQueryString: str cookies: Optional[List[str]] = None headers: Dict[str, str] queryStringParameters: Optional[Dict[str, str]] = None pathParameters: Optional[Dict[str, str]] = None stageVariables: Optional[Dict[str, str]] = None requestContext: RequestContextV2 body: Optional[Union[str, Type[BaseModel]]] = None isBase64Encoded: bool
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var body : Union[str, Type[pydantic.main.BaseModel], ForwardRef(None)]
var headers : Dict[str, str]
var isBase64Encoded : bool
var pathParameters : Optional[Dict[str, str]]
var queryStringParameters : Optional[Dict[str, str]]
var rawPath : str
var rawQueryString : str
var requestContext : RequestContextV2
var routeKey : str
var stageVariables : Optional[Dict[str, str]]
var version : str
class AlbModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class AlbModel(BaseModel): httpMethod: str path: str body: Union[str, Type[BaseModel]] isBase64Encoded: bool headers: Dict[str, str] queryStringParameters: Dict[str, str] requestContext: AlbRequestContext
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : Union[str, Type[pydantic.main.BaseModel]]
var headers : Dict[str, str]
var httpMethod : str
var isBase64Encoded : bool
var path : str
var queryStringParameters : Dict[str, str]
var requestContext : AlbRequestContext
class AlbRequestContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class AlbRequestContext(BaseModel): elb: AlbRequestContextData
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var elb : AlbRequestContextData
class AlbRequestContextData (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class AlbRequestContextData(BaseModel): targetGroupArn: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var targetGroupArn : str
class BedrockAgentEventModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class BedrockAgentEventModel(BaseModel): message_version: str = Field(..., alias="messageVersion") input_text: str = Field(..., alias="inputText") session_id: str = Field(..., alias="sessionId") action_group: str = Field(..., alias="actionGroup") api_path: str = Field(..., alias="apiPath") http_method: str = Field(..., alias="httpMethod") session_attributes: Dict[str, str] = Field({}, alias="sessionAttributes") prompt_session_attributes: Dict[str, str] = Field({}, alias="promptSessionAttributes") agent: BedrockAgentModel parameters: Optional[List[BedrockAgentPropertyModel]] = None request_body: Optional[BedrockAgentRequestBodyModel] = Field(None, alias="requestBody")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var action_group : str
var agent : BedrockAgentModel
var api_path : str
var http_method : str
var input_text : str
var message_version : str
var parameters : Optional[List[BedrockAgentPropertyModel]]
var prompt_session_attributes : Dict[str, str]
var request_body : Optional[BedrockAgentRequestBodyModel]
var session_attributes : Dict[str, str]
var session_id : str
class BedrockAgentModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class BedrockAgentModel(BaseModel): name: str id_: str = Field(..., alias="id") alias: str version: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var alias : str
var id_ : str
var name : str
var version : str
class BedrockAgentPropertyModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class BedrockAgentPropertyModel(BaseModel): name: str type_: str = Field(..., alias="type") value: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var name : str
var type_ : str
var value : str
class BedrockAgentRequestBodyModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class BedrockAgentRequestBodyModel(BaseModel): content: Dict[str, BedrockAgentRequestMediaModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var content : Dict[str, BedrockAgentRequestMediaModel]
class BedrockAgentRequestMediaModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class BedrockAgentRequestMediaModel(BaseModel): properties: List[BedrockAgentPropertyModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var properties : List[BedrockAgentPropertyModel]
class CloudFormationCustomResourceBaseModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudFormationCustomResourceBaseModel(BaseModel): request_type: str = Field(..., alias="RequestType") service_token: str = Field(..., alias="ServiceToken") response_url: HttpUrl = Field(..., alias="ResponseURL") stack_id: str = Field(..., alias="StackId") request_id: str = Field(..., alias="RequestId") logical_resource_id: str = Field(..., alias="LogicalResourceId") resource_type: str = Field(..., alias="ResourceType") resource_properties: Union[Dict[str, Any], BaseModel, None] = Field(None, alias="ResourceProperties")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
- CloudFormationCustomResourceCreateModel
- CloudFormationCustomResourceDeleteModel
- CloudFormationCustomResourceUpdateModel
Class variables
var logical_resource_id : str
var request_id : str
var request_type : str
var resource_properties : Union[Dict[str, Any], pydantic.main.BaseModel, ForwardRef(None)]
var resource_type : str
var response_url : pydantic.networks.HttpUrl
var service_token : str
var stack_id : str
class CloudFormationCustomResourceCreateModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudFormationCustomResourceCreateModel(CloudFormationCustomResourceBaseModel): request_type: Literal["Create"] = Field(..., alias="RequestType")
Ancestors
- CloudFormationCustomResourceBaseModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var request_type : Literal['Create']
class CloudFormationCustomResourceDeleteModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudFormationCustomResourceDeleteModel(CloudFormationCustomResourceBaseModel): request_type: Literal["Delete"] = Field(..., alias="RequestType")
Ancestors
- CloudFormationCustomResourceBaseModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var request_type : Literal['Delete']
class CloudFormationCustomResourceUpdateModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudFormationCustomResourceUpdateModel(CloudFormationCustomResourceBaseModel): request_type: Literal["Update"] = Field(..., alias="RequestType") old_resource_properties: Union[Dict[str, Any], BaseModel, None] = Field(None, alias="OldResourceProperties")
Ancestors
- CloudFormationCustomResourceBaseModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var old_resource_properties : Union[Dict[str, Any], pydantic.main.BaseModel, ForwardRef(None)]
var request_type : Literal['Update']
class CloudWatchLogsData (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsData(BaseModel): decoded_data: CloudWatchLogsDecode = Field(None, alias="data") @validator("decoded_data", pre=True, allow_reuse=True) def prepare_data(cls, value): try: logger.debug("Decoding base64 cloudwatch log data before parsing") payload = base64.b64decode(value) logger.debug("Decompressing cloudwatch log data before parsing") uncompressed = zlib.decompress(payload, zlib.MAX_WBITS | 32) return json.loads(uncompressed.decode("utf-8")) except Exception: raise ValueError("unable to decompress data")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var decoded_data : CloudWatchLogsDecode
Static methods
def prepare_data(value)
-
Expand source code
@validator("decoded_data", pre=True, allow_reuse=True) def prepare_data(cls, value): try: logger.debug("Decoding base64 cloudwatch log data before parsing") payload = base64.b64decode(value) logger.debug("Decompressing cloudwatch log data before parsing") uncompressed = zlib.decompress(payload, zlib.MAX_WBITS | 32) return json.loads(uncompressed.decode("utf-8")) except Exception: raise ValueError("unable to decompress data")
class CloudWatchLogsDecode (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsDecode(BaseModel): messageType: str owner: str logGroup: str logStream: str subscriptionFilters: List[str] logEvents: List[CloudWatchLogsLogEvent]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var logEvents : List[CloudWatchLogsLogEvent]
var logGroup : str
var logStream : str
var messageType : str
var owner : str
var subscriptionFilters : List[str]
class CloudWatchLogsLogEvent (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsLogEvent(BaseModel): id: str # noqa AA03 VNE003 timestamp: datetime message: Union[str, Type[BaseModel]]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var id : str
var message : Union[str, Type[pydantic.main.BaseModel]]
var timestamp : datetime.datetime
class CloudWatchLogsModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsModel(BaseModel): awslogs: CloudWatchLogsData
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awslogs : CloudWatchLogsData
class DynamoDBStreamChangedRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class DynamoDBStreamChangedRecordModel(BaseModel): ApproximateCreationDateTime: Optional[datetime] = None Keys: Dict[str, Dict[str, Any]] NewImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = None OldImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = None SequenceNumber: str SizeBytes: int StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] # context on why it's commented: https://github.com/aws-powertools/powertools-lambda-python/pull/118 # since both images are optional, they can both be None. However, at least one must # exist in a legal model of NEW_AND_OLD_IMAGES type # @root_validator # def check_one_image_exists(cls, values): # noqa: ERA001 # new_img, old_img = values.get("NewImage"), values.get("OldImage") # noqa: ERA001 # stream_type = values.get("StreamViewType") # noqa: ERA001 # if stream_type == "NEW_AND_OLD_IMAGES" and not new_img and not old_img: # noqa: ERA001 # raise TypeError("DynamoDB streams model failed validation, missing both new & old stream images") # noqa: ERA001,E501 # return values # noqa: ERA001
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var ApproximateCreationDateTime : Optional[datetime.datetime]
var Keys : Dict[str, Dict[str, Any]]
var NewImage : Union[Dict[str, Any], Type[pydantic.main.BaseModel], pydantic.main.BaseModel, ForwardRef(None)]
var OldImage : Union[Dict[str, Any], Type[pydantic.main.BaseModel], pydantic.main.BaseModel, ForwardRef(None)]
var SequenceNumber : str
var SizeBytes : int
var StreamViewType : Literal['NEW_AND_OLD_IMAGES', 'KEYS_ONLY', 'NEW_IMAGE', 'OLD_IMAGE']
class DynamoDBStreamModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class DynamoDBStreamModel(BaseModel): Records: List[DynamoDBStreamRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[DynamoDBStreamRecordModel]
class DynamoDBStreamRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class DynamoDBStreamRecordModel(BaseModel): eventID: str eventName: Literal["INSERT", "MODIFY", "REMOVE"] eventVersion: float eventSource: Literal["aws:dynamodb"] awsRegion: str eventSourceARN: str dynamodb: DynamoDBStreamChangedRecordModel userIdentity: Optional[UserIdentity] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awsRegion : str
var dynamodb : DynamoDBStreamChangedRecordModel
var eventID : str
var eventName : Literal['INSERT', 'MODIFY', 'REMOVE']
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.dynamodb']
var eventSourceARN : str
var eventVersion : float
var userIdentity : Optional[UserIdentity]
class EventBridgeModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class EventBridgeModel(BaseModel): version: str id: str # noqa: A003,VNE003 source: str account: str time: datetime region: str resources: List[str] detail_type: str = Field(None, alias="detail-type") detail: RawDictOrModel replay_name: Optional[str] = Field(None, alias="replay-name")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var account : str
var detail : Union[Dict[str, Any], Type[pydantic.main.BaseModel], pydantic.main.BaseModel]
var detail_type : str
var id : str
var region : str
var replay_name : Optional[str]
var resources : List[str]
var source : str
var time : datetime.datetime
var version : str
class KafkaBaseEventModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaBaseEventModel(BaseModel): bootstrapServers: List[str] records: Dict[str, List[KafkaRecordModel]] @validator("bootstrapServers", pre=True, allow_reuse=True) def split_servers(cls, value): return None if not value else value.split(SERVERS_DELIMITER)
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var bootstrapServers : List[str]
var records : Dict[str, List[KafkaRecordModel]]
Static methods
def split_servers(value)
-
Expand source code
@validator("bootstrapServers", pre=True, allow_reuse=True) def split_servers(cls, value): return None if not value else value.split(SERVERS_DELIMITER)
class KafkaMskEventModel (**data: Any)
-
Fully-managed AWS Apache Kafka event trigger Documentation:
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaMskEventModel(KafkaBaseEventModel): """Fully-managed AWS Apache Kafka event trigger Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html """ eventSource: Literal["aws:kafka"] eventSourceArn: str
Ancestors
- KafkaBaseEventModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.kafka']
var eventSourceArn : str
class KafkaRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaRecordModel(BaseModel): topic: str partition: int offset: int timestamp: datetime timestampType: str key: bytes value: Union[str, Type[BaseModel]] headers: List[Dict[str, bytes]] # Added type ignore to keep compatibility between Pydantic v1 and v2 _decode_key = validator("key", allow_reuse=True)(base64_decode) # type: ignore[type-var, unused-ignore] @validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): as_bytes = base64_decode(value) return bytes_to_string(as_bytes) @validator("headers", pre=True, allow_reuse=True) def decode_headers_list(cls, value): for header in value: for key, values in header.items(): header[key] = bytes(values) return value
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var headers : List[Dict[str, bytes]]
var key : bytes
var offset : int
var partition : int
var timestamp : datetime.datetime
var timestampType : str
var topic : str
var value : Union[str, Type[pydantic.main.BaseModel]]
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): as_bytes = base64_decode(value) return bytes_to_string(as_bytes)
def decode_headers_list(value)
-
Expand source code
@validator("headers", pre=True, allow_reuse=True) def decode_headers_list(cls, value): for header in value: for key, values in header.items(): header[key] = bytes(values) return value
class KafkaSelfManagedEventModel (**data: Any)
-
Self-managed Apache Kafka event trigger Documentation:
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaSelfManagedEventModel(KafkaBaseEventModel): """Self-managed Apache Kafka event trigger Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html """ eventSource: Literal["aws:SelfManagedKafka"]
Ancestors
- KafkaBaseEventModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:SelfManagedKafka']
class KinesisDataStreamModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisDataStreamModel(BaseModel): Records: List[KinesisDataStreamRecord]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[KinesisDataStreamRecord]
class KinesisDataStreamRecord (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisDataStreamRecord(BaseModel): eventSource: Literal["aws:kinesis"] eventVersion: str eventID: str eventName: Literal["aws:kinesis:record"] invokeIdentityArn: str awsRegion: str eventSourceARN: str kinesis: KinesisDataStreamRecordPayload def decompress_zlib_record_data_as_json(self) -> Dict: """Decompress Kinesis Record bytes data zlib compressed to JSON""" if not isinstance(self.kinesis.data, bytes): raise ValueError("We can only decompress bytes data, not custom models.") return json.loads(zlib.decompress(self.kinesis.data, zlib.MAX_WBITS | 32))
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awsRegion : str
var eventID : str
var eventName : Literal['aws:aws_lambda_powertools.utilities.parser.models.kinesis:record']
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.kinesis']
var eventSourceARN : str
var eventVersion : str
var invokeIdentityArn : str
var kinesis : KinesisDataStreamRecordPayload
Methods
def decompress_zlib_record_data_as_json(self) ‑> Dict
-
Decompress Kinesis Record bytes data zlib compressed to JSON
Expand source code
def decompress_zlib_record_data_as_json(self) -> Dict: """Decompress Kinesis Record bytes data zlib compressed to JSON""" if not isinstance(self.kinesis.data, bytes): raise ValueError("We can only decompress bytes data, not custom models.") return json.loads(zlib.decompress(self.kinesis.data, zlib.MAX_WBITS | 32))
class KinesisDataStreamRecordPayload (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisDataStreamRecordPayload(BaseModel): kinesisSchemaVersion: str partitionKey: str sequenceNumber: str data: Union[bytes, Type[BaseModel], BaseModel] # base64 encoded str is parsed into bytes approximateArrivalTimestamp: float @validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var approximateArrivalTimestamp : float
var data : Union[bytes, Type[pydantic.main.BaseModel], pydantic.main.BaseModel]
var kinesisSchemaVersion : str
var partitionKey : str
var sequenceNumber : str
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
class KinesisFirehoseModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseModel(BaseModel): invocationId: str deliveryStreamArn: str region: str sourceKinesisStreamArn: Optional[str] = None records: List[KinesisFirehoseRecord]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var deliveryStreamArn : str
var invocationId : str
var records : List[KinesisFirehoseRecord]
var region : str
var sourceKinesisStreamArn : Optional[str]
class KinesisFirehoseRecord (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseRecord(BaseModel): data: Union[bytes, Type[BaseModel]] # base64 encoded str is parsed into bytes recordId: str approximateArrivalTimestamp: PositiveInt kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] = None @validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var approximateArrivalTimestamp : pydantic.types.PositiveInt
var data : Union[bytes, Type[pydantic.main.BaseModel]]
var kinesisRecordMetadata : Optional[KinesisFirehoseRecordMetadata]
var recordId : str
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
class KinesisFirehoseRecordMetadata (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseRecordMetadata(BaseModel): shardId: str partitionKey: str approximateArrivalTimestamp: PositiveInt sequenceNumber: str subsequenceNumber: int
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var approximateArrivalTimestamp : pydantic.types.PositiveInt
var partitionKey : str
var sequenceNumber : str
var shardId : str
var subsequenceNumber : int
class KinesisFirehoseSqsModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseSqsModel(BaseModel): invocationId: str deliveryStreamArn: str region: str sourceKinesisStreamArn: Optional[str] = None records: List[KinesisFirehoseSqsRecord]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var deliveryStreamArn : str
var invocationId : str
var records : List[KinesisFirehoseSqsRecord]
var region : str
var sourceKinesisStreamArn : Optional[str]
class KinesisFirehoseSqsRecord (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseSqsRecord(BaseModel): data: SqsRecordModel recordId: str approximateArrivalTimestamp: PositiveInt kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] = None @validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): # Firehose payload is encoded return json.loads(base64_decode(value))
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var approximateArrivalTimestamp : pydantic.types.PositiveInt
var data : SqsRecordModel
var kinesisRecordMetadata : Optional[KinesisFirehoseRecordMetadata]
var recordId : str
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): # Firehose payload is encoded return json.loads(base64_decode(value))
class LambdaFunctionUrlModel (**data: Any)
-
AWS Lambda Function URL model
Notes:
Lambda Function URL follows the API Gateway HTTP APIs Payload Format Version 2.0.
Keys related to API Gateway features not available in Function URL use a sentinel value (e.g.
routeKey
,stage
).Documentation: - https://docs.aws.amazon.com/lambda/latest/dg/urls-configuration.html - https://docs.aws.amazon.com/lambda/latest/dg/urls-invocation.html#urls-payloads
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class LambdaFunctionUrlModel(APIGatewayProxyEventV2Model): """AWS Lambda Function URL model Notes: ----- Lambda Function URL follows the API Gateway HTTP APIs Payload Format Version 2.0. Keys related to API Gateway features not available in Function URL use a sentinel value (e.g.`routeKey`, `stage`). Documentation: - https://docs.aws.amazon.com/lambda/latest/dg/urls-configuration.html - https://docs.aws.amazon.com/lambda/latest/dg/urls-invocation.html#urls-payloads """ pass
Ancestors
- APIGatewayProxyEventV2Model
- pydantic.main.BaseModel
- pydantic.utils.Representation
class RequestContextV2 (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2(BaseModel): accountId: str apiId: str authorizer: Optional[RequestContextV2Authorizer] = None domainName: str domainPrefix: str requestId: str routeKey: str stage: str time: str timeEpoch: datetime http: RequestContextV2Http
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accountId : str
var apiId : str
var domainName : str
var domainPrefix : str
var http : RequestContextV2Http
var requestId : str
var routeKey : str
var stage : str
var time : str
var timeEpoch : datetime.datetime
class RequestContextV2Authorizer (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2Authorizer(BaseModel): jwt: Optional[RequestContextV2AuthorizerJwt] = None iam: Optional[RequestContextV2AuthorizerIam] = None lambda_value: Optional[Dict[str, Any]] = Field(None, alias="lambda")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var iam : Optional[RequestContextV2AuthorizerIam]
var jwt : Optional[RequestContextV2AuthorizerJwt]
var lambda_value : Optional[Dict[str, Any]]
class RequestContextV2AuthorizerIam (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2AuthorizerIam(BaseModel): accessKey: Optional[str] = None accountId: Optional[str] = None callerId: Optional[str] = None principalOrgId: Optional[str] = None userArn: Optional[str] = None userId: Optional[str] = None cognitoIdentity: Optional[RequestContextV2AuthorizerIamCognito] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessKey : Optional[str]
var accountId : Optional[str]
var callerId : Optional[str]
var cognitoIdentity : Optional[RequestContextV2AuthorizerIamCognito]
var principalOrgId : Optional[str]
var userArn : Optional[str]
var userId : Optional[str]
class RequestContextV2AuthorizerIamCognito (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2AuthorizerIamCognito(BaseModel): amr: List[str] identityId: str identityPoolId: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var amr : List[str]
var identityId : str
var identityPoolId : str
class RequestContextV2AuthorizerJwt (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2AuthorizerJwt(BaseModel): claims: Dict[str, Any] scopes: Optional[List[str]] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var claims : Dict[str, Any]
var scopes : Optional[List[str]]
class RequestContextV2Http (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2Http(BaseModel): method: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] path: str protocol: str sourceIp: IPvAnyNetwork userAgent: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var method : Literal['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT']
var path : str
var protocol : str
var sourceIp : pydantic.networks.IPvAnyNetwork
var userAgent : str
class S3EventNotificationEventBridgeDetailModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3EventNotificationEventBridgeDetailModel(BaseModel): version: str bucket: S3EventNotificationEventBridgeBucketModel object: S3EventNotificationObjectModel # noqa: A003,VNE003 request_id: str = Field(None, alias="request-id") requester: str source_ip_address: str = Field(None, alias="source-ip-address") reason: Optional[str] = None deletion_type: Optional[str] = Field(None, alias="deletion-type") restore_expiry_time: Optional[str] = Field(None, alias="restore-expiry-time") source_storage_class: Optional[str] = Field(None, alias="source-storage-class") destination_storage_class: Optional[str] = Field(None, alias="destination-storage-class") destination_access_tier: Optional[str] = Field(None, alias="destination-access-tier")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var bucket : S3EventNotificationEventBridgeBucketModel
var deletion_type : Optional[str]
var destination_access_tier : Optional[str]
var destination_storage_class : Optional[str]
var object : S3EventNotificationObjectModel
var reason : Optional[str]
var request_id : str
var requester : str
var restore_expiry_time : Optional[str]
var source_ip_address : str
var source_storage_class : Optional[str]
var version : str
class S3EventNotificationEventBridgeModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3EventNotificationEventBridgeModel(EventBridgeModel): detail: S3EventNotificationEventBridgeDetailModel
Ancestors
- EventBridgeModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var detail : S3EventNotificationEventBridgeDetailModel
class S3EventNotificationObjectModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3EventNotificationObjectModel(BaseModel): key: str size: Optional[NonNegativeFloat] = None etag: str version_id: str = Field(None, alias="version-id") sequencer: Optional[str] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var etag : str
var key : str
var sequencer : Optional[str]
var size : Optional[pydantic.types.NonNegativeFloat]
var version_id : str
class S3Model (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3Model(BaseModel): Records: List[S3RecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[S3RecordModel]
class S3ObjectConfiguration (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectConfiguration(BaseModel): accessPointArn: str supportingAccessPointArn: str payload: Union[str, Type[BaseModel]]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessPointArn : str
var payload : Union[str, Type[pydantic.main.BaseModel]]
var supportingAccessPointArn : str
class S3ObjectContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectContext(BaseModel): inputS3Url: HttpUrl outputRoute: str outputToken: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var inputS3Url : pydantic.networks.HttpUrl
var outputRoute : str
var outputToken : str
class S3ObjectLambdaEvent (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectLambdaEvent(BaseModel): xAmzRequestId: str getObjectContext: S3ObjectContext configuration: S3ObjectConfiguration userRequest: S3ObjectUserRequest userIdentity: S3ObjectUserIdentity protocolVersion: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var configuration : S3ObjectConfiguration
var getObjectContext : S3ObjectContext
var protocolVersion : str
var userIdentity : S3ObjectUserIdentity
var userRequest : S3ObjectUserRequest
var xAmzRequestId : str
class S3ObjectSessionAttributes (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectSessionAttributes(BaseModel): creationDate: str mfaAuthenticated: bool
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var creationDate : str
var mfaAuthenticated : bool
class S3ObjectSessionContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectSessionContext(BaseModel): sessionIssuer: S3ObjectSessionIssuer attributes: S3ObjectSessionAttributes
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var attributes : S3ObjectSessionAttributes
var sessionIssuer : S3ObjectSessionIssuer
class S3ObjectSessionIssuer (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectSessionIssuer(BaseModel): type: str # noqa: A003, VNE003 userName: Optional[str] = None principalId: str arn: str accountId: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accountId : str
var arn : str
var principalId : str
var type : str
var userName : Optional[str]
class S3ObjectUserIdentity (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectUserIdentity(BaseModel): type: str # noqa: A003 accountId: str accessKeyId: str userName: Optional[str] = None principalId: str arn: str sessionContext: Optional[S3ObjectSessionContext] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessKeyId : str
var accountId : str
var arn : str
var principalId : str
var sessionContext : Optional[S3ObjectSessionContext]
var type : str
var userName : Optional[str]
class S3ObjectUserRequest (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectUserRequest(BaseModel): url: str headers: Dict[str, str]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var headers : Dict[str, str]
var url : str
class S3RecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3RecordModel(BaseModel): eventVersion: str eventSource: Literal["aws:s3"] awsRegion: str eventTime: datetime eventName: str userIdentity: S3Identity requestParameters: S3RequestParameters responseElements: S3ResponseElements s3: S3Message glacierEventData: Optional[S3EventRecordGlacierEventData] = None @root_validator(allow_reuse=True, skip_on_failure=True) def validate_s3_object(cls, values): event_name = values.get("eventName") s3_object = values.get("s3").object if "ObjectRemoved" not in event_name: if s3_object.size is None or s3_object.eTag is None: raise ValueError("S3Object.size and S3Object.eTag are required for non-ObjectRemoved events") return values
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awsRegion : str
var eventName : str
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.s3']
var eventTime : datetime.datetime
var eventVersion : str
var glacierEventData : Optional[S3EventRecordGlacierEventData]
var requestParameters : S3RequestParameters
var responseElements : S3ResponseElements
var s3 : S3Message
var userIdentity : S3Identity
Static methods
def validate_s3_object(values)
-
Expand source code
@root_validator(allow_reuse=True, skip_on_failure=True) def validate_s3_object(cls, values): event_name = values.get("eventName") s3_object = values.get("s3").object if "ObjectRemoved" not in event_name: if s3_object.size is None or s3_object.eTag is None: raise ValueError("S3Object.size and S3Object.eTag are required for non-ObjectRemoved events") return values
class S3SqsEventNotificationModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3SqsEventNotificationModel(SqsModel): Records: List[S3SqsEventNotificationRecordModel]
Ancestors
- SqsModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[S3SqsEventNotificationRecordModel]
class S3SqsEventNotificationRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3SqsEventNotificationRecordModel(SqsRecordModel): body: Json[S3Model]
Ancestors
- SqsRecordModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : aws_lambda_powertools.utilities.parser.models.s3_event_notification.JsonWrapperValue
class SesMail (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMail(BaseModel): timestamp: datetime source: str messageId: str destination: List[str] headersTruncated: bool headers: List[SesMailHeaders] commonHeaders: SesMailCommonHeaders
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var commonHeaders : SesMailCommonHeaders
var destination : List[str]
var headers : List[SesMailHeaders]
var headersTruncated : bool
var messageId : str
var source : str
var timestamp : datetime.datetime
class SesMailCommonHeaders (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMailCommonHeaders(BaseModel): header_from: List[str] = Field(None, alias="from") to: List[str] cc: Optional[List[str]] = None bcc: Optional[List[str]] = None sender: Optional[List[str]] = None reply_to: Optional[List[str]] = Field(None, alias="reply-to") returnPath: str messageId: str date: str subject: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var bcc : Optional[List[str]]
var cc : Optional[List[str]]
var date : str
var header_from : List[str]
var messageId : str
var reply_to : Optional[List[str]]
var returnPath : str
var sender : Optional[List[str]]
var subject : str
var to : List[str]
class SesMailHeaders (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMailHeaders(BaseModel): name: str value: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var name : str
var value : str
class SesMessage (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMessage(BaseModel): mail: SesMail receipt: SesReceipt
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var mail : SesMail
var receipt : SesReceipt
class SesModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesModel(BaseModel): Records: List[SesRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[SesRecordModel]
class SesReceipt (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesReceipt(BaseModel): timestamp: datetime processingTimeMillis: PositiveInt recipients: List[str] spamVerdict: SesReceiptVerdict virusVerdict: SesReceiptVerdict spfVerdict: SesReceiptVerdict dmarcVerdict: SesReceiptVerdict action: SesReceiptAction
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var action : SesReceiptAction
var dmarcVerdict : SesReceiptVerdict
var processingTimeMillis : pydantic.types.PositiveInt
var recipients : List[str]
var spamVerdict : SesReceiptVerdict
var spfVerdict : SesReceiptVerdict
var timestamp : datetime.datetime
var virusVerdict : SesReceiptVerdict
class SesReceiptAction (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesReceiptAction(BaseModel): type: Literal["Lambda"] # noqa A003,VNE003 invocationType: Literal["Event"] functionArn: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var functionArn : str
var invocationType : Literal['Event']
var type : Literal['Lambda']
class SesReceiptVerdict (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesReceiptVerdict(BaseModel): status: Literal["PASS", "FAIL", "GRAY", "PROCESSING_FAILED"]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var status : Literal['PASS', 'FAIL', 'GRAY', 'PROCESSING_FAILED']
class SesRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesRecordModel(BaseModel): eventSource: Literal["aws:ses"] eventVersion: str ses: SesMessage
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.ses']
var eventVersion : str
var ses : SesMessage
class SnsModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SnsModel(BaseModel): Records: List[SnsRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[SnsRecordModel]
class SnsNotificationModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SnsNotificationModel(BaseModel): Subject: Optional[str] = None TopicArn: str UnsubscribeUrl: HttpUrl Type: Literal["Notification"] MessageAttributes: Optional[Dict[str, SnsMsgAttributeModel]] = None Message: Union[str, TypingType[BaseModel]] MessageId: str SigningCertUrl: Optional[HttpUrl] = None # NOTE: FIFO opt-in removes attribute Signature: Optional[str] = None # NOTE: FIFO opt-in removes attribute Timestamp: datetime SignatureVersion: Optional[str] = None # NOTE: FIFO opt-in removes attribute @root_validator(pre=True, allow_reuse=True) def check_sqs_protocol(cls, values): sqs_rewritten_keys = ("UnsubscribeURL", "SigningCertURL") if any(key in sqs_rewritten_keys for key in values): # The sentinel value 'None' forces the validator to fail with # ValidatorError instead of KeyError when the key is missing from # the SQS payload values["UnsubscribeUrl"] = values.pop("UnsubscribeURL", None) values["SigningCertUrl"] = values.pop("SigningCertURL", None) return values
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Message : Union[str, Type[pydantic.main.BaseModel]]
var MessageAttributes : Optional[Dict[str, SnsMsgAttributeModel]]
var MessageId : str
var Signature : Optional[str]
var SignatureVersion : Optional[str]
var SigningCertUrl : Optional[pydantic.networks.HttpUrl]
var Subject : Optional[str]
var Timestamp : datetime.datetime
var TopicArn : str
var Type : Literal['Notification']
var UnsubscribeUrl : pydantic.networks.HttpUrl
Static methods
def check_sqs_protocol(values)
-
Expand source code
@root_validator(pre=True, allow_reuse=True) def check_sqs_protocol(cls, values): sqs_rewritten_keys = ("UnsubscribeURL", "SigningCertURL") if any(key in sqs_rewritten_keys for key in values): # The sentinel value 'None' forces the validator to fail with # ValidatorError instead of KeyError when the key is missing from # the SQS payload values["UnsubscribeUrl"] = values.pop("UnsubscribeURL", None) values["SigningCertUrl"] = values.pop("SigningCertURL", None) return values
class SnsRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SnsRecordModel(BaseModel): EventSource: Literal["aws:sns"] EventVersion: str EventSubscriptionArn: str Sns: SnsNotificationModel
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var EventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.sns']
var EventSubscriptionArn : str
var EventVersion : str
var Sns : SnsNotificationModel
class SqsAttributesModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsAttributesModel(BaseModel): ApproximateReceiveCount: str ApproximateFirstReceiveTimestamp: datetime MessageDeduplicationId: Optional[str] = None MessageGroupId: Optional[str] = None SenderId: str SentTimestamp: datetime SequenceNumber: Optional[str] = None AWSTraceHeader: Optional[str] = None
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var AWSTraceHeader : Optional[str]
var ApproximateFirstReceiveTimestamp : datetime.datetime
var ApproximateReceiveCount : str
var MessageDeduplicationId : Optional[str]
var MessageGroupId : Optional[str]
var SenderId : str
var SentTimestamp : datetime.datetime
var SequenceNumber : Optional[str]
class SqsModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsModel(BaseModel): Records: Sequence[SqsRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var Records : Sequence[SqsRecordModel]
class SqsMsgAttributeModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsMsgAttributeModel(BaseModel): stringValue: Optional[str] = None binaryValue: Optional[str] = None stringListValues: List[str] = [] binaryListValues: List[str] = [] dataType: str # context on why it's commented: https://github.com/aws-powertools/powertools-lambda-python/pull/118 # Amazon SQS supports the logical data types String, Number, and Binary with optional custom data type # labels with the format .custom-data-type. # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-metadata.html#sqs-message-attributes # @validator("dataType") # def valid_type(cls, v): # noqa: VNE001,ERA001 # noqa: ERA001 # pattern = re.compile("Number.*|String.*|Binary.*") # noqa: ERA001 # if not pattern.match(v): # noqa: ERA001 # raise TypeError("data type is invalid") # noqa: ERA001 # return v # noqa: ERA001 # # # validate that dataType and value are not None and match # @root_validator # def check_str_and_binary_values(cls, values): # noqa: ERA001 # binary_val, str_val = values.get("binaryValue", ""), values.get("stringValue", "") # noqa: ERA001 # data_type = values.get("dataType") # noqa: ERA001 # if not str_val and not binary_val: # noqa: ERA001 # raise TypeError("both binaryValue and stringValue are missing") # noqa: ERA001 # if data_type.startswith("Binary") and not binary_val: # noqa: ERA001 # raise TypeError("binaryValue is missing") # noqa: ERA001 # if (data_type.startswith("String") or data_type.startswith("Number")) and not str_val: # noqa: ERA001 # raise TypeError("stringValue is missing") # noqa: ERA001 # return values # noqa: ERA001
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var binaryListValues : List[str]
var binaryValue : Optional[str]
var dataType : str
var stringListValues : List[str]
var stringValue : Optional[str]
class SqsRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsRecordModel(BaseModel): messageId: str receiptHandle: str body: Union[str, Type[BaseModel], BaseModel] attributes: SqsAttributesModel messageAttributes: Dict[str, SqsMsgAttributeModel] md5OfBody: str md5OfMessageAttributes: Optional[str] = None eventSource: Literal["aws:sqs"] eventSourceARN: str awsRegion: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var attributes : SqsAttributesModel
var awsRegion : str
var body : Union[str, Type[pydantic.main.BaseModel], pydantic.main.BaseModel]
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.sqs']
var eventSourceARN : str
var md5OfBody : str
var md5OfMessageAttributes : Optional[str]
var messageAttributes : Dict[str, SqsMsgAttributeModel]
var messageId : str
var receiptHandle : str
class VpcLatticeModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class VpcLatticeModel(BaseModel): method: str raw_path: str body: Union[str, Type[BaseModel]] is_base64_encoded: bool headers: Dict[str, str] query_string_parameters: Dict[str, str]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : Union[str, Type[pydantic.main.BaseModel]]
var headers : Dict[str, str]
var is_base64_encoded : bool
var method : str
var query_string_parameters : Dict[str, str]
var raw_path : str
class VpcLatticeV2Model (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class VpcLatticeV2Model(BaseModel): version: str path: str method: str headers: Dict[str, str] query_string_parameters: Optional[Dict[str, str]] = Field(None, alias="queryStringParameters") body: Optional[Union[str, Type[BaseModel]]] = None is_base64_encoded: Optional[bool] = Field(None, alias="isBase64Encoded") request_context: VpcLatticeV2RequestContext = Field(None, alias="requestContext")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : Union[str, Type[pydantic.main.BaseModel], ForwardRef(None)]
var headers : Dict[str, str]
var is_base64_encoded : Optional[bool]
var method : str
var path : str
var query_string_parameters : Optional[Dict[str, str]]
var request_context : VpcLatticeV2RequestContext
var version : str