Module aws_lambda_powertools.utilities.parser.models
Expand source code
from .alb import AlbModel, AlbRequestContext, AlbRequestContextData
from .apigw import (
APIGatewayEventAuthorizer,
APIGatewayEventIdentity,
APIGatewayEventRequestContext,
APIGatewayProxyEventModel,
)
from .apigwv2 import (
APIGatewayProxyEventV2Model,
RequestContextV2,
RequestContextV2Authorizer,
RequestContextV2AuthorizerIam,
RequestContextV2AuthorizerIamCognito,
RequestContextV2AuthorizerJwt,
RequestContextV2Http,
)
from .cloudwatch import (
CloudWatchLogsData,
CloudWatchLogsDecode,
CloudWatchLogsLogEvent,
CloudWatchLogsModel,
)
from .dynamodb import (
DynamoDBStreamChangedRecordModel,
DynamoDBStreamModel,
DynamoDBStreamRecordModel,
)
from .event_bridge import EventBridgeModel
from .kafka import (
KafkaBaseEventModel,
KafkaMskEventModel,
KafkaRecordModel,
KafkaSelfManagedEventModel,
)
from .kinesis import (
KinesisDataStreamModel,
KinesisDataStreamRecord,
KinesisDataStreamRecordPayload,
)
from .kinesis_firehose import (
KinesisFirehoseModel,
KinesisFirehoseRecord,
KinesisFirehoseRecordMetadata,
)
from .lambda_function_url import LambdaFunctionUrlModel
from .s3 import S3Model, S3RecordModel
from .s3_object_event import (
S3ObjectConfiguration,
S3ObjectContext,
S3ObjectLambdaEvent,
S3ObjectSessionAttributes,
S3ObjectSessionContext,
S3ObjectSessionIssuer,
S3ObjectUserIdentity,
S3ObjectUserRequest,
)
from .ses import (
SesMail,
SesMailCommonHeaders,
SesMailHeaders,
SesMessage,
SesModel,
SesReceipt,
SesReceiptAction,
SesReceiptVerdict,
SesRecordModel,
)
from .sns import SnsModel, SnsNotificationModel, SnsRecordModel
from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel
__all__ = [
"APIGatewayProxyEventV2Model",
"RequestContextV2",
"RequestContextV2Http",
"RequestContextV2Authorizer",
"RequestContextV2AuthorizerJwt",
"RequestContextV2AuthorizerIam",
"RequestContextV2AuthorizerIamCognito",
"CloudWatchLogsData",
"CloudWatchLogsDecode",
"CloudWatchLogsLogEvent",
"CloudWatchLogsModel",
"AlbModel",
"AlbRequestContext",
"AlbRequestContextData",
"DynamoDBStreamModel",
"EventBridgeModel",
"DynamoDBStreamChangedRecordModel",
"DynamoDBStreamRecordModel",
"DynamoDBStreamChangedRecordModel",
"KinesisDataStreamModel",
"KinesisDataStreamRecord",
"KinesisDataStreamRecordPayload",
"KinesisFirehoseModel",
"KinesisFirehoseRecord",
"KinesisFirehoseRecordMetadata",
"LambdaFunctionUrlModel",
"S3Model",
"S3RecordModel",
"S3ObjectLambdaEvent",
"S3ObjectUserIdentity",
"S3ObjectSessionContext",
"S3ObjectSessionAttributes",
"S3ObjectSessionIssuer",
"S3ObjectUserRequest",
"S3ObjectConfiguration",
"S3ObjectContext",
"SesModel",
"SesRecordModel",
"SesMessage",
"SesMail",
"SesMailCommonHeaders",
"SesMailHeaders",
"SesReceipt",
"SesReceiptAction",
"SesReceiptVerdict",
"SnsModel",
"SnsNotificationModel",
"SnsRecordModel",
"SqsModel",
"SqsRecordModel",
"SqsMsgAttributeModel",
"SqsAttributesModel",
"APIGatewayProxyEventModel",
"APIGatewayEventRequestContext",
"APIGatewayEventAuthorizer",
"APIGatewayEventIdentity",
"KafkaSelfManagedEventModel",
"KafkaRecordModel",
"KafkaMskEventModel",
"KafkaBaseEventModel",
]
Sub-modules
aws_lambda_powertools.utilities.parser.models.alb
aws_lambda_powertools.utilities.parser.models.apigw
aws_lambda_powertools.utilities.parser.models.apigwv2
aws_lambda_powertools.utilities.parser.models.cloudwatch
aws_lambda_powertools.utilities.parser.models.dynamodb
aws_lambda_powertools.utilities.parser.models.event_bridge
aws_lambda_powertools.utilities.parser.models.kafka
aws_lambda_powertools.utilities.parser.models.kinesis
aws_lambda_powertools.utilities.parser.models.kinesis_firehose
aws_lambda_powertools.utilities.parser.models.lambda_function_url
aws_lambda_powertools.utilities.parser.models.s3
aws_lambda_powertools.utilities.parser.models.s3_object_event
aws_lambda_powertools.utilities.parser.models.ses
aws_lambda_powertools.utilities.parser.models.sns
aws_lambda_powertools.utilities.parser.models.sqs
Classes
class APIGatewayEventAuthorizer (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayEventAuthorizer(BaseModel): claims: Optional[Dict[str, Any]] scopes: Optional[List[str]]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var claims : Optional[Dict[str, Any]]
var scopes : Optional[List[str]]
class APIGatewayEventIdentity (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayEventIdentity(BaseModel): accessKey: Optional[str] accountId: Optional[str] apiKey: Optional[str] apiKeyId: Optional[str] caller: Optional[str] cognitoAuthenticationProvider: Optional[str] cognitoAuthenticationType: Optional[str] cognitoIdentityId: Optional[str] cognitoIdentityPoolId: Optional[str] principalOrgId: Optional[str] sourceIp: IPvAnyNetwork user: Optional[str] userAgent: Optional[str] userArn: Optional[str] clientCert: Optional[ApiGatewayUserCert]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessKey : Optional[str]
var accountId : Optional[str]
var apiKey : Optional[str]
var apiKeyId : Optional[str]
var caller : Optional[str]
var clientCert : Optional[ApiGatewayUserCert]
var cognitoAuthenticationProvider : Optional[str]
var cognitoAuthenticationType : Optional[str]
var cognitoIdentityId : Optional[str]
var cognitoIdentityPoolId : Optional[str]
var principalOrgId : Optional[str]
var sourceIp : pydantic.networks.IPvAnyNetwork
var user : Optional[str]
var userAgent : Optional[str]
var userArn : Optional[str]
class APIGatewayEventRequestContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayEventRequestContext(BaseModel): accountId: str apiId: str authorizer: Optional[APIGatewayEventAuthorizer] stage: str protocol: str identity: APIGatewayEventIdentity requestId: str requestTime: str requestTimeEpoch: datetime resourceId: Optional[str] resourcePath: str domainName: Optional[str] domainPrefix: Optional[str] extendedRequestId: Optional[str] httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] path: str connectedAt: Optional[datetime] connectionId: Optional[str] eventType: Optional[Literal["CONNECT", "MESSAGE", "DISCONNECT"]] messageDirection: Optional[str] messageId: Optional[str] routeKey: Optional[str] operationName: Optional[str] @root_validator(allow_reuse=True) def check_message_id(cls, values): message_id, event_type = values.get("messageId"), values.get("eventType") if message_id is not None and event_type != "MESSAGE": raise TypeError("messageId is available only when the `eventType` is `MESSAGE`") return values
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accountId : str
var apiId : str
var connectedAt : Optional[datetime.datetime]
var connectionId : Optional[str]
var domainName : Optional[str]
var domainPrefix : Optional[str]
var eventType : Optional[Literal['CONNECT', 'MESSAGE', 'DISCONNECT']]
var extendedRequestId : Optional[str]
var httpMethod : Literal['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT']
var identity : APIGatewayEventIdentity
var messageDirection : Optional[str]
var messageId : Optional[str]
var operationName : Optional[str]
var path : str
var protocol : str
var requestId : str
var requestTime : str
var requestTimeEpoch : datetime.datetime
var resourceId : Optional[str]
var resourcePath : str
var routeKey : Optional[str]
var stage : str
Static methods
def check_message_id(values)
-
Expand source code
@root_validator(allow_reuse=True) def check_message_id(cls, values): message_id, event_type = values.get("messageId"), values.get("eventType") if message_id is not None and event_type != "MESSAGE": raise TypeError("messageId is available only when the `eventType` is `MESSAGE`") return values
class APIGatewayProxyEventModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayProxyEventModel(BaseModel): version: Optional[str] resource: str path: str httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] headers: Dict[str, str] multiValueHeaders: Dict[str, List[str]] queryStringParameters: Optional[Dict[str, str]] multiValueQueryStringParameters: Optional[Dict[str, List[str]]] requestContext: APIGatewayEventRequestContext pathParameters: Optional[Dict[str, str]] stageVariables: Optional[Dict[str, str]] isBase64Encoded: bool body: Optional[Union[str, Type[BaseModel]]]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : Union[str, Type[pydantic.main.BaseModel], None]
var headers : Dict[str, str]
var httpMethod : Literal['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT']
var isBase64Encoded : bool
var multiValueHeaders : Dict[str, List[str]]
var multiValueQueryStringParameters : Optional[Dict[str, List[str]]]
var path : str
var pathParameters : Optional[Dict[str, str]]
var queryStringParameters : Optional[Dict[str, str]]
var requestContext : APIGatewayEventRequestContext
var resource : str
var stageVariables : Optional[Dict[str, str]]
var version : Optional[str]
class APIGatewayProxyEventV2Model (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class APIGatewayProxyEventV2Model(BaseModel): version: str routeKey: str rawPath: str rawQueryString: str cookies: Optional[List[str]] headers: Dict[str, str] queryStringParameters: Optional[Dict[str, str]] pathParameters: Optional[Dict[str, str]] stageVariables: Optional[Dict[str, str]] requestContext: RequestContextV2 body: Optional[Union[str, Type[BaseModel]]] isBase64Encoded: bool
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var body : Union[str, Type[pydantic.main.BaseModel], None]
var headers : Dict[str, str]
var isBase64Encoded : bool
var pathParameters : Optional[Dict[str, str]]
var queryStringParameters : Optional[Dict[str, str]]
var rawPath : str
var rawQueryString : str
var requestContext : RequestContextV2
var routeKey : str
var stageVariables : Optional[Dict[str, str]]
var version : str
class AlbModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class AlbModel(BaseModel): httpMethod: str path: str body: Union[str, Type[BaseModel]] isBase64Encoded: bool headers: Dict[str, str] queryStringParameters: Dict[str, str] requestContext: AlbRequestContext
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : Union[str, Type[pydantic.main.BaseModel]]
var headers : Dict[str, str]
var httpMethod : str
var isBase64Encoded : bool
var path : str
var queryStringParameters : Dict[str, str]
var requestContext : AlbRequestContext
class AlbRequestContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class AlbRequestContext(BaseModel): elb: AlbRequestContextData
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var elb : AlbRequestContextData
class AlbRequestContextData (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class AlbRequestContextData(BaseModel): targetGroupArn: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var targetGroupArn : str
class CloudWatchLogsData (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsData(BaseModel): decoded_data: CloudWatchLogsDecode = Field(None, alias="data") @validator("decoded_data", pre=True, allow_reuse=True) def prepare_data(cls, value): try: logger.debug("Decoding base64 cloudwatch log data before parsing") payload = base64.b64decode(value) logger.debug("Decompressing cloudwatch log data before parsing") uncompressed = zlib.decompress(payload, zlib.MAX_WBITS | 32) return json.loads(uncompressed.decode("utf-8")) except Exception: raise ValueError("unable to decompress data")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var decoded_data : CloudWatchLogsDecode
Static methods
def prepare_data(value)
-
Expand source code
@validator("decoded_data", pre=True, allow_reuse=True) def prepare_data(cls, value): try: logger.debug("Decoding base64 cloudwatch log data before parsing") payload = base64.b64decode(value) logger.debug("Decompressing cloudwatch log data before parsing") uncompressed = zlib.decompress(payload, zlib.MAX_WBITS | 32) return json.loads(uncompressed.decode("utf-8")) except Exception: raise ValueError("unable to decompress data")
class CloudWatchLogsDecode (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsDecode(BaseModel): messageType: str owner: str logGroup: str logStream: str subscriptionFilters: List[str] logEvents: List[CloudWatchLogsLogEvent]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var logEvents : List[CloudWatchLogsLogEvent]
var logGroup : str
var logStream : str
var messageType : str
var owner : str
var subscriptionFilters : List[str]
class CloudWatchLogsLogEvent (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsLogEvent(BaseModel): id: str # noqa AA03 VNE003 timestamp: datetime message: Union[str, Type[BaseModel]]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var id : str
var message : Union[str, Type[pydantic.main.BaseModel]]
var timestamp : datetime.datetime
class CloudWatchLogsModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class CloudWatchLogsModel(BaseModel): awslogs: CloudWatchLogsData
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awslogs : CloudWatchLogsData
class DynamoDBStreamChangedRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class DynamoDBStreamChangedRecordModel(BaseModel): ApproximateCreationDateTime: Optional[date] Keys: Dict[str, Dict[str, Any]] NewImage: Optional[Union[Dict[str, Any], Type[BaseModel]]] OldImage: Optional[Union[Dict[str, Any], Type[BaseModel]]] SequenceNumber: str SizeBytes: int StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] # context on why it's commented: https://github.com/awslabs/aws-lambda-powertools-python/pull/118 # since both images are optional, they can both be None. However, at least one must # exist in a legal model of NEW_AND_OLD_IMAGES type # @root_validator # def check_one_image_exists(cls, values): # noqa: E800 # new_img, old_img = values.get("NewImage"), values.get("OldImage") # noqa: E800 # stream_type = values.get("StreamViewType") # noqa: E800 # if stream_type == "NEW_AND_OLD_IMAGES" and not new_img and not old_img: # noqa: E800 # raise TypeError("DynamoDB streams model failed validation, missing both new & old stream images") # noqa: E800,E501 # return values # noqa: E800
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var ApproximateCreationDateTime : Optional[datetime.date]
var Keys : Dict[str, Dict[str, Any]]
var NewImage : Union[Dict[str, Any], Type[pydantic.main.BaseModel], None]
var OldImage : Union[Dict[str, Any], Type[pydantic.main.BaseModel], None]
var SequenceNumber : str
var SizeBytes : int
var StreamViewType : Literal['NEW_AND_OLD_IMAGES', 'KEYS_ONLY', 'NEW_IMAGE', 'OLD_IMAGE']
class DynamoDBStreamModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class DynamoDBStreamModel(BaseModel): Records: List[DynamoDBStreamRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[DynamoDBStreamRecordModel]
class DynamoDBStreamRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class DynamoDBStreamRecordModel(BaseModel): eventID: str eventName: Literal["INSERT", "MODIFY", "REMOVE"] eventVersion: float eventSource: Literal["aws:dynamodb"] awsRegion: str eventSourceARN: str dynamodb: DynamoDBStreamChangedRecordModel userIdentity: Optional[UserIdentity]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awsRegion : str
var dynamodb : DynamoDBStreamChangedRecordModel
var eventID : str
var eventName : Literal['INSERT', 'MODIFY', 'REMOVE']
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.dynamodb']
var eventSourceARN : str
var eventVersion : float
var userIdentity : Optional[UserIdentity]
class EventBridgeModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class EventBridgeModel(BaseModel): version: str id: str # noqa: A003,VNE003 source: str account: str time: datetime region: str resources: List[str] detail_type: str = Field(None, alias="detail-type") detail: Union[Dict[str, Any], Type[BaseModel]] replay_name: Optional[str] = Field(None, alias="replay-name")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var account : str
var detail : Union[Dict[str, Any], Type[pydantic.main.BaseModel]]
var detail_type : str
var id : str
var region : str
var replay_name : Optional[str]
var resources : List[str]
var source : str
var time : datetime.datetime
var version : str
class KafkaBaseEventModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaBaseEventModel(BaseModel): bootstrapServers: List[str] records: Dict[str, List[KafkaRecordModel]] @validator("bootstrapServers", pre=True, allow_reuse=True) def split_servers(cls, value): return None if not value else value.split(SERVERS_DELIMITER)
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var bootstrapServers : List[str]
var records : Dict[str, List[KafkaRecordModel]]
Static methods
def split_servers(value)
-
Expand source code
@validator("bootstrapServers", pre=True, allow_reuse=True) def split_servers(cls, value): return None if not value else value.split(SERVERS_DELIMITER)
class KafkaMskEventModel (**data: Any)
-
Fully-managed AWS Apache Kafka event trigger Documentation:
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaMskEventModel(KafkaBaseEventModel): """Fully-managed AWS Apache Kafka event trigger Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html """ eventSource: Literal["aws:kafka"] eventSourceArn: str
Ancestors
- KafkaBaseEventModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.kafka']
var eventSourceArn : str
class KafkaRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaRecordModel(BaseModel): topic: str partition: int offset: int timestamp: datetime timestampType: str key: bytes value: Union[str, Type[BaseModel]] headers: List[Dict[str, bytes]] # validators _decode_key = validator("key", allow_reuse=True)(base64_decode) @validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): as_bytes = base64_decode(value) return bytes_to_string(as_bytes) @validator("headers", pre=True, allow_reuse=True) def decode_headers_list(cls, value): for header in value: for key, values in header.items(): header[key] = bytes(values) return value
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var headers : List[Dict[str, bytes]]
var key : bytes
var offset : int
var partition : int
var timestamp : datetime.datetime
var timestampType : str
var topic : str
var value : Union[str, Type[pydantic.main.BaseModel]]
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): as_bytes = base64_decode(value) return bytes_to_string(as_bytes)
def decode_headers_list(value)
-
Expand source code
@validator("headers", pre=True, allow_reuse=True) def decode_headers_list(cls, value): for header in value: for key, values in header.items(): header[key] = bytes(values) return value
class KafkaSelfManagedEventModel (**data: Any)
-
Self-managed Apache Kafka event trigger Documentation:
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaSelfManagedEventModel(KafkaBaseEventModel): """Self-managed Apache Kafka event trigger Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html """ eventSource: Literal["aws:SelfManagedKafka"]
Ancestors
- KafkaBaseEventModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:SelfManagedKafka']
class KinesisDataStreamModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisDataStreamModel(BaseModel): Records: List[KinesisDataStreamRecord]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[KinesisDataStreamRecord]
class KinesisDataStreamRecord (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisDataStreamRecord(BaseModel): eventSource: Literal["aws:kinesis"] eventVersion: str eventID: str eventName: Literal["aws:kinesis:record"] invokeIdentityArn: str awsRegion: str eventSourceARN: str kinesis: KinesisDataStreamRecordPayload def decompress_zlib_record_data_as_json(self) -> Dict: """Decompress Kinesis Record bytes data zlib compressed to JSON""" if not isinstance(self.kinesis.data, bytes): raise ValueError("We can only decompress bytes data, not custom models.") return json.loads(zlib.decompress(self.kinesis.data, zlib.MAX_WBITS | 32))
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awsRegion : str
var eventID : str
var eventName : Literal['aws:aws_lambda_powertools.utilities.parser.models.kinesis:record']
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.kinesis']
var eventSourceARN : str
var eventVersion : str
var invokeIdentityArn : str
var kinesis : KinesisDataStreamRecordPayload
Methods
def decompress_zlib_record_data_as_json(self) ‑> Dict[~KT, ~VT]
-
Decompress Kinesis Record bytes data zlib compressed to JSON
Expand source code
def decompress_zlib_record_data_as_json(self) -> Dict: """Decompress Kinesis Record bytes data zlib compressed to JSON""" if not isinstance(self.kinesis.data, bytes): raise ValueError("We can only decompress bytes data, not custom models.") return json.loads(zlib.decompress(self.kinesis.data, zlib.MAX_WBITS | 32))
class KinesisDataStreamRecordPayload (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisDataStreamRecordPayload(BaseModel): kinesisSchemaVersion: str partitionKey: str sequenceNumber: str data: Union[bytes, Type[BaseModel]] # base64 encoded str is parsed into bytes approximateArrivalTimestamp: float @validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var approximateArrivalTimestamp : float
var data : Union[bytes, Type[pydantic.main.BaseModel]]
var kinesisSchemaVersion : str
var partitionKey : str
var sequenceNumber : str
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
class KinesisFirehoseModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseModel(BaseModel): invocationId: str deliveryStreamArn: str region: str sourceKinesisStreamArn: Optional[str] records: List[KinesisFirehoseRecord]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var deliveryStreamArn : str
var invocationId : str
var records : List[KinesisFirehoseRecord]
var region : str
var sourceKinesisStreamArn : Optional[str]
class KinesisFirehoseRecord (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseRecord(BaseModel): data: Union[bytes, Type[BaseModel]] # base64 encoded str is parsed into bytes recordId: str approximateArrivalTimestamp: PositiveInt kinesisRecordMetadata: Optional[KinesisFirehoseRecordMetadata] @validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var approximateArrivalTimestamp : pydantic.types.PositiveInt
var data : Union[bytes, Type[pydantic.main.BaseModel]]
var kinesisRecordMetadata : Optional[KinesisFirehoseRecordMetadata]
var recordId : str
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("data", pre=True, allow_reuse=True) def data_base64_decode(cls, value): return base64_decode(value)
class KinesisFirehoseRecordMetadata (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KinesisFirehoseRecordMetadata(BaseModel): shardId: str partitionKey: str approximateArrivalTimestamp: PositiveInt sequenceNumber: str subsequenceNumber: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var approximateArrivalTimestamp : pydantic.types.PositiveInt
var partitionKey : str
var sequenceNumber : str
var shardId : str
var subsequenceNumber : str
class LambdaFunctionUrlModel (**data: Any)
-
AWS Lambda Function URL model
Notes:
Lambda Function URL follows the API Gateway HTTP APIs Payload Format Version 2.0.
Keys related to API Gateway features not available in Function URL use a sentinel value (e.g.
routeKey
,stage
).Documentation: - https://docs.aws.amazon.com/lambda/latest/dg/urls-configuration.html - https://docs.aws.amazon.com/lambda/latest/dg/urls-invocation.html#urls-payloads
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class LambdaFunctionUrlModel(APIGatewayProxyEventV2Model): """AWS Lambda Function URL model Notes: ----- Lambda Function URL follows the API Gateway HTTP APIs Payload Format Version 2.0. Keys related to API Gateway features not available in Function URL use a sentinel value (e.g.`routeKey`, `stage`). Documentation: - https://docs.aws.amazon.com/lambda/latest/dg/urls-configuration.html - https://docs.aws.amazon.com/lambda/latest/dg/urls-invocation.html#urls-payloads """ pass
Ancestors
- APIGatewayProxyEventV2Model
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var body : Union[str, Type[pydantic.main.BaseModel], None]
var headers : Dict[str, str]
var isBase64Encoded : bool
var pathParameters : Optional[Dict[str, str]]
var queryStringParameters : Optional[Dict[str, str]]
var rawPath : str
var rawQueryString : str
var requestContext : RequestContextV2
var routeKey : str
var stageVariables : Optional[Dict[str, str]]
var version : str
class RequestContextV2 (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2(BaseModel): accountId: str apiId: str authorizer: Optional[RequestContextV2Authorizer] domainName: str domainPrefix: str requestId: str routeKey: str stage: str time: str timeEpoch: datetime http: RequestContextV2Http
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accountId : str
var apiId : str
var domainName : str
var domainPrefix : str
var http : RequestContextV2Http
var requestId : str
var routeKey : str
var stage : str
var time : str
var timeEpoch : datetime.datetime
class RequestContextV2Authorizer (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2Authorizer(BaseModel): jwt: Optional[RequestContextV2AuthorizerJwt] iam: Optional[RequestContextV2AuthorizerIam] lambda_value: Optional[Dict[str, Any]] = Field(None, alias="lambda")
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var iam : Optional[RequestContextV2AuthorizerIam]
var jwt : Optional[RequestContextV2AuthorizerJwt]
var lambda_value : Optional[Dict[str, Any]]
class RequestContextV2AuthorizerIam (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2AuthorizerIam(BaseModel): accessKey: Optional[str] accountId: Optional[str] callerId: Optional[str] principalOrgId: Optional[str] userArn: Optional[str] userId: Optional[str] cognitoIdentity: Optional[RequestContextV2AuthorizerIamCognito]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessKey : Optional[str]
var accountId : Optional[str]
var callerId : Optional[str]
var cognitoIdentity : Optional[RequestContextV2AuthorizerIamCognito]
var principalOrgId : Optional[str]
var userArn : Optional[str]
var userId : Optional[str]
class RequestContextV2AuthorizerIamCognito (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2AuthorizerIamCognito(BaseModel): amr: List[str] identityId: str identityPoolId: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var amr : List[str]
var identityId : str
var identityPoolId : str
class RequestContextV2AuthorizerJwt (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2AuthorizerJwt(BaseModel): claims: Dict[str, Any] scopes: List[str]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var claims : Dict[str, Any]
var scopes : List[str]
class RequestContextV2Http (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class RequestContextV2Http(BaseModel): method: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] path: str protocol: str sourceIp: IPvAnyNetwork userAgent: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var method : Literal['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT']
var path : str
var protocol : str
var sourceIp : pydantic.networks.IPvAnyNetwork
var userAgent : str
class S3Model (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3Model(BaseModel): Records: List[S3RecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[S3RecordModel]
class S3ObjectConfiguration (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectConfiguration(BaseModel): accessPointArn: str supportingAccessPointArn: str payload: Union[str, Type[BaseModel]]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessPointArn : str
var payload : Union[str, Type[pydantic.main.BaseModel]]
var supportingAccessPointArn : str
class S3ObjectContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectContext(BaseModel): inputS3Url: HttpUrl outputRoute: str outputToken: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var inputS3Url : pydantic.networks.HttpUrl
var outputRoute : str
var outputToken : str
class S3ObjectLambdaEvent (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectLambdaEvent(BaseModel): xAmzRequestId: str getObjectContext: S3ObjectContext configuration: S3ObjectConfiguration userRequest: S3ObjectUserRequest userIdentity: S3ObjectUserIdentity protocolVersion: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var configuration : S3ObjectConfiguration
var getObjectContext : S3ObjectContext
var protocolVersion : str
var userIdentity : S3ObjectUserIdentity
var userRequest : S3ObjectUserRequest
var xAmzRequestId : str
class S3ObjectSessionAttributes (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectSessionAttributes(BaseModel): creationDate: str mfaAuthenticated: bool
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var creationDate : str
var mfaAuthenticated : bool
class S3ObjectSessionContext (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectSessionContext(BaseModel): sessionIssuer: S3ObjectSessionIssuer attributes: S3ObjectSessionAttributes
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var attributes : S3ObjectSessionAttributes
var sessionIssuer : S3ObjectSessionIssuer
class S3ObjectSessionIssuer (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectSessionIssuer(BaseModel): type: str # noqa: A003, VNE003 userName: Optional[str] principalId: str arn: str accountId: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accountId : str
var arn : str
var principalId : str
var type : str
var userName : Optional[str]
class S3ObjectUserIdentity (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectUserIdentity(BaseModel): type: str # noqa003 accountId: str accessKeyId: str userName: Optional[str] principalId: str arn: str sessionContext: Optional[S3ObjectSessionContext]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var accessKeyId : str
var accountId : str
var arn : str
var principalId : str
var sessionContext : Optional[S3ObjectSessionContext]
var type : str
var userName : Optional[str]
class S3ObjectUserRequest (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3ObjectUserRequest(BaseModel): url: str headers: Dict[str, str]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var headers : Dict[str, str]
var url : str
class S3RecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class S3RecordModel(BaseModel): eventVersion: str eventSource: Literal["aws:s3"] awsRegion: str eventTime: datetime eventName: str userIdentity: S3Identity requestParameters: S3RequestParameters responseElements: S3ResponseElements s3: S3Message glacierEventData: Optional[S3EventRecordGlacierEventData] @root_validator def validate_s3_object(cls, values): event_name = values.get("eventName") s3_object = values.get("s3").object if "ObjectRemoved" not in event_name: if s3_object.size is None or s3_object.eTag is None: raise ValueError("S3Object.size and S3Object.eTag are required for non-ObjectRemoved events") return values
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var awsRegion : str
var eventName : str
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.s3']
var eventTime : datetime.datetime
var eventVersion : str
var glacierEventData : Optional[S3EventRecordGlacierEventData]
var requestParameters : S3RequestParameters
var responseElements : S3ResponseElements
var s3 : S3Message
var userIdentity : S3Identity
Static methods
def validate_s3_object(values)
-
Expand source code
@root_validator def validate_s3_object(cls, values): event_name = values.get("eventName") s3_object = values.get("s3").object if "ObjectRemoved" not in event_name: if s3_object.size is None or s3_object.eTag is None: raise ValueError("S3Object.size and S3Object.eTag are required for non-ObjectRemoved events") return values
class SesMail (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMail(BaseModel): timestamp: datetime source: str messageId: str destination: List[str] headersTruncated: bool headers: List[SesMailHeaders] commonHeaders: SesMailCommonHeaders
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var commonHeaders : SesMailCommonHeaders
var destination : List[str]
var headers : List[SesMailHeaders]
var headersTruncated : bool
var messageId : str
var source : str
var timestamp : datetime.datetime
class SesMailCommonHeaders (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMailCommonHeaders(BaseModel): header_from: List[str] = Field(None, alias="from") to: List[str] cc: Optional[List[str]] bcc: Optional[List[str]] sender: Optional[List[str]] reply_to: Optional[List[str]] = Field(None, alias="reply-to") returnPath: str messageId: str date: str subject: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var bcc : Optional[List[str]]
var cc : Optional[List[str]]
var date : str
var header_from : List[str]
var messageId : str
var reply_to : Optional[List[str]]
var returnPath : str
var sender : Optional[List[str]]
var subject : str
var to : List[str]
class SesMailHeaders (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMailHeaders(BaseModel): name: str value: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var name : str
var value : str
class SesMessage (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesMessage(BaseModel): mail: SesMail receipt: SesReceipt
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var mail : SesMail
var receipt : SesReceipt
class SesModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesModel(BaseModel): Records: List[SesRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[SesRecordModel]
class SesReceipt (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesReceipt(BaseModel): timestamp: datetime processingTimeMillis: PositiveInt recipients: List[str] spamVerdict: SesReceiptVerdict virusVerdict: SesReceiptVerdict spfVerdict: SesReceiptVerdict dmarcVerdict: SesReceiptVerdict action: SesReceiptAction
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var action : SesReceiptAction
var dmarcVerdict : SesReceiptVerdict
var processingTimeMillis : pydantic.types.PositiveInt
var recipients : List[str]
var spamVerdict : SesReceiptVerdict
var spfVerdict : SesReceiptVerdict
var timestamp : datetime.datetime
var virusVerdict : SesReceiptVerdict
class SesReceiptAction (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesReceiptAction(BaseModel): type: Literal["Lambda"] # noqa A003,VNE003 invocationType: Literal["Event"] functionArn: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var functionArn : str
var invocationType : Literal['Event']
var type : Literal['Lambda']
class SesReceiptVerdict (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesReceiptVerdict(BaseModel): status: Literal["PASS", "FAIL", "GRAY", "PROCESSING_FAILED"]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var status : Literal['PASS', 'FAIL', 'GRAY', 'PROCESSING_FAILED']
class SesRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SesRecordModel(BaseModel): eventSource: Literal["aws:ses"] eventVersion: str ses: SesMessage
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.ses']
var eventVersion : str
var ses : SesMessage
class SnsModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SnsModel(BaseModel): Records: List[SnsRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[SnsRecordModel]
class SnsNotificationModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SnsNotificationModel(BaseModel): Subject: Optional[str] TopicArn: str UnsubscribeUrl: HttpUrl Type: Literal["Notification"] MessageAttributes: Optional[Dict[str, SnsMsgAttributeModel]] Message: Union[str, TypingType[BaseModel]] MessageId: str SigningCertUrl: Optional[HttpUrl] # NOTE: FIFO opt-in removes attribute Signature: Optional[str] # NOTE: FIFO opt-in removes attribute Timestamp: datetime SignatureVersion: Optional[str] # NOTE: FIFO opt-in removes attribute @root_validator(pre=True, allow_reuse=True) def check_sqs_protocol(cls, values): sqs_rewritten_keys = ("UnsubscribeURL", "SigningCertURL") if any(key in sqs_rewritten_keys for key in values): # The sentinel value 'None' forces the validator to fail with # ValidatorError instead of KeyError when the key is missing from # the SQS payload values["UnsubscribeUrl"] = values.pop("UnsubscribeURL", None) values["SigningCertUrl"] = values.pop("SigningCertURL", None) return values
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Message : Union[str, Type[pydantic.main.BaseModel]]
var MessageAttributes : Optional[Dict[str, SnsMsgAttributeModel]]
var MessageId : str
var Signature : Optional[str]
var SignatureVersion : Optional[str]
var SigningCertUrl : Optional[pydantic.networks.HttpUrl]
var Subject : Optional[str]
var Timestamp : datetime.datetime
var TopicArn : str
var Type : Literal['Notification']
var UnsubscribeUrl : pydantic.networks.HttpUrl
Static methods
def check_sqs_protocol(values)
-
Expand source code
@root_validator(pre=True, allow_reuse=True) def check_sqs_protocol(cls, values): sqs_rewritten_keys = ("UnsubscribeURL", "SigningCertURL") if any(key in sqs_rewritten_keys for key in values): # The sentinel value 'None' forces the validator to fail with # ValidatorError instead of KeyError when the key is missing from # the SQS payload values["UnsubscribeUrl"] = values.pop("UnsubscribeURL", None) values["SigningCertUrl"] = values.pop("SigningCertURL", None) return values
class SnsRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SnsRecordModel(BaseModel): EventSource: Literal["aws:sns"] EventVersion: str EventSubscriptionArn: str Sns: SnsNotificationModel
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var EventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.sns']
var EventSubscriptionArn : str
var EventVersion : str
var Sns : SnsNotificationModel
class SqsAttributesModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsAttributesModel(BaseModel): ApproximateReceiveCount: str ApproximateFirstReceiveTimestamp: datetime MessageDeduplicationId: Optional[str] MessageGroupId: Optional[str] SenderId: str SentTimestamp: datetime SequenceNumber: Optional[str] AWSTraceHeader: Optional[str]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var AWSTraceHeader : Optional[str]
var ApproximateFirstReceiveTimestamp : datetime.datetime
var ApproximateReceiveCount : str
var MessageDeduplicationId : Optional[str]
var MessageGroupId : Optional[str]
var SenderId : str
var SentTimestamp : datetime.datetime
var SequenceNumber : Optional[str]
class SqsModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsModel(BaseModel): Records: List[SqsRecordModel]
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var Records : List[SqsRecordModel]
class SqsMsgAttributeModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsMsgAttributeModel(BaseModel): stringValue: Optional[str] binaryValue: Optional[str] stringListValues: List[str] = [] binaryListValues: List[str] = [] dataType: str # context on why it's commented: https://github.com/awslabs/aws-lambda-powertools-python/pull/118 # Amazon SQS supports the logical data types String, Number, and Binary with optional custom data type # labels with the format .custom-data-type. # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-metadata.html#sqs-message-attributes # @validator("dataType") # def valid_type(cls, v): # noqa: VNE001,E800 # noqa: E800 # pattern = re.compile("Number.*|String.*|Binary.*") # noqa: E800 # if not pattern.match(v): # noqa: E800 # raise TypeError("data type is invalid") # noqa: E800 # return v # noqa: E800 # # # validate that dataType and value are not None and match # @root_validator # def check_str_and_binary_values(cls, values): # noqa: E800 # binary_val, str_val = values.get("binaryValue", ""), values.get("stringValue", "") # noqa: E800 # data_type = values.get("dataType") # noqa: E800 # if not str_val and not binary_val: # noqa: E800 # raise TypeError("both binaryValue and stringValue are missing") # noqa: E800 # if data_type.startswith("Binary") and not binary_val: # noqa: E800 # raise TypeError("binaryValue is missing") # noqa: E800 # if (data_type.startswith("String") or data_type.startswith("Number")) and not str_val: # noqa: E800 # raise TypeError("stringValue is missing") # noqa: E800 # return values # noqa: E800
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var binaryListValues : List[str]
var binaryValue : Optional[str]
var dataType : str
var stringListValues : List[str]
var stringValue : Optional[str]
class SqsRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class SqsRecordModel(BaseModel): messageId: str receiptHandle: str body: Union[str, Type[BaseModel]] attributes: SqsAttributesModel messageAttributes: Dict[str, SqsMsgAttributeModel] md5OfBody: str md5OfMessageAttributes: Optional[str] eventSource: Literal["aws:sqs"] eventSourceARN: str awsRegion: str
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var attributes : SqsAttributesModel
var awsRegion : str
var body : Union[str, Type[pydantic.main.BaseModel]]
var eventSource : Literal['aws:aws_lambda_powertools.utilities.parser.models.sqs']
var eventSourceARN : str
var md5OfBody : str
var md5OfMessageAttributes : Optional[str]
var messageAttributes : Dict[str, SqsMsgAttributeModel]
var messageId : str
var receiptHandle : str