Module aws_lambda_powertools.utilities.parser.models.kafka
Expand source code
from datetime import datetime
from typing import Dict, List, Type, Union
from pydantic import BaseModel, validator
from aws_lambda_powertools.shared.functions import base64_decode, bytes_to_string
from aws_lambda_powertools.utilities.parser.types import Literal
SERVERS_DELIMITER = ","
class KafkaRecordModel(BaseModel):
topic: str
partition: int
offset: int
timestamp: datetime
timestampType: str
key: bytes
value: Union[str, Type[BaseModel]]
headers: List[Dict[str, bytes]]
# Added type ignore to keep compatibility between Pydantic v1 and v2
_decode_key = validator("key", allow_reuse=True)(base64_decode) # type: ignore[type-var, unused-ignore]
@validator("value", pre=True, allow_reuse=True)
def data_base64_decode(cls, value):
as_bytes = base64_decode(value)
return bytes_to_string(as_bytes)
@validator("headers", pre=True, allow_reuse=True)
def decode_headers_list(cls, value):
for header in value:
for key, values in header.items():
header[key] = bytes(values)
return value
class KafkaBaseEventModel(BaseModel):
bootstrapServers: List[str]
records: Dict[str, List[KafkaRecordModel]]
@validator("bootstrapServers", pre=True, allow_reuse=True)
def split_servers(cls, value):
return None if not value else value.split(SERVERS_DELIMITER)
class KafkaSelfManagedEventModel(KafkaBaseEventModel):
"""Self-managed Apache Kafka event trigger
Documentation:
--------------
- https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html
"""
eventSource: Literal["aws:SelfManagedKafka"]
class KafkaMskEventModel(KafkaBaseEventModel):
"""Fully-managed AWS Apache Kafka event trigger
Documentation:
--------------
- https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html
"""
eventSource: Literal["aws:kafka"]
eventSourceArn: str
Classes
class KafkaBaseEventModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaBaseEventModel(BaseModel): bootstrapServers: List[str] records: Dict[str, List[KafkaRecordModel]] @validator("bootstrapServers", pre=True, allow_reuse=True) def split_servers(cls, value): return None if not value else value.split(SERVERS_DELIMITER)
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Subclasses
Class variables
var bootstrapServers : List[str]
var records : Dict[str, List[KafkaRecordModel]]
Static methods
def split_servers(value)
-
Expand source code
@validator("bootstrapServers", pre=True, allow_reuse=True) def split_servers(cls, value): return None if not value else value.split(SERVERS_DELIMITER)
class KafkaMskEventModel (**data: Any)
-
Fully-managed AWS Apache Kafka event trigger Documentation:
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaMskEventModel(KafkaBaseEventModel): """Fully-managed AWS Apache Kafka event trigger Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html """ eventSource: Literal["aws:kafka"] eventSourceArn: str
Ancestors
- KafkaBaseEventModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:kafka']
var eventSourceArn : str
class KafkaRecordModel (**data: Any)
-
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaRecordModel(BaseModel): topic: str partition: int offset: int timestamp: datetime timestampType: str key: bytes value: Union[str, Type[BaseModel]] headers: List[Dict[str, bytes]] # Added type ignore to keep compatibility between Pydantic v1 and v2 _decode_key = validator("key", allow_reuse=True)(base64_decode) # type: ignore[type-var, unused-ignore] @validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): as_bytes = base64_decode(value) return bytes_to_string(as_bytes) @validator("headers", pre=True, allow_reuse=True) def decode_headers_list(cls, value): for header in value: for key, values in header.items(): header[key] = bytes(values) return value
Ancestors
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var headers : List[Dict[str, bytes]]
var key : bytes
var offset : int
var partition : int
var timestamp : datetime.datetime
var timestampType : str
var topic : str
var value : Union[str, Type[pydantic.main.BaseModel]]
Static methods
def data_base64_decode(value)
-
Expand source code
@validator("value", pre=True, allow_reuse=True) def data_base64_decode(cls, value): as_bytes = base64_decode(value) return bytes_to_string(as_bytes)
def decode_headers_list(value)
-
Expand source code
@validator("headers", pre=True, allow_reuse=True) def decode_headers_list(cls, value): for header in value: for key, values in header.items(): header[key] = bytes(values) return value
class KafkaSelfManagedEventModel (**data: Any)
-
Self-managed Apache Kafka event trigger Documentation:
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
Expand source code
class KafkaSelfManagedEventModel(KafkaBaseEventModel): """Self-managed Apache Kafka event trigger Documentation: -------------- - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html """ eventSource: Literal["aws:SelfManagedKafka"]
Ancestors
- KafkaBaseEventModel
- pydantic.main.BaseModel
- pydantic.utils.Representation
Class variables
var eventSource : Literal['aws:SelfManagedKafka']