Module aws_lambda_powertools.utilities.parser.models.kinesis

Expand source code
import json
import zlib
from typing import Dict, List, Type, Union

from pydantic import BaseModel, validator

from aws_lambda_powertools.shared.functions import base64_decode
from aws_lambda_powertools.utilities.parser.models.cloudwatch import (
    CloudWatchLogsDecode,
)
from aws_lambda_powertools.utilities.parser.types import Literal


class KinesisDataStreamRecordPayload(BaseModel):
    kinesisSchemaVersion: str
    partitionKey: str
    sequenceNumber: str
    data: Union[bytes, Type[BaseModel]]  # base64 encoded str is parsed into bytes
    approximateArrivalTimestamp: float

    @validator("data", pre=True, allow_reuse=True)
    def data_base64_decode(cls, value):
        return base64_decode(value)


class KinesisDataStreamRecord(BaseModel):
    eventSource: Literal["aws:kinesis"]
    eventVersion: str
    eventID: str
    eventName: Literal["aws:kinesis:record"]
    invokeIdentityArn: str
    awsRegion: str
    eventSourceARN: str
    kinesis: KinesisDataStreamRecordPayload

    def decompress_zlib_record_data_as_json(self) -> Dict:
        """Decompress Kinesis Record bytes data zlib compressed to JSON"""
        if not isinstance(self.kinesis.data, bytes):
            raise ValueError("We can only decompress bytes data, not custom models.")

        return json.loads(zlib.decompress(self.kinesis.data, zlib.MAX_WBITS | 32))


class KinesisDataStreamModel(BaseModel):
    Records: List[KinesisDataStreamRecord]


def extract_cloudwatch_logs_from_event(event: KinesisDataStreamModel) -> List[CloudWatchLogsDecode]:
    return [CloudWatchLogsDecode(**record.decompress_zlib_record_data_as_json()) for record in event.Records]


def extract_cloudwatch_logs_from_record(record: KinesisDataStreamRecord) -> CloudWatchLogsDecode:
    return CloudWatchLogsDecode(**record.decompress_zlib_record_data_as_json())

Functions

def extract_cloudwatch_logs_from_event(event: KinesisDataStreamModel) ‑> List[CloudWatchLogsDecode]
Expand source code
def extract_cloudwatch_logs_from_event(event: KinesisDataStreamModel) -> List[CloudWatchLogsDecode]:
    return [CloudWatchLogsDecode(**record.decompress_zlib_record_data_as_json()) for record in event.Records]
def extract_cloudwatch_logs_from_record(record: KinesisDataStreamRecord) ‑> CloudWatchLogsDecode
Expand source code
def extract_cloudwatch_logs_from_record(record: KinesisDataStreamRecord) -> CloudWatchLogsDecode:
    return CloudWatchLogsDecode(**record.decompress_zlib_record_data_as_json())

Classes

class KinesisDataStreamModel (**data: Any)

Create a new model by parsing and validating input data from keyword arguments.

Raises ValidationError if the input data cannot be parsed to form a valid model.

Expand source code
class KinesisDataStreamModel(BaseModel):
    Records: List[KinesisDataStreamRecord]

Ancestors

  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

var Records : List[KinesisDataStreamRecord]
class KinesisDataStreamRecord (**data: Any)

Create a new model by parsing and validating input data from keyword arguments.

Raises ValidationError if the input data cannot be parsed to form a valid model.

Expand source code
class KinesisDataStreamRecord(BaseModel):
    eventSource: Literal["aws:kinesis"]
    eventVersion: str
    eventID: str
    eventName: Literal["aws:kinesis:record"]
    invokeIdentityArn: str
    awsRegion: str
    eventSourceARN: str
    kinesis: KinesisDataStreamRecordPayload

    def decompress_zlib_record_data_as_json(self) -> Dict:
        """Decompress Kinesis Record bytes data zlib compressed to JSON"""
        if not isinstance(self.kinesis.data, bytes):
            raise ValueError("We can only decompress bytes data, not custom models.")

        return json.loads(zlib.decompress(self.kinesis.data, zlib.MAX_WBITS | 32))

Ancestors

  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

var awsRegion : str
var eventID : str
var eventName : Literal['aws:kinesis:record']
var eventSource : Literal['aws:kinesis']
var eventSourceARN : str
var eventVersion : str
var invokeIdentityArn : str
var kinesisKinesisDataStreamRecordPayload

Methods

def decompress_zlib_record_data_as_json(self) ‑> Dict[~KT, ~VT]

Decompress Kinesis Record bytes data zlib compressed to JSON

Expand source code
def decompress_zlib_record_data_as_json(self) -> Dict:
    """Decompress Kinesis Record bytes data zlib compressed to JSON"""
    if not isinstance(self.kinesis.data, bytes):
        raise ValueError("We can only decompress bytes data, not custom models.")

    return json.loads(zlib.decompress(self.kinesis.data, zlib.MAX_WBITS | 32))
class KinesisDataStreamRecordPayload (**data: Any)

Create a new model by parsing and validating input data from keyword arguments.

Raises ValidationError if the input data cannot be parsed to form a valid model.

Expand source code
class KinesisDataStreamRecordPayload(BaseModel):
    kinesisSchemaVersion: str
    partitionKey: str
    sequenceNumber: str
    data: Union[bytes, Type[BaseModel]]  # base64 encoded str is parsed into bytes
    approximateArrivalTimestamp: float

    @validator("data", pre=True, allow_reuse=True)
    def data_base64_decode(cls, value):
        return base64_decode(value)

Ancestors

  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

var approximateArrivalTimestamp : float
var data : Union[bytes, Type[pydantic.main.BaseModel]]
var kinesisSchemaVersion : str
var partitionKey : str
var sequenceNumber : str

Static methods

def data_base64_decode(value)
Expand source code
@validator("data", pre=True, allow_reuse=True)
def data_base64_decode(cls, value):
    return base64_decode(value)