This is not necessary if you're installing Powertools for AWS Lambda (Python) via Lambda Layer/SAR
Add aws-lambda-powertools[validation] as a dependency in your preferred tool: e.g., requirements.txt, pyproject.toml. This will ensure you have the required dependencies before using Validation.
fromdataclassesimportdataclass,fieldfromuuidimportuuid4importgetting_started_validator_decorator_schemaasschemasfromaws_lambda_powertools.utilitiesimportparametersfromaws_lambda_powertools.utilities.typingimportLambdaContextfromaws_lambda_powertools.utilities.validationimportvalidator# we can get list of allowed IPs from AWS Parameter Store using Parameters Utility# See: https://awslabs.github.io/aws-lambda-powertools-python/latest/utilities/parameters/ALLOWED_IPS=parameters.get_parameter("/lambda-powertools/allowed_ips")classUserPermissionsError(Exception):...@dataclassclassUser:ip:strpermissions:listuser_id:str=field(default_factory=lambda:f"{uuid4()}")name:str="Project Lambda Powertools"# using a decorator to validate input and output data@validator(inbound_schema=schemas.INPUT,outbound_schema=schemas.OUTPUT)deflambda_handler(event,context:LambdaContext)->dict:try:user_details:dict={}# get permissions by user_id and projectif(event.get("user_id")=="0d44b083-8206-4a3a-aa95-5d392a99be4a"andevent.get("project")=="powertools"andevent.get("ip")inALLOWED_IPS):user_details=User(ip=event.get("ip"),permissions=["read","write"]).__dict__# the body must be an object because must match OUTPUT schema, otherwise it failsreturn{"body":user_detailsorNone,"statusCode":200ifuser_detailselse204}exceptExceptionase:raiseUserPermissionsError(str(e))
importgetting_started_validator_standalone_schemaasschemasfromaws_lambda_powertools.utilitiesimportparametersfromaws_lambda_powertools.utilities.typingimportLambdaContextfromaws_lambda_powertools.utilities.validationimportSchemaValidationError,validate# we can get list of allowed IPs from AWS Parameter Store using Parameters Utility# See: https://awslabs.github.io/aws-lambda-powertools-python/latest/utilities/parameters/ALLOWED_IPS=parameters.get_parameter("/lambda-powertools/allowed_ips")deflambda_handler(event,context:LambdaContext)->dict:try:user_authenticated:str=""# using standalone function to validate input data onlyvalidate(event=event,schema=schemas.INPUT)if(event.get("user_id")=="0d44b083-8206-4a3a-aa95-5d392a99be4a"andevent.get("project")=="powertools"andevent.get("ip")inALLOWED_IPS):user_authenticated="Allowed"# in this example the body can be of any type because we are not validating the OUTPUTreturn{"body":user_authenticated,"statusCode":200ifuser_authenticatedelse204}exceptSchemaValidationErrorasexception:# SchemaValidationError indicates where a data mismatch isreturn{"body":str(exception),"statusCode":400}
importboto3importgetting_started_validator_unwrapping_schemaasschemasfromaws_lambda_powertools.utilities.data_classes.event_bridge_eventimport(EventBridgeEvent,)fromaws_lambda_powertools.utilities.typingimportLambdaContextfromaws_lambda_powertools.utilities.validationimportvalidators3_client=boto3.resource("s3")# we use the 'envelope' parameter to extract the payload inside the 'detail' key before validating@validator(inbound_schema=schemas.INPUT,envelope="detail")deflambda_handler(event:dict,context:LambdaContext)->dict:my_event=EventBridgeEvent(event)data=my_event.detail.get("data",{})s3_bucket,s3_key=data.get("s3_bucket"),data.get("s3_key")try:s3_object=s3_client.Object(bucket_name=s3_bucket,key=s3_key)payload=s3_object.get()["Body"]content=payload.read().decode("utf-8")return{"message":process_data_object(content),"success":True}excepts3_client.meta.client.exceptions.NoSuchBucketasexception:returnreturn_error_message(str(exception))excepts3_client.meta.client.exceptions.NoSuchKeyasexception:returnreturn_error_message(str(exception))defreturn_error_message(message:str)->dict:return{"message":message,"success":False}defprocess_data_object(content:str)->str:# insert logic herereturn"Data OK"
JSON Schema DRAFT 7 has many new built-in formats such as date, time, and specifically a regex format which might be a better replacement for a custom format, if you do have control over the schema.
JSON Schemas with custom formats like awsaccountid will fail validation. If you have these, you can pass them using formats parameter:
For each format defined in a dictionary key, you must use a regex, or a function that returns a boolean to instruct the validator on how to proceed when encountering that type.
importjsonimportreimportboto3importcustom_format_schemaasschemasfromaws_lambda_powertools.utilities.typingimportLambdaContextfromaws_lambda_powertools.utilities.validationimportSchemaValidationError,validate# awsaccountid must have 12 digitscustom_format={"awsaccountid":lambdavalue:re.match(r"^(\d{12})$",value)}deflambda_handler(event,context:LambdaContext)->dict:try:# validate input using custom json formatvalidate(event=event,schema=schemas.INPUT,formats=custom_format)client_organization=boto3.client("organizations",region_name=event.get("region"))account_data=client_organization.describe_account(AccountId=event.get("accountid"))return{"account":json.dumps(account_data.get("Account"),default=str),"message":"Success","statusCode":200,}exceptSchemaValidationErrorasexception:returnreturn_error_message(str(exception))exceptExceptionasexception:returnreturn_error_message(str(exception))defreturn_error_message(message:str)->dict:return{"account":None,"message":message,"statusCode":400}