Module aws_lambda_powertools.utilities.parser

Advanced event_parser utility

Expand source code
"""Advanced event_parser utility
"""
from . import envelopes
from .envelopes import BaseEnvelope
from .parser import event_parser, parse
from .pydantic import BaseModel, Field, ValidationError, root_validator, validator

__all__ = [
    "event_parser",
    "parse",
    "envelopes",
    "BaseEnvelope",
    "BaseModel",
    "Field",
    "validator",
    "root_validator",
    "ValidationError",
]

Sub-modules

aws_lambda_powertools.utilities.parser.compat
aws_lambda_powertools.utilities.parser.envelopes
aws_lambda_powertools.utilities.parser.exceptions
aws_lambda_powertools.utilities.parser.models
aws_lambda_powertools.utilities.parser.parser
aws_lambda_powertools.utilities.parser.pydantic
aws_lambda_powertools.utilities.parser.types

Generics and other shared types used across parser

Functions

def Field(default: Any = PydanticUndefined, *, default_factory: Optional[Callable[[], Any]] = None, alias: Optional[str] = None, title: Optional[str] = None, description: Optional[str] = None, exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), Any, ForwardRef(None)] = None, include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), Any, ForwardRef(None)] = None, const: Optional[bool] = None, gt: Optional[float] = None, ge: Optional[float] = None, lt: Optional[float] = None, le: Optional[float] = None, multiple_of: Optional[float] = None, allow_inf_nan: Optional[bool] = None, max_digits: Optional[int] = None, decimal_places: Optional[int] = None, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: Optional[bool] = None, min_length: Optional[int] = None, max_length: Optional[int] = None, allow_mutation: bool = True, regex: Optional[str] = None, discriminator: Optional[str] = None, repr: bool = True, **extra: Any) ‑> Any

Used to provide extra information about a field, either for the model schema or complex validation. Some arguments apply only to number fields (int, float, Decimal) and some apply only to str.

:param default: since this is replacing the field’s default, its first argument is used to set the default, use ellipsis () to indicate the field is required :param default_factory: callable that will be called when a default value is needed for this field If both default and default_factory are set, an error is raised. :param alias: the public name of the field :param title: can be any string, used in the schema :param description: can be any string, used in the schema :param exclude: exclude this field while dumping. Takes same values as the include and exclude arguments on the .dict method. :param include: include this field while dumping. Takes same values as the include and exclude arguments on the .dict method. :param const: this field is required and must take it's default value :param gt: only applies to numbers, requires the field to be "greater than". The schema will have an exclusiveMinimum validation keyword :param ge: only applies to numbers, requires the field to be "greater than or equal to". The schema will have a minimum validation keyword :param lt: only applies to numbers, requires the field to be "less than". The schema will have an exclusiveMaximum validation keyword :param le: only applies to numbers, requires the field to be "less than or equal to". The schema will have a maximum validation keyword :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The schema will have a multipleOf validation keyword :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf), which is a valid Python float. Default True, set to False for compatibility with JSON. :param max_digits: only applies to Decimals, requires the field to have a maximum number of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places allowed. It does not include trailing decimal zeroes. :param min_items: only applies to lists, requires the field to have a minimum number of elements. The schema will have a minItems validation keyword :param max_items: only applies to lists, requires the field to have a maximum number of elements. The schema will have a maxItems validation keyword :param unique_items: only applies to lists, requires the field not to have duplicated elements. The schema will have a uniqueItems validation keyword :param min_length: only applies to strings, requires the field to have a minimum length. The schema will have a minLength validation keyword :param max_length: only applies to strings, requires the field to have a maximum length. The schema will have a maxLength validation keyword :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is assigned on an instance. The BaseModel Config must set validate_assignment to True :param regex: only applies to strings, requires the field match against a regular expression pattern string. The schema will have a pattern validation keyword :param discriminator: only useful with a (discriminated a.k.a. tagged) Union of sub models with a common field. The discriminator is the name of this common field to shorten validation and improve generated schema :param repr: show this field in the representation :param **extra: any additional keyword arguments will be added as is to the schema

Expand source code
def Field(
    default: Any = Undefined,
    *,
    default_factory: Optional[NoArgAnyCallable] = None,
    alias: Optional[str] = None,
    title: Optional[str] = None,
    description: Optional[str] = None,
    exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
    include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
    const: Optional[bool] = None,
    gt: Optional[float] = None,
    ge: Optional[float] = None,
    lt: Optional[float] = None,
    le: Optional[float] = None,
    multiple_of: Optional[float] = None,
    allow_inf_nan: Optional[bool] = None,
    max_digits: Optional[int] = None,
    decimal_places: Optional[int] = None,
    min_items: Optional[int] = None,
    max_items: Optional[int] = None,
    unique_items: Optional[bool] = None,
    min_length: Optional[int] = None,
    max_length: Optional[int] = None,
    allow_mutation: bool = True,
    regex: Optional[str] = None,
    discriminator: Optional[str] = None,
    repr: bool = True,
    **extra: Any,
) -> Any:
    """
    Used to provide extra information about a field, either for the model schema or complex validation. Some arguments
    apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``.

    :param default: since this is replacing the field’s default, its first argument is used
      to set the default, use ellipsis (``...``) to indicate the field is required
    :param default_factory: callable that will be called when a default value is needed for this field
      If both `default` and `default_factory` are set, an error is raised.
    :param alias: the public name of the field
    :param title: can be any string, used in the schema
    :param description: can be any string, used in the schema
    :param exclude: exclude this field while dumping.
      Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method.
    :param include: include this field while dumping.
      Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method.
    :param const: this field is required and *must* take it's default value
    :param gt: only applies to numbers, requires the field to be "greater than". The schema
      will have an ``exclusiveMinimum`` validation keyword
    :param ge: only applies to numbers, requires the field to be "greater than or equal to". The
      schema will have a ``minimum`` validation keyword
    :param lt: only applies to numbers, requires the field to be "less than". The schema
      will have an ``exclusiveMaximum`` validation keyword
    :param le: only applies to numbers, requires the field to be "less than or equal to". The
      schema will have a ``maximum`` validation keyword
    :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The
      schema will have a ``multipleOf`` validation keyword
    :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf),
        which is a valid Python float. Default True, set to False for compatibility with JSON.
    :param max_digits: only applies to Decimals, requires the field to have a maximum number
      of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes.
    :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places
      allowed. It does not include trailing decimal zeroes.
    :param min_items: only applies to lists, requires the field to have a minimum number of
      elements. The schema will have a ``minItems`` validation keyword
    :param max_items: only applies to lists, requires the field to have a maximum number of
      elements. The schema will have a ``maxItems`` validation keyword
    :param unique_items: only applies to lists, requires the field not to have duplicated
      elements. The schema will have a ``uniqueItems`` validation keyword
    :param min_length: only applies to strings, requires the field to have a minimum length. The
      schema will have a ``minLength`` validation keyword
    :param max_length: only applies to strings, requires the field to have a maximum length. The
      schema will have a ``maxLength`` validation keyword
    :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is
      assigned on an instance.  The BaseModel Config must set validate_assignment to True
    :param regex: only applies to strings, requires the field match against a regular expression
      pattern string. The schema will have a ``pattern`` validation keyword
    :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field.
      The `discriminator` is the name of this common field to shorten validation and improve generated schema
    :param repr: show this field in the representation
    :param **extra: any additional keyword arguments will be added as is to the schema
    """
    field_info = FieldInfo(
        default,
        default_factory=default_factory,
        alias=alias,
        title=title,
        description=description,
        exclude=exclude,
        include=include,
        const=const,
        gt=gt,
        ge=ge,
        lt=lt,
        le=le,
        multiple_of=multiple_of,
        allow_inf_nan=allow_inf_nan,
        max_digits=max_digits,
        decimal_places=decimal_places,
        min_items=min_items,
        max_items=max_items,
        unique_items=unique_items,
        min_length=min_length,
        max_length=max_length,
        allow_mutation=allow_mutation,
        regex=regex,
        discriminator=discriminator,
        repr=repr,
        **extra,
    )
    field_info._validate()
    return field_info
def event_parser(handler: Callable[..., ~EventParserReturnType], event: Dict[str, Any], context: LambdaContext, model: Optional[Type[~Model]] = None, envelope: Optional[Type[~Envelope]] = None, **kwargs: Any) ‑> ~EventParserReturnType

Lambda handler decorator to parse & validate events using Pydantic models

It requires a model that implements Pydantic BaseModel to parse & validate the event.

When an envelope is given, it'll use the following logic:

  1. Parse the event against the envelope model first e.g. EnvelopeModel(**event)
  2. Envelope will extract a given key to be parsed against the model e.g. event.detail

This is useful when you need to confirm event wrapper structure, and b) selectively extract a portion of your payload for parsing & validation.

NOTE: If envelope is omitted, the complete event is parsed to match the model parameter BaseModel definition.

Example

Lambda handler decorator to parse & validate event

class Order(BaseModel):
    id: int
    description: str
    ...

@event_parser(model=Order)
def handler(event: Order, context: LambdaContext):
    ...

Lambda handler decorator to parse & validate event - using built-in envelope

class Order(BaseModel):
    id: int
    description: str
    ...

@event_parser(model=Order, envelope=envelopes.EVENTBRIDGE)
def handler(event: Order, context: LambdaContext):
    ...

Parameters

handler :  Callable
Method to annotate on
event :  Dict
Lambda event to be parsed & validated
context :  LambdaContext
Lambda context object
model :  Model
Your data model that will replace the event.
envelope : Envelope
Optional envelope to extract the model from

Raises

ValidationError
When input event does not conform with model provided
InvalidModelTypeError
When model given does not implement BaseModel or is not provided
InvalidEnvelopeError
When envelope given does not implement BaseEnvelope
Expand source code
@lambda_handler_decorator
def event_parser(
    handler: Callable[..., EventParserReturnType],
    event: Dict[str, Any],
    context: LambdaContext,
    model: Optional[Type[Model]] = None,
    envelope: Optional[Type[Envelope]] = None,
    **kwargs: Any,
) -> EventParserReturnType:
    """Lambda handler decorator to parse & validate events using Pydantic models

    It requires a model that implements Pydantic BaseModel to parse & validate the event.

    When an envelope is given, it'll use the following logic:

    1. Parse the event against the envelope model first e.g. EnvelopeModel(**event)
    2. Envelope will extract a given key to be parsed against the model e.g. event.detail

    This is useful when you need to confirm event wrapper structure, and
    b) selectively extract a portion of your payload for parsing & validation.

    NOTE: If envelope is omitted, the complete event is parsed to match the model parameter BaseModel definition.

    Example
    -------
    **Lambda handler decorator to parse & validate event**

        class Order(BaseModel):
            id: int
            description: str
            ...

        @event_parser(model=Order)
        def handler(event: Order, context: LambdaContext):
            ...

    **Lambda handler decorator to parse & validate event - using built-in envelope**

        class Order(BaseModel):
            id: int
            description: str
            ...

        @event_parser(model=Order, envelope=envelopes.EVENTBRIDGE)
        def handler(event: Order, context: LambdaContext):
            ...

    Parameters
    ----------
    handler:  Callable
        Method to annotate on
    event:    Dict
        Lambda event to be parsed & validated
    context:  LambdaContext
        Lambda context object
    model:   Model
        Your data model that will replace the event.
    envelope: Envelope
        Optional envelope to extract the model from

    Raises
    ------
    ValidationError
        When input event does not conform with model provided
    InvalidModelTypeError
        When model given does not implement BaseModel or is not provided
    InvalidEnvelopeError
        When envelope given does not implement BaseEnvelope
    """

    # The first parameter of a Lambda function is always the event
    # This line get the model informed in the event_parser function
    # or the first parameter of the function by using typing.get_type_hints
    type_hints = typing.get_type_hints(handler)
    model = model or (list(type_hints.values())[0] if type_hints else None)
    if model is None:
        raise InvalidModelTypeError(
            "The model must be provided either as the `model` argument to `event_parser`"
            "or as the type hint of `event` in the handler that it wraps",
        )

    if envelope:
        parsed_event = parse(event=event, model=model, envelope=envelope)
    else:
        parsed_event = parse(event=event, model=model)

    logger.debug(f"Calling handler {handler.__name__}")
    return handler(parsed_event, context, **kwargs)
def parse(event: Dict[str, Any], model: Type[~Model], envelope: Optional[Type[~Envelope]] = None)

Standalone function to parse & validate events using Pydantic models

Typically used when you need fine-grained control over error handling compared to event_parser decorator.

Example

Lambda handler decorator to parse & validate event

from aws_lambda_powertools.utilities.parser import ValidationError

class Order(BaseModel):
    id: int
    description: str
    ...

def handler(event: Order, context: LambdaContext):
    try:
        parse(model=Order)
    except ValidationError:
        ...

Lambda handler decorator to parse & validate event - using built-in envelope

class Order(BaseModel):
    id: int
    description: str
    ...

def handler(event: Order, context: LambdaContext):
    try:
        parse(model=Order, envelope=envelopes.EVENTBRIDGE)
    except ValidationError:
        ...

Parameters

event :  Dict
Lambda event to be parsed & validated
model :  Model
Your data model that will replace the event
envelope : Envelope
Optional envelope to extract the model from

Raises

ValidationError
When input event does not conform with model provided
InvalidModelTypeError
When model given does not implement BaseModel
InvalidEnvelopeError
When envelope given does not implement BaseEnvelope
Expand source code
def parse(event: Dict[str, Any], model: Type[Model], envelope: Optional[Type[Envelope]] = None):
    """Standalone function to parse & validate events using Pydantic models

    Typically used when you need fine-grained control over error handling compared to event_parser decorator.

    Example
    -------

    **Lambda handler decorator to parse & validate event**

        from aws_lambda_powertools.utilities.parser import ValidationError

        class Order(BaseModel):
            id: int
            description: str
            ...

        def handler(event: Order, context: LambdaContext):
            try:
                parse(model=Order)
            except ValidationError:
                ...

    **Lambda handler decorator to parse & validate event - using built-in envelope**

        class Order(BaseModel):
            id: int
            description: str
            ...

        def handler(event: Order, context: LambdaContext):
            try:
                parse(model=Order, envelope=envelopes.EVENTBRIDGE)
            except ValidationError:
                ...

    Parameters
    ----------
    event:    Dict
        Lambda event to be parsed & validated
    model:   Model
        Your data model that will replace the event
    envelope: Envelope
        Optional envelope to extract the model from

    Raises
    ------
    ValidationError
        When input event does not conform with model provided
    InvalidModelTypeError
        When model given does not implement BaseModel
    InvalidEnvelopeError
        When envelope given does not implement BaseEnvelope
    """
    if envelope and callable(envelope):
        try:
            logger.debug(f"Parsing and validating event model with envelope={envelope}")
            return envelope().parse(data=event, model=model)
        except AttributeError as exc:
            raise InvalidEnvelopeError(
                f"Error: {str(exc)}. Please ensure that both the Input model and the Envelope inherits from BaseModel,\n"  # noqa E501
                "and your payload adheres to the specified Input model structure.\n"
                f"Envelope={envelope}\nModel={model}",
            )

    try:
        disable_pydantic_v2_warning()
        logger.debug("Parsing and validating event model; no envelope used")
        if isinstance(event, str):
            return model.parse_raw(event)

        return model.parse_obj(event)
    except AttributeError as exc:
        raise InvalidModelTypeError(
            f"Error: {str(exc)}. Please ensure the Input model inherits from BaseModel,\n"
            "and your payload adheres to the specified Input model structure.\n"
            f"Model={model}",
        )
def root_validator(*, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False) ‑> Union[AnyClassMethod, Callable[[Callable[..., Any]], AnyClassMethod]]

Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either before or after standard model parsing/validation is performed.

Expand source code
def root_validator(
    _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False
) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]:
    """
    Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either
    before or after standard model parsing/validation is performed.
    """
    if _func:
        f_cls = _prepare_validator(_func, allow_reuse)
        setattr(
            f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
        )
        return f_cls

    def dec(f: AnyCallable) -> 'AnyClassMethod':
        f_cls = _prepare_validator(f, allow_reuse)
        setattr(
            f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
        )
        return f_cls

    return dec
def validator(*fields: str, pre: bool = False, each_item: bool = False, always: bool = False, check_fields: bool = True, whole: Optional[bool] = None, allow_reuse: bool = False) ‑> Callable[[Callable[..., Any]], AnyClassMethod]

Decorate methods on the class indicating that they should be used to validate fields :param fields: which field(s) the method should be called on :param pre: whether or not this validator should be called before the standard validators (else after) :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the whole object :param always: whether this method and other validators should be called even if the value is missing :param check_fields: whether to check that the fields actually exist on the model :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function

Expand source code
def validator(
    *fields: str,
    pre: bool = False,
    each_item: bool = False,
    always: bool = False,
    check_fields: bool = True,
    whole: Optional[bool] = None,
    allow_reuse: bool = False,
) -> Callable[[AnyCallable], 'AnyClassMethod']:
    """
    Decorate methods on the class indicating that they should be used to validate fields
    :param fields: which field(s) the method should be called on
    :param pre: whether or not this validator should be called before the standard validators (else after)
    :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the
      whole object
    :param always: whether this method and other validators should be called even if the value is missing
    :param check_fields: whether to check that the fields actually exist on the model
    :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function
    """
    if not fields:
        raise ConfigError('validator with no fields specified')
    elif isinstance(fields[0], FunctionType):
        raise ConfigError(
            "validators should be used with fields and keyword arguments, not bare. "  # noqa: Q000
            "E.g. usage should be `@validator('<field_name>', ...)`"
        )
    elif not all(isinstance(field, str) for field in fields):
        raise ConfigError(
            "validator fields should be passed as separate string args. "  # noqa: Q000
            "E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`"
        )

    if whole is not None:
        warnings.warn(
            'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead',
            DeprecationWarning,
        )
        assert each_item is False, '"each_item" and "whole" conflict, remove "whole"'
        each_item = not whole

    def dec(f: AnyCallable) -> 'AnyClassMethod':
        f_cls = _prepare_validator(f, allow_reuse)
        setattr(
            f_cls,
            VALIDATOR_CONFIG_KEY,
            (
                fields,
                Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields),
            ),
        )
        return f_cls

    return dec

Classes

class BaseEnvelope

ABC implementation for creating a supported Envelope

Expand source code
class BaseEnvelope(ABC):
    """ABC implementation for creating a supported Envelope"""

    @staticmethod
    def _parse(data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Union[Model, None]:
        """Parses envelope data against model provided

        Parameters
        ----------
        data : Dict
            Data to be parsed and validated
        model : Type[Model]
            Data model to parse and validate data against

        Returns
        -------
        Any
            Parsed data
        """
        disable_pydantic_v2_warning()

        if data is None:
            logger.debug("Skipping parsing as event is None")
            return data

        logger.debug("parsing event against model")
        if isinstance(data, str):
            logger.debug("parsing event as string")
            return model.parse_raw(data)

        return model.parse_obj(data)

    @abstractmethod
    def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]):
        """Implementation to parse data against envelope model, then against the data model

        NOTE: Call `_parse` method to fully parse data with model provided.

        Example
        -------

        **EventBridge envelope implementation example**

        def parse(...):
            # 1. parses data against envelope model
            parsed_envelope = EventBridgeModel(**data)

            # 2. parses portion of data within the envelope against model
            return self._parse(data=parsed_envelope.detail, model=data_model)
        """
        return NotImplemented  # pragma: no cover

Ancestors

  • abc.ABC

Subclasses

Methods

def parse(self, data: Union[Dict[str, Any], Any, ForwardRef(None)], model: Type[~Model])

Implementation to parse data against envelope model, then against the data model

NOTE: Call _parse method to fully parse data with model provided.

Example

EventBridge envelope implementation example

def parse(…): # 1. parses data against envelope model parsed_envelope = EventBridgeModel(**data)

# 2. parses portion of data within the envelope against model
return self._parse(data=parsed_envelope.detail, model=data_model)
Expand source code
@abstractmethod
def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]):
    """Implementation to parse data against envelope model, then against the data model

    NOTE: Call `_parse` method to fully parse data with model provided.

    Example
    -------

    **EventBridge envelope implementation example**

    def parse(...):
        # 1. parses data against envelope model
        parsed_envelope = EventBridgeModel(**data)

        # 2. parses portion of data within the envelope against model
        return self._parse(data=parsed_envelope.detail, model=data_model)
    """
    return NotImplemented  # pragma: no cover
class BaseModel (**data: Any)

Create a new model by parsing and validating input data from keyword arguments.

Raises ValidationError if the input data cannot be parsed to form a valid model.

Expand source code
class BaseModel(Representation, metaclass=ModelMetaclass):
    if TYPE_CHECKING:
        # populated by the metaclass, defined here to help IDEs only
        __fields__: ClassVar[Dict[str, ModelField]] = {}
        __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
        __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
        __validators__: ClassVar[Dict[str, AnyCallable]] = {}
        __pre_root_validators__: ClassVar[List[AnyCallable]]
        __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]]
        __config__: ClassVar[Type[BaseConfig]] = BaseConfig
        __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x
        __schema_cache__: ClassVar['DictAny'] = {}
        __custom_root_type__: ClassVar[bool] = False
        __signature__: ClassVar['Signature']
        __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]]
        __class_vars__: ClassVar[SetStr]
        __fields_set__: ClassVar[SetStr] = set()

    Config = BaseConfig
    __slots__ = ('__dict__', '__fields_set__')
    __doc__ = ''  # Null out the Representation docstring

    def __init__(__pydantic_self__, **data: Any) -> None:
        """
        Create a new model by parsing and validating input data from keyword arguments.

        Raises ValidationError if the input data cannot be parsed to form a valid model.
        """
        # Uses something other than `self` the first arg to allow "self" as a settable attribute
        values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data)
        if validation_error:
            raise validation_error
        try:
            object_setattr(__pydantic_self__, '__dict__', values)
        except TypeError as e:
            raise TypeError(
                'Model values must be a dict; you may not have returned a dictionary from a root validator'
            ) from e
        object_setattr(__pydantic_self__, '__fields_set__', fields_set)
        __pydantic_self__._init_private_attributes()

    @no_type_check
    def __setattr__(self, name, value):  # noqa: C901 (ignore complexity)
        if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES:
            return object_setattr(self, name, value)

        if self.__config__.extra is not Extra.allow and name not in self.__fields__:
            raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"')
        elif not self.__config__.allow_mutation or self.__config__.frozen:
            raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment')
        elif name in self.__fields__ and self.__fields__[name].final:
            raise TypeError(
                f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment'
            )
        elif self.__config__.validate_assignment:
            new_values = {**self.__dict__, name: value}

            for validator in self.__pre_root_validators__:
                try:
                    new_values = validator(self.__class__, new_values)
                except (ValueError, TypeError, AssertionError) as exc:
                    raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__)

            known_field = self.__fields__.get(name, None)
            if known_field:
                # We want to
                # - make sure validators are called without the current value for this field inside `values`
                # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts)
                # - keep the order of the fields
                if not known_field.field_info.allow_mutation:
                    raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned')
                dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name}
                value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__)
                if error_:
                    raise ValidationError([error_], self.__class__)
                else:
                    new_values[name] = value

            errors = []
            for skip_on_failure, validator in self.__post_root_validators__:
                if skip_on_failure and errors:
                    continue
                try:
                    new_values = validator(self.__class__, new_values)
                except (ValueError, TypeError, AssertionError) as exc:
                    errors.append(ErrorWrapper(exc, loc=ROOT_KEY))
            if errors:
                raise ValidationError(errors, self.__class__)

            # update the whole __dict__ as other values than just `value`
            # may be changed (e.g. with `root_validator`)
            object_setattr(self, '__dict__', new_values)
        else:
            self.__dict__[name] = value

        self.__fields_set__.add(name)

    def __getstate__(self) -> 'DictAny':
        private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__)
        return {
            '__dict__': self.__dict__,
            '__fields_set__': self.__fields_set__,
            '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined},
        }

    def __setstate__(self, state: 'DictAny') -> None:
        object_setattr(self, '__dict__', state['__dict__'])
        object_setattr(self, '__fields_set__', state['__fields_set__'])
        for name, value in state.get('__private_attribute_values__', {}).items():
            object_setattr(self, name, value)

    def _init_private_attributes(self) -> None:
        for name, private_attr in self.__private_attributes__.items():
            default = private_attr.get_default()
            if default is not Undefined:
                object_setattr(self, name, default)

    def dict(
        self,
        *,
        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        by_alias: bool = False,
        skip_defaults: Optional[bool] = None,
        exclude_unset: bool = False,
        exclude_defaults: bool = False,
        exclude_none: bool = False,
    ) -> 'DictStrAny':
        """
        Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

        """
        if skip_defaults is not None:
            warnings.warn(
                f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
                DeprecationWarning,
            )
            exclude_unset = skip_defaults

        return dict(
            self._iter(
                to_dict=True,
                by_alias=by_alias,
                include=include,
                exclude=exclude,
                exclude_unset=exclude_unset,
                exclude_defaults=exclude_defaults,
                exclude_none=exclude_none,
            )
        )

    def json(
        self,
        *,
        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        by_alias: bool = False,
        skip_defaults: Optional[bool] = None,
        exclude_unset: bool = False,
        exclude_defaults: bool = False,
        exclude_none: bool = False,
        encoder: Optional[Callable[[Any], Any]] = None,
        models_as_dict: bool = True,
        **dumps_kwargs: Any,
    ) -> str:
        """
        Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.

        `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
        """
        if skip_defaults is not None:
            warnings.warn(
                f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
                DeprecationWarning,
            )
            exclude_unset = skip_defaults
        encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__)

        # We don't directly call `self.dict()`, which does exactly this with `to_dict=True`
        # because we want to be able to keep raw `BaseModel` instances and not as `dict`.
        # This allows users to write custom JSON encoders for given `BaseModel` classes.
        data = dict(
            self._iter(
                to_dict=models_as_dict,
                by_alias=by_alias,
                include=include,
                exclude=exclude,
                exclude_unset=exclude_unset,
                exclude_defaults=exclude_defaults,
                exclude_none=exclude_none,
            )
        )
        if self.__custom_root_type__:
            data = data[ROOT_KEY]
        return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs)

    @classmethod
    def _enforce_dict_if_root(cls, obj: Any) -> Any:
        if cls.__custom_root_type__ and (
            not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY})
            and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY})
            or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES
        ):
            return {ROOT_KEY: obj}
        else:
            return obj

    @classmethod
    def parse_obj(cls: Type['Model'], obj: Any) -> 'Model':
        obj = cls._enforce_dict_if_root(obj)
        if not isinstance(obj, dict):
            try:
                obj = dict(obj)
            except (TypeError, ValueError) as e:
                exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}')
                raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e
        return cls(**obj)

    @classmethod
    def parse_raw(
        cls: Type['Model'],
        b: StrBytes,
        *,
        content_type: str = None,
        encoding: str = 'utf8',
        proto: Protocol = None,
        allow_pickle: bool = False,
    ) -> 'Model':
        try:
            obj = load_str_bytes(
                b,
                proto=proto,
                content_type=content_type,
                encoding=encoding,
                allow_pickle=allow_pickle,
                json_loads=cls.__config__.json_loads,
            )
        except (ValueError, TypeError, UnicodeDecodeError) as e:
            raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls)
        return cls.parse_obj(obj)

    @classmethod
    def parse_file(
        cls: Type['Model'],
        path: Union[str, Path],
        *,
        content_type: str = None,
        encoding: str = 'utf8',
        proto: Protocol = None,
        allow_pickle: bool = False,
    ) -> 'Model':
        obj = load_file(
            path,
            proto=proto,
            content_type=content_type,
            encoding=encoding,
            allow_pickle=allow_pickle,
            json_loads=cls.__config__.json_loads,
        )
        return cls.parse_obj(obj)

    @classmethod
    def from_orm(cls: Type['Model'], obj: Any) -> 'Model':
        if not cls.__config__.orm_mode:
            raise ConfigError('You must have the config attribute orm_mode=True to use from_orm')
        obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj)
        m = cls.__new__(cls)
        values, fields_set, validation_error = validate_model(cls, obj)
        if validation_error:
            raise validation_error
        object_setattr(m, '__dict__', values)
        object_setattr(m, '__fields_set__', fields_set)
        m._init_private_attributes()
        return m

    @classmethod
    def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model':
        """
        Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data.
        Default values are respected, but no other validation is performed.
        Behaves as if `Config.extra = 'allow'` was set since it adds all passed values
        """
        m = cls.__new__(cls)
        fields_values: Dict[str, Any] = {}
        for name, field in cls.__fields__.items():
            if field.alt_alias and field.alias in values:
                fields_values[name] = values[field.alias]
            elif name in values:
                fields_values[name] = values[name]
            elif not field.required:
                fields_values[name] = field.get_default()
        fields_values.update(values)
        object_setattr(m, '__dict__', fields_values)
        if _fields_set is None:
            _fields_set = set(values.keys())
        object_setattr(m, '__fields_set__', _fields_set)
        m._init_private_attributes()
        return m

    def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model':
        if deep:
            # chances of having empty dict here are quite low for using smart_deepcopy
            values = deepcopy(values)

        cls = self.__class__
        m = cls.__new__(cls)
        object_setattr(m, '__dict__', values)
        object_setattr(m, '__fields_set__', fields_set)
        for name in self.__private_attributes__:
            value = getattr(self, name, Undefined)
            if value is not Undefined:
                if deep:
                    value = deepcopy(value)
                object_setattr(m, name, value)

        return m

    def copy(
        self: 'Model',
        *,
        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        update: Optional['DictStrAny'] = None,
        deep: bool = False,
    ) -> 'Model':
        """
        Duplicate a model, optionally choose which fields to include, exclude and change.

        :param include: fields to include in new model
        :param exclude: fields to exclude from new model, as with values this takes precedence over include
        :param update: values to change/add in the new model. Note: the data is not validated before creating
            the new model: you should trust this data
        :param deep: set to `True` to make a deep copy of the model
        :return: new model instance
        """

        values = dict(
            self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False),
            **(update or {}),
        )

        # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg
        if update:
            fields_set = self.__fields_set__ | update.keys()
        else:
            fields_set = set(self.__fields_set__)

        return self._copy_and_set_values(values, fields_set, deep=deep)

    @classmethod
    def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny':
        cached = cls.__schema_cache__.get((by_alias, ref_template))
        if cached is not None:
            return cached
        s = model_schema(cls, by_alias=by_alias, ref_template=ref_template)
        cls.__schema_cache__[(by_alias, ref_template)] = s
        return s

    @classmethod
    def schema_json(
        cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any
    ) -> str:
        from .json import pydantic_encoder

        return cls.__config__.json_dumps(
            cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs
        )

    @classmethod
    def __get_validators__(cls) -> 'CallableGenerator':
        yield cls.validate

    @classmethod
    def validate(cls: Type['Model'], value: Any) -> 'Model':
        if isinstance(value, cls):
            copy_on_model_validation = cls.__config__.copy_on_model_validation
            # whether to deep or shallow copy the model on validation, None means do not copy
            deep_copy: Optional[bool] = None
            if copy_on_model_validation not in {'deep', 'shallow', 'none'}:
                # Warn about deprecated behavior
                warnings.warn(
                    "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning
                )
                if copy_on_model_validation:
                    deep_copy = False

            if copy_on_model_validation == 'shallow':
                # shallow copy
                deep_copy = False
            elif copy_on_model_validation == 'deep':
                # deep copy
                deep_copy = True

            if deep_copy is None:
                return value
            else:
                return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy)

        value = cls._enforce_dict_if_root(value)

        if isinstance(value, dict):
            return cls(**value)
        elif cls.__config__.orm_mode:
            return cls.from_orm(value)
        else:
            try:
                value_as_dict = dict(value)
            except (TypeError, ValueError) as e:
                raise DictError() from e
            return cls(**value_as_dict)

    @classmethod
    def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict:
        if isinstance(obj, GetterDict):
            return obj
        return cls.__config__.getter_dict(obj)

    @classmethod
    @no_type_check
    def _get_value(
        cls,
        v: Any,
        to_dict: bool,
        by_alias: bool,
        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
        exclude_unset: bool,
        exclude_defaults: bool,
        exclude_none: bool,
    ) -> Any:
        if isinstance(v, BaseModel):
            if to_dict:
                v_dict = v.dict(
                    by_alias=by_alias,
                    exclude_unset=exclude_unset,
                    exclude_defaults=exclude_defaults,
                    include=include,
                    exclude=exclude,
                    exclude_none=exclude_none,
                )
                if ROOT_KEY in v_dict:
                    return v_dict[ROOT_KEY]
                return v_dict
            else:
                return v.copy(include=include, exclude=exclude)

        value_exclude = ValueItems(v, exclude) if exclude else None
        value_include = ValueItems(v, include) if include else None

        if isinstance(v, dict):
            return {
                k_: cls._get_value(
                    v_,
                    to_dict=to_dict,
                    by_alias=by_alias,
                    exclude_unset=exclude_unset,
                    exclude_defaults=exclude_defaults,
                    include=value_include and value_include.for_element(k_),
                    exclude=value_exclude and value_exclude.for_element(k_),
                    exclude_none=exclude_none,
                )
                for k_, v_ in v.items()
                if (not value_exclude or not value_exclude.is_excluded(k_))
                and (not value_include or value_include.is_included(k_))
            }

        elif sequence_like(v):
            seq_args = (
                cls._get_value(
                    v_,
                    to_dict=to_dict,
                    by_alias=by_alias,
                    exclude_unset=exclude_unset,
                    exclude_defaults=exclude_defaults,
                    include=value_include and value_include.for_element(i),
                    exclude=value_exclude and value_exclude.for_element(i),
                    exclude_none=exclude_none,
                )
                for i, v_ in enumerate(v)
                if (not value_exclude or not value_exclude.is_excluded(i))
                and (not value_include or value_include.is_included(i))
            )

            return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args)

        elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False):
            return v.value

        else:
            return v

    @classmethod
    def __try_update_forward_refs__(cls, **localns: Any) -> None:
        """
        Same as update_forward_refs but will not raise exception
        when forward references are not defined.
        """
        update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,))

    @classmethod
    def update_forward_refs(cls, **localns: Any) -> None:
        """
        Try to update ForwardRefs on fields based on this Model, globalns and localns.
        """
        update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns)

    def __iter__(self) -> 'TupleGenerator':
        """
        so `dict(model)` works
        """
        yield from self.__dict__.items()

    def _iter(
        self,
        to_dict: bool = False,
        by_alias: bool = False,
        include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
        exclude_unset: bool = False,
        exclude_defaults: bool = False,
        exclude_none: bool = False,
    ) -> 'TupleGenerator':
        # Merge field set excludes with explicit exclude parameter with explicit overriding field set options.
        # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case.
        if exclude is not None or self.__exclude_fields__ is not None:
            exclude = ValueItems.merge(self.__exclude_fields__, exclude)

        if include is not None or self.__include_fields__ is not None:
            include = ValueItems.merge(self.__include_fields__, include, intersect=True)

        allowed_keys = self._calculate_keys(
            include=include, exclude=exclude, exclude_unset=exclude_unset  # type: ignore
        )
        if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none):
            # huge boost for plain _iter()
            yield from self.__dict__.items()
            return

        value_exclude = ValueItems(self, exclude) if exclude is not None else None
        value_include = ValueItems(self, include) if include is not None else None

        for field_key, v in self.__dict__.items():
            if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None):
                continue

            if exclude_defaults:
                model_field = self.__fields__.get(field_key)
                if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v:
                    continue

            if by_alias and field_key in self.__fields__:
                dict_key = self.__fields__[field_key].alias
            else:
                dict_key = field_key

            if to_dict or value_include or value_exclude:
                v = self._get_value(
                    v,
                    to_dict=to_dict,
                    by_alias=by_alias,
                    include=value_include and value_include.for_element(field_key),
                    exclude=value_exclude and value_exclude.for_element(field_key),
                    exclude_unset=exclude_unset,
                    exclude_defaults=exclude_defaults,
                    exclude_none=exclude_none,
                )
            yield dict_key, v

    def _calculate_keys(
        self,
        include: Optional['MappingIntStrAny'],
        exclude: Optional['MappingIntStrAny'],
        exclude_unset: bool,
        update: Optional['DictStrAny'] = None,
    ) -> Optional[AbstractSet[str]]:
        if include is None and exclude is None and exclude_unset is False:
            return None

        keys: AbstractSet[str]
        if exclude_unset:
            keys = self.__fields_set__.copy()
        else:
            keys = self.__dict__.keys()

        if include is not None:
            keys &= include.keys()

        if update:
            keys -= update.keys()

        if exclude:
            keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)}

        return keys

    def __eq__(self, other: Any) -> bool:
        if isinstance(other, BaseModel):
            return self.dict() == other.dict()
        else:
            return self.dict() == other

    def __repr_args__(self) -> 'ReprArgs':
        return [
            (k, v)
            for k, v in self.__dict__.items()
            if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr)
        ]

Ancestors

  • pydantic.utils.Representation

Subclasses

Class variables

var Config

Static methods

def construct(**values: Any) ‑> Model

Creates a new model setting dict and fields_set from trusted or pre-validated data. Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

Expand source code
@classmethod
def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model':
    """
    Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data.
    Default values are respected, but no other validation is performed.
    Behaves as if `Config.extra = 'allow'` was set since it adds all passed values
    """
    m = cls.__new__(cls)
    fields_values: Dict[str, Any] = {}
    for name, field in cls.__fields__.items():
        if field.alt_alias and field.alias in values:
            fields_values[name] = values[field.alias]
        elif name in values:
            fields_values[name] = values[name]
        elif not field.required:
            fields_values[name] = field.get_default()
    fields_values.update(values)
    object_setattr(m, '__dict__', fields_values)
    if _fields_set is None:
        _fields_set = set(values.keys())
    object_setattr(m, '__fields_set__', _fields_set)
    m._init_private_attributes()
    return m
def from_orm(obj: Any) ‑> Model
Expand source code
@classmethod
def from_orm(cls: Type['Model'], obj: Any) -> 'Model':
    if not cls.__config__.orm_mode:
        raise ConfigError('You must have the config attribute orm_mode=True to use from_orm')
    obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj)
    m = cls.__new__(cls)
    values, fields_set, validation_error = validate_model(cls, obj)
    if validation_error:
        raise validation_error
    object_setattr(m, '__dict__', values)
    object_setattr(m, '__fields_set__', fields_set)
    m._init_private_attributes()
    return m
def parse_file(path: Union[str, pathlib.Path], *, content_type: str = None, encoding: str = 'utf8', proto: pydantic.parse.Protocol = None, allow_pickle: bool = False) ‑> Model
Expand source code
@classmethod
def parse_file(
    cls: Type['Model'],
    path: Union[str, Path],
    *,
    content_type: str = None,
    encoding: str = 'utf8',
    proto: Protocol = None,
    allow_pickle: bool = False,
) -> 'Model':
    obj = load_file(
        path,
        proto=proto,
        content_type=content_type,
        encoding=encoding,
        allow_pickle=allow_pickle,
        json_loads=cls.__config__.json_loads,
    )
    return cls.parse_obj(obj)
def parse_obj(obj: Any) ‑> Model
Expand source code
@classmethod
def parse_obj(cls: Type['Model'], obj: Any) -> 'Model':
    obj = cls._enforce_dict_if_root(obj)
    if not isinstance(obj, dict):
        try:
            obj = dict(obj)
        except (TypeError, ValueError) as e:
            exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}')
            raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e
    return cls(**obj)
def parse_raw(b: Union[str, bytes], *, content_type: str = None, encoding: str = 'utf8', proto: pydantic.parse.Protocol = None, allow_pickle: bool = False) ‑> Model
Expand source code
@classmethod
def parse_raw(
    cls: Type['Model'],
    b: StrBytes,
    *,
    content_type: str = None,
    encoding: str = 'utf8',
    proto: Protocol = None,
    allow_pickle: bool = False,
) -> 'Model':
    try:
        obj = load_str_bytes(
            b,
            proto=proto,
            content_type=content_type,
            encoding=encoding,
            allow_pickle=allow_pickle,
            json_loads=cls.__config__.json_loads,
        )
    except (ValueError, TypeError, UnicodeDecodeError) as e:
        raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls)
    return cls.parse_obj(obj)
def schema(by_alias: bool = True, ref_template: str = '#/definitions/{model}') ‑> DictStrAny
Expand source code
@classmethod
def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny':
    cached = cls.__schema_cache__.get((by_alias, ref_template))
    if cached is not None:
        return cached
    s = model_schema(cls, by_alias=by_alias, ref_template=ref_template)
    cls.__schema_cache__[(by_alias, ref_template)] = s
    return s
def schema_json(*, by_alias: bool = True, ref_template: str = '#/definitions/{model}', **dumps_kwargs: Any) ‑> str
Expand source code
@classmethod
def schema_json(
    cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any
) -> str:
    from .json import pydantic_encoder

    return cls.__config__.json_dumps(
        cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs
    )
def update_forward_refs(**localns: Any) ‑> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

Expand source code
@classmethod
def update_forward_refs(cls, **localns: Any) -> None:
    """
    Try to update ForwardRefs on fields based on this Model, globalns and localns.
    """
    update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns)
def validate(value: Any) ‑> Model
Expand source code
@classmethod
def validate(cls: Type['Model'], value: Any) -> 'Model':
    if isinstance(value, cls):
        copy_on_model_validation = cls.__config__.copy_on_model_validation
        # whether to deep or shallow copy the model on validation, None means do not copy
        deep_copy: Optional[bool] = None
        if copy_on_model_validation not in {'deep', 'shallow', 'none'}:
            # Warn about deprecated behavior
            warnings.warn(
                "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning
            )
            if copy_on_model_validation:
                deep_copy = False

        if copy_on_model_validation == 'shallow':
            # shallow copy
            deep_copy = False
        elif copy_on_model_validation == 'deep':
            # deep copy
            deep_copy = True

        if deep_copy is None:
            return value
        else:
            return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy)

    value = cls._enforce_dict_if_root(value)

    if isinstance(value, dict):
        return cls(**value)
    elif cls.__config__.orm_mode:
        return cls.from_orm(value)
    else:
        try:
            value_as_dict = dict(value)
        except (TypeError, ValueError) as e:
            raise DictError() from e
        return cls(**value_as_dict)

Methods

def copy(self: Model, *, include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), ForwardRef(None)] = None, exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), ForwardRef(None)] = None, update: Optional[ForwardRef('DictStrAny')] = None, deep: bool = False) ‑> Model

Duplicate a model, optionally choose which fields to include, exclude and change.

:param include: fields to include in new model :param exclude: fields to exclude from new model, as with values this takes precedence over include :param update: values to change/add in the new model. Note: the data is not validated before creating the new model: you should trust this data :param deep: set to True to make a deep copy of the model :return: new model instance

Expand source code
def copy(
    self: 'Model',
    *,
    include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
    exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
    update: Optional['DictStrAny'] = None,
    deep: bool = False,
) -> 'Model':
    """
    Duplicate a model, optionally choose which fields to include, exclude and change.

    :param include: fields to include in new model
    :param exclude: fields to exclude from new model, as with values this takes precedence over include
    :param update: values to change/add in the new model. Note: the data is not validated before creating
        the new model: you should trust this data
    :param deep: set to `True` to make a deep copy of the model
    :return: new model instance
    """

    values = dict(
        self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False),
        **(update or {}),
    )

    # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg
    if update:
        fields_set = self.__fields_set__ | update.keys()
    else:
        fields_set = set(self.__fields_set__)

    return self._copy_and_set_values(values, fields_set, deep=deep)
def dict(self, *, include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), ForwardRef(None)] = None, exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), ForwardRef(None)] = None, by_alias: bool = False, skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False) ‑> DictStrAny

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Expand source code
def dict(
    self,
    *,
    include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
    exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
) -> 'DictStrAny':
    """
    Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

    """
    if skip_defaults is not None:
        warnings.warn(
            f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
            DeprecationWarning,
        )
        exclude_unset = skip_defaults

    return dict(
        self._iter(
            to_dict=True,
            by_alias=by_alias,
            include=include,
            exclude=exclude,
            exclude_unset=exclude_unset,
            exclude_defaults=exclude_defaults,
            exclude_none=exclude_none,
        )
    )
def json(self, *, include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), ForwardRef(None)] = None, exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), ForwardRef(None)] = None, by_alias: bool = False, skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, encoder: Optional[Callable[[Any], Any]] = None, models_as_dict: bool = True, **dumps_kwargs: Any) ‑> str

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

Expand source code
def json(
    self,
    *,
    include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
    exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any,
) -> str:
    """
    Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.

    `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
    """
    if skip_defaults is not None:
        warnings.warn(
            f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
            DeprecationWarning,
        )
        exclude_unset = skip_defaults
    encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__)

    # We don't directly call `self.dict()`, which does exactly this with `to_dict=True`
    # because we want to be able to keep raw `BaseModel` instances and not as `dict`.
    # This allows users to write custom JSON encoders for given `BaseModel` classes.
    data = dict(
        self._iter(
            to_dict=models_as_dict,
            by_alias=by_alias,
            include=include,
            exclude=exclude,
            exclude_unset=exclude_unset,
            exclude_defaults=exclude_defaults,
            exclude_none=exclude_none,
        )
    )
    if self.__custom_root_type__:
        data = data[ROOT_KEY]
    return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs)
class ValidationError (errors: Sequence[Union[Sequence[Any], pydantic.error_wrappers.ErrorWrapper]], model: ModelOrDc)

Mixin to provide str, repr, and pretty methods. See #884 for more details.

pretty is used by devtools to provide human readable representations of objects.

Expand source code
class ValidationError(Representation, ValueError):
    __slots__ = 'raw_errors', 'model', '_error_cache'

    def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None:
        self.raw_errors = errors
        self.model = model
        self._error_cache: Optional[List['ErrorDict']] = None

    def errors(self) -> List['ErrorDict']:
        if self._error_cache is None:
            try:
                config = self.model.__config__  # type: ignore
            except AttributeError:
                config = self.model.__pydantic_model__.__config__  # type: ignore
            self._error_cache = list(flatten_errors(self.raw_errors, config))
        return self._error_cache

    def json(self, *, indent: Union[None, int, str] = 2) -> str:
        return json.dumps(self.errors(), indent=indent, default=pydantic_encoder)

    def __str__(self) -> str:
        errors = self.errors()
        no_errors = len(errors)
        return (
            f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n'
            f'{display_errors(errors)}'
        )

    def __repr_args__(self) -> 'ReprArgs':
        return [('model', self.model.__name__), ('errors', self.errors())]

Ancestors

  • pydantic.utils.Representation
  • builtins.ValueError
  • builtins.Exception
  • builtins.BaseException

Instance variables

var model

Return an attribute of instance, which is of type owner.

var raw_errors

Return an attribute of instance, which is of type owner.

Methods

def errors(self) ‑> List[ErrorDict]
Expand source code
def errors(self) -> List['ErrorDict']:
    if self._error_cache is None:
        try:
            config = self.model.__config__  # type: ignore
        except AttributeError:
            config = self.model.__pydantic_model__.__config__  # type: ignore
        self._error_cache = list(flatten_errors(self.raw_errors, config))
    return self._error_cache
def json(self, *, indent: Union[ForwardRef(None), int, str] = 2) ‑> str
Expand source code
def json(self, *, indent: Union[None, int, str] = 2) -> str:
    return json.dumps(self.errors(), indent=indent, default=pydantic_encoder)