From d9950aa7cce94313b15c39af68c30024aa4c8c2b Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 22 Oct 2024 12:19:34 +0200 Subject: [PATCH 01/27] naming is not working --- tortoise/contrib/pydantic/creator.py | 884 ++++++++++++++++----------- tortoise/fields/base.py | 40 ++ tortoise/fields/relational.py | 38 +- tortoise/models.py | 68 ++- 4 files changed, 678 insertions(+), 352 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index f94418dde..cabad4984 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -1,21 +1,131 @@ +import dataclasses import inspect from base64 import b32encode +from collections.abc import MutableMapping +from dataclasses import dataclass, field from hashlib import sha3_224 -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union from pydantic import ConfigDict, Field, computed_field, create_model from pydantic._internal._decorators import PydanticDescriptorProxy +from pydantic.fields import PropertyT from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations from tortoise.fields import IntField, JSONField, TextField, relational +from tortoise.fields.base import FieldDescriptionBase +from tortoise.fields.relational import ForeignKeyFieldInstanceDescription if TYPE_CHECKING: # pragma: nocoverage - from tortoise.models import Model + from tortoise.models import Model, ModelDescription _MODEL_INDEX: Dict[str, Type[PydanticModel]] = {} +@dataclass +class MyPydanticMeta: + #: If not empty, only fields this property contains will be in the pydantic model + include: Tuple[str, ...] = () + + #: Fields listed in this property will be excluded from pydantic model + exclude: Tuple[str, ...] = field(default_factory=lambda: ("Meta",)) + + #: Computed fields can be listed here to use in pydantic model + computed: Tuple[str, ...] = field(default_factory=tuple) + + #: Use backward relations without annotations - not recommended, it can be huge data + #: without control + backward_relations: bool = True + + #: Maximum recursion level allowed + max_recursion: int = 3 + + #: Allow cycles in recursion - This can result in HUGE data - Be careful! + #: Please use this with ``exclude``/``include`` and sane ``max_recursion`` + allow_cycles: bool = False + + #: If we should exclude raw fields (the ones have _id suffixes) of relations + exclude_raw_fields: bool = True + + #: Sort fields alphabetically. + #: If not set (or ``False``) then leave fields in declaration order + sort_alphabetically: bool = False + + #: Allows user to specify custom config for generated model + model_config: Optional[ConfigDict] = None + + +def construct_pydantic_meta( + meta_default: MyPydanticMeta, + meta_override: Type +) -> MyPydanticMeta: + def get_param_from_meta_override(attr: str) -> Any: + return getattr(meta_override, attr, getattr(meta_default, attr)) + + default_include: Tuple[str, ...] = tuple(get_param_from_meta_override("include")) + default_exclude: Tuple[str, ...] = tuple(get_param_from_meta_override("exclude")) + default_computed: Tuple[str, ...] = tuple(get_param_from_meta_override("computed")) + default_config: Optional[ConfigDict] = meta_default.model_config + + backward_relations: bool = bool(get_param_from_meta_override("backward_relations")) + + max_recursion: int = int(get_param_from_meta_override("max_recursion")) + exclude_raw_fields: bool = bool(get_param_from_meta_override("exclude_raw_fields")) + sort_alphabetically: bool = bool(get_param_from_meta_override("sort_alphabetically")) + allow_cycles: bool = bool(get_param_from_meta_override("allow_cycles")) + + return MyPydanticMeta( + include=default_include, + exclude=default_exclude, + computed=default_computed, + model_config=default_config, + backward_relations=backward_relations, + max_recursion=max_recursion, + exclude_raw_fields=exclude_raw_fields, + sort_alphabetically=sort_alphabetically, + allow_cycles=allow_cycles + ) + + +def finalize_meta( + pydantic_meta: MyPydanticMeta, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + model_config: Optional[ConfigDict] = None, +) -> MyPydanticMeta: + _sort_fields: bool = ( + pydantic_meta.sort_alphabetically + if sort_alphabetically is None + else sort_alphabetically + ) + _allow_cycles: bool = ( + pydantic_meta.allow_cycles + if allow_cycles is None + else allow_cycles + ) + + include = tuple(include) + pydantic_meta.include + exclude = tuple(exclude) + pydantic_meta.exclude + computed = tuple(computed) + pydantic_meta.computed + + _model_config = model_config if model_config else pydantic_meta.model_config + + return MyPydanticMeta( + include=include, + exclude=exclude, + computed=computed, + backward_relations=pydantic_meta.backward_relations, + max_recursion=pydantic_meta.max_recursion, + exclude_raw_fields=pydantic_meta.exclude_raw_fields, + sort_alphabetically=_sort_fields, + allow_cycles=_allow_cycles, + model_config=_model_config + ) + + class PydanticMeta: """ The ``PydanticMeta`` class is used to configure metadata for generating the pydantic Model. @@ -72,15 +182,15 @@ def _cleandoc(obj: Any) -> str: def _pydantic_recursion_protector( - cls: "Type[Model]", - *, - stack: tuple, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - name=None, - allow_cycles: bool = False, - sort_alphabetically: Optional[bool] = None, + cls: "Type[Model]", + *, + stack: tuple, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + name=None, + allow_cycles: bool = False, + sort_alphabetically: Optional[bool] = None, ) -> Optional[Type[PydanticModel]]: """ It is an inner function to protect pydantic model creator against cyclic recursion @@ -105,7 +215,7 @@ def _pydantic_recursion_protector( level += 1 - return pydantic_model_creator( + return PydanticModelCreator( cls, exclude=exclude, include=include, @@ -114,362 +224,83 @@ def _pydantic_recursion_protector( _stack=stack, allow_cycles=allow_cycles, sort_alphabetically=sort_alphabetically, - ) - - -def pydantic_model_creator( - cls: "Type[Model]", - *, - name=None, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - optional: Tuple[str, ...] = (), - allow_cycles: Optional[bool] = None, - sort_alphabetically: Optional[bool] = None, - _stack: tuple = (), - exclude_readonly: bool = False, - meta_override: Optional[Type] = None, - model_config: Optional[ConfigDict] = None, - validators: Optional[Dict[str, Any]] = None, - module: str = __name__, -) -> Type[PydanticModel]: - """ - Function to build `Pydantic Model `__ off Tortoise Model. - - :param _stack: Internal parameter to track recursion - :param cls: The Tortoise Model - :param name: Specify a custom name explicitly, instead of a generated name. - :param exclude: Extra fields to exclude from the provided model. - :param include: Extra fields to include from the provided model. - :param computed: Extra computed fields to include from the provided model. - :param optional: Extra optional fields for the provided model. - :param allow_cycles: Do we allow any cycles in the generated model? - This is only useful for recursive/self-referential models. + ).create_pydantic_model() - A value of ``False`` (the default) will prevent any and all backtracking. - :param sort_alphabetically: Sort the parameters alphabetically instead of Field-definition order. - The default order would be: +@dataclasses.dataclass +class ComputedFieldDescription: + field_type: Any + function: Callable[[], Any] + description: Optional[str] - * Field definition order + - * order of reverse relations (as discovered) + - * order of computed functions (as provided). - :param exclude_readonly: Build a subset model that excludes any readonly fields - :param meta_override: A PydanticMeta class to override model's values. - :param model_config: A custom config to use as pydantic config. - :param validators: A dictionary of methods that validate fields. - :param module: The name of the module that the model belongs to. - - Note: Created pydantic model uses config_class parameter and PydanticMeta's - config_class as its Config class's bases(Only if provided!), but it - ignores ``fields`` config. pydantic_model_creator will generate fields by - include/exclude/computed parameters automatically. - """ - # Fully qualified class name - fqname = cls.__module__ + "." + cls.__qualname__ - postfix = "" +class FieldMap(MutableMapping[str, Union[FieldDescriptionBase | ComputedFieldDescription]]): + def __init__(self, meta: MyPydanticMeta, pk_field_description: Optional[FieldDescriptionBase] = None): + self._field_map: dict[str, Union[FieldDescriptionBase | ComputedFieldDescription]] = {} + self.pk_raw_field = pk_field_description.name if pk_field_description is not None else "" + if pk_field_description: + self.pk_raw_field = pk_field_description.name + self.field_map_update([pk_field_description], meta) + self.computed_fields: dict[str, ComputedFieldDescription] = {} - def get_name() -> str: - # If arguments are specified (different from the defaults), we append a hash to the - # class name, to make it unique - # We don't check by stack, as cycles get explicitly renamed. - # When called later, include is explicitly set, so fence passes. - nonlocal postfix - is_default = ( - exclude == () - and include == () - and computed == () - and sort_alphabetically is None - and allow_cycles is None - ) - hashval = ( - f"{fqname};{exclude};{include};{computed};{_stack}:{sort_alphabetically}:{allow_cycles}" - ) - postfix = ( - ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] - if not is_default - else "" - ) - return fqname + postfix + def __delitem__(self, __key): + self._field_map.__delitem__(__key) - # We need separate model class for different exclude, include and computed parameters - _name = name or get_name() - has_submodel = False + def __getitem__(self, __key): + return self._field_map.__getitem__(__key) - # Get settings and defaults - meta = getattr(cls, "PydanticMeta", PydanticMeta) + def __len__(self): + return self._field_map.__len__() - def get_param(attr: str) -> Any: - if meta_override: - return getattr(meta_override, attr, getattr(meta, attr, getattr(PydanticMeta, attr))) - return getattr(meta, attr, getattr(PydanticMeta, attr)) + def __iter__(self): + return self._field_map.__iter__() - default_include: Tuple[str, ...] = tuple(get_param("include")) - default_exclude: Tuple[str, ...] = tuple(get_param("exclude")) - default_computed: Tuple[str, ...] = tuple(get_param("computed")) - default_config: Optional[ConfigDict] = get_param("model_config") + def __setitem__(self, __key, __value): + self._field_map.__setitem__(__key, __value) - backward_relations: bool = bool(get_param("backward_relations")) + def sort_alphabetically(self): + self._field_map = {k: self._field_map[k] for k in sorted(self._field_map)} - max_recursion: int = int(get_param("max_recursion")) - exclude_raw_fields: bool = bool(get_param("exclude_raw_fields")) - _sort_fields: bool = ( - bool(get_param("sort_alphabetically")) - if sort_alphabetically is None - else sort_alphabetically - ) - _allow_cycles: bool = bool(get_param("allow_cycles") if allow_cycles is None else allow_cycles) - - # Update parameters with defaults - include = tuple(include) + default_include - exclude = tuple(exclude) + default_exclude - computed = tuple(computed) + default_computed - - annotations = get_annotations(cls) - - pconfig = PydanticModel.model_config.copy() - if default_config: - pconfig.update(default_config) - if model_config: - pconfig.update(model_config) - if "title" not in pconfig: - pconfig["title"] = name or cls.__name__ - if "extra" not in pconfig: - pconfig["extra"] = "forbid" - - properties: Dict[str, Any] = {} - - # Get model description - model_description = cls.describe(serializable=False) - - # Field map we use - field_map: Dict[str, dict] = {} - pk_raw_field: str = "" - - def field_map_update(keys: tuple, is_relation=True) -> None: - nonlocal pk_raw_field - - for key in keys: - fds = model_description[key] - if isinstance(fds, dict): - fds = [fds] - for fd in fds: - n = fd["name"] - if key == "pk_field": - pk_raw_field = n - # Include or exclude field - if (include and n not in include) or n in exclude: - continue - # Remove raw fields - raw_field = fd.get("raw_field", None) - if raw_field is not None and exclude_raw_fields and raw_field != pk_raw_field: - field_map.pop(raw_field, None) - field_map[n] = fd - - # Update field definitions from description - if not exclude_readonly: - field_map_update(("pk_field",), is_relation=False) - field_map_update(("data_fields",), is_relation=False) - if not exclude_readonly: - included_fields: tuple = ( - "fk_fields", - "o2o_fields", - "m2m_fields", - ) - if backward_relations: - included_fields = ( - *included_fields, - "backward_fk_fields", - "backward_o2o_fields", - ) + def sort_definition_order(self, cls: "Type[Model]", computed: tuple[str, ...]): + self._field_map = { + k: self._field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in self._field_map + } - field_map_update(included_fields) - # Add possible computed fields - field_map.update( + def field_map_update(self, field_descriptions: list[FieldDescriptionBase], meta: MyPydanticMeta) -> None: + for field_description in field_descriptions: + name = field_description.name + # Include or exclude field + if (meta.include and name not in meta.include) or name in meta.exclude: + continue + # Remove raw fields + if isinstance(field_description, ForeignKeyFieldInstanceDescription): + raw_field = field_description.raw_field + if meta.exclude_raw_fields and raw_field != self.pk_raw_field: + self.pop(raw_field, None) + self[name] = field_description + + def computed_field_map_update(self, computed: tuple[str, ...], cls: "Type[Model]"): + self._field_map.update( { - k: { - "field_type": callable, - "function": getattr(cls, k), - "description": None, - } + k: ComputedFieldDescription( + field_type=callable, + function=getattr(cls, k), + description=None, + ) for k in computed } ) - # Sort field map (Python 3.7+ has guaranteed ordered dictionary keys) - if _sort_fields: - # Sort Alphabetically - field_map = {k: field_map[k] for k in sorted(field_map)} - else: - # Sort to definition order - field_map = { - k: field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in field_map - } - # Process fields - for fname, fdesc in field_map.items(): - comment = "" - json_schema_extra: Dict[str, Any] = {} - fconfig: Dict[str, Any] = { - "json_schema_extra": json_schema_extra, - } - field_type = fdesc["field_type"] - field_default = fdesc.get("default") - is_optional_field = fname in optional - - def get_submodel(_model: "Type[Model]") -> Optional[Type[PydanticModel]]: - """Get Pydantic model for the submodel""" - nonlocal exclude, _name, has_submodel - - if _model: - new_stack = _stack + ((cls, fname, max_recursion),) - - # Get pydantic schema for the submodel - prefix_len = len(fname) + 1 - pmodel = _pydantic_recursion_protector( - _model, - exclude=tuple( - str(v[prefix_len:]) for v in exclude if v.startswith(fname + ".") - ), - include=tuple( - str(v[prefix_len:]) for v in include if v.startswith(fname + ".") - ), - computed=tuple( - str(v[prefix_len:]) for v in computed if v.startswith(fname + ".") - ), - stack=new_stack, - allow_cycles=_allow_cycles, - sort_alphabetically=sort_alphabetically, - ) - else: - pmodel = None - - # If the result is None it has been excluded and we need to exclude the field - if pmodel is None: - exclude += (fname,) - else: - has_submodel = True - # We need to rename if there are duplicate instances of this model - if cls in (c[0] for c in _stack): - _name = name or get_name() - - return pmodel - - # Foreign keys and OneToOne fields are embedded schemas - is_to_one_relation = False - if ( - field_type is relational.ForeignKeyFieldInstance - or field_type is relational.OneToOneFieldInstance - or field_type is relational.BackwardOneToOneRelation - ): - is_to_one_relation = True - model = get_submodel(fdesc["python_type"]) - if model: - if fdesc.get("nullable"): - json_schema_extra["nullable"] = True - if fdesc.get("nullable") or field_default is not None: - model = Optional[model] # type: ignore - - properties[fname] = model - - # Backward FK and ManyToMany fields are list of embedded schemas - elif ( - field_type is relational.BackwardFKRelation - or field_type is relational.ManyToManyFieldInstance - ): - model = get_submodel(fdesc["python_type"]) - if model: - properties[fname] = List[model] # type: ignore - - # Computed fields as methods - elif field_type is callable: - func = fdesc["function"] - annotation = get_annotations(cls, func).get("return", None) - comment = _cleandoc(func) - if annotation is not None: - properties[fname] = computed_field(return_type=annotation, description=comment)( - func - ) - - # Json fields - elif field_type is JSONField: - properties[fname] = Any - # Any other tortoise fields - else: - annotation = annotations.get(fname, None) - if "readOnly" in fdesc["constraints"]: - json_schema_extra["readOnly"] = fdesc["constraints"]["readOnly"] - del fdesc["constraints"]["readOnly"] - fconfig.update(fdesc["constraints"]) - ptype = fdesc["python_type"] - if fdesc.get("nullable"): - json_schema_extra["nullable"] = True - if is_optional_field or field_default is not None or fdesc.get("nullable"): - ptype = Optional[ptype] - if not (exclude_readonly and json_schema_extra.get("readOnly") is True): - properties[fname] = annotation or ptype - - if fname in properties and not isinstance(properties[fname], tuple): - fconfig["title"] = fname.replace("_", " ").title() - description = comment or _br_it(fdesc.get("docstring") or fdesc["description"] or "") - if description: - fconfig["description"] = description - ftype = properties[fname] - if isinstance(ftype, PydanticDescriptorProxy): - continue - if is_optional_field or (field_default is not None and not callable(field_default)): - properties[fname] = (ftype, Field(default=field_default, **fconfig)) - else: - if (j := fconfig.get("json_schema_extra")) and ( - ( - j.get("nullable") - and not is_to_one_relation - and field_type not in (IntField, TextField) - ) - or (exclude_readonly and j.get("readOnly")) - ): - fconfig["default_factory"] = lambda: None - properties[fname] = (ftype, Field(**fconfig)) - - # Here we endure that the name is unique, but complete objects are still labeled verbatim - if not has_submodel: - _name = name or f"{fqname}.leaf" - elif has_submodel: - _name = name or get_name() - - # Here we de-dup to ensure that a uniquely named object is a unique object - # This fixes some Pydantic constraints. - if _name in _MODEL_INDEX: - return _MODEL_INDEX[_name] - - # Creating Pydantic class for the properties generated before - properties["model_config"] = pconfig - model = create_model( - _name, - __base__=PydanticModel, - __module__=module, - __validators__=validators, - **properties, - ) - # Copy the Model docstring over - model.__doc__ = _cleandoc(cls) - # Store the base class - model.model_config["orig_model"] = cls # type: ignore - # Store model reference so we can de-dup it later on if needed. - _MODEL_INDEX[_name] = model - return model - def pydantic_queryset_creator( - cls: "Type[Model]", - *, - name=None, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - allow_cycles: Optional[bool] = None, - sort_alphabetically: Optional[bool] = None, + cls: "Type[Model]", + *, + name=None, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, ) -> Type[PydanticListModel]: """ Function to build a `Pydantic Model `__ list off Tortoise Model. @@ -518,3 +349,358 @@ def pydantic_queryset_creator( model.model_config["title"] = name or f"{submodel.model_config['title']}_list" model.model_config["submodel"] = submodel # type: ignore return model + + +PropertyT = Any + + +class PydanticModelCreator: + def __init__( + self, + cls: "Type[Model]", + name: Optional[str] = None, + exclude: Optional[Tuple[str, ...]] = (), + include: Optional[Tuple[str, ...]] = (), + computed: Optional[Tuple[str, ...]] = (), + optional: Optional[Tuple[str, ...]] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + exclude_readonly: bool = False, + meta_override: Optional[Type] = None, + model_config: Optional[ConfigDict] = None, + validators: Optional[Dict[str, Any]] = None, + module: str = __name__, + _stack: tuple = () + ): + self._cls: "Type[Model]" = cls + self._stack: tuple[tuple["Type[Model]", str, int]] = tuple() # ((Type[Model], field_name, max_recursion),) + self._is_default: bool = ( + exclude is None + and include is None + and computed is None + and optional is None + and sort_alphabetically is None + and allow_cycles is None + and meta_override is None + ) + + meta_from_class = cls.my_pydantic_meta \ + if not meta_override \ + else construct_pydantic_meta(cls.my_pydantic_meta, meta_override) + self.meta = finalize_meta( + meta_from_class, exclude, include, computed, allow_cycles, sort_alphabetically, model_config + ) + print(f"Meta: {self.meta}") + + self._fqname = cls.__module__ + "." + cls.__qualname__ + self._name: str + self._title: str + self._name, self._title = self.get_name(name) + self.given_name = name + + self._has_submodel = False + + self._annotations = get_annotations(cls) + + self._pconfig: ConfigDict = self.initialize_pconfig() + + self._properties: Dict[str, Any] = dict() + + self._model_description: ModelDescription = cls.describe_by_dataclass() + + self._exclude_read_only: bool = exclude_readonly + + self._field_map: FieldMap = self.initialize_field_map() + self.construct_field_map() + + self._optional = optional + + self._validators = validators + self._module = module + + self._stack = _stack + + def get_name(self, name: Optional[str] = None) -> tuple[str, str]: + """ + return + @rtype: tuple + name, title + """ + # If arguments are specified (different from the defaults), we append a hash to the + # class name, to make it unique + # We don't check by stack, as cycles get explicitly renamed. + # When called later, include is explicitly set, so fence passes. + if name is not None: + return name, name + hashval = ( + f"{self._fqname};{self.meta.exclude};{self.meta.include};{self.meta.computed};" + f"{self._stack}:{self.meta.sort_alphabetically}:{self.meta.allow_cycles}" + ) + postfix = ( + ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] + if not self._is_default + else "" + ) + return self._fqname + postfix, self._cls.__name__ + + def initialize_pconfig(self): + pconfig: ConfigDict = PydanticModel.model_config.copy() + if self.meta.model_config: + pconfig.update(self.meta.model_config) + if "title" not in pconfig: + pconfig["title"] = self._title + if "extra" not in pconfig: + pconfig["extra"] = 'forbid' + return pconfig + + def initialize_field_map(self): + return FieldMap(self.meta) \ + if self._exclude_read_only \ + else FieldMap(self.meta, pk_field_description=self._model_description.pk_field) + + def construct_field_map(self): + self._field_map.field_map_update(field_descriptions=self._model_description.data_fields, meta=self.meta) + if not self._exclude_read_only: + for field_descriptions in ( + self._model_description.fk_fields, + self._model_description.o2o_fields, + self._model_description.m2m_fields + ): + self._field_map.field_map_update(field_descriptions, self.meta) + if self.meta.backward_relations: + for field_descriptions in ( + self._model_description.backward_fk_fields, + self._model_description.backward_o2o_fields + ): + self._field_map.field_map_update(field_descriptions, self.meta) + self._field_map.computed_field_map_update(self.meta.computed, self._cls) + if self.meta.sort_alphabetically: + self._field_map.sort_alphabetically() + else: + self._field_map.sort_definition_order(self._cls, self.meta.computed) + + def create_pydantic_model(self): + print(f"ModelDescription: {self._model_description}") + print(f"FieldMap: {self._field_map._field_map}") + for field_name, field_description in self._field_map.items(): + self.process_field(field_name, field_description) + + print(f"FieldMap: {self._field_map._field_map}") + print(f"Properties: {self._properties}") + if not self._has_submodel: + self._name = self.given_name or f"{self._fqname}.leaf" + + if self._name in _MODEL_INDEX: + print("not new generated") + return _MODEL_INDEX[self._name] + + self._properties["model_config"] = self._pconfig + print(f"Properties: {self._properties}") + print(f"FieldMap: {self._field_map._field_map}") + model = create_model( + self._name, + __base__=PydanticModel, + __module__=self._module, + __validators__=self._validators, + **self._properties, + ) + # Copy the Model docstring over + model.__doc__ = _cleandoc(self._cls) + # Store the base class + model.model_config["orig_model"] = self._cls # type: ignore + # Store model reference so we can de-dup it later on if needed. + _MODEL_INDEX[self._name] = model + return model + + def process_field( + self, + field_name: str, + field_description: Union[FieldDescriptionBase, ComputedFieldDescription], + ): + json_schema_extra: Dict[str, Any] = {} + fconfig: Dict[str, Any] = { + "json_schema_extra": json_schema_extra, + } + field_property: PropertyT = None + is_to_one_relation: bool = False + if isinstance(field_description, FieldDescriptionBase): + field_property, is_to_one_relation = self.process_normal_field_description(field_name, field_description, json_schema_extra, fconfig) + elif isinstance(field_description, ComputedFieldDescription): + field_property, is_to_one_relation = self.process_computed_field_description(field_description), False + + if field_property: + self._properties[field_name] = field_property + if field_name in self._properties and not isinstance(self._properties[field_name], tuple): + fconfig["title"] = field_name.replace("_", " ").title() + # description = "" or _br_it(field_description.docstring or field_description.description or "") + description = None + if description: + fconfig["description"] = description + ftype = self._properties[field_name] + if not isinstance(ftype, PydanticDescriptorProxy): + if field_name in self._optional or (field_description.default is not None and not callable(field_description.default)): + self._properties[field_name] = (ftype, Field(default=field_description.default, **fconfig)) + else: + if ( + ( + json_schema_extra.get("nullable") + and not is_to_one_relation + and field_description.field_type not in (IntField, TextField) + ) + or (self._exclude_read_only and json_schema_extra.get("readOnly")) + ): + fconfig["default_factory"] = lambda: None + self._properties[field_name] = (ftype, Field(**fconfig)) + + def process_normal_field_description( + self, + field_name: str, + field_description: FieldDescriptionBase, + json_schema_extra: Dict[str, Any], + fconfig: Dict[str, Any], + ) -> tuple[Optional[PropertyT], bool]: + print(field_description) + if ( + field_description.field_type is relational.ForeignKeyFieldInstance + or field_description.field_type is relational.OneToOneFieldInstance + or field_description.field_type is relational.BackwardOneToOneRelation + ): + return self.process_single_field_relation(field_name, field_description, json_schema_extra), True + elif field_description.field_type in (relational.BackwardFKRelation, relational.ManyToManyFieldInstance): + return self.process_many_field_relation(field_name, field_description), False + elif field_description.field_type is JSONField: + return self.process_json_field_description(), False + return self.process_data_field_description(field_name, field_description, json_schema_extra, fconfig), False + + def process_single_field_relation( + self, + field_name: str, + field_description: FieldDescriptionBase, + json_schema_extra: Dict[str, Any], + ) -> Optional[PropertyT]: + print(field_description) + model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) + if model: + if field_description.nullable: + json_schema_extra["nullable"] = True + if field_description.nullable or field_description.default is not None: + model = Optional[model] # type: ignore + + return model + + def process_many_field_relation( + self, + field_name: str, + field_description: FieldDescriptionBase, + ) -> Optional[Type[List[Type[PydanticModel]]]]: + model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) + if model: + return List[model] + + def process_json_field_description(self): + is_to_one_relation = False + return Any, is_to_one_relation + + def process_data_field_description( + self, + field_name: str, + field_description: FieldDescriptionBase, + json_schema_extra: Dict[str, Any], + fconfig: Dict[str, Any], + ) -> Optional[PropertyT]: + annotation = self._annotations.get(field_name, None) + if "readOnly" in field_description.constraints: + json_schema_extra["readOnly"] = field_description.constraints["readOnly"] + del field_description.constraints["readOnly"] + fconfig.update(field_description.constraints) + ptype = field_description.python_type + print(f"ptype: {ptype}") + if field_description.nullable: + json_schema_extra["nullable"] = True + print(field_description) + if field_name in self._optional or field_description.default is not None or field_description.nullable: + ptype = Optional[ptype] + if not (self._exclude_read_only and json_schema_extra.get("readOnly") is True): + return annotation or ptype + + def process_computed_field_description( + self, + field_description: ComputedFieldDescription, + ): + func = field_description.function + annotation = get_annotations(self._cls, func).get("return", None) + print(f"anno: {annotation}") + comment = _cleandoc(func) + if annotation is not None: + c_f = computed_field(return_type=annotation, description=comment) + ret = c_f(func) + return ret + + def get_submodel(self, _model: "Type[Model]", field_name: str) -> Optional[Type[PydanticModel]]: + """Get Pydantic model for the submodel""" + + if _model: + new_stack = self._stack + ((self._cls, field_name, self.meta.max_recursion),) + + # Get pydantic schema for the submodel + prefix_len = len(field_name) + 1 + pmodel = _pydantic_recursion_protector( + _model, + exclude=tuple( + str(v[prefix_len:]) for v in self.meta.exclude if v.startswith(field_name + ".") + ), + include=tuple( + str(v[prefix_len:]) for v in self.meta.include if v.startswith(field_name + ".") + ), + computed=tuple( + str(v[prefix_len:]) for v in self.meta.computed if v.startswith(field_name + ".") + ), + stack=new_stack, + allow_cycles=self.meta.allow_cycles, + sort_alphabetically=self.meta.sort_alphabetically, + ) + else: + pmodel = None + + # If the result is None it has been excluded and we need to exclude the field + if pmodel is None: + self.meta.exclude += (field_name,) + else: + self._has_submodel = True + + return pmodel + + +def pydantic_model_creator( + cls: "Type[Model]", + *, + name=None, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + optional: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + _stack: tuple = (), + exclude_readonly: bool = False, + meta_override: Optional[Type] = None, + model_config: Optional[ConfigDict] = None, + validators: Optional[Dict[str, Any]] = None, + module: str = __name__, +) -> Type[PydanticModel]: + pmc = PydanticModelCreator( + cls=cls, + name=name, + exclude=exclude, + include=include, + computed=computed, + optional=optional, + allow_cycles=allow_cycles, + sort_alphabetically=sort_alphabetically, + exclude_readonly=exclude_readonly, + meta_override=meta_override, + model_config=model_config, + validators=validators, + module=module + ) + return pmc.create_pydantic_model() diff --git a/tortoise/fields/base.py b/tortoise/fields/base.py index 1887e63ae..c8529d8a9 100644 --- a/tortoise/fields/base.py +++ b/tortoise/fields/base.py @@ -1,3 +1,4 @@ +import dataclasses import sys import warnings from enum import Enum @@ -62,6 +63,27 @@ def __new__(mcs, name: str, bases: Tuple[Type, ...], attrs: dict): return type.__new__(mcs, name, bases, attrs) +@dataclasses.dataclass +class FieldDescriptionBase: + name: str + field_type: Type["Field"] + generated: bool + nullable: bool + unique: bool + indexed: bool + constraints: dict + python_type: Optional[type] = None + default: Optional[Any] = None + description: Optional[str] = None + docstring: Optional[str] = None + db_field_types: Optional[dict[str, str]] = None + + +@dataclasses.dataclass +class FieldDescription(FieldDescriptionBase): + db_column: str = "" + + class Field(Generic[VALUE], metaclass=_FieldMeta): """ Base Field type. @@ -441,3 +463,21 @@ def default_name(default: Any) -> Optional[Union[int, float, str, bool]]: desc["db_field_types"] = self.get_db_field_types() return desc + + def describe_by_dataclass(self): + field_type = getattr(self, "related_model", self.field_type) + return FieldDescription( + name=self.model_field_name, + field_type=self.__class__, + db_column=self.source_field or self.model_field_name, + python_type=field_type, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None + ) diff --git a/tortoise/fields/relational.py b/tortoise/fields/relational.py index a95e5103f..eb01037fd 100644 --- a/tortoise/fields/relational.py +++ b/tortoise/fields/relational.py @@ -1,3 +1,4 @@ +import dataclasses from typing import ( TYPE_CHECKING, Any, @@ -18,7 +19,7 @@ from typing_extensions import Literal from tortoise.exceptions import ConfigurationError, NoValuesFetched, OperationalError -from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete +from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete, FieldDescription, FieldDescriptionBase if TYPE_CHECKING: # pragma: nocoverage from tortoise.backends.base.client import BaseDBAsyncClient @@ -48,7 +49,7 @@ class ReverseRelation(Generic[MODEL]): def __init__( self, - remote_model: Type[MODEL], + remote_model: "Type[MODEL]", relation_field: str, instance: "Model", from_field: str, @@ -240,6 +241,11 @@ async def _remove_or_clear( await db.execute_query(str(query)) +@dataclasses.dataclass +class RelationalFieldDescription(FieldDescriptionBase): + db_constraint: bool = False + + class RelationalField(Field[MODEL]): has_db_field = False @@ -276,6 +282,11 @@ def describe(self, serializable: bool) -> dict: del desc["db_column"] return desc + def describe_by_dataclass(self): + return RelationalFieldDescription( + **self.describe(False) + ) + @classmethod def validate_model_name(cls, model_name: str) -> None: if len(model_name.split(".")) != 2: @@ -283,6 +294,12 @@ def validate_model_name(cls, model_name: str) -> None: raise ConfigurationError(f'{field_type} accepts model name in format "app.Model"') +@dataclasses.dataclass +class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): + raw_field: str = "" + on_delete: str = "" + + class ForeignKeyFieldInstance(RelationalField[MODEL]): def __init__( self, @@ -309,6 +326,9 @@ def describe(self, serializable: bool) -> dict: desc["on_delete"] = str(self.on_delete) return desc + def describe_by_dataclass(self): + return ForeignKeyFieldInstanceDescription(**self.describe(False)) + class BackwardFKRelation(RelationalField[MODEL]): def __init__( @@ -342,6 +362,17 @@ class BackwardOneToOneRelation(BackwardFKRelation[MODEL]): pass +@dataclasses.dataclass +class ManyToManyFieldInstanceDescription(RelationalFieldDescription): + model_name: str = "" + related_name: str = "" + forward_key: str = "" + backward_key: str = "" + through: str = "" + on_delete: str = "" + _generated: bool = False + + class ManyToManyFieldInstance(RelationalField[MODEL]): field_type = ManyToManyRelation @@ -381,6 +412,9 @@ def describe(self, serializable: bool) -> dict: desc["_generated"] = self._generated return desc + def describe_by_dataclass(self): + return ManyToManyFieldInstanceDescription(**self.describe(False)) + @overload def OneToOneField( diff --git a/tortoise/models.py b/tortoise/models.py index 4fd513d56..c1923af09 100644 --- a/tortoise/models.py +++ b/tortoise/models.py @@ -1,4 +1,5 @@ import asyncio +import dataclasses import inspect import re from copy import copy, deepcopy @@ -27,6 +28,7 @@ from tortoise import connections from tortoise.backends.base.client import BaseDBAsyncClient +from tortoise.contrib.pydantic.creator import MyPydanticMeta from tortoise.exceptions import ( ConfigurationError, DoesNotExist, @@ -36,7 +38,7 @@ OperationalError, ParamsError, ) -from tortoise.fields.base import Field +from tortoise.fields.base import Field, FieldDescription from tortoise.fields.data import IntField from tortoise.fields.relational import ( BackwardFKRelation, @@ -643,10 +645,30 @@ def __getitem__(cls: Type[MODEL], key: Any) -> QuerySetSingle[MODEL]: # type: i return cls._getbypk(key) # type: ignore +@dataclasses.dataclass +class ModelDescription: + name: str + table: str + abstract: bool + description: str + pk_field: FieldDescription + app: Optional[str] = None + docstring: Optional[str] = None + unique_together: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=list) + indexes: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=list) + data_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + fk_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + backward_fk_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + o2o_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + backward_o2o_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + m2m_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + + class Model(metaclass=ModelMeta): """ Base class for all Tortoise ORM Models. """ + my_pydantic_meta: MyPydanticMeta = MyPydanticMeta() # I don' like this here, but it makes auto completion and static analysis much happier _meta = MetaInfo(None) # type: ignore @@ -1490,6 +1512,50 @@ def describe(cls, serializable: bool = True) -> dict: ], } + @classmethod + def describe_by_dataclass(cls): + return ModelDescription( + name=cls._meta.full_name, + app=cls._meta.app, + table=cls._meta.db_table, + abstract=cls._meta.abstract, + description=cls._meta.table_description or None, + docstring=inspect.cleandoc(cls.__doc__ or "") or None, + unique_together=cls._meta.unique_together or [], + indexes=cls._meta.indexes or [], + pk_field=cls._meta.fields_map[cls._meta.pk_attr].describe_by_dataclass(), + data_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name != cls._meta.pk_attr and name in (cls._meta.fields - cls._meta.fetch_fields) + ], + fk_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.fk_fields + ], + backward_fk_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.backward_fk_fields + ], + o2o_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.o2o_fields + ], + backward_o2o_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.backward_o2o_fields + ], + m2m_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.m2m_fields + ], + ) + def __await__(self: MODEL) -> Generator[Any, None, MODEL]: async def _self() -> MODEL: return self From f6239840c1044a910dcef5a3322bc93c8f37c660 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 22 Oct 2024 16:14:51 +0200 Subject: [PATCH 02/27] naming and description --- tortoise/contrib/pydantic/creator.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index cabad4984..c739831b2 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -224,6 +224,7 @@ def _pydantic_recursion_protector( _stack=stack, allow_cycles=allow_cycles, sort_alphabetically=sort_alphabetically, + _as_submodel=True, ).create_pydantic_model() @@ -370,7 +371,8 @@ def __init__( model_config: Optional[ConfigDict] = None, validators: Optional[Dict[str, Any]] = None, module: str = __name__, - _stack: tuple = () + _stack: tuple = (), + _as_submodel: bool = False ): self._cls: "Type[Model]" = cls self._stack: tuple[tuple["Type[Model]", str, int]] = tuple() # ((Type[Model], field_name, max_recursion),) @@ -396,9 +398,10 @@ def __init__( self._name: str self._title: str self._name, self._title = self.get_name(name) + print(self._name, self._title) self.given_name = name - self._has_submodel = False + self._as_submodel = _as_submodel self._annotations = get_annotations(cls) @@ -487,8 +490,8 @@ def create_pydantic_model(self): print(f"FieldMap: {self._field_map._field_map}") print(f"Properties: {self._properties}") - if not self._has_submodel: - self._name = self.given_name or f"{self._fqname}.leaf" + if self._as_submodel: + self._name = f"{self._name}:leaf" if self._name in _MODEL_INDEX: print("not new generated") @@ -523,17 +526,18 @@ def process_field( } field_property: PropertyT = None is_to_one_relation: bool = False + comment = "" if isinstance(field_description, FieldDescriptionBase): field_property, is_to_one_relation = self.process_normal_field_description(field_name, field_description, json_schema_extra, fconfig) elif isinstance(field_description, ComputedFieldDescription): field_property, is_to_one_relation = self.process_computed_field_description(field_description), False + comment = _cleandoc(field_description.function) if field_property: self._properties[field_name] = field_property if field_name in self._properties and not isinstance(self._properties[field_name], tuple): fconfig["title"] = field_name.replace("_", " ").title() - # description = "" or _br_it(field_description.docstring or field_description.description or "") - description = None + description = comment or _br_it(field_description.docstring or field_description.description or "") if description: fconfig["description"] = description ftype = self._properties[field_name] @@ -665,8 +669,6 @@ def get_submodel(self, _model: "Type[Model]", field_name: str) -> Optional[Type[ # If the result is None it has been excluded and we need to exclude the field if pmodel is None: self.meta.exclude += (field_name,) - else: - self._has_submodel = True return pmodel From 7fcf914d2184661ab768a054a0e7408a4eb7242d Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 22 Oct 2024 12:19:34 +0200 Subject: [PATCH 03/27] naming is not working --- tortoise/contrib/pydantic/creator.py | 883 ++++++++++++++++----------- tortoise/fields/base.py | 40 ++ tortoise/fields/relational.py | 38 +- tortoise/models.py | 68 ++- 4 files changed, 678 insertions(+), 351 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index f73d5e4fc..cabad4984 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -1,21 +1,131 @@ +import dataclasses import inspect from base64 import b32encode +from collections.abc import MutableMapping +from dataclasses import dataclass, field from hashlib import sha3_224 -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union from pydantic import ConfigDict, Field, computed_field, create_model from pydantic._internal._decorators import PydanticDescriptorProxy +from pydantic.fields import PropertyT from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations from tortoise.fields import IntField, JSONField, TextField, relational +from tortoise.fields.base import FieldDescriptionBase +from tortoise.fields.relational import ForeignKeyFieldInstanceDescription if TYPE_CHECKING: # pragma: nocoverage - from tortoise.models import Model + from tortoise.models import Model, ModelDescription _MODEL_INDEX: Dict[str, Type[PydanticModel]] = {} +@dataclass +class MyPydanticMeta: + #: If not empty, only fields this property contains will be in the pydantic model + include: Tuple[str, ...] = () + + #: Fields listed in this property will be excluded from pydantic model + exclude: Tuple[str, ...] = field(default_factory=lambda: ("Meta",)) + + #: Computed fields can be listed here to use in pydantic model + computed: Tuple[str, ...] = field(default_factory=tuple) + + #: Use backward relations without annotations - not recommended, it can be huge data + #: without control + backward_relations: bool = True + + #: Maximum recursion level allowed + max_recursion: int = 3 + + #: Allow cycles in recursion - This can result in HUGE data - Be careful! + #: Please use this with ``exclude``/``include`` and sane ``max_recursion`` + allow_cycles: bool = False + + #: If we should exclude raw fields (the ones have _id suffixes) of relations + exclude_raw_fields: bool = True + + #: Sort fields alphabetically. + #: If not set (or ``False``) then leave fields in declaration order + sort_alphabetically: bool = False + + #: Allows user to specify custom config for generated model + model_config: Optional[ConfigDict] = None + + +def construct_pydantic_meta( + meta_default: MyPydanticMeta, + meta_override: Type +) -> MyPydanticMeta: + def get_param_from_meta_override(attr: str) -> Any: + return getattr(meta_override, attr, getattr(meta_default, attr)) + + default_include: Tuple[str, ...] = tuple(get_param_from_meta_override("include")) + default_exclude: Tuple[str, ...] = tuple(get_param_from_meta_override("exclude")) + default_computed: Tuple[str, ...] = tuple(get_param_from_meta_override("computed")) + default_config: Optional[ConfigDict] = meta_default.model_config + + backward_relations: bool = bool(get_param_from_meta_override("backward_relations")) + + max_recursion: int = int(get_param_from_meta_override("max_recursion")) + exclude_raw_fields: bool = bool(get_param_from_meta_override("exclude_raw_fields")) + sort_alphabetically: bool = bool(get_param_from_meta_override("sort_alphabetically")) + allow_cycles: bool = bool(get_param_from_meta_override("allow_cycles")) + + return MyPydanticMeta( + include=default_include, + exclude=default_exclude, + computed=default_computed, + model_config=default_config, + backward_relations=backward_relations, + max_recursion=max_recursion, + exclude_raw_fields=exclude_raw_fields, + sort_alphabetically=sort_alphabetically, + allow_cycles=allow_cycles + ) + + +def finalize_meta( + pydantic_meta: MyPydanticMeta, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + model_config: Optional[ConfigDict] = None, +) -> MyPydanticMeta: + _sort_fields: bool = ( + pydantic_meta.sort_alphabetically + if sort_alphabetically is None + else sort_alphabetically + ) + _allow_cycles: bool = ( + pydantic_meta.allow_cycles + if allow_cycles is None + else allow_cycles + ) + + include = tuple(include) + pydantic_meta.include + exclude = tuple(exclude) + pydantic_meta.exclude + computed = tuple(computed) + pydantic_meta.computed + + _model_config = model_config if model_config else pydantic_meta.model_config + + return MyPydanticMeta( + include=include, + exclude=exclude, + computed=computed, + backward_relations=pydantic_meta.backward_relations, + max_recursion=pydantic_meta.max_recursion, + exclude_raw_fields=pydantic_meta.exclude_raw_fields, + sort_alphabetically=_sort_fields, + allow_cycles=_allow_cycles, + model_config=_model_config + ) + + class PydanticMeta: """ The ``PydanticMeta`` class is used to configure metadata for generating the pydantic Model. @@ -72,15 +182,15 @@ def _cleandoc(obj: Any) -> str: def _pydantic_recursion_protector( - cls: "Type[Model]", - *, - stack: tuple, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - name=None, - allow_cycles: bool = False, - sort_alphabetically: Optional[bool] = None, + cls: "Type[Model]", + *, + stack: tuple, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + name=None, + allow_cycles: bool = False, + sort_alphabetically: Optional[bool] = None, ) -> Optional[Type[PydanticModel]]: """ It is an inner function to protect pydantic model creator against cyclic recursion @@ -105,7 +215,7 @@ def _pydantic_recursion_protector( level += 1 - return pydantic_model_creator( + return PydanticModelCreator( cls, exclude=exclude, include=include, @@ -114,361 +224,83 @@ def _pydantic_recursion_protector( _stack=stack, allow_cycles=allow_cycles, sort_alphabetically=sort_alphabetically, - ) + ).create_pydantic_model() -def pydantic_model_creator( - cls: "Type[Model]", - *, - name=None, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - optional: Tuple[str, ...] = (), - allow_cycles: Optional[bool] = None, - sort_alphabetically: Optional[bool] = None, - _stack: tuple = (), - exclude_readonly: bool = False, - meta_override: Optional[Type] = None, - model_config: Optional[ConfigDict] = None, - validators: Optional[Dict[str, Any]] = None, - module: str = __name__, -) -> Type[PydanticModel]: - """ - Function to build `Pydantic Model `__ off Tortoise Model. +@dataclasses.dataclass +class ComputedFieldDescription: + field_type: Any + function: Callable[[], Any] + description: Optional[str] - :param _stack: Internal parameter to track recursion - :param cls: The Tortoise Model - :param name: Specify a custom name explicitly, instead of a generated name. - :param exclude: Extra fields to exclude from the provided model. - :param include: Extra fields to include from the provided model. - :param computed: Extra computed fields to include from the provided model. - :param optional: Extra optional fields for the provided model. - :param allow_cycles: Do we allow any cycles in the generated model? - This is only useful for recursive/self-referential models. - A value of ``False`` (the default) will prevent any and all backtracking. - :param sort_alphabetically: Sort the parameters alphabetically instead of Field-definition order. +class FieldMap(MutableMapping[str, Union[FieldDescriptionBase | ComputedFieldDescription]]): + def __init__(self, meta: MyPydanticMeta, pk_field_description: Optional[FieldDescriptionBase] = None): + self._field_map: dict[str, Union[FieldDescriptionBase | ComputedFieldDescription]] = {} + self.pk_raw_field = pk_field_description.name if pk_field_description is not None else "" + if pk_field_description: + self.pk_raw_field = pk_field_description.name + self.field_map_update([pk_field_description], meta) + self.computed_fields: dict[str, ComputedFieldDescription] = {} - The default order would be: + def __delitem__(self, __key): + self._field_map.__delitem__(__key) - * Field definition order + - * order of reverse relations (as discovered) + - * order of computed functions (as provided). - :param exclude_readonly: Build a subset model that excludes any readonly fields - :param meta_override: A PydanticMeta class to override model's values. - :param model_config: A custom config to use as pydantic config. - :param validators: A dictionary of methods that validate fields. - :param module: The name of the module that the model belongs to. - - Note: Created pydantic model uses config_class parameter and PydanticMeta's - config_class as its Config class's bases(Only if provided!), but it - ignores ``fields`` config. pydantic_model_creator will generate fields by - include/exclude/computed parameters automatically. - """ + def __getitem__(self, __key): + return self._field_map.__getitem__(__key) - # Fully qualified class name - fqname = cls.__module__ + "." + cls.__qualname__ - postfix = "" + def __len__(self): + return self._field_map.__len__() - def get_name() -> str: - # If arguments are specified (different from the defaults), we append a hash to the - # class name, to make it unique - # We don't check by stack, as cycles get explicitly renamed. - # When called later, include is explicitly set, so fence passes. - nonlocal postfix - is_default = ( - exclude == () - and include == () - and computed == () - and sort_alphabetically is None - and allow_cycles is None - and not exclude_readonly - ) - hashval = f"{fqname};{exclude};{include};{computed};{_stack}:{sort_alphabetically}:{allow_cycles}:{exclude_readonly}" - postfix = ( - ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] - if not is_default - else "" - ) - return fqname + postfix - - # We need separate model class for different exclude, include and computed parameters - _name = name or get_name() - has_submodel = False - - # Get settings and defaults - meta = getattr(cls, "PydanticMeta", PydanticMeta) + def __iter__(self): + return self._field_map.__iter__() - def get_param(attr: str) -> Any: - if meta_override: - return getattr(meta_override, attr, getattr(meta, attr, getattr(PydanticMeta, attr))) - return getattr(meta, attr, getattr(PydanticMeta, attr)) + def __setitem__(self, __key, __value): + self._field_map.__setitem__(__key, __value) - default_include: Tuple[str, ...] = tuple(get_param("include")) - default_exclude: Tuple[str, ...] = tuple(get_param("exclude")) - default_computed: Tuple[str, ...] = tuple(get_param("computed")) - default_config: Optional[ConfigDict] = get_param("model_config") + def sort_alphabetically(self): + self._field_map = {k: self._field_map[k] for k in sorted(self._field_map)} - backward_relations: bool = bool(get_param("backward_relations")) - - max_recursion: int = int(get_param("max_recursion")) - exclude_raw_fields: bool = bool(get_param("exclude_raw_fields")) - _sort_fields: bool = ( - bool(get_param("sort_alphabetically")) - if sort_alphabetically is None - else sort_alphabetically - ) - _allow_cycles: bool = bool(get_param("allow_cycles") if allow_cycles is None else allow_cycles) - - # Update parameters with defaults - include = tuple(include) + default_include - exclude = tuple(exclude) + default_exclude - computed = tuple(computed) + default_computed - - annotations = get_annotations(cls) - - pconfig = PydanticModel.model_config.copy() - if default_config: - pconfig.update(default_config) - if model_config: - pconfig.update(model_config) - if "title" not in pconfig: - pconfig["title"] = name or cls.__name__ - if "extra" not in pconfig: - pconfig["extra"] = "forbid" - - properties: Dict[str, Any] = {} - - # Get model description - model_description = cls.describe(serializable=False) - - # Field map we use - field_map: Dict[str, dict] = {} - pk_raw_field: str = "" - - def field_map_update(keys: tuple, is_relation=True) -> None: - nonlocal pk_raw_field - - for key in keys: - fds = model_description[key] - if isinstance(fds, dict): - fds = [fds] - for fd in fds: - n = fd["name"] - if key == "pk_field": - pk_raw_field = n - # Include or exclude field - if (include and n not in include) or n in exclude: - continue - # Remove raw fields - raw_field = fd.get("raw_field", None) - if raw_field is not None and exclude_raw_fields and raw_field != pk_raw_field: - field_map.pop(raw_field, None) - field_map[n] = fd - - # Update field definitions from description - if not exclude_readonly: - field_map_update(("pk_field",), is_relation=False) - field_map_update(("data_fields",), is_relation=False) - if not exclude_readonly: - included_fields: tuple = ( - "fk_fields", - "o2o_fields", - "m2m_fields", - ) - if backward_relations: - included_fields = ( - *included_fields, - "backward_fk_fields", - "backward_o2o_fields", - ) + def sort_definition_order(self, cls: "Type[Model]", computed: tuple[str, ...]): + self._field_map = { + k: self._field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in self._field_map + } - field_map_update(included_fields) - # Add possible computed fields - field_map.update( + def field_map_update(self, field_descriptions: list[FieldDescriptionBase], meta: MyPydanticMeta) -> None: + for field_description in field_descriptions: + name = field_description.name + # Include or exclude field + if (meta.include and name not in meta.include) or name in meta.exclude: + continue + # Remove raw fields + if isinstance(field_description, ForeignKeyFieldInstanceDescription): + raw_field = field_description.raw_field + if meta.exclude_raw_fields and raw_field != self.pk_raw_field: + self.pop(raw_field, None) + self[name] = field_description + + def computed_field_map_update(self, computed: tuple[str, ...], cls: "Type[Model]"): + self._field_map.update( { - k: { - "field_type": callable, - "function": getattr(cls, k), - "description": None, - } + k: ComputedFieldDescription( + field_type=callable, + function=getattr(cls, k), + description=None, + ) for k in computed } ) - # Sort field map (Python 3.7+ has guaranteed ordered dictionary keys) - if _sort_fields: - # Sort Alphabetically - field_map = {k: field_map[k] for k in sorted(field_map)} - else: - # Sort to definition order - field_map = { - k: field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in field_map - } - # Process fields - for fname, fdesc in field_map.items(): - comment = "" - json_schema_extra: Dict[str, Any] = {} - fconfig: Dict[str, Any] = { - "json_schema_extra": json_schema_extra, - } - field_type = fdesc["field_type"] - field_default = fdesc.get("default") - is_optional_field = fname in optional - - def get_submodel(_model: "Type[Model]") -> Optional[Type[PydanticModel]]: - """Get Pydantic model for the submodel""" - nonlocal exclude, _name, has_submodel - - if _model: - new_stack = _stack + ((cls, fname, max_recursion),) - - # Get pydantic schema for the submodel - prefix_len = len(fname) + 1 - pmodel = _pydantic_recursion_protector( - _model, - exclude=tuple( - str(v[prefix_len:]) for v in exclude if v.startswith(fname + ".") - ), - include=tuple( - str(v[prefix_len:]) for v in include if v.startswith(fname + ".") - ), - computed=tuple( - str(v[prefix_len:]) for v in computed if v.startswith(fname + ".") - ), - stack=new_stack, - allow_cycles=_allow_cycles, - sort_alphabetically=sort_alphabetically, - ) - else: - pmodel = None - - # If the result is None it has been excluded and we need to exclude the field - if pmodel is None: - exclude += (fname,) - else: - has_submodel = True - # We need to rename if there are duplicate instances of this model - if cls in (c[0] for c in _stack): - _name = name or get_name() - - return pmodel - - # Foreign keys and OneToOne fields are embedded schemas - is_to_one_relation = False - if ( - field_type is relational.ForeignKeyFieldInstance - or field_type is relational.OneToOneFieldInstance - or field_type is relational.BackwardOneToOneRelation - ): - is_to_one_relation = True - model = get_submodel(fdesc["python_type"]) - if model: - if fdesc.get("nullable"): - json_schema_extra["nullable"] = True - if fdesc.get("nullable") or field_default is not None: - model = Optional[model] # type: ignore - - properties[fname] = model - - # Backward FK and ManyToMany fields are list of embedded schemas - elif ( - field_type is relational.BackwardFKRelation - or field_type is relational.ManyToManyFieldInstance - ): - model = get_submodel(fdesc["python_type"]) - if model: - properties[fname] = List[model] # type: ignore - - # Computed fields as methods - elif field_type is callable: - func = fdesc["function"] - annotation = get_annotations(cls, func).get("return", None) - comment = _cleandoc(func) - if annotation is not None: - properties[fname] = computed_field(return_type=annotation, description=comment)( - func - ) - - # Json fields - elif field_type is JSONField: - properties[fname] = Any - # Any other tortoise fields - else: - annotation = annotations.get(fname, None) - if "readOnly" in fdesc["constraints"]: - json_schema_extra["readOnly"] = fdesc["constraints"]["readOnly"] - del fdesc["constraints"]["readOnly"] - fconfig.update(fdesc["constraints"]) - ptype = fdesc["python_type"] - if fdesc.get("nullable"): - json_schema_extra["nullable"] = True - if is_optional_field or field_default is not None or fdesc.get("nullable"): - ptype = Optional[ptype] - if not (exclude_readonly and json_schema_extra.get("readOnly") is True): - properties[fname] = annotation or ptype - - if fname in properties and not isinstance(properties[fname], tuple): - fconfig["title"] = fname.replace("_", " ").title() - description = comment or _br_it(fdesc.get("docstring") or fdesc["description"] or "") - if description: - fconfig["description"] = description - ftype = properties[fname] - if isinstance(ftype, PydanticDescriptorProxy): - continue - if is_optional_field or (field_default is not None and not callable(field_default)): - properties[fname] = (ftype, Field(default=field_default, **fconfig)) - else: - if (j := fconfig.get("json_schema_extra")) and ( - ( - j.get("nullable") - and not is_to_one_relation - and field_type not in (IntField, TextField) - ) - or (exclude_readonly and j.get("readOnly")) - ): - fconfig["default_factory"] = lambda: None - properties[fname] = (ftype, Field(**fconfig)) - - # Here we endure that the name is unique, but complete objects are still labeled verbatim - if not has_submodel and _stack: - _name = name or f"{fqname}.leaf" - else: - _name = name or get_name() - - # Here we de-dup to ensure that a uniquely named object is a unique object - # This fixes some Pydantic constraints. - if _name in _MODEL_INDEX: - return _MODEL_INDEX[_name] - - # Creating Pydantic class for the properties generated before - properties["model_config"] = pconfig - model = create_model( - _name, - __base__=PydanticModel, - __module__=module, - __validators__=validators, - **properties, - ) - # Copy the Model docstring over - model.__doc__ = _cleandoc(cls) - # Store the base class - model.model_config["orig_model"] = cls # type: ignore - # Store model reference so we can de-dup it later on if needed. - _MODEL_INDEX[_name] = model - return model - def pydantic_queryset_creator( - cls: "Type[Model]", - *, - name=None, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - allow_cycles: Optional[bool] = None, - sort_alphabetically: Optional[bool] = None, + cls: "Type[Model]", + *, + name=None, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, ) -> Type[PydanticListModel]: """ Function to build a `Pydantic Model `__ list off Tortoise Model. @@ -517,3 +349,358 @@ def pydantic_queryset_creator( model.model_config["title"] = name or f"{submodel.model_config['title']}_list" model.model_config["submodel"] = submodel # type: ignore return model + + +PropertyT = Any + + +class PydanticModelCreator: + def __init__( + self, + cls: "Type[Model]", + name: Optional[str] = None, + exclude: Optional[Tuple[str, ...]] = (), + include: Optional[Tuple[str, ...]] = (), + computed: Optional[Tuple[str, ...]] = (), + optional: Optional[Tuple[str, ...]] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + exclude_readonly: bool = False, + meta_override: Optional[Type] = None, + model_config: Optional[ConfigDict] = None, + validators: Optional[Dict[str, Any]] = None, + module: str = __name__, + _stack: tuple = () + ): + self._cls: "Type[Model]" = cls + self._stack: tuple[tuple["Type[Model]", str, int]] = tuple() # ((Type[Model], field_name, max_recursion),) + self._is_default: bool = ( + exclude is None + and include is None + and computed is None + and optional is None + and sort_alphabetically is None + and allow_cycles is None + and meta_override is None + ) + + meta_from_class = cls.my_pydantic_meta \ + if not meta_override \ + else construct_pydantic_meta(cls.my_pydantic_meta, meta_override) + self.meta = finalize_meta( + meta_from_class, exclude, include, computed, allow_cycles, sort_alphabetically, model_config + ) + print(f"Meta: {self.meta}") + + self._fqname = cls.__module__ + "." + cls.__qualname__ + self._name: str + self._title: str + self._name, self._title = self.get_name(name) + self.given_name = name + + self._has_submodel = False + + self._annotations = get_annotations(cls) + + self._pconfig: ConfigDict = self.initialize_pconfig() + + self._properties: Dict[str, Any] = dict() + + self._model_description: ModelDescription = cls.describe_by_dataclass() + + self._exclude_read_only: bool = exclude_readonly + + self._field_map: FieldMap = self.initialize_field_map() + self.construct_field_map() + + self._optional = optional + + self._validators = validators + self._module = module + + self._stack = _stack + + def get_name(self, name: Optional[str] = None) -> tuple[str, str]: + """ + return + @rtype: tuple + name, title + """ + # If arguments are specified (different from the defaults), we append a hash to the + # class name, to make it unique + # We don't check by stack, as cycles get explicitly renamed. + # When called later, include is explicitly set, so fence passes. + if name is not None: + return name, name + hashval = ( + f"{self._fqname};{self.meta.exclude};{self.meta.include};{self.meta.computed};" + f"{self._stack}:{self.meta.sort_alphabetically}:{self.meta.allow_cycles}" + ) + postfix = ( + ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] + if not self._is_default + else "" + ) + return self._fqname + postfix, self._cls.__name__ + + def initialize_pconfig(self): + pconfig: ConfigDict = PydanticModel.model_config.copy() + if self.meta.model_config: + pconfig.update(self.meta.model_config) + if "title" not in pconfig: + pconfig["title"] = self._title + if "extra" not in pconfig: + pconfig["extra"] = 'forbid' + return pconfig + + def initialize_field_map(self): + return FieldMap(self.meta) \ + if self._exclude_read_only \ + else FieldMap(self.meta, pk_field_description=self._model_description.pk_field) + + def construct_field_map(self): + self._field_map.field_map_update(field_descriptions=self._model_description.data_fields, meta=self.meta) + if not self._exclude_read_only: + for field_descriptions in ( + self._model_description.fk_fields, + self._model_description.o2o_fields, + self._model_description.m2m_fields + ): + self._field_map.field_map_update(field_descriptions, self.meta) + if self.meta.backward_relations: + for field_descriptions in ( + self._model_description.backward_fk_fields, + self._model_description.backward_o2o_fields + ): + self._field_map.field_map_update(field_descriptions, self.meta) + self._field_map.computed_field_map_update(self.meta.computed, self._cls) + if self.meta.sort_alphabetically: + self._field_map.sort_alphabetically() + else: + self._field_map.sort_definition_order(self._cls, self.meta.computed) + + def create_pydantic_model(self): + print(f"ModelDescription: {self._model_description}") + print(f"FieldMap: {self._field_map._field_map}") + for field_name, field_description in self._field_map.items(): + self.process_field(field_name, field_description) + + print(f"FieldMap: {self._field_map._field_map}") + print(f"Properties: {self._properties}") + if not self._has_submodel: + self._name = self.given_name or f"{self._fqname}.leaf" + + if self._name in _MODEL_INDEX: + print("not new generated") + return _MODEL_INDEX[self._name] + + self._properties["model_config"] = self._pconfig + print(f"Properties: {self._properties}") + print(f"FieldMap: {self._field_map._field_map}") + model = create_model( + self._name, + __base__=PydanticModel, + __module__=self._module, + __validators__=self._validators, + **self._properties, + ) + # Copy the Model docstring over + model.__doc__ = _cleandoc(self._cls) + # Store the base class + model.model_config["orig_model"] = self._cls # type: ignore + # Store model reference so we can de-dup it later on if needed. + _MODEL_INDEX[self._name] = model + return model + + def process_field( + self, + field_name: str, + field_description: Union[FieldDescriptionBase, ComputedFieldDescription], + ): + json_schema_extra: Dict[str, Any] = {} + fconfig: Dict[str, Any] = { + "json_schema_extra": json_schema_extra, + } + field_property: PropertyT = None + is_to_one_relation: bool = False + if isinstance(field_description, FieldDescriptionBase): + field_property, is_to_one_relation = self.process_normal_field_description(field_name, field_description, json_schema_extra, fconfig) + elif isinstance(field_description, ComputedFieldDescription): + field_property, is_to_one_relation = self.process_computed_field_description(field_description), False + + if field_property: + self._properties[field_name] = field_property + if field_name in self._properties and not isinstance(self._properties[field_name], tuple): + fconfig["title"] = field_name.replace("_", " ").title() + # description = "" or _br_it(field_description.docstring or field_description.description or "") + description = None + if description: + fconfig["description"] = description + ftype = self._properties[field_name] + if not isinstance(ftype, PydanticDescriptorProxy): + if field_name in self._optional or (field_description.default is not None and not callable(field_description.default)): + self._properties[field_name] = (ftype, Field(default=field_description.default, **fconfig)) + else: + if ( + ( + json_schema_extra.get("nullable") + and not is_to_one_relation + and field_description.field_type not in (IntField, TextField) + ) + or (self._exclude_read_only and json_schema_extra.get("readOnly")) + ): + fconfig["default_factory"] = lambda: None + self._properties[field_name] = (ftype, Field(**fconfig)) + + def process_normal_field_description( + self, + field_name: str, + field_description: FieldDescriptionBase, + json_schema_extra: Dict[str, Any], + fconfig: Dict[str, Any], + ) -> tuple[Optional[PropertyT], bool]: + print(field_description) + if ( + field_description.field_type is relational.ForeignKeyFieldInstance + or field_description.field_type is relational.OneToOneFieldInstance + or field_description.field_type is relational.BackwardOneToOneRelation + ): + return self.process_single_field_relation(field_name, field_description, json_schema_extra), True + elif field_description.field_type in (relational.BackwardFKRelation, relational.ManyToManyFieldInstance): + return self.process_many_field_relation(field_name, field_description), False + elif field_description.field_type is JSONField: + return self.process_json_field_description(), False + return self.process_data_field_description(field_name, field_description, json_schema_extra, fconfig), False + + def process_single_field_relation( + self, + field_name: str, + field_description: FieldDescriptionBase, + json_schema_extra: Dict[str, Any], + ) -> Optional[PropertyT]: + print(field_description) + model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) + if model: + if field_description.nullable: + json_schema_extra["nullable"] = True + if field_description.nullable or field_description.default is not None: + model = Optional[model] # type: ignore + + return model + + def process_many_field_relation( + self, + field_name: str, + field_description: FieldDescriptionBase, + ) -> Optional[Type[List[Type[PydanticModel]]]]: + model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) + if model: + return List[model] + + def process_json_field_description(self): + is_to_one_relation = False + return Any, is_to_one_relation + + def process_data_field_description( + self, + field_name: str, + field_description: FieldDescriptionBase, + json_schema_extra: Dict[str, Any], + fconfig: Dict[str, Any], + ) -> Optional[PropertyT]: + annotation = self._annotations.get(field_name, None) + if "readOnly" in field_description.constraints: + json_schema_extra["readOnly"] = field_description.constraints["readOnly"] + del field_description.constraints["readOnly"] + fconfig.update(field_description.constraints) + ptype = field_description.python_type + print(f"ptype: {ptype}") + if field_description.nullable: + json_schema_extra["nullable"] = True + print(field_description) + if field_name in self._optional or field_description.default is not None or field_description.nullable: + ptype = Optional[ptype] + if not (self._exclude_read_only and json_schema_extra.get("readOnly") is True): + return annotation or ptype + + def process_computed_field_description( + self, + field_description: ComputedFieldDescription, + ): + func = field_description.function + annotation = get_annotations(self._cls, func).get("return", None) + print(f"anno: {annotation}") + comment = _cleandoc(func) + if annotation is not None: + c_f = computed_field(return_type=annotation, description=comment) + ret = c_f(func) + return ret + + def get_submodel(self, _model: "Type[Model]", field_name: str) -> Optional[Type[PydanticModel]]: + """Get Pydantic model for the submodel""" + + if _model: + new_stack = self._stack + ((self._cls, field_name, self.meta.max_recursion),) + + # Get pydantic schema for the submodel + prefix_len = len(field_name) + 1 + pmodel = _pydantic_recursion_protector( + _model, + exclude=tuple( + str(v[prefix_len:]) for v in self.meta.exclude if v.startswith(field_name + ".") + ), + include=tuple( + str(v[prefix_len:]) for v in self.meta.include if v.startswith(field_name + ".") + ), + computed=tuple( + str(v[prefix_len:]) for v in self.meta.computed if v.startswith(field_name + ".") + ), + stack=new_stack, + allow_cycles=self.meta.allow_cycles, + sort_alphabetically=self.meta.sort_alphabetically, + ) + else: + pmodel = None + + # If the result is None it has been excluded and we need to exclude the field + if pmodel is None: + self.meta.exclude += (field_name,) + else: + self._has_submodel = True + + return pmodel + + +def pydantic_model_creator( + cls: "Type[Model]", + *, + name=None, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + optional: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + _stack: tuple = (), + exclude_readonly: bool = False, + meta_override: Optional[Type] = None, + model_config: Optional[ConfigDict] = None, + validators: Optional[Dict[str, Any]] = None, + module: str = __name__, +) -> Type[PydanticModel]: + pmc = PydanticModelCreator( + cls=cls, + name=name, + exclude=exclude, + include=include, + computed=computed, + optional=optional, + allow_cycles=allow_cycles, + sort_alphabetically=sort_alphabetically, + exclude_readonly=exclude_readonly, + meta_override=meta_override, + model_config=model_config, + validators=validators, + module=module + ) + return pmc.create_pydantic_model() diff --git a/tortoise/fields/base.py b/tortoise/fields/base.py index 1887e63ae..c8529d8a9 100644 --- a/tortoise/fields/base.py +++ b/tortoise/fields/base.py @@ -1,3 +1,4 @@ +import dataclasses import sys import warnings from enum import Enum @@ -62,6 +63,27 @@ def __new__(mcs, name: str, bases: Tuple[Type, ...], attrs: dict): return type.__new__(mcs, name, bases, attrs) +@dataclasses.dataclass +class FieldDescriptionBase: + name: str + field_type: Type["Field"] + generated: bool + nullable: bool + unique: bool + indexed: bool + constraints: dict + python_type: Optional[type] = None + default: Optional[Any] = None + description: Optional[str] = None + docstring: Optional[str] = None + db_field_types: Optional[dict[str, str]] = None + + +@dataclasses.dataclass +class FieldDescription(FieldDescriptionBase): + db_column: str = "" + + class Field(Generic[VALUE], metaclass=_FieldMeta): """ Base Field type. @@ -441,3 +463,21 @@ def default_name(default: Any) -> Optional[Union[int, float, str, bool]]: desc["db_field_types"] = self.get_db_field_types() return desc + + def describe_by_dataclass(self): + field_type = getattr(self, "related_model", self.field_type) + return FieldDescription( + name=self.model_field_name, + field_type=self.__class__, + db_column=self.source_field or self.model_field_name, + python_type=field_type, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None + ) diff --git a/tortoise/fields/relational.py b/tortoise/fields/relational.py index a95e5103f..eb01037fd 100644 --- a/tortoise/fields/relational.py +++ b/tortoise/fields/relational.py @@ -1,3 +1,4 @@ +import dataclasses from typing import ( TYPE_CHECKING, Any, @@ -18,7 +19,7 @@ from typing_extensions import Literal from tortoise.exceptions import ConfigurationError, NoValuesFetched, OperationalError -from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete +from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete, FieldDescription, FieldDescriptionBase if TYPE_CHECKING: # pragma: nocoverage from tortoise.backends.base.client import BaseDBAsyncClient @@ -48,7 +49,7 @@ class ReverseRelation(Generic[MODEL]): def __init__( self, - remote_model: Type[MODEL], + remote_model: "Type[MODEL]", relation_field: str, instance: "Model", from_field: str, @@ -240,6 +241,11 @@ async def _remove_or_clear( await db.execute_query(str(query)) +@dataclasses.dataclass +class RelationalFieldDescription(FieldDescriptionBase): + db_constraint: bool = False + + class RelationalField(Field[MODEL]): has_db_field = False @@ -276,6 +282,11 @@ def describe(self, serializable: bool) -> dict: del desc["db_column"] return desc + def describe_by_dataclass(self): + return RelationalFieldDescription( + **self.describe(False) + ) + @classmethod def validate_model_name(cls, model_name: str) -> None: if len(model_name.split(".")) != 2: @@ -283,6 +294,12 @@ def validate_model_name(cls, model_name: str) -> None: raise ConfigurationError(f'{field_type} accepts model name in format "app.Model"') +@dataclasses.dataclass +class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): + raw_field: str = "" + on_delete: str = "" + + class ForeignKeyFieldInstance(RelationalField[MODEL]): def __init__( self, @@ -309,6 +326,9 @@ def describe(self, serializable: bool) -> dict: desc["on_delete"] = str(self.on_delete) return desc + def describe_by_dataclass(self): + return ForeignKeyFieldInstanceDescription(**self.describe(False)) + class BackwardFKRelation(RelationalField[MODEL]): def __init__( @@ -342,6 +362,17 @@ class BackwardOneToOneRelation(BackwardFKRelation[MODEL]): pass +@dataclasses.dataclass +class ManyToManyFieldInstanceDescription(RelationalFieldDescription): + model_name: str = "" + related_name: str = "" + forward_key: str = "" + backward_key: str = "" + through: str = "" + on_delete: str = "" + _generated: bool = False + + class ManyToManyFieldInstance(RelationalField[MODEL]): field_type = ManyToManyRelation @@ -381,6 +412,9 @@ def describe(self, serializable: bool) -> dict: desc["_generated"] = self._generated return desc + def describe_by_dataclass(self): + return ManyToManyFieldInstanceDescription(**self.describe(False)) + @overload def OneToOneField( diff --git a/tortoise/models.py b/tortoise/models.py index 4fd513d56..c1923af09 100644 --- a/tortoise/models.py +++ b/tortoise/models.py @@ -1,4 +1,5 @@ import asyncio +import dataclasses import inspect import re from copy import copy, deepcopy @@ -27,6 +28,7 @@ from tortoise import connections from tortoise.backends.base.client import BaseDBAsyncClient +from tortoise.contrib.pydantic.creator import MyPydanticMeta from tortoise.exceptions import ( ConfigurationError, DoesNotExist, @@ -36,7 +38,7 @@ OperationalError, ParamsError, ) -from tortoise.fields.base import Field +from tortoise.fields.base import Field, FieldDescription from tortoise.fields.data import IntField from tortoise.fields.relational import ( BackwardFKRelation, @@ -643,10 +645,30 @@ def __getitem__(cls: Type[MODEL], key: Any) -> QuerySetSingle[MODEL]: # type: i return cls._getbypk(key) # type: ignore +@dataclasses.dataclass +class ModelDescription: + name: str + table: str + abstract: bool + description: str + pk_field: FieldDescription + app: Optional[str] = None + docstring: Optional[str] = None + unique_together: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=list) + indexes: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=list) + data_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + fk_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + backward_fk_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + o2o_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + backward_o2o_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + m2m_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + + class Model(metaclass=ModelMeta): """ Base class for all Tortoise ORM Models. """ + my_pydantic_meta: MyPydanticMeta = MyPydanticMeta() # I don' like this here, but it makes auto completion and static analysis much happier _meta = MetaInfo(None) # type: ignore @@ -1490,6 +1512,50 @@ def describe(cls, serializable: bool = True) -> dict: ], } + @classmethod + def describe_by_dataclass(cls): + return ModelDescription( + name=cls._meta.full_name, + app=cls._meta.app, + table=cls._meta.db_table, + abstract=cls._meta.abstract, + description=cls._meta.table_description or None, + docstring=inspect.cleandoc(cls.__doc__ or "") or None, + unique_together=cls._meta.unique_together or [], + indexes=cls._meta.indexes or [], + pk_field=cls._meta.fields_map[cls._meta.pk_attr].describe_by_dataclass(), + data_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name != cls._meta.pk_attr and name in (cls._meta.fields - cls._meta.fetch_fields) + ], + fk_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.fk_fields + ], + backward_fk_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.backward_fk_fields + ], + o2o_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.o2o_fields + ], + backward_o2o_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.backward_o2o_fields + ], + m2m_fields=[ + field.describe_by_dataclass() + for name, field in cls._meta.fields_map.items() + if name in cls._meta.m2m_fields + ], + ) + def __await__(self: MODEL) -> Generator[Any, None, MODEL]: async def _self() -> MODEL: return self From fee01e1f3fb34e31ba6855042be588b1e09f9da7 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 22 Oct 2024 16:14:51 +0200 Subject: [PATCH 04/27] naming and description --- tortoise/contrib/pydantic/creator.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index cabad4984..c739831b2 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -224,6 +224,7 @@ def _pydantic_recursion_protector( _stack=stack, allow_cycles=allow_cycles, sort_alphabetically=sort_alphabetically, + _as_submodel=True, ).create_pydantic_model() @@ -370,7 +371,8 @@ def __init__( model_config: Optional[ConfigDict] = None, validators: Optional[Dict[str, Any]] = None, module: str = __name__, - _stack: tuple = () + _stack: tuple = (), + _as_submodel: bool = False ): self._cls: "Type[Model]" = cls self._stack: tuple[tuple["Type[Model]", str, int]] = tuple() # ((Type[Model], field_name, max_recursion),) @@ -396,9 +398,10 @@ def __init__( self._name: str self._title: str self._name, self._title = self.get_name(name) + print(self._name, self._title) self.given_name = name - self._has_submodel = False + self._as_submodel = _as_submodel self._annotations = get_annotations(cls) @@ -487,8 +490,8 @@ def create_pydantic_model(self): print(f"FieldMap: {self._field_map._field_map}") print(f"Properties: {self._properties}") - if not self._has_submodel: - self._name = self.given_name or f"{self._fqname}.leaf" + if self._as_submodel: + self._name = f"{self._name}:leaf" if self._name in _MODEL_INDEX: print("not new generated") @@ -523,17 +526,18 @@ def process_field( } field_property: PropertyT = None is_to_one_relation: bool = False + comment = "" if isinstance(field_description, FieldDescriptionBase): field_property, is_to_one_relation = self.process_normal_field_description(field_name, field_description, json_schema_extra, fconfig) elif isinstance(field_description, ComputedFieldDescription): field_property, is_to_one_relation = self.process_computed_field_description(field_description), False + comment = _cleandoc(field_description.function) if field_property: self._properties[field_name] = field_property if field_name in self._properties and not isinstance(self._properties[field_name], tuple): fconfig["title"] = field_name.replace("_", " ").title() - # description = "" or _br_it(field_description.docstring or field_description.description or "") - description = None + description = comment or _br_it(field_description.docstring or field_description.description or "") if description: fconfig["description"] = description ftype = self._properties[field_name] @@ -665,8 +669,6 @@ def get_submodel(self, _model: "Type[Model]", field_name: str) -> Optional[Type[ # If the result is None it has been excluded and we need to exclude the field if pmodel is None: self.meta.exclude += (field_name,) - else: - self._has_submodel = True return pmodel From 0b557d92cb70874cfe13606add3926dd1cb3b9ac Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Wed, 23 Oct 2024 10:26:10 +0200 Subject: [PATCH 05/27] include improvements from #1741 --- tortoise/contrib/pydantic/creator.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index c739831b2..ab23ec3e5 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -384,6 +384,7 @@ def __init__( and sort_alphabetically is None and allow_cycles is None and meta_override is None + and not exclude_readonly ) meta_from_class = cls.my_pydantic_meta \ @@ -394,6 +395,8 @@ def __init__( ) print(f"Meta: {self.meta}") + self._exclude_read_only: bool = exclude_readonly + self._fqname = cls.__module__ + "." + cls.__qualname__ self._name: str self._title: str @@ -411,8 +414,6 @@ def __init__( self._model_description: ModelDescription = cls.describe_by_dataclass() - self._exclude_read_only: bool = exclude_readonly - self._field_map: FieldMap = self.initialize_field_map() self.construct_field_map() @@ -437,7 +438,7 @@ def get_name(self, name: Optional[str] = None) -> tuple[str, str]: return name, name hashval = ( f"{self._fqname};{self.meta.exclude};{self.meta.include};{self.meta.computed};" - f"{self._stack}:{self.meta.sort_alphabetically}:{self.meta.allow_cycles}" + f"{self._stack}:{self.meta.sort_alphabetically}:{self.meta.allow_cycles}:{self._exclude_read_only}" ) postfix = ( ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] @@ -490,7 +491,7 @@ def create_pydantic_model(self): print(f"FieldMap: {self._field_map._field_map}") print(f"Properties: {self._properties}") - if self._as_submodel: + if self._as_submodel and self._stack: self._name = f"{self._name}:leaf" if self._name in _MODEL_INDEX: From 24003e3042325e408a3c23b26d9a1423b48839fe Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Wed, 23 Oct 2024 10:26:34 +0200 Subject: [PATCH 06/27] remove print statements --- tortoise/contrib/pydantic/creator.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index ab23ec3e5..357322cb5 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -393,7 +393,6 @@ def __init__( self.meta = finalize_meta( meta_from_class, exclude, include, computed, allow_cycles, sort_alphabetically, model_config ) - print(f"Meta: {self.meta}") self._exclude_read_only: bool = exclude_readonly @@ -401,7 +400,6 @@ def __init__( self._name: str self._title: str self._name, self._title = self.get_name(name) - print(self._name, self._title) self.given_name = name self._as_submodel = _as_submodel @@ -484,23 +482,16 @@ def construct_field_map(self): self._field_map.sort_definition_order(self._cls, self.meta.computed) def create_pydantic_model(self): - print(f"ModelDescription: {self._model_description}") - print(f"FieldMap: {self._field_map._field_map}") for field_name, field_description in self._field_map.items(): self.process_field(field_name, field_description) - print(f"FieldMap: {self._field_map._field_map}") - print(f"Properties: {self._properties}") if self._as_submodel and self._stack: self._name = f"{self._name}:leaf" if self._name in _MODEL_INDEX: - print("not new generated") return _MODEL_INDEX[self._name] self._properties["model_config"] = self._pconfig - print(f"Properties: {self._properties}") - print(f"FieldMap: {self._field_map._field_map}") model = create_model( self._name, __base__=PydanticModel, @@ -564,7 +555,6 @@ def process_normal_field_description( json_schema_extra: Dict[str, Any], fconfig: Dict[str, Any], ) -> tuple[Optional[PropertyT], bool]: - print(field_description) if ( field_description.field_type is relational.ForeignKeyFieldInstance or field_description.field_type is relational.OneToOneFieldInstance @@ -583,7 +573,6 @@ def process_single_field_relation( field_description: FieldDescriptionBase, json_schema_extra: Dict[str, Any], ) -> Optional[PropertyT]: - print(field_description) model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) if model: if field_description.nullable: @@ -619,10 +608,8 @@ def process_data_field_description( del field_description.constraints["readOnly"] fconfig.update(field_description.constraints) ptype = field_description.python_type - print(f"ptype: {ptype}") if field_description.nullable: json_schema_extra["nullable"] = True - print(field_description) if field_name in self._optional or field_description.default is not None or field_description.nullable: ptype = Optional[ptype] if not (self._exclude_read_only and json_schema_extra.get("readOnly") is True): @@ -634,7 +621,6 @@ def process_computed_field_description( ): func = field_description.function annotation = get_annotations(self._cls, func).get("return", None) - print(f"anno: {annotation}") comment = _cleandoc(func) if annotation is not None: c_f = computed_field(return_type=annotation, description=comment) From 2ec883689f3d21a8019d4fea2bb95c5e651f140f Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Wed, 23 Oct 2024 10:37:11 +0200 Subject: [PATCH 07/27] i should learn git better... --- tortoise/contrib/pydantic/creator.py | 59 ---------------------------- 1 file changed, 59 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 87e462051..357322cb5 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -384,10 +384,7 @@ def __init__( and sort_alphabetically is None and allow_cycles is None and meta_override is None -<<<<<<< HEAD and not exclude_readonly -======= ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 ) meta_from_class = cls.my_pydantic_meta \ @@ -396,21 +393,13 @@ def __init__( self.meta = finalize_meta( meta_from_class, exclude, include, computed, allow_cycles, sort_alphabetically, model_config ) -<<<<<<< HEAD self._exclude_read_only: bool = exclude_readonly -======= - print(f"Meta: {self.meta}") ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 self._fqname = cls.__module__ + "." + cls.__qualname__ self._name: str self._title: str self._name, self._title = self.get_name(name) -<<<<<<< HEAD -======= - print(self._name, self._title) ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 self.given_name = name self._as_submodel = _as_submodel @@ -423,11 +412,6 @@ def __init__( self._model_description: ModelDescription = cls.describe_by_dataclass() -<<<<<<< HEAD -======= - self._exclude_read_only: bool = exclude_readonly - ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 self._field_map: FieldMap = self.initialize_field_map() self.construct_field_map() @@ -452,11 +436,7 @@ def get_name(self, name: Optional[str] = None) -> tuple[str, str]: return name, name hashval = ( f"{self._fqname};{self.meta.exclude};{self.meta.include};{self.meta.computed};" -<<<<<<< HEAD f"{self._stack}:{self.meta.sort_alphabetically}:{self.meta.allow_cycles}:{self._exclude_read_only}" -======= - f"{self._stack}:{self.meta.sort_alphabetically}:{self.meta.allow_cycles}" ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 ) postfix = ( ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] @@ -502,7 +482,6 @@ def construct_field_map(self): self._field_map.sort_definition_order(self._cls, self.meta.computed) def create_pydantic_model(self): -<<<<<<< HEAD for field_name, field_description in self._field_map.items(): self.process_field(field_name, field_description) @@ -513,25 +492,6 @@ def create_pydantic_model(self): return _MODEL_INDEX[self._name] self._properties["model_config"] = self._pconfig -======= - print(f"ModelDescription: {self._model_description}") - print(f"FieldMap: {self._field_map._field_map}") - for field_name, field_description in self._field_map.items(): - self.process_field(field_name, field_description) - - print(f"FieldMap: {self._field_map._field_map}") - print(f"Properties: {self._properties}") - if self._as_submodel: - self._name = f"{self._name}:leaf" - - if self._name in _MODEL_INDEX: - print("not new generated") - return _MODEL_INDEX[self._name] - - self._properties["model_config"] = self._pconfig - print(f"Properties: {self._properties}") - print(f"FieldMap: {self._field_map._field_map}") ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 model = create_model( self._name, __base__=PydanticModel, @@ -595,10 +555,6 @@ def process_normal_field_description( json_schema_extra: Dict[str, Any], fconfig: Dict[str, Any], ) -> tuple[Optional[PropertyT], bool]: -<<<<<<< HEAD -======= - print(field_description) ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 if ( field_description.field_type is relational.ForeignKeyFieldInstance or field_description.field_type is relational.OneToOneFieldInstance @@ -617,10 +573,6 @@ def process_single_field_relation( field_description: FieldDescriptionBase, json_schema_extra: Dict[str, Any], ) -> Optional[PropertyT]: -<<<<<<< HEAD -======= - print(field_description) ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) if model: if field_description.nullable: @@ -656,15 +608,8 @@ def process_data_field_description( del field_description.constraints["readOnly"] fconfig.update(field_description.constraints) ptype = field_description.python_type -<<<<<<< HEAD - if field_description.nullable: - json_schema_extra["nullable"] = True -======= - print(f"ptype: {ptype}") if field_description.nullable: json_schema_extra["nullable"] = True - print(field_description) ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 if field_name in self._optional or field_description.default is not None or field_description.nullable: ptype = Optional[ptype] if not (self._exclude_read_only and json_schema_extra.get("readOnly") is True): @@ -676,10 +621,6 @@ def process_computed_field_description( ): func = field_description.function annotation = get_annotations(self._cls, func).get("return", None) -<<<<<<< HEAD -======= - print(f"anno: {annotation}") ->>>>>>> f6239840c1044a910dcef5a3322bc93c8f37c660 comment = _cleandoc(func) if annotation is not None: c_f = computed_field(return_type=annotation, description=comment) From 298a916d007183ab1e542d357237a096cf2bf5dd Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 25 Oct 2024 13:48:49 +0200 Subject: [PATCH 08/27] type hints, recursion protector, naming, .... should be ready for review --- tests/contrib/test_pydantic.py | 137 ++++++++------------- tortoise/contrib/pydantic/creator.py | 178 +++++++++++++++++---------- tortoise/fields/base.py | 2 +- tortoise/fields/relational.py | 130 ++++++++++++++++++- tortoise/models.py | 28 ++--- 5 files changed, 307 insertions(+), 168 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 5b894f62a..b8b3573a3 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,4 +1,5 @@ import copy +import json import pytest from pydantic import ConfigDict, ValidationError @@ -67,7 +68,7 @@ def test_event_schema(self): self.Event_Pydantic.model_json_schema(), { "$defs": { - "tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf": { + "Address_diegkp_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -83,7 +84,7 @@ def test_event_schema(self): "title": "Address", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf": { + "Reporter_flpkb7_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -99,7 +100,7 @@ def test_event_schema(self): "title": "Reporter", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf": { + "Team_uqjfiz_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -127,7 +128,7 @@ def test_event_schema(self): "title": "Team", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf": { + "Tournament_lloz5q_leaf": { "additionalProperties": False, "properties": { "id": { @@ -165,13 +166,13 @@ def test_event_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf", + "$ref": "#/$defs/Tournament_lloz5q_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf" + "$ref": "#/$defs/Reporter_flpkb7_leaf" }, {"type": "null"}, ], @@ -180,7 +181,7 @@ def test_event_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf" + "$ref": "#/$defs/Team_uqjfiz_leaf" }, "title": "Participants", "type": "array", @@ -203,7 +204,7 @@ def test_event_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf" + "$ref": "#/$defs/Address_diegkp_leaf" }, {"type": "null"}, ], @@ -232,7 +233,7 @@ def test_eventlist_schema(self): self.Event_Pydantic_List.model_json_schema(), { "$defs": { - "Event_bliobj": { + "Event_vh77nq": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -244,13 +245,13 @@ def test_eventlist_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf", + "$ref": "#/$defs/Tournament_lloz5q_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf" + "$ref": "#/$defs/Reporter_flpkb7_leaf" }, {"type": "null"}, ], @@ -259,7 +260,7 @@ def test_eventlist_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf" + "$ref": "#/$defs/Team_uqjfiz_leaf" }, "title": "Participants", "type": "array", @@ -289,7 +290,7 @@ def test_eventlist_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf" + "$ref": "#/$defs/Address_diegkp_leaf" }, {"type": "null"}, ], @@ -311,7 +312,7 @@ def test_eventlist_schema(self): "title": "Event", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf": { + "Address_diegkp_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -327,7 +328,7 @@ def test_eventlist_schema(self): "title": "Address", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf": { + "Reporter_flpkb7_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -343,7 +344,7 @@ def test_eventlist_schema(self): "title": "Reporter", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf": { + "Team_uqjfiz_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -371,7 +372,7 @@ def test_eventlist_schema(self): "title": "Team", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf": { + "Tournament_lloz5q_leaf": { "additionalProperties": False, "properties": { "id": { @@ -399,18 +400,19 @@ def test_eventlist_schema(self): }, }, "description": "Events on the calendar", - "items": {"$ref": "#/$defs/Event_bliobj"}, + "items": {"$ref": "#/$defs/Event_vh77nq"}, "title": "Event_list", "type": "array", }, ) def test_address_schema(self): + # print(json.dumps(self.Address_Pydantic.model_json_schema(), indent=2)) self.assertEqual( self.Address_Pydantic.model_json_schema(), { "$defs": { - "Event_jim4na": { + "Event_az5m74_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -422,13 +424,13 @@ def test_address_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf", + "$ref": "#/$defs/Tournament_lloz5q_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf" + "$ref": "#/$defs/Reporter_flpkb7_leaf" }, {"type": "null"}, ], @@ -437,7 +439,7 @@ def test_address_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf" + "$ref": "#/$defs/Team_uqjfiz_leaf" }, "title": "Participants", "type": "array", @@ -478,7 +480,7 @@ def test_address_schema(self): "title": "Event", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf": { + "Reporter_flpkb7_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -494,7 +496,7 @@ def test_address_schema(self): "title": "Reporter", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf": { + "Team_uqjfiz_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -522,7 +524,7 @@ def test_address_schema(self): "title": "Team", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf": { + "Tournament_lloz5q_leaf": { "additionalProperties": False, "properties": { "id": { @@ -553,7 +555,7 @@ def test_address_schema(self): "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, "street": {"maxLength": 128, "title": "Street", "type": "string"}, - "event": {"$ref": "#/$defs/Event_jim4na"}, + "event": {"$ref": "#/$defs/Event_az5m74_leaf"}, "event_id": { "maximum": 9223372036854775807, "minimum": -9223372036854775808, @@ -572,7 +574,7 @@ def test_tournament_schema(self): self.Tournament_Pydantic.model_json_schema(), { "$defs": { - "Event_ml4ytz": { + "Event_atwjj7_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -586,7 +588,7 @@ def test_tournament_schema(self): "reporter": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf" + "$ref": "#/$defs/Reporter_flpkb7_leaf" }, {"type": "null"}, ], @@ -595,7 +597,7 @@ def test_tournament_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf" + "$ref": "#/$defs/Team_uqjfiz_leaf" }, "title": "Participants", "type": "array", @@ -625,7 +627,7 @@ def test_tournament_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf" + "$ref": "#/$defs/Address_diegkp_leaf" }, {"type": "null"}, ], @@ -646,7 +648,7 @@ def test_tournament_schema(self): "title": "Event", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf": { + "Address_diegkp_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -662,7 +664,7 @@ def test_tournament_schema(self): "title": "Address", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf": { + "Reporter_flpkb7_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -678,7 +680,7 @@ def test_tournament_schema(self): "title": "Reporter", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Team__leaf": { + "Team_uqjfiz_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -724,7 +726,7 @@ def test_tournament_schema(self): }, "events": { "description": "What tournaments is a happenin'", - "items": {"$ref": "#/$defs/Event_ml4ytz"}, + "items": {"$ref": "#/$defs/Event_atwjj7_leaf"}, "title": "Events", "type": "array", }, @@ -740,7 +742,7 @@ def test_team_schema(self): self.Team_Pydantic.model_json_schema(), { "$defs": { - "Event_vrm2bi": { + "Event_37gjqu_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -752,13 +754,13 @@ def test_team_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf", + "$ref": "#/$defs/Tournament_lloz5q_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf" + "$ref": "#/$defs/Reporter_flpkb7_leaf" }, {"type": "null"}, ], @@ -790,7 +792,7 @@ def test_team_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf" + "$ref": "#/$defs/Address_diegkp_leaf" }, {"type": "null"}, ], @@ -811,7 +813,7 @@ def test_team_schema(self): "title": "Event", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Address__leaf": { + "Address_diegkp_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -827,7 +829,7 @@ def test_team_schema(self): "title": "Address", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Reporter__leaf": { + "Reporter_flpkb7_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -843,7 +845,7 @@ def test_team_schema(self): "title": "Reporter", "type": "object", }, - "tortoise__contrib__pydantic__creator__tests__testmodels__Tournament__leaf": { + "Tournament_lloz5q_leaf": { "additionalProperties": False, "properties": { "id": { @@ -889,7 +891,7 @@ def test_team_schema(self): "title": "Alias", }, "events": { - "items": {"$ref": "#/$defs/Event_vrm2bi"}, + "items": {"$ref": "#/$defs/Event_37gjqu_leaf"}, "title": "Events", "type": "array", }, @@ -1298,7 +1300,7 @@ def test_schema(self): self.Employee_Pydantic.model_json_schema(), { "$defs": { - "Employee_ibbaiu": { + "Employee_dkvdqq_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1309,7 +1311,7 @@ def test_schema(self): }, "name": {"maxLength": 50, "title": "Name", "type": "string"}, "talks_to": { - "items": {"$ref": "#/$defs/leaf"}, + "items": {"$ref": "#/$defs/Employee_pb36jg_leaf"}, "title": "Talks To", "type": "array", }, @@ -1326,7 +1328,7 @@ def test_schema(self): "title": "Manager Id", }, "team_members": { - "items": {"$ref": "#/$defs/leaf"}, + "items": {"$ref": "#/$defs/Employee_pb36jg_leaf"}, "title": "Team Members", "type": "array", }, @@ -1335,44 +1337,7 @@ def test_schema(self): "title": "Employee", "type": "object", }, - "Employee_obdn4z": { - "additionalProperties": False, - "properties": { - "id": { - "maximum": 2147483647, - "minimum": -2147483648, - "title": "Id", - "type": "integer", - }, - "name": {"maxLength": 50, "title": "Name", "type": "string"}, - "talks_to": { - "items": {"$ref": "#/$defs/leaf"}, - "title": "Talks To", - "type": "array", - }, - "manager_id": { - "anyOf": [ - { - "maximum": 2147483647, - "minimum": -2147483648, - "type": "integer", - }, - {"type": "null"}, - ], - "nullable": True, - "title": "Manager Id", - }, - "team_members": { - "items": {"$ref": "#/$defs/leaf"}, - "title": "Team Members", - "type": "array", - }, - }, - "required": ["id", "name", "talks_to", "manager_id", "team_members"], - "title": "Employee", - "type": "object", - }, - "leaf": { + "Employee_pb36jg_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1410,7 +1375,7 @@ def test_schema(self): }, "name": {"maxLength": 50, "title": "Name", "type": "string"}, "talks_to": { - "items": {"$ref": "#/$defs/Employee_obdn4z"}, + "items": {"$ref": "#/$defs/Employee_dkvdqq_leaf"}, "title": "Talks To", "type": "array", }, @@ -1423,7 +1388,7 @@ def test_schema(self): "title": "Manager Id", }, "team_members": { - "items": {"$ref": "#/$defs/Employee_ibbaiu"}, + "items": {"$ref": "#/$defs/Employee_dkvdqq_leaf"}, "title": "Team Members", "type": "array", }, @@ -1436,7 +1401,7 @@ def test_schema(self): async def test_serialisation(self): empp = await self.Employee_Pydantic.from_tortoise_orm(await Employee.get(name="Root")) - # print(empp.json(indent=4)) + # print(empp.model_dump_json(indent=4)) empdict = empp.model_dump() self.assertEqual( diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 357322cb5..19e49488f 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -4,7 +4,7 @@ from collections.abc import MutableMapping from dataclasses import dataclass, field from hashlib import sha3_224 -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union, TypeVar from pydantic import ConfigDict, Field, computed_field, create_model from pydantic._internal._decorators import PydanticDescriptorProxy @@ -12,9 +12,10 @@ from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations -from tortoise.fields import IntField, JSONField, TextField, relational +from tortoise.fields import IntField, JSONField, TextField from tortoise.fields.base import FieldDescriptionBase -from tortoise.fields.relational import ForeignKeyFieldInstanceDescription +from tortoise.fields.relational import ForeignKeyFieldInstanceDescription, OneToOneFieldInstanceDescription, \ + BackwardOneToOneRelationDescription, BackwardFKRelationDescription, ManyToManyFieldInstanceDescription if TYPE_CHECKING: # pragma: nocoverage from tortoise.models import Model, ModelDescription @@ -54,6 +55,39 @@ class MyPydanticMeta: #: Allows user to specify custom config for generated model model_config: Optional[ConfigDict] = None + @classmethod + def from_pydantic_meta(cls, pydantic_meta: "PydanticMeta"): + default_meta = cls() + + def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: + return getattr(pydantic_meta, attr, default) + include = tuple(get_param_from_pydantic_meta("include", default_meta.include)) + exclude = tuple(get_param_from_pydantic_meta("exclude", default_meta.exclude)) + computed = tuple(get_param_from_pydantic_meta("computed", default_meta.computed)) + backward_relations = bool( + get_param_from_pydantic_meta("backward_relations_raw", default_meta.backward_relations) + ) + max_recursion = int(get_param_from_pydantic_meta("max_recursion", default_meta.max_recursion)) + allow_cycles = bool(get_param_from_pydantic_meta("allow_cycles", default_meta.allow_cycles)) + exclude_raw_fields = bool( + get_param_from_pydantic_meta("exclude_raw_fields", default_meta.exclude_raw_fields) + ) + sort_alphabetically = bool( + get_param_from_pydantic_meta("sort_alphabetically", default_meta.sort_alphabetically) + ) + model_config = get_param_from_pydantic_meta("model_config", default_meta.model_config) + return MyPydanticMeta( + include=include, + exclude=exclude, + computed=computed, + backward_relations=backward_relations, + max_recursion=max_recursion, + allow_cycles=allow_cycles, + exclude_raw_fields=exclude_raw_fields, + sort_alphabetically=sort_alphabetically, + model_config=model_config + ) + def construct_pydantic_meta( meta_default: MyPydanticMeta, @@ -111,7 +145,11 @@ def finalize_meta( exclude = tuple(exclude) + pydantic_meta.exclude computed = tuple(computed) + pydantic_meta.computed - _model_config = model_config if model_config else pydantic_meta.model_config + _model_config = ConfigDict() + if pydantic_meta.model_config: + _model_config.update(pydantic_meta.model_config) + if model_config: + _model_config.update(model_config) return MyPydanticMeta( include=include, @@ -214,8 +252,7 @@ def _pydantic_recursion_protector( return None level += 1 - - return PydanticModelCreator( + pmc = PydanticModelCreator( cls, exclude=exclude, include=include, @@ -225,7 +262,8 @@ def _pydantic_recursion_protector( allow_cycles=allow_cycles, sort_alphabetically=sort_alphabetically, _as_submodel=True, - ).create_pydantic_model() + ) + return pmc.create_pydantic_model() @dataclasses.dataclass @@ -235,6 +273,9 @@ class ComputedFieldDescription: description: Optional[str] +FieldDescriptionT = TypeVar('FieldDescriptionT', bound=FieldDescriptionBase) + + class FieldMap(MutableMapping[str, Union[FieldDescriptionBase | ComputedFieldDescription]]): def __init__(self, meta: MyPydanticMeta, pk_field_description: Optional[FieldDescriptionBase] = None): self._field_map: dict[str, Union[FieldDescriptionBase | ComputedFieldDescription]] = {} @@ -259,15 +300,15 @@ def __iter__(self): def __setitem__(self, __key, __value): self._field_map.__setitem__(__key, __value) - def sort_alphabetically(self): + def sort_alphabetically(self) -> None: self._field_map = {k: self._field_map[k] for k in sorted(self._field_map)} - def sort_definition_order(self, cls: "Type[Model]", computed: tuple[str, ...]): + def sort_definition_order(self, cls: "Type[Model]", computed: tuple[str, ...]) -> None: self._field_map = { k: self._field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in self._field_map } - def field_map_update(self, field_descriptions: list[FieldDescriptionBase], meta: MyPydanticMeta) -> None: + def field_map_update(self, field_descriptions: list[FieldDescriptionT], meta: MyPydanticMeta) -> None: for field_description in field_descriptions: name = field_description.name # Include or exclude field @@ -276,7 +317,7 @@ def field_map_update(self, field_descriptions: list[FieldDescriptionBase], meta: # Remove raw fields if isinstance(field_description, ForeignKeyFieldInstanceDescription): raw_field = field_description.raw_field - if meta.exclude_raw_fields and raw_field != self.pk_raw_field: + if raw_field is not None and meta.exclude_raw_fields and raw_field != self.pk_raw_field: self.pop(raw_field, None) self[name] = field_description @@ -352,18 +393,15 @@ def pydantic_queryset_creator( return model -PropertyT = Any - - class PydanticModelCreator: def __init__( self, cls: "Type[Model]", name: Optional[str] = None, - exclude: Optional[Tuple[str, ...]] = (), - include: Optional[Tuple[str, ...]] = (), - computed: Optional[Tuple[str, ...]] = (), - optional: Optional[Tuple[str, ...]] = (), + exclude: Optional[Tuple[str, ...]] = None, + include: Optional[Tuple[str, ...]] = None, + computed: Optional[Tuple[str, ...]] = None, + optional: Optional[Tuple[str, ...]] = None, allow_cycles: Optional[bool] = None, sort_alphabetically: Optional[bool] = None, exclude_readonly: bool = False, @@ -373,9 +411,9 @@ def __init__( module: str = __name__, _stack: tuple = (), _as_submodel: bool = False - ): + ) -> None: self._cls: "Type[Model]" = cls - self._stack: tuple[tuple["Type[Model]", str, int]] = tuple() # ((Type[Model], field_name, max_recursion),) + self._stack: tuple[tuple["Type[Model]", str, int], ...] = tuple[tuple["Type[Model]", str, int], ...]() # ((Type[Model], field_name, max_recursion),) self._is_default: bool = ( exclude is None and include is None @@ -386,10 +424,22 @@ def __init__( and meta_override is None and not exclude_readonly ) - - meta_from_class = cls.my_pydantic_meta \ - if not meta_override \ - else construct_pydantic_meta(cls.my_pydantic_meta, meta_override) + if exclude is None: + exclude = () + if include is None: + include = () + if computed is None: + computed = () + if optional is None: + optional = () + + old_meta = getattr(cls, "PydanticMeta", None) + if old_meta: + meta_from_class = MyPydanticMeta.from_pydantic_meta(old_meta) + else: + meta_from_class = cls.my_pydantic_meta + if meta_override: + meta_from_class = construct_pydantic_meta(meta_from_class, meta_override) self.meta = finalize_meta( meta_from_class, exclude, include, computed, allow_cycles, sort_alphabetically, model_config ) @@ -399,14 +449,13 @@ def __init__( self._fqname = cls.__module__ + "." + cls.__qualname__ self._name: str self._title: str - self._name, self._title = self.get_name(name) self.given_name = name self._as_submodel = _as_submodel self._annotations = get_annotations(cls) - self._pconfig: ConfigDict = self.initialize_pconfig() + self._pconfig: ConfigDict self._properties: Dict[str, Any] = dict() @@ -422,21 +471,16 @@ def __init__( self._stack = _stack - def get_name(self, name: Optional[str] = None) -> tuple[str, str]: - """ - return - @rtype: tuple - name, title - """ + def get_name(self) -> tuple[str, str]: # If arguments are specified (different from the defaults), we append a hash to the # class name, to make it unique # We don't check by stack, as cycles get explicitly renamed. # When called later, include is explicitly set, so fence passes. - if name is not None: - return name, name + if self.given_name is not None: + return self.given_name, self.given_name hashval = ( f"{self._fqname};{self.meta.exclude};{self.meta.include};{self.meta.computed};" - f"{self._stack}:{self.meta.sort_alphabetically}:{self.meta.allow_cycles}:{self._exclude_read_only}" + f"{self.meta.sort_alphabetically}:{self.meta.allow_cycles}:{self._exclude_read_only}" ) postfix = ( ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] @@ -445,7 +489,7 @@ def get_name(self, name: Optional[str] = None) -> tuple[str, str]: ) return self._fqname + postfix, self._cls.__name__ - def initialize_pconfig(self): + def initialize_pconfig(self) -> ConfigDict: pconfig: ConfigDict = PydanticModel.model_config.copy() if self.meta.model_config: pconfig.update(self.meta.model_config) @@ -455,12 +499,12 @@ def initialize_pconfig(self): pconfig["extra"] = 'forbid' return pconfig - def initialize_field_map(self): + def initialize_field_map(self) -> FieldMap: return FieldMap(self.meta) \ if self._exclude_read_only \ else FieldMap(self.meta, pk_field_description=self._model_description.pk_field) - def construct_field_map(self): + def construct_field_map(self) -> None: self._field_map.field_map_update(field_descriptions=self._model_description.data_fields, meta=self.meta) if not self._exclude_read_only: for field_descriptions in ( @@ -481,16 +525,18 @@ def construct_field_map(self): else: self._field_map.sort_definition_order(self._cls, self.meta.computed) - def create_pydantic_model(self): + def create_pydantic_model(self) -> Type[PydanticModel]: for field_name, field_description in self._field_map.items(): self.process_field(field_name, field_description) + self._name, self._title = self.get_name() if self._as_submodel and self._stack: self._name = f"{self._name}:leaf" if self._name in _MODEL_INDEX: return _MODEL_INDEX[self._name] + self._pconfig = self.initialize_pconfig() self._properties["model_config"] = self._pconfig model = create_model( self._name, @@ -511,12 +557,12 @@ def process_field( self, field_name: str, field_description: Union[FieldDescriptionBase, ComputedFieldDescription], - ): + ) -> None: json_schema_extra: Dict[str, Any] = {} fconfig: Dict[str, Any] = { "json_schema_extra": json_schema_extra, } - field_property: PropertyT = None + field_property: Optional[Any] = None is_to_one_relation: bool = False comment = "" if isinstance(field_description, FieldDescriptionBase): @@ -529,11 +575,13 @@ def process_field( self._properties[field_name] = field_property if field_name in self._properties and not isinstance(self._properties[field_name], tuple): fconfig["title"] = field_name.replace("_", " ").title() - description = comment or _br_it(field_description.docstring or field_description.description or "") + description = comment or _br_it(field_description.docstring or field_description.description or "") \ + if isinstance(field_description, FieldDescriptionBase) \ + else (comment or _br_it(field_description.description or "")) if description: fconfig["description"] = description ftype = self._properties[field_name] - if not isinstance(ftype, PydanticDescriptorProxy): + if not isinstance(ftype, PydanticDescriptorProxy) and isinstance(field_description, FieldDescriptionBase): if field_name in self._optional or (field_description.default is not None and not callable(field_description.default)): self._properties[field_name] = (ftype, Field(default=field_description.default, **fconfig)) else: @@ -554,15 +602,18 @@ def process_normal_field_description( field_description: FieldDescriptionBase, json_schema_extra: Dict[str, Any], fconfig: Dict[str, Any], - ) -> tuple[Optional[PropertyT], bool]: - if ( - field_description.field_type is relational.ForeignKeyFieldInstance - or field_description.field_type is relational.OneToOneFieldInstance - or field_description.field_type is relational.BackwardOneToOneRelation + ) -> tuple[Optional[Any], bool]: + if isinstance(field_description, (BackwardFKRelationDescription, ManyToManyFieldInstanceDescription)): + return self.process_many_field_relation(field_name, field_description), False + elif isinstance( + field_description, + ( + ForeignKeyFieldInstanceDescription, + OneToOneFieldInstanceDescription, + BackwardOneToOneRelationDescription + ) ): return self.process_single_field_relation(field_name, field_description, json_schema_extra), True - elif field_description.field_type in (relational.BackwardFKRelation, relational.ManyToManyFieldInstance): - return self.process_many_field_relation(field_name, field_description), False elif field_description.field_type is JSONField: return self.process_json_field_description(), False return self.process_data_field_description(field_name, field_description, json_schema_extra, fconfig), False @@ -570,9 +621,9 @@ def process_normal_field_description( def process_single_field_relation( self, field_name: str, - field_description: FieldDescriptionBase, + field_description: ForeignKeyFieldInstanceDescription | OneToOneFieldInstanceDescription | BackwardOneToOneRelationDescription, json_schema_extra: Dict[str, Any], - ) -> Optional[PropertyT]: + ) -> Optional[Type[PydanticModel]]: model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) if model: if field_description.nullable: @@ -581,19 +632,20 @@ def process_single_field_relation( model = Optional[model] # type: ignore return model + return None def process_many_field_relation( self, field_name: str, - field_description: FieldDescriptionBase, + field_description: BackwardFKRelationDescription | ManyToManyFieldInstanceDescription, ) -> Optional[Type[List[Type[PydanticModel]]]]: - model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) + model = self.get_submodel(field_description.python_type, field_name) if model: - return List[model] + return List[model] # type: ignore + return None - def process_json_field_description(self): - is_to_one_relation = False - return Any, is_to_one_relation + def process_json_field_description(self) -> Any: + return Any def process_data_field_description( self, @@ -601,7 +653,7 @@ def process_data_field_description( field_description: FieldDescriptionBase, json_schema_extra: Dict[str, Any], fconfig: Dict[str, Any], - ) -> Optional[PropertyT]: + ) -> Optional[Any]: annotation = self._annotations.get(field_name, None) if "readOnly" in field_description.constraints: json_schema_extra["readOnly"] = field_description.constraints["readOnly"] @@ -611,14 +663,15 @@ def process_data_field_description( if field_description.nullable: json_schema_extra["nullable"] = True if field_name in self._optional or field_description.default is not None or field_description.nullable: - ptype = Optional[ptype] + ptype = Optional[ptype] # type: ignore if not (self._exclude_read_only and json_schema_extra.get("readOnly") is True): return annotation or ptype + return None def process_computed_field_description( self, field_description: ComputedFieldDescription, - ): + ) -> Optional[Any]: func = field_description.function annotation = get_annotations(self._cls, func).get("return", None) comment = _cleandoc(func) @@ -626,8 +679,9 @@ def process_computed_field_description( c_f = computed_field(return_type=annotation, description=comment) ret = c_f(func) return ret + return None - def get_submodel(self, _model: "Type[Model]", field_name: str) -> Optional[Type[PydanticModel]]: + def get_submodel(self, _model: Optional["Type[Model]"], field_name: str) -> Optional[Type[PydanticModel]]: """Get Pydantic model for the submodel""" if _model: diff --git a/tortoise/fields/base.py b/tortoise/fields/base.py index c8529d8a9..a49158055 100644 --- a/tortoise/fields/base.py +++ b/tortoise/fields/base.py @@ -464,7 +464,7 @@ def default_name(default: Any) -> Optional[Union[int, float, str, bool]]: return desc - def describe_by_dataclass(self): + def describe_by_dataclass(self) -> FieldDescriptionBase: field_type = getattr(self, "related_model", self.field_type) return FieldDescription( name=self.model_field_name, diff --git a/tortoise/fields/relational.py b/tortoise/fields/relational.py index eb01037fd..5c4d3fc39 100644 --- a/tortoise/fields/relational.py +++ b/tortoise/fields/relational.py @@ -244,6 +244,7 @@ async def _remove_or_clear( @dataclasses.dataclass class RelationalFieldDescription(FieldDescriptionBase): db_constraint: bool = False + python_type: Optional["Type[Model]"] = None class RelationalField(Field[MODEL]): @@ -284,7 +285,19 @@ def describe(self, serializable: bool) -> dict: def describe_by_dataclass(self): return RelationalFieldDescription( - **self.describe(False) + name=self.model_field_name, + field_type=self.__class__, + python_type=self.related_model, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None, + db_constraint=self.db_constraint ) @classmethod @@ -296,7 +309,7 @@ def validate_model_name(cls, model_name: str) -> None: @dataclasses.dataclass class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): - raw_field: str = "" + raw_field: str | None = "" on_delete: str = "" @@ -327,7 +340,29 @@ def describe(self, serializable: bool) -> dict: return desc def describe_by_dataclass(self): - return ForeignKeyFieldInstanceDescription(**self.describe(False)) + field_type = getattr(self, "related_model", self.field_type) + return ForeignKeyFieldInstanceDescription( + name=self.model_field_name, + field_type=self.__class__, + python_type=field_type, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None, + db_constraint=self.db_constraint, + raw_field=self.source_field, + on_delete=str(self.on_delete), + ) + + +@dataclasses.dataclass +class BackwardFKRelationDescription(ForeignKeyFieldInstanceDescription): + ... class BackwardFKRelation(RelationalField[MODEL]): @@ -345,6 +380,28 @@ def __init__( self.relation_source_field: str = relation_source_field self.description: Optional[str] = description + def describe_by_dataclass(self): + return BackwardFKRelationDescription( + name=self.model_field_name, + field_type=self.__class__, + python_type=self.related_model, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None, + db_constraint=self.db_constraint + ) + + +@dataclasses.dataclass +class OneToOneFieldInstanceDescription(ForeignKeyFieldInstanceDescription): + ... + class OneToOneFieldInstance(ForeignKeyFieldInstance[MODEL]): def __init__( @@ -357,9 +414,51 @@ def __init__( self.validate_model_name(model_name) super().__init__(model_name, related_name, on_delete, unique=True, **kwargs) + def describe_by_dataclass(self): + field_type = getattr(self, "related_model", self.field_type) + return OneToOneFieldInstanceDescription( + name=self.model_field_name, + field_type=self.__class__, + python_type=field_type, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None, + db_constraint=self.db_constraint, + raw_field=self.source_field, + on_delete=str(self.on_delete), + ) + + +@dataclasses.dataclass +class BackwardOneToOneRelationDescription(ForeignKeyFieldInstanceDescription): + ... + class BackwardOneToOneRelation(BackwardFKRelation[MODEL]): - pass + def describe_by_dataclass(self): + field_type = getattr(self, "related_model", self.field_type) + return BackwardOneToOneRelationDescription( + name=self.model_field_name, + field_type=self.__class__, + python_type=field_type, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None, + db_constraint=self.db_constraint, + raw_field=self.source_field, + ) @dataclasses.dataclass @@ -413,7 +512,28 @@ def describe(self, serializable: bool) -> dict: return desc def describe_by_dataclass(self): - return ManyToManyFieldInstanceDescription(**self.describe(False)) + return ManyToManyFieldInstanceDescription( + name=self.model_field_name, + field_type=self.__class__, + python_type=self.related_model, + generated=self.generated, + nullable=self.null, + unique=self.unique, + indexed=self.index or self.unique, + default=self.default, + description=self.description, + docstring=self.docstring, + constraints=self.constraints, + db_field_types=self.get_db_field_types() if self.has_db_field else None, + db_constraint=self.db_constraint, + model_name=self.model_name, + related_name=self.related_name, + forward_key=self.forward_key, + backward_key=self.backward_key, + through=self.through, + on_delete=str(self.on_delete), + _generated=self._generated, + ) @overload diff --git a/tortoise/models.py b/tortoise/models.py index c1923af09..6072d6d61 100644 --- a/tortoise/models.py +++ b/tortoise/models.py @@ -38,7 +38,7 @@ OperationalError, ParamsError, ) -from tortoise.fields.base import Field, FieldDescription +from tortoise.fields.base import Field, FieldDescription, FieldDescriptionBase from tortoise.fields.data import IntField from tortoise.fields.relational import ( BackwardFKRelation, @@ -650,18 +650,18 @@ class ModelDescription: name: str table: str abstract: bool - description: str - pk_field: FieldDescription + description: Optional[str] + pk_field: FieldDescriptionBase app: Optional[str] = None docstring: Optional[str] = None - unique_together: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=list) - indexes: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=list) - data_fields: list[FieldDescription] = dataclasses.field(default_factory=list) - fk_fields: list[FieldDescription] = dataclasses.field(default_factory=list) - backward_fk_fields: list[FieldDescription] = dataclasses.field(default_factory=list) - o2o_fields: list[FieldDescription] = dataclasses.field(default_factory=list) - backward_o2o_fields: list[FieldDescription] = dataclasses.field(default_factory=list) - m2m_fields: list[FieldDescription] = dataclasses.field(default_factory=list) + unique_together: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) + indexes: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) + data_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + backward_fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + backward_o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + m2m_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) class Model(metaclass=ModelMeta): @@ -1513,7 +1513,7 @@ def describe(cls, serializable: bool = True) -> dict: } @classmethod - def describe_by_dataclass(cls): + def describe_by_dataclass(cls) -> ModelDescription: return ModelDescription( name=cls._meta.full_name, app=cls._meta.app, @@ -1521,8 +1521,8 @@ def describe_by_dataclass(cls): abstract=cls._meta.abstract, description=cls._meta.table_description or None, docstring=inspect.cleandoc(cls.__doc__ or "") or None, - unique_together=cls._meta.unique_together or [], - indexes=cls._meta.indexes or [], + unique_together=cls._meta.unique_together or (), + indexes=cls._meta.indexes or (), pk_field=cls._meta.fields_map[cls._meta.pk_attr].describe_by_dataclass(), data_fields=[ field.describe_by_dataclass() From 81a61243f674d2aab738e34b75ba4c1d85f268f5 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 25 Oct 2024 14:14:12 +0200 Subject: [PATCH 09/27] unused import statements... --- tests/contrib/test_pydantic.py | 1 - tortoise/contrib/pydantic/creator.py | 1 - tortoise/fields/relational.py | 2 +- tortoise/models.py | 2 +- 4 files changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index b8b3573a3..2a6199da6 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,5 +1,4 @@ import copy -import json import pytest from pydantic import ConfigDict, ValidationError diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 19e49488f..b38d3fd14 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -8,7 +8,6 @@ from pydantic import ConfigDict, Field, computed_field, create_model from pydantic._internal._decorators import PydanticDescriptorProxy -from pydantic.fields import PropertyT from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations diff --git a/tortoise/fields/relational.py b/tortoise/fields/relational.py index 5c4d3fc39..4d7fd77dd 100644 --- a/tortoise/fields/relational.py +++ b/tortoise/fields/relational.py @@ -19,7 +19,7 @@ from typing_extensions import Literal from tortoise.exceptions import ConfigurationError, NoValuesFetched, OperationalError -from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete, FieldDescription, FieldDescriptionBase +from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete, FieldDescriptionBase if TYPE_CHECKING: # pragma: nocoverage from tortoise.backends.base.client import BaseDBAsyncClient diff --git a/tortoise/models.py b/tortoise/models.py index 6072d6d61..07618aa1a 100644 --- a/tortoise/models.py +++ b/tortoise/models.py @@ -38,7 +38,7 @@ OperationalError, ParamsError, ) -from tortoise.fields.base import Field, FieldDescription, FieldDescriptionBase +from tortoise.fields.base import Field, FieldDescriptionBase from tortoise.fields.data import IntField from tortoise.fields.relational import ( BackwardFKRelation, From 24c27d81c5b96f6aaf9754b3c3546421b36ddbc2 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 25 Oct 2024 14:31:43 +0200 Subject: [PATCH 10/27] test_early_init.py: naming of $defs changed --- tests/test_early_init.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_early_init.py b/tests/test_early_init.py index 72d2e4314..7068ac9c0 100644 --- a/tests/test_early_init.py +++ b/tests/test_early_init.py @@ -167,7 +167,7 @@ def test_early_init(self): Event_Pydantic.model_json_schema(), { "$defs": { - "leaf": { + "Tournament_pf77nd_leaf": { "additionalProperties": False, "properties": { "id": { @@ -211,7 +211,7 @@ def test_early_init(self): "type": "string", }, "tournament": { - "anyOf": [{"$ref": "#/$defs/leaf"}, {"type": "null"}], + "anyOf": [{"$ref": "#/$defs/Tournament_pf77nd_leaf"}, {"type": "null"}], "nullable": True, "title": "Tournament", }, From 702e45d2e23ba1e361e73efa3ee43486e23b572c Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Mon, 28 Oct 2024 11:19:38 +0100 Subject: [PATCH 11/27] move dataclasses to a dedicated place --- tortoise/contrib/pydantic/creator.py | 10 +- tortoise/contrib/pydantic/dataclasses.py | 264 +++++++++++++++++++++++ tortoise/fields/base.py | 40 ---- tortoise/fields/relational.py | 158 +------------- tortoise/models.py | 66 +----- 5 files changed, 272 insertions(+), 266 deletions(-) create mode 100644 tortoise/contrib/pydantic/dataclasses.py diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index b38d3fd14..4c8199797 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -12,12 +12,12 @@ from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations from tortoise.fields import IntField, JSONField, TextField -from tortoise.fields.base import FieldDescriptionBase -from tortoise.fields.relational import ForeignKeyFieldInstanceDescription, OneToOneFieldInstanceDescription, \ - BackwardOneToOneRelationDescription, BackwardFKRelationDescription, ManyToManyFieldInstanceDescription +from tortoise.contrib.pydantic.dataclasses import FieldDescriptionBase, ForeignKeyFieldInstanceDescription, \ + OneToOneFieldInstanceDescription, BackwardOneToOneRelationDescription, BackwardFKRelationDescription, \ + ManyToManyFieldInstanceDescription, describe_model_by_dataclass, ModelDescription if TYPE_CHECKING: # pragma: nocoverage - from tortoise.models import Model, ModelDescription + from tortoise.models import Model _MODEL_INDEX: Dict[str, Type[PydanticModel]] = {} @@ -458,7 +458,7 @@ def __init__( self._properties: Dict[str, Any] = dict() - self._model_description: ModelDescription = cls.describe_by_dataclass() + self._model_description: ModelDescription = describe_model_by_dataclass(cls) self._field_map: FieldMap = self.initialize_field_map() self.construct_field_map() diff --git a/tortoise/contrib/pydantic/dataclasses.py b/tortoise/contrib/pydantic/dataclasses.py new file mode 100644 index 000000000..b00c624f7 --- /dev/null +++ b/tortoise/contrib/pydantic/dataclasses.py @@ -0,0 +1,264 @@ +import dataclasses +import inspect +from typing import Type, Optional, Any, TYPE_CHECKING + +from tortoise.fields import Field +from tortoise.fields.relational import RelationalField, ForeignKeyFieldInstance, ManyToManyFieldInstance, \ + BackwardOneToOneRelation, BackwardFKRelation, OneToOneFieldInstance, MODEL + +if TYPE_CHECKING: # pragma: nocoverage + from tortoise.models import Model + + +@dataclasses.dataclass +class FieldDescriptionBase: + name: str + field_type: Type[Field] + generated: bool + nullable: bool + unique: bool + indexed: bool + constraints: dict + python_type: Optional[type] = None + default: Optional[Any] = None + description: Optional[str] = None + docstring: Optional[str] = None + db_field_types: Optional[dict[str, str]] = None + + +@dataclasses.dataclass +class FieldDescription(FieldDescriptionBase): + db_column: str = "" + + +@dataclasses.dataclass +class RelationalFieldDescription(FieldDescriptionBase): + db_constraint: bool = False + python_type: Optional[Type["Model"]] = None + + +@dataclasses.dataclass +class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): + raw_field: Optional[str] = "" + on_delete: str = "" + + +@dataclasses.dataclass +class BackwardFKRelationDescription(ForeignKeyFieldInstanceDescription): + ... + + +@dataclasses.dataclass +class OneToOneFieldInstanceDescription(ForeignKeyFieldInstanceDescription): + ... + + +@dataclasses.dataclass +class BackwardOneToOneRelationDescription(ForeignKeyFieldInstanceDescription): + ... + + +@dataclasses.dataclass +class ManyToManyFieldInstanceDescription(RelationalFieldDescription): + model_name: str = "" + related_name: str = "" + forward_key: str = "" + backward_key: str = "" + through: str = "" + on_delete: str = "" + _generated: bool = False + + +@dataclasses.dataclass +class ModelDescription: + name: str + table: str + abstract: bool + description: Optional[str] + pk_field: FieldDescriptionBase + app: Optional[str] = None + docstring: Optional[str] = None + unique_together: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) + indexes: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) + data_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + backward_fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + backward_o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + m2m_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + + +def describe_model_by_dataclass(cls: Type[MODEL]) -> ModelDescription: + return ModelDescription( + name=cls._meta.full_name, + app=cls._meta.app, + table=cls._meta.db_table, + abstract=cls._meta.abstract, + description=cls._meta.table_description or None, + docstring=inspect.cleandoc(cls.__doc__ or "") or None, + unique_together=cls._meta.unique_together or (), + indexes=cls._meta.indexes or (), + pk_field=describe_field_by_dataclass(cls._meta.fields_map[cls._meta.pk_attr]), + data_fields=[ + describe_field_by_dataclass(field) + for name, field in cls._meta.fields_map.items() + if name != cls._meta.pk_attr and name in (cls._meta.fields - cls._meta.fetch_fields) + ], + fk_fields=[ + describe_field_by_dataclass(field) + for name, field in cls._meta.fields_map.items() + if name in cls._meta.fk_fields + ], + backward_fk_fields=[ + describe_field_by_dataclass(field) + for name, field in cls._meta.fields_map.items() + if name in cls._meta.backward_fk_fields + ], + o2o_fields=[ + describe_field_by_dataclass(field) + for name, field in cls._meta.fields_map.items() + if name in cls._meta.o2o_fields + ], + backward_o2o_fields=[ + describe_field_by_dataclass(field) + for name, field in cls._meta.fields_map.items() + if name in cls._meta.backward_o2o_fields + ], + m2m_fields=[ + describe_field_by_dataclass(field) + for name, field in cls._meta.fields_map.items() + if name in cls._meta.m2m_fields + ], + ) + + +def describe_field_by_dataclass(field: Field) -> FieldDescriptionBase: + field_type = getattr(field, "related_model", field.field_type) + if isinstance(field, RelationalField): + if isinstance(field, ForeignKeyFieldInstance): + # ForeignKeyFieldInstance -> RelationalField + if isinstance(field, OneToOneFieldInstance): + # OneToOneFieldInstance -> ForeignKeyFieldInstance -> RelationalField + return OneToOneFieldInstanceDescription( + name=field.model_field_name, + field_type=field.__class__, + python_type=field_type, + generated=field.generated, + nullable=field.null, + unique=field.unique, + indexed=field.index or field.unique, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=field.constraints, + db_field_types=field.get_db_field_types() if field.has_db_field else None, + db_constraint=field.db_constraint, + raw_field=field.source_field, + on_delete=str(field.on_delete), + ) + return ForeignKeyFieldInstanceDescription( + name=field.model_field_name, + field_type=field.__class__, + python_type=field_type, + generated=field.generated, + nullable=field.null, + unique=field.unique, + indexed=field.index or field.unique, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=field.constraints, + db_field_types=field.get_db_field_types() if field.has_db_field else None, + db_constraint=field.db_constraint, + raw_field=field.source_field, + on_delete=str(field.on_delete), + ) + if isinstance(field, BackwardFKRelation): + # BackwardFKRelation -> RelationalField + if isinstance(field, BackwardOneToOneRelation): + # BackwardOneToOneRelation -> BackwardFKRelation -> RelationalField + return BackwardOneToOneRelationDescription( + name=field.model_field_name, + field_type=field.__class__, + python_type=field_type, + generated=field.generated, + nullable=field.null, + unique=field.unique, + indexed=field.index or field.unique, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=field.constraints, + db_field_types=field.get_db_field_types() if field.has_db_field else None, + db_constraint=field.db_constraint, + raw_field=field.source_field, + ) + return BackwardFKRelationDescription( + name=field.model_field_name, + field_type=field.__class__, + python_type=field.related_model, + generated=field.generated, + nullable=field.null, + unique=field.unique, + indexed=field.index or field.unique, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=field.constraints, + db_field_types=field.get_db_field_types() if field.has_db_field else None, + db_constraint=field.db_constraint + ) + if isinstance(field, ManyToManyFieldInstance): + # ManyToManyFieldInstance -> RelationalField + return ManyToManyFieldInstanceDescription( + name=field.model_field_name, + field_type=field.__class__, + python_type=field.related_model, + generated=field.generated, + nullable=field.null, + unique=field.unique, + indexed=field.index or field.unique, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=field.constraints, + db_field_types=field.get_db_field_types() if field.has_db_field else None, + db_constraint=field.db_constraint, + model_name=field.model_name, + related_name=field.related_name, + forward_key=field.forward_key, + backward_key=field.backward_key, + through=field.through, + on_delete=str(field.on_delete), + _generated=field._generated, + ) + return RelationalFieldDescription( + name=field.model_field_name, + field_type=field.__class__, + python_type=field.related_model, + generated=field.generated, + nullable=field.null, + unique=field.unique, + indexed=field.index or field.unique, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=field.constraints, + db_field_types=field.get_db_field_types() if field.has_db_field else None, + db_constraint=field.db_constraint + ) + return FieldDescription( + name=field.model_field_name, + field_type=field.__class__, + db_column=field.source_field or field.model_field_name, + python_type=field.field_type, + generated=field.generated, + nullable=field.null, + unique=field.unique, + indexed=field.index or field.unique, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=field.constraints, + db_field_types=field.get_db_field_types() if field.has_db_field else None + ) diff --git a/tortoise/fields/base.py b/tortoise/fields/base.py index a49158055..1887e63ae 100644 --- a/tortoise/fields/base.py +++ b/tortoise/fields/base.py @@ -1,4 +1,3 @@ -import dataclasses import sys import warnings from enum import Enum @@ -63,27 +62,6 @@ def __new__(mcs, name: str, bases: Tuple[Type, ...], attrs: dict): return type.__new__(mcs, name, bases, attrs) -@dataclasses.dataclass -class FieldDescriptionBase: - name: str - field_type: Type["Field"] - generated: bool - nullable: bool - unique: bool - indexed: bool - constraints: dict - python_type: Optional[type] = None - default: Optional[Any] = None - description: Optional[str] = None - docstring: Optional[str] = None - db_field_types: Optional[dict[str, str]] = None - - -@dataclasses.dataclass -class FieldDescription(FieldDescriptionBase): - db_column: str = "" - - class Field(Generic[VALUE], metaclass=_FieldMeta): """ Base Field type. @@ -463,21 +441,3 @@ def default_name(default: Any) -> Optional[Union[int, float, str, bool]]: desc["db_field_types"] = self.get_db_field_types() return desc - - def describe_by_dataclass(self) -> FieldDescriptionBase: - field_type = getattr(self, "related_model", self.field_type) - return FieldDescription( - name=self.model_field_name, - field_type=self.__class__, - db_column=self.source_field or self.model_field_name, - python_type=field_type, - generated=self.generated, - nullable=self.null, - unique=self.unique, - indexed=self.index or self.unique, - default=self.default, - description=self.description, - docstring=self.docstring, - constraints=self.constraints, - db_field_types=self.get_db_field_types() if self.has_db_field else None - ) diff --git a/tortoise/fields/relational.py b/tortoise/fields/relational.py index 4d7fd77dd..5d28e55fe 100644 --- a/tortoise/fields/relational.py +++ b/tortoise/fields/relational.py @@ -1,4 +1,3 @@ -import dataclasses from typing import ( TYPE_CHECKING, Any, @@ -19,7 +18,7 @@ from typing_extensions import Literal from tortoise.exceptions import ConfigurationError, NoValuesFetched, OperationalError -from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete, FieldDescriptionBase +from tortoise.fields.base import CASCADE, SET_NULL, Field, OnDelete if TYPE_CHECKING: # pragma: nocoverage from tortoise.backends.base.client import BaseDBAsyncClient @@ -241,12 +240,6 @@ async def _remove_or_clear( await db.execute_query(str(query)) -@dataclasses.dataclass -class RelationalFieldDescription(FieldDescriptionBase): - db_constraint: bool = False - python_type: Optional["Type[Model]"] = None - - class RelationalField(Field[MODEL]): has_db_field = False @@ -283,23 +276,6 @@ def describe(self, serializable: bool) -> dict: del desc["db_column"] return desc - def describe_by_dataclass(self): - return RelationalFieldDescription( - name=self.model_field_name, - field_type=self.__class__, - python_type=self.related_model, - generated=self.generated, - nullable=self.null, - unique=self.unique, - indexed=self.index or self.unique, - default=self.default, - description=self.description, - docstring=self.docstring, - constraints=self.constraints, - db_field_types=self.get_db_field_types() if self.has_db_field else None, - db_constraint=self.db_constraint - ) - @classmethod def validate_model_name(cls, model_name: str) -> None: if len(model_name.split(".")) != 2: @@ -307,12 +283,6 @@ def validate_model_name(cls, model_name: str) -> None: raise ConfigurationError(f'{field_type} accepts model name in format "app.Model"') -@dataclasses.dataclass -class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): - raw_field: str | None = "" - on_delete: str = "" - - class ForeignKeyFieldInstance(RelationalField[MODEL]): def __init__( self, @@ -339,31 +309,6 @@ def describe(self, serializable: bool) -> dict: desc["on_delete"] = str(self.on_delete) return desc - def describe_by_dataclass(self): - field_type = getattr(self, "related_model", self.field_type) - return ForeignKeyFieldInstanceDescription( - name=self.model_field_name, - field_type=self.__class__, - python_type=field_type, - generated=self.generated, - nullable=self.null, - unique=self.unique, - indexed=self.index or self.unique, - default=self.default, - description=self.description, - docstring=self.docstring, - constraints=self.constraints, - db_field_types=self.get_db_field_types() if self.has_db_field else None, - db_constraint=self.db_constraint, - raw_field=self.source_field, - on_delete=str(self.on_delete), - ) - - -@dataclasses.dataclass -class BackwardFKRelationDescription(ForeignKeyFieldInstanceDescription): - ... - class BackwardFKRelation(RelationalField[MODEL]): def __init__( @@ -380,28 +325,6 @@ def __init__( self.relation_source_field: str = relation_source_field self.description: Optional[str] = description - def describe_by_dataclass(self): - return BackwardFKRelationDescription( - name=self.model_field_name, - field_type=self.__class__, - python_type=self.related_model, - generated=self.generated, - nullable=self.null, - unique=self.unique, - indexed=self.index or self.unique, - default=self.default, - description=self.description, - docstring=self.docstring, - constraints=self.constraints, - db_field_types=self.get_db_field_types() if self.has_db_field else None, - db_constraint=self.db_constraint - ) - - -@dataclasses.dataclass -class OneToOneFieldInstanceDescription(ForeignKeyFieldInstanceDescription): - ... - class OneToOneFieldInstance(ForeignKeyFieldInstance[MODEL]): def __init__( @@ -414,62 +337,9 @@ def __init__( self.validate_model_name(model_name) super().__init__(model_name, related_name, on_delete, unique=True, **kwargs) - def describe_by_dataclass(self): - field_type = getattr(self, "related_model", self.field_type) - return OneToOneFieldInstanceDescription( - name=self.model_field_name, - field_type=self.__class__, - python_type=field_type, - generated=self.generated, - nullable=self.null, - unique=self.unique, - indexed=self.index or self.unique, - default=self.default, - description=self.description, - docstring=self.docstring, - constraints=self.constraints, - db_field_types=self.get_db_field_types() if self.has_db_field else None, - db_constraint=self.db_constraint, - raw_field=self.source_field, - on_delete=str(self.on_delete), - ) - - -@dataclasses.dataclass -class BackwardOneToOneRelationDescription(ForeignKeyFieldInstanceDescription): - ... - class BackwardOneToOneRelation(BackwardFKRelation[MODEL]): - def describe_by_dataclass(self): - field_type = getattr(self, "related_model", self.field_type) - return BackwardOneToOneRelationDescription( - name=self.model_field_name, - field_type=self.__class__, - python_type=field_type, - generated=self.generated, - nullable=self.null, - unique=self.unique, - indexed=self.index or self.unique, - default=self.default, - description=self.description, - docstring=self.docstring, - constraints=self.constraints, - db_field_types=self.get_db_field_types() if self.has_db_field else None, - db_constraint=self.db_constraint, - raw_field=self.source_field, - ) - - -@dataclasses.dataclass -class ManyToManyFieldInstanceDescription(RelationalFieldDescription): - model_name: str = "" - related_name: str = "" - forward_key: str = "" - backward_key: str = "" - through: str = "" - on_delete: str = "" - _generated: bool = False + pass class ManyToManyFieldInstance(RelationalField[MODEL]): @@ -511,30 +381,6 @@ def describe(self, serializable: bool) -> dict: desc["_generated"] = self._generated return desc - def describe_by_dataclass(self): - return ManyToManyFieldInstanceDescription( - name=self.model_field_name, - field_type=self.__class__, - python_type=self.related_model, - generated=self.generated, - nullable=self.null, - unique=self.unique, - indexed=self.index or self.unique, - default=self.default, - description=self.description, - docstring=self.docstring, - constraints=self.constraints, - db_field_types=self.get_db_field_types() if self.has_db_field else None, - db_constraint=self.db_constraint, - model_name=self.model_name, - related_name=self.related_name, - forward_key=self.forward_key, - backward_key=self.backward_key, - through=self.through, - on_delete=str(self.on_delete), - _generated=self._generated, - ) - @overload def OneToOneField( diff --git a/tortoise/models.py b/tortoise/models.py index 07618aa1a..f0c32fd00 100644 --- a/tortoise/models.py +++ b/tortoise/models.py @@ -1,5 +1,4 @@ import asyncio -import dataclasses import inspect import re from copy import copy, deepcopy @@ -38,7 +37,7 @@ OperationalError, ParamsError, ) -from tortoise.fields.base import Field, FieldDescriptionBase +from tortoise.fields.base import Field from tortoise.fields.data import IntField from tortoise.fields.relational import ( BackwardFKRelation, @@ -645,25 +644,6 @@ def __getitem__(cls: Type[MODEL], key: Any) -> QuerySetSingle[MODEL]: # type: i return cls._getbypk(key) # type: ignore -@dataclasses.dataclass -class ModelDescription: - name: str - table: str - abstract: bool - description: Optional[str] - pk_field: FieldDescriptionBase - app: Optional[str] = None - docstring: Optional[str] = None - unique_together: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) - indexes: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) - data_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - backward_fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - backward_o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - m2m_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - - class Model(metaclass=ModelMeta): """ Base class for all Tortoise ORM Models. @@ -1512,50 +1492,6 @@ def describe(cls, serializable: bool = True) -> dict: ], } - @classmethod - def describe_by_dataclass(cls) -> ModelDescription: - return ModelDescription( - name=cls._meta.full_name, - app=cls._meta.app, - table=cls._meta.db_table, - abstract=cls._meta.abstract, - description=cls._meta.table_description or None, - docstring=inspect.cleandoc(cls.__doc__ or "") or None, - unique_together=cls._meta.unique_together or (), - indexes=cls._meta.indexes or (), - pk_field=cls._meta.fields_map[cls._meta.pk_attr].describe_by_dataclass(), - data_fields=[ - field.describe_by_dataclass() - for name, field in cls._meta.fields_map.items() - if name != cls._meta.pk_attr and name in (cls._meta.fields - cls._meta.fetch_fields) - ], - fk_fields=[ - field.describe_by_dataclass() - for name, field in cls._meta.fields_map.items() - if name in cls._meta.fk_fields - ], - backward_fk_fields=[ - field.describe_by_dataclass() - for name, field in cls._meta.fields_map.items() - if name in cls._meta.backward_fk_fields - ], - o2o_fields=[ - field.describe_by_dataclass() - for name, field in cls._meta.fields_map.items() - if name in cls._meta.o2o_fields - ], - backward_o2o_fields=[ - field.describe_by_dataclass() - for name, field in cls._meta.fields_map.items() - if name in cls._meta.backward_o2o_fields - ], - m2m_fields=[ - field.describe_by_dataclass() - for name, field in cls._meta.fields_map.items() - if name in cls._meta.m2m_fields - ], - ) - def __await__(self: MODEL) -> Generator[Any, None, MODEL]: async def _self() -> MODEL: return self From 01698a394e5332bfc8f09c773f568073b0509e1e Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Mon, 28 Oct 2024 11:42:00 +0100 Subject: [PATCH 12/27] python 3.8 and 3.9: typing --- tortoise/contrib/pydantic/creator.py | 26 ++++++++++++------------ tortoise/contrib/pydantic/dataclasses.py | 22 ++++++++++---------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 4c8199797..3e742fa97 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -1,7 +1,7 @@ import dataclasses import inspect from base64 import b32encode -from collections.abc import MutableMapping +from typing import MutableMapping from dataclasses import dataclass, field from hashlib import sha3_224 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union, TypeVar @@ -221,7 +221,7 @@ def _cleandoc(obj: Any) -> str: def _pydantic_recursion_protector( cls: "Type[Model]", *, - stack: tuple, + stack: Tuple, exclude: Tuple[str, ...] = (), include: Tuple[str, ...] = (), computed: Tuple[str, ...] = (), @@ -275,14 +275,14 @@ class ComputedFieldDescription: FieldDescriptionT = TypeVar('FieldDescriptionT', bound=FieldDescriptionBase) -class FieldMap(MutableMapping[str, Union[FieldDescriptionBase | ComputedFieldDescription]]): +class FieldMap(MutableMapping[str, Union[FieldDescriptionBase, ComputedFieldDescription]]): def __init__(self, meta: MyPydanticMeta, pk_field_description: Optional[FieldDescriptionBase] = None): - self._field_map: dict[str, Union[FieldDescriptionBase | ComputedFieldDescription]] = {} + self._field_map: Dict[str, Union[FieldDescriptionBase, ComputedFieldDescription]] = {} self.pk_raw_field = pk_field_description.name if pk_field_description is not None else "" if pk_field_description: self.pk_raw_field = pk_field_description.name self.field_map_update([pk_field_description], meta) - self.computed_fields: dict[str, ComputedFieldDescription] = {} + self.computed_fields: Dict[str, ComputedFieldDescription] = {} def __delitem__(self, __key): self._field_map.__delitem__(__key) @@ -302,12 +302,12 @@ def __setitem__(self, __key, __value): def sort_alphabetically(self) -> None: self._field_map = {k: self._field_map[k] for k in sorted(self._field_map)} - def sort_definition_order(self, cls: "Type[Model]", computed: tuple[str, ...]) -> None: + def sort_definition_order(self, cls: "Type[Model]", computed: Tuple[str, ...]) -> None: self._field_map = { k: self._field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in self._field_map } - def field_map_update(self, field_descriptions: list[FieldDescriptionT], meta: MyPydanticMeta) -> None: + def field_map_update(self, field_descriptions: List[FieldDescriptionT], meta: MyPydanticMeta) -> None: for field_description in field_descriptions: name = field_description.name # Include or exclude field @@ -320,7 +320,7 @@ def field_map_update(self, field_descriptions: list[FieldDescriptionT], meta: My self.pop(raw_field, None) self[name] = field_description - def computed_field_map_update(self, computed: tuple[str, ...], cls: "Type[Model]"): + def computed_field_map_update(self, computed: Tuple[str, ...], cls: "Type[Model]"): self._field_map.update( { k: ComputedFieldDescription( @@ -412,7 +412,7 @@ def __init__( _as_submodel: bool = False ) -> None: self._cls: "Type[Model]" = cls - self._stack: tuple[tuple["Type[Model]", str, int], ...] = tuple[tuple["Type[Model]", str, int], ...]() # ((Type[Model], field_name, max_recursion),) + self._stack: Tuple[Tuple["Type[Model]", str, int], ...] = tuple() # ((Type[Model], field_name, max_recursion),) self._is_default: bool = ( exclude is None and include is None @@ -470,7 +470,7 @@ def __init__( self._stack = _stack - def get_name(self) -> tuple[str, str]: + def get_name(self) -> Tuple[str, str]: # If arguments are specified (different from the defaults), we append a hash to the # class name, to make it unique # We don't check by stack, as cycles get explicitly renamed. @@ -601,7 +601,7 @@ def process_normal_field_description( field_description: FieldDescriptionBase, json_schema_extra: Dict[str, Any], fconfig: Dict[str, Any], - ) -> tuple[Optional[Any], bool]: + ) -> Tuple[Optional[Any], bool]: if isinstance(field_description, (BackwardFKRelationDescription, ManyToManyFieldInstanceDescription)): return self.process_many_field_relation(field_name, field_description), False elif isinstance( @@ -620,7 +620,7 @@ def process_normal_field_description( def process_single_field_relation( self, field_name: str, - field_description: ForeignKeyFieldInstanceDescription | OneToOneFieldInstanceDescription | BackwardOneToOneRelationDescription, + field_description: Union[ForeignKeyFieldInstanceDescription, OneToOneFieldInstanceDescription, BackwardOneToOneRelationDescription], json_schema_extra: Dict[str, Any], ) -> Optional[Type[PydanticModel]]: model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) @@ -636,7 +636,7 @@ def process_single_field_relation( def process_many_field_relation( self, field_name: str, - field_description: BackwardFKRelationDescription | ManyToManyFieldInstanceDescription, + field_description: Union[BackwardFKRelationDescription, ManyToManyFieldInstanceDescription], ) -> Optional[Type[List[Type[PydanticModel]]]]: model = self.get_submodel(field_description.python_type, field_name) if model: diff --git a/tortoise/contrib/pydantic/dataclasses.py b/tortoise/contrib/pydantic/dataclasses.py index b00c624f7..1ea0a0e91 100644 --- a/tortoise/contrib/pydantic/dataclasses.py +++ b/tortoise/contrib/pydantic/dataclasses.py @@ -1,6 +1,6 @@ import dataclasses import inspect -from typing import Type, Optional, Any, TYPE_CHECKING +from typing import Type, Optional, Any, TYPE_CHECKING, Dict, Tuple, List from tortoise.fields import Field from tortoise.fields.relational import RelationalField, ForeignKeyFieldInstance, ManyToManyFieldInstance, \ @@ -18,12 +18,12 @@ class FieldDescriptionBase: nullable: bool unique: bool indexed: bool - constraints: dict + constraints: Dict python_type: Optional[type] = None default: Optional[Any] = None description: Optional[str] = None docstring: Optional[str] = None - db_field_types: Optional[dict[str, str]] = None + db_field_types: Optional[Dict[str, str]] = None @dataclasses.dataclass @@ -78,14 +78,14 @@ class ModelDescription: pk_field: FieldDescriptionBase app: Optional[str] = None docstring: Optional[str] = None - unique_together: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) - indexes: tuple[tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) - data_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - backward_fk_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - backward_o2o_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) - m2m_fields: list[FieldDescriptionBase] = dataclasses.field(default_factory=list) + unique_together: Tuple[Tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) + indexes: Tuple[Tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) + data_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) + fk_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) + backward_fk_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) + o2o_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) + backward_o2o_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) + m2m_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) def describe_model_by_dataclass(cls: Type[MODEL]) -> ModelDescription: From fbb8d3c720061780769dc277cbcf9412e9da7da4 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Mon, 28 Oct 2024 14:21:07 +0100 Subject: [PATCH 13/27] test computed fields, remove own PydanticMeta class --- tests/contrib/test_pydantic.py | 205 ++++++++++++++++++++++ tortoise/contrib/pydantic/creator.py | 244 ++++++++++++--------------- tortoise/models.py | 2 - 3 files changed, 310 insertions(+), 141 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 2a6199da6..171990123 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1481,6 +1481,211 @@ async def test_serialisation(self): ) +class TestPydanticComputed(test.TestCase): + async def asyncSetUp(self) -> None: + await super(TestPydanticComputed, self).asyncSetUp() + self.Employee_Pydantic = pydantic_model_creator(Employee) + self.employee = await Employee.create(name="Some Employee") + self.maxDiff = None + + async def test_computed_field(self): + employee_pyd = await self.Employee_Pydantic.from_tortoise_orm(await Employee.get(name="Some Employee")) + employee_serialised = employee_pyd.model_dump() + self.assertEqual(employee_serialised.get("name_length"), self.employee.name_length()) + + async def test_computed_field_schema(self): + self.assertEqual( + self.Employee_Pydantic.model_json_schema(mode="serialization"), + { + "$defs": { + "Employee_dkvdqq_leaf": { + "additionalProperties": False, + "properties": { + "id": { + "maximum": 2147483647, + "minimum": -2147483648, + "title": "Id", + "type": "integer" + }, + "name": { + "maxLength": 50, + "title": "Name", + "type": "string" + }, + "talks_to": { + "items": { + "$ref": "#/$defs/Employee_pb36jg_leaf" + }, + "title": "Talks To", + "type": "array" + }, + "manager_id": { + "anyOf": [ + { + "maximum": 2147483647, + "minimum": -2147483648, + "type": "integer" + }, + { + "type": "null" + } + ], + "nullable": True, + "title": "Manager Id" + }, + "team_members": { + "items": { + "$ref": "#/$defs/Employee_pb36jg_leaf" + }, + "title": "Team Members", + "type": "array" + }, + "name_length": { + "description": "", + "readOnly": True, + "title": "Name Length", + "type": "integer" + }, + "team_size": { + "description": "Computes team size.

Note that this function needs to be annotated with a return type so that pydantic can
generate a valid schema.

Note that the pydantic serializer can't call async methods, but the tortoise helpers
pre-fetch relational data, so that it is available before serialization. So we don't
need to await the relation. We do however have to protect against the case where no
prefetching was done, hence catching and handling the
``tortoise.exceptions.NoValuesFetched`` exception.", + "readOnly": True, + "title": "Team Size", + "type": "integer" + } + }, + "required": [ + "id", + "name", + "talks_to", + "manager_id", + "team_members", + "name_length", + "team_size" + ], + "title": "Employee", + "type": "object" + }, + "Employee_pb36jg_leaf": { + "additionalProperties": False, + "properties": { + "id": { + "maximum": 2147483647, + "minimum": -2147483648, + "title": "Id", + "type": "integer" + }, + "name": { + "maxLength": 50, + "title": "Name", + "type": "string" + }, + "manager_id": { + "anyOf": [ + { + "maximum": 2147483647, + "minimum": -2147483648, + "type": "integer" + }, + { + "type": "null" + } + ], + "nullable": True, + "title": "Manager Id" + }, + "name_length": { + "description": "", + "readOnly": True, + "title": "Name Length", + "type": "integer" + }, + "team_size": { + "description": "Computes team size.

Note that this function needs to be annotated with a return type so that pydantic can
generate a valid schema.

Note that the pydantic serializer can't call async methods, but the tortoise helpers
pre-fetch relational data, so that it is available before serialization. So we don't
need to await the relation. We do however have to protect against the case where no
prefetching was done, hence catching and handling the
``tortoise.exceptions.NoValuesFetched`` exception.", + "readOnly": True, + "title": "Team Size", + "type": "integer" + } + }, + "required": [ + "id", + "name", + "manager_id", + "name_length", + "team_size" + ], + "title": "Employee", + "type": "object" + } + }, + "additionalProperties": False, + "properties": { + "id": { + "maximum": 2147483647, + "minimum": -2147483648, + "title": "Id", + "type": "integer" + }, + "name": { + "maxLength": 50, + "title": "Name", + "type": "string" + }, + "talks_to": { + "items": { + "$ref": "#/$defs/Employee_dkvdqq_leaf" + }, + "title": "Talks To", + "type": "array" + }, + "manager_id": { + "anyOf": [ + { + "maximum": 2147483647, + "minimum": -2147483648, + "type": "integer" + }, + { + "type": "null" + } + ], + "nullable": True, + "title": "Manager Id" + }, + "team_members": { + "items": { + "$ref": "#/$defs/Employee_dkvdqq_leaf" + }, + "title": "Team Members", + "type": "array" + }, + "name_length": { + "description": "", + "readOnly": True, + "title": "Name Length", + "type": "integer" + }, + "team_size": { + "description": "Computes team size.

Note that this function needs to be annotated with a return type so that pydantic can
generate a valid schema.

Note that the pydantic serializer can't call async methods, but the tortoise helpers
pre-fetch relational data, so that it is available before serialization. So we don't
need to await the relation. We do however have to protect against the case where no
prefetching was done, hence catching and handling the
``tortoise.exceptions.NoValuesFetched`` exception.", + "readOnly": True, + "title": "Team Size", + "type": "integer" + } + }, + "required": [ + "id", + "name", + "talks_to", + "manager_id", + "team_members", + "name_length", + "team_size" + ], + "title": "Employee", + "type": "object" + } + ) + + class TestPydanticUpdate(test.TestCase): def setUp(self) -> None: self.UserCreate_Pydantic = pydantic_model_creator( diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 3e742fa97..dcd7d0901 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -1,7 +1,14 @@ import dataclasses import inspect +import sys from base64 import b32encode from typing import MutableMapping + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + from dataclasses import dataclass, field from hashlib import sha3_224 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union, TypeVar @@ -23,7 +30,7 @@ @dataclass -class MyPydanticMeta: +class PydanticMetaData: #: If not empty, only fields this property contains will be in the pydantic model include: Tuple[str, ...] = () @@ -55,11 +62,11 @@ class MyPydanticMeta: model_config: Optional[ConfigDict] = None @classmethod - def from_pydantic_meta(cls, pydantic_meta: "PydanticMeta"): + def from_pydantic_meta(cls, old_pydantic_meta: Any): default_meta = cls() def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: - return getattr(pydantic_meta, attr, default) + return getattr(old_pydantic_meta, attr, default) include = tuple(get_param_from_pydantic_meta("include", default_meta.include)) exclude = tuple(get_param_from_pydantic_meta("exclude", default_meta.exclude)) computed = tuple(get_param_from_pydantic_meta("computed", default_meta.computed)) @@ -75,7 +82,7 @@ def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: get_param_from_pydantic_meta("sort_alphabetically", default_meta.sort_alphabetically) ) model_config = get_param_from_pydantic_meta("model_config", default_meta.model_config) - return MyPydanticMeta( + return PydanticMetaData( include=include, exclude=exclude, computed=computed, @@ -87,127 +94,78 @@ def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: model_config=model_config ) + def construct_pydantic_meta( + self, + meta_override: Type + ) -> Self: + def get_param_from_meta_override(attr: str) -> Any: + return getattr(meta_override, attr, getattr(self, attr)) + + default_include: Tuple[str, ...] = tuple(get_param_from_meta_override("include")) + default_exclude: Tuple[str, ...] = tuple(get_param_from_meta_override("exclude")) + default_computed: Tuple[str, ...] = tuple(get_param_from_meta_override("computed")) + default_config: Optional[ConfigDict] = self.model_config + + backward_relations: bool = bool(get_param_from_meta_override("backward_relations")) + + max_recursion: int = int(get_param_from_meta_override("max_recursion")) + exclude_raw_fields: bool = bool(get_param_from_meta_override("exclude_raw_fields")) + sort_alphabetically: bool = bool(get_param_from_meta_override("sort_alphabetically")) + allow_cycles: bool = bool(get_param_from_meta_override("allow_cycles")) + + return PydanticMetaData( + include=default_include, + exclude=default_exclude, + computed=default_computed, + model_config=default_config, + backward_relations=backward_relations, + max_recursion=max_recursion, + exclude_raw_fields=exclude_raw_fields, + sort_alphabetically=sort_alphabetically, + allow_cycles=allow_cycles + ) -def construct_pydantic_meta( - meta_default: MyPydanticMeta, - meta_override: Type -) -> MyPydanticMeta: - def get_param_from_meta_override(attr: str) -> Any: - return getattr(meta_override, attr, getattr(meta_default, attr)) - - default_include: Tuple[str, ...] = tuple(get_param_from_meta_override("include")) - default_exclude: Tuple[str, ...] = tuple(get_param_from_meta_override("exclude")) - default_computed: Tuple[str, ...] = tuple(get_param_from_meta_override("computed")) - default_config: Optional[ConfigDict] = meta_default.model_config - - backward_relations: bool = bool(get_param_from_meta_override("backward_relations")) - - max_recursion: int = int(get_param_from_meta_override("max_recursion")) - exclude_raw_fields: bool = bool(get_param_from_meta_override("exclude_raw_fields")) - sort_alphabetically: bool = bool(get_param_from_meta_override("sort_alphabetically")) - allow_cycles: bool = bool(get_param_from_meta_override("allow_cycles")) - - return MyPydanticMeta( - include=default_include, - exclude=default_exclude, - computed=default_computed, - model_config=default_config, - backward_relations=backward_relations, - max_recursion=max_recursion, - exclude_raw_fields=exclude_raw_fields, - sort_alphabetically=sort_alphabetically, - allow_cycles=allow_cycles - ) - - -def finalize_meta( - pydantic_meta: MyPydanticMeta, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - allow_cycles: Optional[bool] = None, - sort_alphabetically: Optional[bool] = None, - model_config: Optional[ConfigDict] = None, -) -> MyPydanticMeta: - _sort_fields: bool = ( - pydantic_meta.sort_alphabetically - if sort_alphabetically is None - else sort_alphabetically - ) - _allow_cycles: bool = ( - pydantic_meta.allow_cycles - if allow_cycles is None - else allow_cycles - ) - - include = tuple(include) + pydantic_meta.include - exclude = tuple(exclude) + pydantic_meta.exclude - computed = tuple(computed) + pydantic_meta.computed - - _model_config = ConfigDict() - if pydantic_meta.model_config: - _model_config.update(pydantic_meta.model_config) - if model_config: - _model_config.update(model_config) - - return MyPydanticMeta( - include=include, - exclude=exclude, - computed=computed, - backward_relations=pydantic_meta.backward_relations, - max_recursion=pydantic_meta.max_recursion, - exclude_raw_fields=pydantic_meta.exclude_raw_fields, - sort_alphabetically=_sort_fields, - allow_cycles=_allow_cycles, - model_config=_model_config - ) - - -class PydanticMeta: - """ - The ``PydanticMeta`` class is used to configure metadata for generating the pydantic Model. - - Usage: - - .. code-block:: python3 - - class Foo(Model): - ... - - class PydanticMeta: - exclude = ("foo", "baa") - computed = ("count_peanuts", ) - """ - - #: If not empty, only fields this property contains will be in the pydantic model - include: Tuple[str, ...] = () - - #: Fields listed in this property will be excluded from pydantic model - exclude: Tuple[str, ...] = ("Meta",) - - #: Computed fields can be listed here to use in pydantic model - computed: Tuple[str, ...] = () - - #: Use backward relations without annotations - not recommended, it can be huge data - #: without control - backward_relations: bool = True - - #: Maximum recursion level allowed - max_recursion: int = 3 - - #: Allow cycles in recursion - This can result in HUGE data - Be careful! - #: Please use this with ``exclude``/``include`` and sane ``max_recursion`` - allow_cycles: bool = False + def finalize_meta( + self, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + model_config: Optional[ConfigDict] = None, + ) -> Self: + _sort_fields: bool = ( + self.sort_alphabetically + if sort_alphabetically is None + else sort_alphabetically + ) + _allow_cycles: bool = ( + self.allow_cycles + if allow_cycles is None + else allow_cycles + ) - #: If we should exclude raw fields (the ones have _id suffixes) of relations - exclude_raw_fields: bool = True + include = tuple(include) + self.include + exclude = tuple(exclude) + self.exclude + computed = tuple(computed) + self.computed - #: Sort fields alphabetically. - #: If not set (or ``False``) then leave fields in declaration order - sort_alphabetically: bool = False + _model_config = ConfigDict() + if self.model_config: + _model_config.update(self.model_config) + if model_config: + _model_config.update(model_config) - #: Allows user to specify custom config for generated model - model_config: Optional[ConfigDict] = None + return PydanticMetaData( + include=include, + exclude=exclude, + computed=computed, + backward_relations=self.backward_relations, + max_recursion=self.max_recursion, + exclude_raw_fields=self.exclude_raw_fields, + sort_alphabetically=_sort_fields, + allow_cycles=_allow_cycles, + model_config=_model_config + ) def _br_it(val: str) -> str: @@ -276,7 +234,7 @@ class ComputedFieldDescription: class FieldMap(MutableMapping[str, Union[FieldDescriptionBase, ComputedFieldDescription]]): - def __init__(self, meta: MyPydanticMeta, pk_field_description: Optional[FieldDescriptionBase] = None): + def __init__(self, meta: PydanticMetaData, pk_field_description: Optional[FieldDescriptionBase] = None): self._field_map: Dict[str, Union[FieldDescriptionBase, ComputedFieldDescription]] = {} self.pk_raw_field = pk_field_description.name if pk_field_description is not None else "" if pk_field_description: @@ -290,7 +248,7 @@ def __delitem__(self, __key): def __getitem__(self, __key): return self._field_map.__getitem__(__key) - def __len__(self): + def __len__(self): # pragma: no-coverage return self._field_map.__len__() def __iter__(self): @@ -307,7 +265,7 @@ def sort_definition_order(self, cls: "Type[Model]", computed: Tuple[str, ...]) - k: self._field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in self._field_map } - def field_map_update(self, field_descriptions: List[FieldDescriptionT], meta: MyPydanticMeta) -> None: + def field_map_update(self, field_descriptions: List[FieldDescriptionT], meta: PydanticMetaData) -> None: for field_description in field_descriptions: name = field_description.name # Include or exclude field @@ -432,15 +390,14 @@ def __init__( if optional is None: optional = () - old_meta = getattr(cls, "PydanticMeta", None) - if old_meta: - meta_from_class = MyPydanticMeta.from_pydantic_meta(old_meta) - else: - meta_from_class = cls.my_pydantic_meta + if meta := getattr(cls, "PydanticMeta", None): + meta_from_class = PydanticMetaData.from_pydantic_meta(meta) + else: # default + meta_from_class = PydanticMetaData() if meta_override: - meta_from_class = construct_pydantic_meta(meta_from_class, meta_override) - self.meta = finalize_meta( - meta_from_class, exclude, include, computed, allow_cycles, sort_alphabetically, model_config + meta_from_class = meta_from_class.construct_pydantic_meta(meta_override) + self.meta = meta_from_class.finalize_meta( + exclude, include, computed, allow_cycles, sort_alphabetically, model_config ) self._exclude_read_only: bool = exclude_readonly @@ -565,7 +522,9 @@ def process_field( is_to_one_relation: bool = False comment = "" if isinstance(field_description, FieldDescriptionBase): - field_property, is_to_one_relation = self.process_normal_field_description(field_name, field_description, json_schema_extra, fconfig) + field_property, is_to_one_relation = self.process_normal_field_description( + field_name, field_description, json_schema_extra, fconfig + ) elif isinstance(field_description, ComputedFieldDescription): field_property, is_to_one_relation = self.process_computed_field_description(field_description), False comment = _cleandoc(field_description.function) @@ -581,7 +540,10 @@ def process_field( fconfig["description"] = description ftype = self._properties[field_name] if not isinstance(ftype, PydanticDescriptorProxy) and isinstance(field_description, FieldDescriptionBase): - if field_name in self._optional or (field_description.default is not None and not callable(field_description.default)): + if ( + field_name in self._optional + or (field_description.default is not None and not callable(field_description.default)) + ): self._properties[field_name] = (ftype, Field(default=field_description.default, **fconfig)) else: if ( @@ -620,7 +582,11 @@ def process_normal_field_description( def process_single_field_relation( self, field_name: str, - field_description: Union[ForeignKeyFieldInstanceDescription, OneToOneFieldInstanceDescription, BackwardOneToOneRelationDescription], + field_description: Union[ + ForeignKeyFieldInstanceDescription, + OneToOneFieldInstanceDescription, + BackwardOneToOneRelationDescription + ], json_schema_extra: Dict[str, Any], ) -> Optional[Type[PydanticModel]]: model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) @@ -717,10 +683,10 @@ def pydantic_model_creator( cls: "Type[Model]", *, name=None, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - optional: Tuple[str, ...] = (), + exclude: Optional[Tuple[str, ...]] = None, + include: Optional[Tuple[str, ...]] = None, + computed: Optional[Tuple[str, ...]] = None, + optional: Optional[Tuple[str, ...]] = None, allow_cycles: Optional[bool] = None, sort_alphabetically: Optional[bool] = None, _stack: tuple = (), diff --git a/tortoise/models.py b/tortoise/models.py index f0c32fd00..4fd513d56 100644 --- a/tortoise/models.py +++ b/tortoise/models.py @@ -27,7 +27,6 @@ from tortoise import connections from tortoise.backends.base.client import BaseDBAsyncClient -from tortoise.contrib.pydantic.creator import MyPydanticMeta from tortoise.exceptions import ( ConfigurationError, DoesNotExist, @@ -648,7 +647,6 @@ class Model(metaclass=ModelMeta): """ Base class for all Tortoise ORM Models. """ - my_pydantic_meta: MyPydanticMeta = MyPydanticMeta() # I don' like this here, but it makes auto completion and static analysis much happier _meta = MetaInfo(None) # type: ignore From cda58e3632c6fd7f8cd34981431821494acdeca1 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 29 Oct 2024 12:58:31 +0100 Subject: [PATCH 14/27] remove print statements from tests --- tests/contrib/test_pydantic.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 171990123..3beb81a3b 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -406,7 +406,6 @@ def test_eventlist_schema(self): ) def test_address_schema(self): - # print(json.dumps(self.Address_Pydantic.model_json_schema(), indent=2)) self.assertEqual( self.Address_Pydantic.model_json_schema(), { @@ -903,7 +902,6 @@ def test_team_schema(self): async def test_eventlist(self): eventlp = await self.Event_Pydantic_List.from_queryset(Event.all()) - # print(eventlp.json(indent=4)) eventldict = eventlp.model_dump() # Remove timestamps @@ -962,7 +960,6 @@ async def test_eventlist(self): async def test_event(self): eventp = await self.Event_Pydantic.from_tortoise_orm(await Event.get(name="Test")) - # print(eventp.json(indent=4)) eventdict = eventp.model_dump() # Remove timestamps @@ -994,7 +991,6 @@ async def test_event(self): async def test_address(self): addressp = await self.Address_Pydantic.from_tortoise_orm(await Address.get(street="Ocean")) - # print(addressp.json(indent=4)) addressdict = addressp.model_dump() # Remove timestamps @@ -1030,7 +1026,6 @@ async def test_tournament(self): tournamentp = await self.Tournament_Pydantic.from_tortoise_orm( await Tournament.all().first() ) - # print(tournamentp.json(indent=4)) tournamentdict = tournamentp.model_dump() # Remove timestamps @@ -1082,7 +1077,6 @@ async def test_tournament(self): async def test_team(self): teamp = await self.Team_Pydantic.from_tortoise_orm(await Team.get(id=self.team1.id)) - # print(teamp.json(indent=4)) teamdict = teamp.model_dump() # Remove timestamps @@ -1400,7 +1394,6 @@ def test_schema(self): async def test_serialisation(self): empp = await self.Employee_Pydantic.from_tortoise_orm(await Employee.get(name="Root")) - # print(empp.model_dump_json(indent=4)) empdict = empp.model_dump() self.assertEqual( From 11754d604b867eb8f06dc0a07626944a83ad53af Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 29 Oct 2024 13:03:44 +0100 Subject: [PATCH 15/27] re-add pydantic_model_creator docstring it got lost during refactoring --- tortoise/contrib/pydantic/creator.py | 31 ++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index dcd7d0901..0cddf0762 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -696,6 +696,37 @@ def pydantic_model_creator( validators: Optional[Dict[str, Any]] = None, module: str = __name__, ) -> Type[PydanticModel]: + """ + Function to build `Pydantic Model `__ off Tortoise Model. + + :param cls: The Tortoise Model + :param name: Specify a custom name explicitly, instead of a generated name. + :param exclude: Extra fields to exclude from the provided model. + :param include: Extra fields to include from the provided model. + :param computed: Extra computed fields to include from the provided model. + :param optional: Extra optional fields for the provided model. + :param allow_cycles: Do we allow any cycles in the generated model? + This is only useful for recursive/self-referential models. + + A value of ``False`` (the default) will prevent any and all backtracking. + :param sort_alphabetically: Sort the parameters alphabetically instead of Field-definition order. + + The default order would be: + + * Field definition order + + * order of reverse relations (as discovered) + + * order of computed functions (as provided). + :param exclude_readonly: Build a subset model that excludes any readonly fields + :param meta_override: A PydanticMeta class to override model's values. + :param model_config: A custom config to use as pydantic config. + :param validators: A dictionary of methods that validate fields. + :param module: The name of the module that the model belongs to. + + Note: Created pydantic model uses config_class parameter and PydanticMeta's + config_class as its Config class's bases(Only if provided!), but it + ignores ``fields`` config. pydantic_model_creator will generate fields by + include/exclude/computed parameters automatically. + """ pmc = PydanticModelCreator( cls=cls, name=name, From 9e10c01706989078f28a4054f405eff4bed72f14 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 29 Oct 2024 13:08:50 +0100 Subject: [PATCH 16/27] remove _stack from pydantic_model_creator this is now handled by PydanticModelCreator --- tortoise/contrib/pydantic/creator.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 0cddf0762..1d2578530 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -689,7 +689,6 @@ def pydantic_model_creator( optional: Optional[Tuple[str, ...]] = None, allow_cycles: Optional[bool] = None, sort_alphabetically: Optional[bool] = None, - _stack: tuple = (), exclude_readonly: bool = False, meta_override: Optional[Type] = None, model_config: Optional[ConfigDict] = None, From 579e6c529d09d7b72e12be14d98a3ca62ddaf31c Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 29 Oct 2024 13:13:43 +0100 Subject: [PATCH 17/27] make some methods private --- tortoise/contrib/pydantic/creator.py | 46 ++++++++++++++-------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 1d2578530..99087492e 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -417,8 +417,8 @@ def __init__( self._model_description: ModelDescription = describe_model_by_dataclass(cls) - self._field_map: FieldMap = self.initialize_field_map() - self.construct_field_map() + self._field_map: FieldMap = self._initialize_field_map() + self._construct_field_map() self._optional = optional @@ -445,7 +445,7 @@ def get_name(self) -> Tuple[str, str]: ) return self._fqname + postfix, self._cls.__name__ - def initialize_pconfig(self) -> ConfigDict: + def _initialize_pconfig(self) -> ConfigDict: pconfig: ConfigDict = PydanticModel.model_config.copy() if self.meta.model_config: pconfig.update(self.meta.model_config) @@ -455,12 +455,12 @@ def initialize_pconfig(self) -> ConfigDict: pconfig["extra"] = 'forbid' return pconfig - def initialize_field_map(self) -> FieldMap: + def _initialize_field_map(self) -> FieldMap: return FieldMap(self.meta) \ if self._exclude_read_only \ else FieldMap(self.meta, pk_field_description=self._model_description.pk_field) - def construct_field_map(self) -> None: + def _construct_field_map(self) -> None: self._field_map.field_map_update(field_descriptions=self._model_description.data_fields, meta=self.meta) if not self._exclude_read_only: for field_descriptions in ( @@ -483,7 +483,7 @@ def construct_field_map(self) -> None: def create_pydantic_model(self) -> Type[PydanticModel]: for field_name, field_description in self._field_map.items(): - self.process_field(field_name, field_description) + self._process_field(field_name, field_description) self._name, self._title = self.get_name() if self._as_submodel and self._stack: @@ -492,7 +492,7 @@ def create_pydantic_model(self) -> Type[PydanticModel]: if self._name in _MODEL_INDEX: return _MODEL_INDEX[self._name] - self._pconfig = self.initialize_pconfig() + self._pconfig = self._initialize_pconfig() self._properties["model_config"] = self._pconfig model = create_model( self._name, @@ -509,7 +509,7 @@ def create_pydantic_model(self) -> Type[PydanticModel]: _MODEL_INDEX[self._name] = model return model - def process_field( + def _process_field( self, field_name: str, field_description: Union[FieldDescriptionBase, ComputedFieldDescription], @@ -522,11 +522,11 @@ def process_field( is_to_one_relation: bool = False comment = "" if isinstance(field_description, FieldDescriptionBase): - field_property, is_to_one_relation = self.process_normal_field_description( + field_property, is_to_one_relation = self._process_normal_field_description( field_name, field_description, json_schema_extra, fconfig ) elif isinstance(field_description, ComputedFieldDescription): - field_property, is_to_one_relation = self.process_computed_field_description(field_description), False + field_property, is_to_one_relation = self._process_computed_field_description(field_description), False comment = _cleandoc(field_description.function) if field_property: @@ -557,7 +557,7 @@ def process_field( fconfig["default_factory"] = lambda: None self._properties[field_name] = (ftype, Field(**fconfig)) - def process_normal_field_description( + def _process_normal_field_description( self, field_name: str, field_description: FieldDescriptionBase, @@ -565,7 +565,7 @@ def process_normal_field_description( fconfig: Dict[str, Any], ) -> Tuple[Optional[Any], bool]: if isinstance(field_description, (BackwardFKRelationDescription, ManyToManyFieldInstanceDescription)): - return self.process_many_field_relation(field_name, field_description), False + return self._process_many_field_relation(field_name, field_description), False elif isinstance( field_description, ( @@ -574,12 +574,12 @@ def process_normal_field_description( BackwardOneToOneRelationDescription ) ): - return self.process_single_field_relation(field_name, field_description, json_schema_extra), True + return self._process_single_field_relation(field_name, field_description, json_schema_extra), True elif field_description.field_type is JSONField: - return self.process_json_field_description(), False - return self.process_data_field_description(field_name, field_description, json_schema_extra, fconfig), False + return self._process_json_field_description(), False + return self._process_data_field_description(field_name, field_description, json_schema_extra, fconfig), False - def process_single_field_relation( + def _process_single_field_relation( self, field_name: str, field_description: Union[ @@ -589,7 +589,7 @@ def process_single_field_relation( ], json_schema_extra: Dict[str, Any], ) -> Optional[Type[PydanticModel]]: - model: Optional[Type[PydanticModel]] = self.get_submodel(field_description.python_type, field_name) + model: Optional[Type[PydanticModel]] = self._get_submodel(field_description.python_type, field_name) if model: if field_description.nullable: json_schema_extra["nullable"] = True @@ -599,20 +599,20 @@ def process_single_field_relation( return model return None - def process_many_field_relation( + def _process_many_field_relation( self, field_name: str, field_description: Union[BackwardFKRelationDescription, ManyToManyFieldInstanceDescription], ) -> Optional[Type[List[Type[PydanticModel]]]]: - model = self.get_submodel(field_description.python_type, field_name) + model = self._get_submodel(field_description.python_type, field_name) if model: return List[model] # type: ignore return None - def process_json_field_description(self) -> Any: + def _process_json_field_description(self) -> Any: return Any - def process_data_field_description( + def _process_data_field_description( self, field_name: str, field_description: FieldDescriptionBase, @@ -633,7 +633,7 @@ def process_data_field_description( return annotation or ptype return None - def process_computed_field_description( + def _process_computed_field_description( self, field_description: ComputedFieldDescription, ) -> Optional[Any]: @@ -646,7 +646,7 @@ def process_computed_field_description( return ret return None - def get_submodel(self, _model: Optional["Type[Model]"], field_name: str) -> Optional[Type[PydanticModel]]: + def _get_submodel(self, _model: Optional["Type[Model]"], field_name: str) -> Optional[Type[PydanticModel]]: """Get Pydantic model for the submodel""" if _model: From b6d9737b3904dbe0a7456d355be0c481725c3c1a Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 29 Oct 2024 15:48:32 +0100 Subject: [PATCH 18/27] slim down dataclasses to only include necessary information --- tortoise/contrib/pydantic/dataclasses.py | 76 +----------------------- 1 file changed, 2 insertions(+), 74 deletions(-) diff --git a/tortoise/contrib/pydantic/dataclasses.py b/tortoise/contrib/pydantic/dataclasses.py index 1ea0a0e91..dbd91b0d3 100644 --- a/tortoise/contrib/pydantic/dataclasses.py +++ b/tortoise/contrib/pydantic/dataclasses.py @@ -14,33 +14,27 @@ class FieldDescriptionBase: name: str field_type: Type[Field] - generated: bool nullable: bool - unique: bool - indexed: bool constraints: Dict python_type: Optional[type] = None default: Optional[Any] = None description: Optional[str] = None docstring: Optional[str] = None - db_field_types: Optional[Dict[str, str]] = None @dataclasses.dataclass class FieldDescription(FieldDescriptionBase): - db_column: str = "" + ... @dataclasses.dataclass class RelationalFieldDescription(FieldDescriptionBase): - db_constraint: bool = False python_type: Optional[Type["Model"]] = None @dataclasses.dataclass class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): raw_field: Optional[str] = "" - on_delete: str = "" @dataclasses.dataclass @@ -60,26 +54,12 @@ class BackwardOneToOneRelationDescription(ForeignKeyFieldInstanceDescription): @dataclasses.dataclass class ManyToManyFieldInstanceDescription(RelationalFieldDescription): - model_name: str = "" - related_name: str = "" - forward_key: str = "" - backward_key: str = "" - through: str = "" - on_delete: str = "" - _generated: bool = False + ... @dataclasses.dataclass class ModelDescription: - name: str - table: str - abstract: bool - description: Optional[str] pk_field: FieldDescriptionBase - app: Optional[str] = None - docstring: Optional[str] = None - unique_together: Tuple[Tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) - indexes: Tuple[Tuple[str, ...], ...] = dataclasses.field(default_factory=tuple) data_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) fk_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) backward_fk_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) @@ -90,14 +70,6 @@ class ModelDescription: def describe_model_by_dataclass(cls: Type[MODEL]) -> ModelDescription: return ModelDescription( - name=cls._meta.full_name, - app=cls._meta.app, - table=cls._meta.db_table, - abstract=cls._meta.abstract, - description=cls._meta.table_description or None, - docstring=inspect.cleandoc(cls.__doc__ or "") or None, - unique_together=cls._meta.unique_together or (), - indexes=cls._meta.indexes or (), pk_field=describe_field_by_dataclass(cls._meta.fields_map[cls._meta.pk_attr]), data_fields=[ describe_field_by_dataclass(field) @@ -143,35 +115,23 @@ def describe_field_by_dataclass(field: Field) -> FieldDescriptionBase: name=field.model_field_name, field_type=field.__class__, python_type=field_type, - generated=field.generated, nullable=field.null, - unique=field.unique, - indexed=field.index or field.unique, default=field.default, description=field.description, docstring=field.docstring, constraints=field.constraints, - db_field_types=field.get_db_field_types() if field.has_db_field else None, - db_constraint=field.db_constraint, raw_field=field.source_field, - on_delete=str(field.on_delete), ) return ForeignKeyFieldInstanceDescription( name=field.model_field_name, field_type=field.__class__, python_type=field_type, - generated=field.generated, nullable=field.null, - unique=field.unique, - indexed=field.index or field.unique, default=field.default, description=field.description, docstring=field.docstring, constraints=field.constraints, - db_field_types=field.get_db_field_types() if field.has_db_field else None, - db_constraint=field.db_constraint, raw_field=field.source_field, - on_delete=str(field.on_delete), ) if isinstance(field, BackwardFKRelation): # BackwardFKRelation -> RelationalField @@ -181,32 +141,22 @@ def describe_field_by_dataclass(field: Field) -> FieldDescriptionBase: name=field.model_field_name, field_type=field.__class__, python_type=field_type, - generated=field.generated, nullable=field.null, - unique=field.unique, - indexed=field.index or field.unique, default=field.default, description=field.description, docstring=field.docstring, constraints=field.constraints, - db_field_types=field.get_db_field_types() if field.has_db_field else None, - db_constraint=field.db_constraint, raw_field=field.source_field, ) return BackwardFKRelationDescription( name=field.model_field_name, field_type=field.__class__, python_type=field.related_model, - generated=field.generated, nullable=field.null, - unique=field.unique, - indexed=field.index or field.unique, default=field.default, description=field.description, docstring=field.docstring, constraints=field.constraints, - db_field_types=field.get_db_field_types() if field.has_db_field else None, - db_constraint=field.db_constraint ) if isinstance(field, ManyToManyFieldInstance): # ManyToManyFieldInstance -> RelationalField @@ -214,51 +164,29 @@ def describe_field_by_dataclass(field: Field) -> FieldDescriptionBase: name=field.model_field_name, field_type=field.__class__, python_type=field.related_model, - generated=field.generated, nullable=field.null, - unique=field.unique, - indexed=field.index or field.unique, default=field.default, description=field.description, docstring=field.docstring, constraints=field.constraints, - db_field_types=field.get_db_field_types() if field.has_db_field else None, - db_constraint=field.db_constraint, - model_name=field.model_name, - related_name=field.related_name, - forward_key=field.forward_key, - backward_key=field.backward_key, - through=field.through, - on_delete=str(field.on_delete), - _generated=field._generated, ) return RelationalFieldDescription( name=field.model_field_name, field_type=field.__class__, python_type=field.related_model, - generated=field.generated, nullable=field.null, - unique=field.unique, - indexed=field.index or field.unique, default=field.default, description=field.description, docstring=field.docstring, constraints=field.constraints, - db_field_types=field.get_db_field_types() if field.has_db_field else None, - db_constraint=field.db_constraint ) return FieldDescription( name=field.model_field_name, field_type=field.__class__, - db_column=field.source_field or field.model_field_name, python_type=field.field_type, - generated=field.generated, nullable=field.null, - unique=field.unique, - indexed=field.index or field.unique, default=field.default, description=field.description, docstring=field.docstring, constraints=field.constraints, - db_field_types=field.get_db_field_types() if field.has_db_field else None ) From 3e78fd16bab6c91a325ee57867d56d401ded07f3 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Tue, 29 Oct 2024 16:04:42 +0100 Subject: [PATCH 19/27] remove unused imports, satisfy mypy --- tortoise/contrib/pydantic/creator.py | 10 ++++++---- tortoise/contrib/pydantic/dataclasses.py | 3 +-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 99087492e..38c8a46c9 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -82,7 +82,7 @@ def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: get_param_from_pydantic_meta("sort_alphabetically", default_meta.sort_alphabetically) ) model_config = get_param_from_pydantic_meta("model_config", default_meta.model_config) - return PydanticMetaData( + pmd = cls( include=include, exclude=exclude, computed=computed, @@ -93,11 +93,12 @@ def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: sort_alphabetically=sort_alphabetically, model_config=model_config ) + return pmd def construct_pydantic_meta( self, meta_override: Type - ) -> Self: + ) -> "PydanticMetaData": def get_param_from_meta_override(attr: str) -> Any: return getattr(meta_override, attr, getattr(self, attr)) @@ -113,7 +114,7 @@ def get_param_from_meta_override(attr: str) -> Any: sort_alphabetically: bool = bool(get_param_from_meta_override("sort_alphabetically")) allow_cycles: bool = bool(get_param_from_meta_override("allow_cycles")) - return PydanticMetaData( + pmd = PydanticMetaData( include=default_include, exclude=default_exclude, computed=default_computed, @@ -124,6 +125,7 @@ def get_param_from_meta_override(attr: str) -> Any: sort_alphabetically=sort_alphabetically, allow_cycles=allow_cycles ) + return pmd def finalize_meta( self, @@ -133,7 +135,7 @@ def finalize_meta( allow_cycles: Optional[bool] = None, sort_alphabetically: Optional[bool] = None, model_config: Optional[ConfigDict] = None, - ) -> Self: + ) -> "PydanticMetaData": _sort_fields: bool = ( self.sort_alphabetically if sort_alphabetically is None diff --git a/tortoise/contrib/pydantic/dataclasses.py b/tortoise/contrib/pydantic/dataclasses.py index dbd91b0d3..1b9511393 100644 --- a/tortoise/contrib/pydantic/dataclasses.py +++ b/tortoise/contrib/pydantic/dataclasses.py @@ -1,6 +1,5 @@ import dataclasses -import inspect -from typing import Type, Optional, Any, TYPE_CHECKING, Dict, Tuple, List +from typing import Type, Optional, Any, TYPE_CHECKING, Dict, List from tortoise.fields import Field from tortoise.fields.relational import RelationalField, ForeignKeyFieldInstance, ManyToManyFieldInstance, \ From 8d46894b326ad48d58dcb3a68f8569eadad4873a Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Wed, 30 Oct 2024 09:38:01 +0100 Subject: [PATCH 20/27] add type annotation for ``from_pydantic_meta`` --- tortoise/contrib/pydantic/creator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 38c8a46c9..cbaa9b3cd 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -62,7 +62,7 @@ class PydanticMetaData: model_config: Optional[ConfigDict] = None @classmethod - def from_pydantic_meta(cls, old_pydantic_meta: Any): + def from_pydantic_meta(cls, old_pydantic_meta: Any) -> Self: default_meta = cls() def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: From 41a7f7bc37fb66d3163ccd8ac7c9ee3b0eae515c Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Mon, 4 Nov 2024 14:10:29 +0100 Subject: [PATCH 21/27] better indexing of pydantic models --- tests/contrib/test_pydantic.py | 136 +++++----- tests/test_early_init.py | 4 +- tortoise/contrib/pydantic/creator.py | 212 +++------------ tortoise/contrib/pydantic/dataclasses.py | 325 +++++++++++++++-------- 4 files changed, 330 insertions(+), 347 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 3beb81a3b..a732d1654 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -67,7 +67,7 @@ def test_event_schema(self): self.Event_Pydantic.model_json_schema(), { "$defs": { - "Address_diegkp_leaf": { + "Address_4smkxs_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -83,7 +83,7 @@ def test_event_schema(self): "title": "Address", "type": "object", }, - "Reporter_flpkb7_leaf": { + "Reporter_4tvqui_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -99,7 +99,7 @@ def test_event_schema(self): "title": "Reporter", "type": "object", }, - "Team_uqjfiz_leaf": { + "Team_bnxjyv_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -127,7 +127,7 @@ def test_event_schema(self): "title": "Team", "type": "object", }, - "Tournament_lloz5q_leaf": { + "Tournament_djsht2_leaf": { "additionalProperties": False, "properties": { "id": { @@ -165,13 +165,13 @@ def test_event_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_lloz5q_leaf", + "$ref": "#/$defs/Tournament_djsht2_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_flpkb7_leaf" + "$ref": "#/$defs/Reporter_4tvqui_leaf" }, {"type": "null"}, ], @@ -180,7 +180,7 @@ def test_event_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_uqjfiz_leaf" + "$ref": "#/$defs/Team_bnxjyv_leaf" }, "title": "Participants", "type": "array", @@ -203,7 +203,7 @@ def test_event_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_diegkp_leaf" + "$ref": "#/$defs/Address_4smkxs_leaf" }, {"type": "null"}, ], @@ -232,7 +232,7 @@ def test_eventlist_schema(self): self.Event_Pydantic_List.model_json_schema(), { "$defs": { - "Event_vh77nq": { + "Event_c5pgat": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -244,13 +244,13 @@ def test_eventlist_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_lloz5q_leaf", + "$ref": "#/$defs/Tournament_djsht2_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_flpkb7_leaf" + "$ref": "#/$defs/Reporter_4tvqui_leaf" }, {"type": "null"}, ], @@ -259,7 +259,7 @@ def test_eventlist_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_uqjfiz_leaf" + "$ref": "#/$defs/Team_bnxjyv_leaf" }, "title": "Participants", "type": "array", @@ -289,7 +289,7 @@ def test_eventlist_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_diegkp_leaf" + "$ref": "#/$defs/Address_4smkxs_leaf" }, {"type": "null"}, ], @@ -311,7 +311,7 @@ def test_eventlist_schema(self): "title": "Event", "type": "object", }, - "Address_diegkp_leaf": { + "Address_4smkxs_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -327,7 +327,7 @@ def test_eventlist_schema(self): "title": "Address", "type": "object", }, - "Reporter_flpkb7_leaf": { + "Reporter_4tvqui_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -343,7 +343,7 @@ def test_eventlist_schema(self): "title": "Reporter", "type": "object", }, - "Team_uqjfiz_leaf": { + "Team_bnxjyv_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -371,7 +371,7 @@ def test_eventlist_schema(self): "title": "Team", "type": "object", }, - "Tournament_lloz5q_leaf": { + "Tournament_djsht2_leaf": { "additionalProperties": False, "properties": { "id": { @@ -399,7 +399,7 @@ def test_eventlist_schema(self): }, }, "description": "Events on the calendar", - "items": {"$ref": "#/$defs/Event_vh77nq"}, + "items": {"$ref": "#/$defs/Event_c5pgat"}, "title": "Event_list", "type": "array", }, @@ -410,7 +410,7 @@ def test_address_schema(self): self.Address_Pydantic.model_json_schema(), { "$defs": { - "Event_az5m74_leaf": { + "Event_jz7ivr_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -422,13 +422,13 @@ def test_address_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_lloz5q_leaf", + "$ref": "#/$defs/Tournament_djsht2_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_flpkb7_leaf" + "$ref": "#/$defs/Reporter_4tvqui_leaf" }, {"type": "null"}, ], @@ -437,7 +437,7 @@ def test_address_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_uqjfiz_leaf" + "$ref": "#/$defs/Team_bnxjyv_leaf" }, "title": "Participants", "type": "array", @@ -478,7 +478,7 @@ def test_address_schema(self): "title": "Event", "type": "object", }, - "Reporter_flpkb7_leaf": { + "Reporter_4tvqui_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -494,7 +494,7 @@ def test_address_schema(self): "title": "Reporter", "type": "object", }, - "Team_uqjfiz_leaf": { + "Team_bnxjyv_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -522,7 +522,7 @@ def test_address_schema(self): "title": "Team", "type": "object", }, - "Tournament_lloz5q_leaf": { + "Tournament_djsht2_leaf": { "additionalProperties": False, "properties": { "id": { @@ -553,7 +553,7 @@ def test_address_schema(self): "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, "street": {"maxLength": 128, "title": "Street", "type": "string"}, - "event": {"$ref": "#/$defs/Event_az5m74_leaf"}, + "event": {"$ref": "#/$defs/Event_jz7ivr_leaf"}, "event_id": { "maximum": 9223372036854775807, "minimum": -9223372036854775808, @@ -572,7 +572,7 @@ def test_tournament_schema(self): self.Tournament_Pydantic.model_json_schema(), { "$defs": { - "Event_atwjj7_leaf": { + "Event_yu5egy_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -586,7 +586,7 @@ def test_tournament_schema(self): "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_flpkb7_leaf" + "$ref": "#/$defs/Reporter_4tvqui_leaf" }, {"type": "null"}, ], @@ -595,7 +595,7 @@ def test_tournament_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_uqjfiz_leaf" + "$ref": "#/$defs/Team_bnxjyv_leaf" }, "title": "Participants", "type": "array", @@ -625,7 +625,7 @@ def test_tournament_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_diegkp_leaf" + "$ref": "#/$defs/Address_4smkxs_leaf" }, {"type": "null"}, ], @@ -646,7 +646,7 @@ def test_tournament_schema(self): "title": "Event", "type": "object", }, - "Address_diegkp_leaf": { + "Address_4smkxs_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -662,7 +662,7 @@ def test_tournament_schema(self): "title": "Address", "type": "object", }, - "Reporter_flpkb7_leaf": { + "Reporter_4tvqui_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -678,7 +678,7 @@ def test_tournament_schema(self): "title": "Reporter", "type": "object", }, - "Team_uqjfiz_leaf": { + "Team_bnxjyv_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -724,7 +724,7 @@ def test_tournament_schema(self): }, "events": { "description": "What tournaments is a happenin'", - "items": {"$ref": "#/$defs/Event_atwjj7_leaf"}, + "items": {"$ref": "#/$defs/Event_yu5egy_leaf"}, "title": "Events", "type": "array", }, @@ -740,7 +740,7 @@ def test_team_schema(self): self.Team_Pydantic.model_json_schema(), { "$defs": { - "Event_37gjqu_leaf": { + "Event_7fv6fv_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -752,13 +752,13 @@ def test_team_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_lloz5q_leaf", + "$ref": "#/$defs/Tournament_djsht2_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_flpkb7_leaf" + "$ref": "#/$defs/Reporter_4tvqui_leaf" }, {"type": "null"}, ], @@ -790,7 +790,7 @@ def test_team_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_diegkp_leaf" + "$ref": "#/$defs/Address_4smkxs_leaf" }, {"type": "null"}, ], @@ -811,7 +811,7 @@ def test_team_schema(self): "title": "Event", "type": "object", }, - "Address_diegkp_leaf": { + "Address_4smkxs_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -827,7 +827,7 @@ def test_team_schema(self): "title": "Address", "type": "object", }, - "Reporter_flpkb7_leaf": { + "Reporter_4tvqui_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -843,7 +843,7 @@ def test_team_schema(self): "title": "Reporter", "type": "object", }, - "Tournament_lloz5q_leaf": { + "Tournament_djsht2_leaf": { "additionalProperties": False, "properties": { "id": { @@ -889,7 +889,7 @@ def test_team_schema(self): "title": "Alias", }, "events": { - "items": {"$ref": "#/$defs/Event_37gjqu_leaf"}, + "items": {"$ref": "#/$defs/Event_7fv6fv_leaf"}, "title": "Events", "type": "array", }, @@ -1293,7 +1293,7 @@ def test_schema(self): self.Employee_Pydantic.model_json_schema(), { "$defs": { - "Employee_dkvdqq_leaf": { + "Employee_lqnwvu_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1304,7 +1304,7 @@ def test_schema(self): }, "name": {"maxLength": 50, "title": "Name", "type": "string"}, "talks_to": { - "items": {"$ref": "#/$defs/Employee_pb36jg_leaf"}, + "items": {"$ref": "#/$defs/Employee_okqh4k_leaf"}, "title": "Talks To", "type": "array", }, @@ -1321,7 +1321,7 @@ def test_schema(self): "title": "Manager Id", }, "team_members": { - "items": {"$ref": "#/$defs/Employee_pb36jg_leaf"}, + "items": {"$ref": "#/$defs/Employee_okqh4k_leaf"}, "title": "Team Members", "type": "array", }, @@ -1330,7 +1330,7 @@ def test_schema(self): "title": "Employee", "type": "object", }, - "Employee_pb36jg_leaf": { + "Employee_okqh4k_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1368,7 +1368,7 @@ def test_schema(self): }, "name": {"maxLength": 50, "title": "Name", "type": "string"}, "talks_to": { - "items": {"$ref": "#/$defs/Employee_dkvdqq_leaf"}, + "items": {"$ref": "#/$defs/Employee_lqnwvu_leaf"}, "title": "Talks To", "type": "array", }, @@ -1381,7 +1381,7 @@ def test_schema(self): "title": "Manager Id", }, "team_members": { - "items": {"$ref": "#/$defs/Employee_dkvdqq_leaf"}, + "items": {"$ref": "#/$defs/Employee_lqnwvu_leaf"}, "title": "Team Members", "type": "array", }, @@ -1491,7 +1491,7 @@ async def test_computed_field_schema(self): self.Employee_Pydantic.model_json_schema(mode="serialization"), { "$defs": { - "Employee_dkvdqq_leaf": { + "Employee_okqh4k_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1505,13 +1505,6 @@ async def test_computed_field_schema(self): "title": "Name", "type": "string" }, - "talks_to": { - "items": { - "$ref": "#/$defs/Employee_pb36jg_leaf" - }, - "title": "Talks To", - "type": "array" - }, "manager_id": { "anyOf": [ { @@ -1526,13 +1519,6 @@ async def test_computed_field_schema(self): "nullable": True, "title": "Manager Id" }, - "team_members": { - "items": { - "$ref": "#/$defs/Employee_pb36jg_leaf" - }, - "title": "Team Members", - "type": "array" - }, "name_length": { "description": "", "readOnly": True, @@ -1549,16 +1535,14 @@ async def test_computed_field_schema(self): "required": [ "id", "name", - "talks_to", "manager_id", - "team_members", "name_length", "team_size" ], "title": "Employee", "type": "object" }, - "Employee_pb36jg_leaf": { + "Employee_lqnwvu_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1572,6 +1556,13 @@ async def test_computed_field_schema(self): "title": "Name", "type": "string" }, + "talks_to": { + "items": { + "$ref": "#/$defs/Employee_okqh4k_leaf" + }, + "title": "Talks To", + "type": "array" + }, "manager_id": { "anyOf": [ { @@ -1586,6 +1577,13 @@ async def test_computed_field_schema(self): "nullable": True, "title": "Manager Id" }, + "team_members": { + "items": { + "$ref": "#/$defs/Employee_okqh4k_leaf" + }, + "title": "Team Members", + "type": "array" + }, "name_length": { "description": "", "readOnly": True, @@ -1602,7 +1600,9 @@ async def test_computed_field_schema(self): "required": [ "id", "name", + "talks_to", "manager_id", + "team_members", "name_length", "team_size" ], @@ -1625,7 +1625,7 @@ async def test_computed_field_schema(self): }, "talks_to": { "items": { - "$ref": "#/$defs/Employee_dkvdqq_leaf" + "$ref": "#/$defs/Employee_lqnwvu_leaf" }, "title": "Talks To", "type": "array" @@ -1646,7 +1646,7 @@ async def test_computed_field_schema(self): }, "team_members": { "items": { - "$ref": "#/$defs/Employee_dkvdqq_leaf" + "$ref": "#/$defs/Employee_lqnwvu_leaf" }, "title": "Team Members", "type": "array" diff --git a/tests/test_early_init.py b/tests/test_early_init.py index 7068ac9c0..14dd7ce13 100644 --- a/tests/test_early_init.py +++ b/tests/test_early_init.py @@ -167,7 +167,7 @@ def test_early_init(self): Event_Pydantic.model_json_schema(), { "$defs": { - "Tournament_pf77nd_leaf": { + "Tournament_z7k3ew_leaf": { "additionalProperties": False, "properties": { "id": { @@ -211,7 +211,7 @@ def test_early_init(self): "type": "string", }, "tournament": { - "anyOf": [{"$ref": "#/$defs/Tournament_pf77nd_leaf"}, {"type": "null"}], + "anyOf": [{"$ref": "#/$defs/Tournament_z7k3ew_leaf"}, {"type": "null"}], "nullable": True, "title": "Tournament", }, diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index cbaa9b3cd..199ddffd0 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -1,15 +1,8 @@ import dataclasses import inspect -import sys from base64 import b32encode from typing import MutableMapping -if sys.version_info >= (3, 11): - from typing import Self -else: - from typing_extensions import Self - -from dataclasses import dataclass, field from hashlib import sha3_224 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union, TypeVar @@ -21,153 +14,22 @@ from tortoise.fields import IntField, JSONField, TextField from tortoise.contrib.pydantic.dataclasses import FieldDescriptionBase, ForeignKeyFieldInstanceDescription, \ OneToOneFieldInstanceDescription, BackwardOneToOneRelationDescription, BackwardFKRelationDescription, \ - ManyToManyFieldInstanceDescription, describe_model_by_dataclass, ModelDescription + ManyToManyFieldInstanceDescription, ModelDescription, PydanticMetaData if TYPE_CHECKING: # pragma: nocoverage from tortoise.models import Model _MODEL_INDEX: Dict[str, Type[PydanticModel]] = {} - - -@dataclass -class PydanticMetaData: - #: If not empty, only fields this property contains will be in the pydantic model - include: Tuple[str, ...] = () - - #: Fields listed in this property will be excluded from pydantic model - exclude: Tuple[str, ...] = field(default_factory=lambda: ("Meta",)) - - #: Computed fields can be listed here to use in pydantic model - computed: Tuple[str, ...] = field(default_factory=tuple) - - #: Use backward relations without annotations - not recommended, it can be huge data - #: without control - backward_relations: bool = True - - #: Maximum recursion level allowed - max_recursion: int = 3 - - #: Allow cycles in recursion - This can result in HUGE data - Be careful! - #: Please use this with ``exclude``/``include`` and sane ``max_recursion`` - allow_cycles: bool = False - - #: If we should exclude raw fields (the ones have _id suffixes) of relations - exclude_raw_fields: bool = True - - #: Sort fields alphabetically. - #: If not set (or ``False``) then leave fields in declaration order - sort_alphabetically: bool = False - - #: Allows user to specify custom config for generated model - model_config: Optional[ConfigDict] = None - - @classmethod - def from_pydantic_meta(cls, old_pydantic_meta: Any) -> Self: - default_meta = cls() - - def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: - return getattr(old_pydantic_meta, attr, default) - include = tuple(get_param_from_pydantic_meta("include", default_meta.include)) - exclude = tuple(get_param_from_pydantic_meta("exclude", default_meta.exclude)) - computed = tuple(get_param_from_pydantic_meta("computed", default_meta.computed)) - backward_relations = bool( - get_param_from_pydantic_meta("backward_relations_raw", default_meta.backward_relations) - ) - max_recursion = int(get_param_from_pydantic_meta("max_recursion", default_meta.max_recursion)) - allow_cycles = bool(get_param_from_pydantic_meta("allow_cycles", default_meta.allow_cycles)) - exclude_raw_fields = bool( - get_param_from_pydantic_meta("exclude_raw_fields", default_meta.exclude_raw_fields) - ) - sort_alphabetically = bool( - get_param_from_pydantic_meta("sort_alphabetically", default_meta.sort_alphabetically) - ) - model_config = get_param_from_pydantic_meta("model_config", default_meta.model_config) - pmd = cls( - include=include, - exclude=exclude, - computed=computed, - backward_relations=backward_relations, - max_recursion=max_recursion, - allow_cycles=allow_cycles, - exclude_raw_fields=exclude_raw_fields, - sort_alphabetically=sort_alphabetically, - model_config=model_config - ) - return pmd - - def construct_pydantic_meta( - self, - meta_override: Type - ) -> "PydanticMetaData": - def get_param_from_meta_override(attr: str) -> Any: - return getattr(meta_override, attr, getattr(self, attr)) - - default_include: Tuple[str, ...] = tuple(get_param_from_meta_override("include")) - default_exclude: Tuple[str, ...] = tuple(get_param_from_meta_override("exclude")) - default_computed: Tuple[str, ...] = tuple(get_param_from_meta_override("computed")) - default_config: Optional[ConfigDict] = self.model_config - - backward_relations: bool = bool(get_param_from_meta_override("backward_relations")) - - max_recursion: int = int(get_param_from_meta_override("max_recursion")) - exclude_raw_fields: bool = bool(get_param_from_meta_override("exclude_raw_fields")) - sort_alphabetically: bool = bool(get_param_from_meta_override("sort_alphabetically")) - allow_cycles: bool = bool(get_param_from_meta_override("allow_cycles")) - - pmd = PydanticMetaData( - include=default_include, - exclude=default_exclude, - computed=default_computed, - model_config=default_config, - backward_relations=backward_relations, - max_recursion=max_recursion, - exclude_raw_fields=exclude_raw_fields, - sort_alphabetically=sort_alphabetically, - allow_cycles=allow_cycles - ) - return pmd - - def finalize_meta( - self, - exclude: Tuple[str, ...] = (), - include: Tuple[str, ...] = (), - computed: Tuple[str, ...] = (), - allow_cycles: Optional[bool] = None, - sort_alphabetically: Optional[bool] = None, - model_config: Optional[ConfigDict] = None, - ) -> "PydanticMetaData": - _sort_fields: bool = ( - self.sort_alphabetically - if sort_alphabetically is None - else sort_alphabetically - ) - _allow_cycles: bool = ( - self.allow_cycles - if allow_cycles is None - else allow_cycles - ) - - include = tuple(include) + self.include - exclude = tuple(exclude) + self.exclude - computed = tuple(computed) + self.computed - - _model_config = ConfigDict() - if self.model_config: - _model_config.update(self.model_config) - if model_config: - _model_config.update(model_config) - - return PydanticMetaData( - include=include, - exclude=exclude, - computed=computed, - backward_relations=self.backward_relations, - max_recursion=self.max_recursion, - exclude_raw_fields=self.exclude_raw_fields, - sort_alphabetically=_sort_fields, - allow_cycles=_allow_cycles, - model_config=_model_config - ) +""" +The index works as follows: +1. the hash is calculated from the following: + - the fully qualified name of the model + - the names of the contained fields + - the names of all relational fields and the corresponding names of the pydantic model. + This is because if the model is not yet fully initialized, the relational fields are not yet present. +2. the hash does not take into account the resulting name of the model; this must be checked separately. +3. the hash can only be calculated after a complete analysis of the given model. +""" def _br_it(val: str) -> str: @@ -372,7 +234,7 @@ def __init__( _as_submodel: bool = False ) -> None: self._cls: "Type[Model]" = cls - self._stack: Tuple[Tuple["Type[Model]", str, int], ...] = tuple() # ((Type[Model], field_name, max_recursion),) + self._stack: Tuple[Tuple["Type[Model]", str, int], ...] = _stack # ((Type[Model], field_name, max_recursion),) self._is_default: bool = ( exclude is None and include is None @@ -408,6 +270,7 @@ def __init__( self._name: str self._title: str self.given_name = name + self.__hash: str = "" self._as_submodel = _as_submodel @@ -416,8 +279,9 @@ def __init__( self._pconfig: ConfigDict self._properties: Dict[str, Any] = dict() + self._relational_fields_index: List[Tuple[str, str]] = list() - self._model_description: ModelDescription = describe_model_by_dataclass(cls) + self._model_description: ModelDescription = ModelDescription.from_model(cls) self._field_map: FieldMap = self._initialize_field_map() self._construct_field_map() @@ -429,6 +293,16 @@ def __init__( self._stack = _stack + @property + def _hash(self): + if self.__hash == "": + hashval = ( + f"{self._fqname};{self._properties.keys()};{self._relational_fields_index};" + f"{self.meta.allow_cycles}" + ) + self.__hash = b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] + return self.__hash + def get_name(self) -> Tuple[str, str]: # If arguments are specified (different from the defaults), we append a hash to the # class name, to make it unique @@ -436,16 +310,17 @@ def get_name(self) -> Tuple[str, str]: # When called later, include is explicitly set, so fence passes. if self.given_name is not None: return self.given_name, self.given_name - hashval = ( - f"{self._fqname};{self.meta.exclude};{self.meta.include};{self.meta.computed};" - f"{self.meta.sort_alphabetically}:{self.meta.allow_cycles}:{self._exclude_read_only}" - ) - postfix = ( - ":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] + name = ( + f"{self._fqname}:{self._hash}" if not self._is_default - else "" + else self._fqname ) - return self._fqname + postfix, self._cls.__name__ + name = ( + f"{name}:leaf" + if self._as_submodel + else name + ) + return name, self._cls.__name__ def _initialize_pconfig(self) -> ConfigDict: pconfig: ConfigDict = PydanticModel.model_config.copy() @@ -488,11 +363,13 @@ def create_pydantic_model(self) -> Type[PydanticModel]: self._process_field(field_name, field_description) self._name, self._title = self.get_name() - if self._as_submodel and self._stack: - self._name = f"{self._name}:leaf" - if self._name in _MODEL_INDEX: - return _MODEL_INDEX[self._name] + if self._hash in _MODEL_INDEX: + # there is a model exactly the same, but the name could be different + hashed_model = _MODEL_INDEX[self._hash] + if hashed_model.__name__ == self._name: + # also the same name + return _MODEL_INDEX[self._hash] self._pconfig = self._initialize_pconfig() self._properties["model_config"] = self._pconfig @@ -508,7 +385,7 @@ def create_pydantic_model(self) -> Type[PydanticModel]: # Store the base class model.model_config["orig_model"] = self._cls # type: ignore # Store model reference so we can de-dup it later on if needed. - _MODEL_INDEX[self._name] = model + _MODEL_INDEX[self._hash] = model return model def _process_field( @@ -578,7 +455,7 @@ def _process_normal_field_description( ): return self._process_single_field_relation(field_name, field_description, json_schema_extra), True elif field_description.field_type is JSONField: - return self._process_json_field_description(), False + return Any, False return self._process_data_field_description(field_name, field_description, json_schema_extra, fconfig), False def _process_single_field_relation( @@ -593,6 +470,7 @@ def _process_single_field_relation( ) -> Optional[Type[PydanticModel]]: model: Optional[Type[PydanticModel]] = self._get_submodel(field_description.python_type, field_name) if model: + self._relational_fields_index.append((field_name, model.__name__)) if field_description.nullable: json_schema_extra["nullable"] = True if field_description.nullable or field_description.default is not None: @@ -608,12 +486,10 @@ def _process_many_field_relation( ) -> Optional[Type[List[Type[PydanticModel]]]]: model = self._get_submodel(field_description.python_type, field_name) if model: + self._relational_fields_index.append((field_name, model.__name__)) return List[model] # type: ignore return None - def _process_json_field_description(self) -> Any: - return Any - def _process_data_field_description( self, field_name: str, diff --git a/tortoise/contrib/pydantic/dataclasses.py b/tortoise/contrib/pydantic/dataclasses.py index 1b9511393..6fd9e7b24 100644 --- a/tortoise/contrib/pydantic/dataclasses.py +++ b/tortoise/contrib/pydantic/dataclasses.py @@ -1,5 +1,14 @@ import dataclasses -from typing import Type, Optional, Any, TYPE_CHECKING, Dict, List +import sys +from copy import copy +from typing import Type, Optional, Any, TYPE_CHECKING, Dict, List, Tuple + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + +from pydantic import ConfigDict from tortoise.fields import Field from tortoise.fields.relational import RelationalField, ForeignKeyFieldInstance, ManyToManyFieldInstance, \ @@ -20,6 +29,24 @@ class FieldDescriptionBase: description: Optional[str] = None docstring: Optional[str] = None + @classmethod + def _from_field(cls, field: Field): + field_type = getattr(field, "related_model", field.field_type) + return cls( + name=field.model_field_name, + field_type=field.__class__, + python_type=field_type, + nullable=field.null, + default=field.default, + description=field.description, + docstring=field.docstring, + constraints=copy(field.constraints), + ) + + @classmethod + def from_field(cls, field: Field): + return cls._from_field(field) + @dataclasses.dataclass class FieldDescription(FieldDescriptionBase): @@ -35,6 +62,12 @@ class RelationalFieldDescription(FieldDescriptionBase): class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): raw_field: Optional[str] = "" + @classmethod + def from_field(cls, field: Field): + fd = cls._from_field(field) + fd.raw_field = field.source_field + return fd + @dataclasses.dataclass class BackwardFKRelationDescription(ForeignKeyFieldInstanceDescription): @@ -66,126 +99,200 @@ class ModelDescription: backward_o2o_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) m2m_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) - -def describe_model_by_dataclass(cls: Type[MODEL]) -> ModelDescription: - return ModelDescription( - pk_field=describe_field_by_dataclass(cls._meta.fields_map[cls._meta.pk_attr]), - data_fields=[ - describe_field_by_dataclass(field) - for name, field in cls._meta.fields_map.items() - if name != cls._meta.pk_attr and name in (cls._meta.fields - cls._meta.fetch_fields) - ], - fk_fields=[ - describe_field_by_dataclass(field) - for name, field in cls._meta.fields_map.items() - if name in cls._meta.fk_fields - ], - backward_fk_fields=[ - describe_field_by_dataclass(field) - for name, field in cls._meta.fields_map.items() - if name in cls._meta.backward_fk_fields - ], - o2o_fields=[ - describe_field_by_dataclass(field) - for name, field in cls._meta.fields_map.items() - if name in cls._meta.o2o_fields - ], - backward_o2o_fields=[ - describe_field_by_dataclass(field) - for name, field in cls._meta.fields_map.items() - if name in cls._meta.backward_o2o_fields - ], - m2m_fields=[ - describe_field_by_dataclass(field) - for name, field in cls._meta.fields_map.items() - if name in cls._meta.m2m_fields - ], - ) + @classmethod + def from_model(cls, model: Type[MODEL]) -> Self: + return cls( + pk_field=describe_field_by_dataclass(model._meta.fields_map[model._meta.pk_attr]), + data_fields=[ + describe_field_by_dataclass(field) + for name, field in model._meta.fields_map.items() + if name != model._meta.pk_attr and name in (model._meta.fields - model._meta.fetch_fields) + ], + fk_fields=[ + describe_field_by_dataclass(field) + for name, field in model._meta.fields_map.items() + if name in model._meta.fk_fields + ], + backward_fk_fields=[ + describe_field_by_dataclass(field) + for name, field in model._meta.fields_map.items() + if name in model._meta.backward_fk_fields + ], + o2o_fields=[ + describe_field_by_dataclass(field) + for name, field in model._meta.fields_map.items() + if name in model._meta.o2o_fields + ], + backward_o2o_fields=[ + describe_field_by_dataclass(field) + for name, field in model._meta.fields_map.items() + if name in model._meta.backward_o2o_fields + ], + m2m_fields=[ + describe_field_by_dataclass(field) + for name, field in model._meta.fields_map.items() + if name in model._meta.m2m_fields + ], + ) def describe_field_by_dataclass(field: Field) -> FieldDescriptionBase: - field_type = getattr(field, "related_model", field.field_type) if isinstance(field, RelationalField): if isinstance(field, ForeignKeyFieldInstance): # ForeignKeyFieldInstance -> RelationalField if isinstance(field, OneToOneFieldInstance): # OneToOneFieldInstance -> ForeignKeyFieldInstance -> RelationalField - return OneToOneFieldInstanceDescription( - name=field.model_field_name, - field_type=field.__class__, - python_type=field_type, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=field.constraints, - raw_field=field.source_field, - ) - return ForeignKeyFieldInstanceDescription( - name=field.model_field_name, - field_type=field.__class__, - python_type=field_type, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=field.constraints, - raw_field=field.source_field, - ) + return OneToOneFieldInstanceDescription.from_field(field) + return ForeignKeyFieldInstanceDescription.from_field(field) if isinstance(field, BackwardFKRelation): # BackwardFKRelation -> RelationalField if isinstance(field, BackwardOneToOneRelation): # BackwardOneToOneRelation -> BackwardFKRelation -> RelationalField - return BackwardOneToOneRelationDescription( - name=field.model_field_name, - field_type=field.__class__, - python_type=field_type, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=field.constraints, - raw_field=field.source_field, - ) - return BackwardFKRelationDescription( - name=field.model_field_name, - field_type=field.__class__, - python_type=field.related_model, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=field.constraints, - ) + return BackwardOneToOneRelationDescription.from_field(field) + return BackwardFKRelationDescription.from_field(field) if isinstance(field, ManyToManyFieldInstance): # ManyToManyFieldInstance -> RelationalField - return ManyToManyFieldInstanceDescription( - name=field.model_field_name, - field_type=field.__class__, - python_type=field.related_model, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=field.constraints, - ) - return RelationalFieldDescription( - name=field.model_field_name, - field_type=field.__class__, - python_type=field.related_model, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=field.constraints, + return ManyToManyFieldInstanceDescription.from_field(field) + return RelationalFieldDescription.from_field(field) + return FieldDescription.from_field(field) + + +@dataclasses.dataclass +class PydanticMetaData: + #: If not empty, only fields this property contains will be in the pydantic model + include: Tuple[str, ...] = () + + #: Fields listed in this property will be excluded from pydantic model + exclude: Tuple[str, ...] = dataclasses.field(default_factory=lambda: ("Meta",)) + + #: Computed fields can be listed here to use in pydantic model + computed: Tuple[str, ...] = dataclasses.field(default_factory=tuple) + + #: Use backward relations without annotations - not recommended, it can be huge data + #: without control + backward_relations: bool = True + + #: Maximum recursion level allowed + max_recursion: int = 3 + + #: Allow cycles in recursion - This can result in HUGE data - Be careful! + #: Please use this with ``exclude``/``include`` and sane ``max_recursion`` + allow_cycles: bool = False + + #: If we should exclude raw fields (the ones have _id suffixes) of relations + exclude_raw_fields: bool = True + + #: Sort fields alphabetically. + #: If not set (or ``False``) then leave fields in declaration order + sort_alphabetically: bool = False + + #: Allows user to specify custom config for generated model + model_config: Optional[ConfigDict] = None + + @classmethod + def from_pydantic_meta(cls, old_pydantic_meta: Any) -> Self: + default_meta = cls() + + def get_param_from_pydantic_meta(attr: str, default: Any) -> Any: + return getattr(old_pydantic_meta, attr, default) + include = tuple(get_param_from_pydantic_meta("include", default_meta.include)) + exclude = tuple(get_param_from_pydantic_meta("exclude", default_meta.exclude)) + computed = tuple(get_param_from_pydantic_meta("computed", default_meta.computed)) + backward_relations = bool( + get_param_from_pydantic_meta("backward_relations_raw", default_meta.backward_relations) + ) + max_recursion = int(get_param_from_pydantic_meta("max_recursion", default_meta.max_recursion)) + allow_cycles = bool(get_param_from_pydantic_meta("allow_cycles", default_meta.allow_cycles)) + exclude_raw_fields = bool( + get_param_from_pydantic_meta("exclude_raw_fields", default_meta.exclude_raw_fields) + ) + sort_alphabetically = bool( + get_param_from_pydantic_meta("sort_alphabetically", default_meta.sort_alphabetically) + ) + model_config = get_param_from_pydantic_meta("model_config", default_meta.model_config) + pmd = cls( + include=include, + exclude=exclude, + computed=computed, + backward_relations=backward_relations, + max_recursion=max_recursion, + allow_cycles=allow_cycles, + exclude_raw_fields=exclude_raw_fields, + sort_alphabetically=sort_alphabetically, + model_config=model_config + ) + return pmd + + def construct_pydantic_meta( + self, + meta_override: Type + ) -> "PydanticMetaData": + def get_param_from_meta_override(attr: str) -> Any: + return getattr(meta_override, attr, getattr(self, attr)) + + default_include: Tuple[str, ...] = tuple(get_param_from_meta_override("include")) + default_exclude: Tuple[str, ...] = tuple(get_param_from_meta_override("exclude")) + default_computed: Tuple[str, ...] = tuple(get_param_from_meta_override("computed")) + default_config: Optional[ConfigDict] = self.model_config + + backward_relations: bool = bool(get_param_from_meta_override("backward_relations")) + + max_recursion: int = int(get_param_from_meta_override("max_recursion")) + exclude_raw_fields: bool = bool(get_param_from_meta_override("exclude_raw_fields")) + sort_alphabetically: bool = bool(get_param_from_meta_override("sort_alphabetically")) + allow_cycles: bool = bool(get_param_from_meta_override("allow_cycles")) + + pmd = PydanticMetaData( + include=default_include, + exclude=default_exclude, + computed=default_computed, + model_config=default_config, + backward_relations=backward_relations, + max_recursion=max_recursion, + exclude_raw_fields=exclude_raw_fields, + sort_alphabetically=sort_alphabetically, + allow_cycles=allow_cycles + ) + return pmd + + def finalize_meta( + self, + exclude: Tuple[str, ...] = (), + include: Tuple[str, ...] = (), + computed: Tuple[str, ...] = (), + allow_cycles: Optional[bool] = None, + sort_alphabetically: Optional[bool] = None, + model_config: Optional[ConfigDict] = None, + ) -> "PydanticMetaData": + _sort_fields: bool = ( + self.sort_alphabetically + if sort_alphabetically is None + else sort_alphabetically + ) + _allow_cycles: bool = ( + self.allow_cycles + if allow_cycles is None + else allow_cycles + ) + + include = tuple(include) + self.include + exclude = tuple(exclude) + self.exclude + computed = tuple(computed) + self.computed + + _model_config = ConfigDict() + if self.model_config: + _model_config.update(self.model_config) + if model_config: + _model_config.update(model_config) + + return PydanticMetaData( + include=include, + exclude=exclude, + computed=computed, + backward_relations=self.backward_relations, + max_recursion=self.max_recursion, + exclude_raw_fields=self.exclude_raw_fields, + sort_alphabetically=_sort_fields, + allow_cycles=_allow_cycles, + model_config=_model_config ) - return FieldDescription( - name=field.model_field_name, - field_type=field.__class__, - python_type=field.field_type, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=field.constraints, - ) From 3ce7516119255504781a4f6559b9a9627af3f6bd Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 8 Nov 2024 10:20:19 +0100 Subject: [PATCH 22/27] remove dataclasses for field descriptions pydantic_model_creator now accesses the fields directly --- tortoise/contrib/pydantic/creator.py | 190 +++++++++++------------ tortoise/contrib/pydantic/dataclasses.py | 130 +++------------- tortoise/fields/relational.py | 2 +- 3 files changed, 115 insertions(+), 207 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 199ddffd0..0ed464ffc 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -1,20 +1,20 @@ -import dataclasses import inspect from base64 import b32encode +from copy import copy from typing import MutableMapping from hashlib import sha3_224 -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Callable, Union, TypeVar +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union -from pydantic import ConfigDict, Field, computed_field, create_model -from pydantic._internal._decorators import PydanticDescriptorProxy +from pydantic import ConfigDict, computed_field, create_model +from pydantic import Field as PydanticField +from tortoise import ForeignKeyFieldInstance, BackwardFKRelation, ManyToManyFieldInstance, OneToOneFieldInstance, \ + BackwardOneToOneRelation from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations -from tortoise.fields import IntField, JSONField, TextField -from tortoise.contrib.pydantic.dataclasses import FieldDescriptionBase, ForeignKeyFieldInstanceDescription, \ - OneToOneFieldInstanceDescription, BackwardOneToOneRelationDescription, BackwardFKRelationDescription, \ - ManyToManyFieldInstanceDescription, ModelDescription, PydanticMetaData +from tortoise.fields import JSONField, Field, IntField, TextField +from tortoise.contrib.pydantic.dataclasses import ModelDescription, PydanticMetaData, ComputedFieldDescription if TYPE_CHECKING: # pragma: nocoverage from tortoise.models import Model @@ -87,23 +87,16 @@ def _pydantic_recursion_protector( return pmc.create_pydantic_model() -@dataclasses.dataclass -class ComputedFieldDescription: - field_type: Any - function: Callable[[], Any] - description: Optional[str] +# FieldDescriptionT = TypeVar('FieldDescriptionT', bound=FieldDescriptionBase) -FieldDescriptionT = TypeVar('FieldDescriptionT', bound=FieldDescriptionBase) - - -class FieldMap(MutableMapping[str, Union[FieldDescriptionBase, ComputedFieldDescription]]): - def __init__(self, meta: PydanticMetaData, pk_field_description: Optional[FieldDescriptionBase] = None): - self._field_map: Dict[str, Union[FieldDescriptionBase, ComputedFieldDescription]] = {} - self.pk_raw_field = pk_field_description.name if pk_field_description is not None else "" - if pk_field_description: - self.pk_raw_field = pk_field_description.name - self.field_map_update([pk_field_description], meta) +class FieldMap(MutableMapping[str, Union[Field, ComputedFieldDescription]]): + def __init__(self, meta: PydanticMetaData, pk_field: Optional[Field] = None): + self._field_map: Dict[str, Union[Field, ComputedFieldDescription]] = {} + self.pk_raw_field = pk_field.model_field_name if pk_field is not None else "" + if pk_field: + self.pk_raw_field = pk_field.model_field_name + self.field_map_update([pk_field], meta) self.computed_fields: Dict[str, ComputedFieldDescription] = {} def __delitem__(self, __key): @@ -129,18 +122,18 @@ def sort_definition_order(self, cls: "Type[Model]", computed: Tuple[str, ...]) - k: self._field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in self._field_map } - def field_map_update(self, field_descriptions: List[FieldDescriptionT], meta: PydanticMetaData) -> None: - for field_description in field_descriptions: - name = field_description.name + def field_map_update(self, fields: List[Field], meta: PydanticMetaData) -> None: + for field in fields: + name = field.model_field_name # Include or exclude field if (meta.include and name not in meta.include) or name in meta.exclude: continue # Remove raw fields - if isinstance(field_description, ForeignKeyFieldInstanceDescription): - raw_field = field_description.raw_field + if isinstance(field, ForeignKeyFieldInstance): + raw_field = field.source_field if raw_field is not None and meta.exclude_raw_fields and raw_field != self.pk_raw_field: self.pop(raw_field, None) - self[name] = field_description + self[name] = field def computed_field_map_update(self, computed: Tuple[str, ...], cls: "Type[Model]"): self._field_map.update( @@ -335,23 +328,23 @@ def _initialize_pconfig(self) -> ConfigDict: def _initialize_field_map(self) -> FieldMap: return FieldMap(self.meta) \ if self._exclude_read_only \ - else FieldMap(self.meta, pk_field_description=self._model_description.pk_field) + else FieldMap(self.meta, pk_field=self._model_description.pk_field) def _construct_field_map(self) -> None: - self._field_map.field_map_update(field_descriptions=self._model_description.data_fields, meta=self.meta) + self._field_map.field_map_update(fields=self._model_description.data_fields, meta=self.meta) if not self._exclude_read_only: - for field_descriptions in ( + for fields in ( self._model_description.fk_fields, self._model_description.o2o_fields, self._model_description.m2m_fields ): - self._field_map.field_map_update(field_descriptions, self.meta) + self._field_map.field_map_update(fields, self.meta) if self.meta.backward_relations: - for field_descriptions in ( + for fields in ( self._model_description.backward_fk_fields, self._model_description.backward_o2o_fields ): - self._field_map.field_map_update(field_descriptions, self.meta) + self._field_map.field_map_update(fields, self.meta) self._field_map.computed_field_map_update(self.meta.computed, self._cls) if self.meta.sort_alphabetically: self._field_map.sort_alphabetically() @@ -359,8 +352,8 @@ def _construct_field_map(self) -> None: self._field_map.sort_definition_order(self._cls, self.meta.computed) def create_pydantic_model(self) -> Type[PydanticModel]: - for field_name, field_description in self._field_map.items(): - self._process_field(field_name, field_description) + for field_name, field in self._field_map.items(): + self._process_field(field_name, field) self._name, self._title = self.get_name() @@ -391,7 +384,7 @@ def create_pydantic_model(self) -> Type[PydanticModel]: def _process_field( self, field_name: str, - field_description: Union[FieldDescriptionBase, ComputedFieldDescription], + field: Union[Field, ComputedFieldDescription], ) -> None: json_schema_extra: Dict[str, Any] = {} fconfig: Dict[str, Any] = { @@ -399,81 +392,81 @@ def _process_field( } field_property: Optional[Any] = None is_to_one_relation: bool = False - comment = "" - if isinstance(field_description, FieldDescriptionBase): - field_property, is_to_one_relation = self._process_normal_field_description( - field_name, field_description, json_schema_extra, fconfig + if isinstance(field, Field): + field_property, is_to_one_relation = self._process_normal_field( + field_name, field, json_schema_extra, fconfig ) - elif isinstance(field_description, ComputedFieldDescription): - field_property, is_to_one_relation = self._process_computed_field_description(field_description), False - comment = _cleandoc(field_description.function) - - if field_property: - self._properties[field_name] = field_property - if field_name in self._properties and not isinstance(self._properties[field_name], tuple): - fconfig["title"] = field_name.replace("_", " ").title() - description = comment or _br_it(field_description.docstring or field_description.description or "") \ - if isinstance(field_description, FieldDescriptionBase) \ - else (comment or _br_it(field_description.description or "")) - if description: - fconfig["description"] = description - ftype = self._properties[field_name] - if not isinstance(ftype, PydanticDescriptorProxy) and isinstance(field_description, FieldDescriptionBase): + if field_property: + fconfig["title"] = field_name.replace("_", " ").title() + description = _br_it(field.docstring or field.description or "") + if description: + fconfig["description"] = description if ( field_name in self._optional - or (field_description.default is not None and not callable(field_description.default)) + or (field.default is not None and not callable(field.default)) ): - self._properties[field_name] = (ftype, Field(default=field_description.default, **fconfig)) + self._properties[field_name] = (field_property, PydanticField(default=field.default, **fconfig)) else: if ( ( json_schema_extra.get("nullable") and not is_to_one_relation - and field_description.field_type not in (IntField, TextField) + and field.__class__ not in (IntField, TextField) ) or (self._exclude_read_only and json_schema_extra.get("readOnly")) ): - fconfig["default_factory"] = lambda: None - self._properties[field_name] = (ftype, Field(**fconfig)) - - def _process_normal_field_description( + # see: https://docs.pydantic.dev/latest/migration/#required-optional-and-nullable-fields + fconfig["default"] = None + self._properties[field_name] = (field_property, PydanticField(**fconfig)) + elif isinstance(field, ComputedFieldDescription): + field_property, is_to_one_relation = self._process_computed_field(field), False + if field_property: + comment = _cleandoc(field.function) + fconfig["title"] = field_name.replace("_", " ").title() + description = comment or _br_it(field.description or "") + if description: + fconfig["description"] = description + self._properties[field_name] = field_property + + def _process_normal_field( self, field_name: str, - field_description: FieldDescriptionBase, + field: Field, json_schema_extra: Dict[str, Any], fconfig: Dict[str, Any], ) -> Tuple[Optional[Any], bool]: - if isinstance(field_description, (BackwardFKRelationDescription, ManyToManyFieldInstanceDescription)): - return self._process_many_field_relation(field_name, field_description), False - elif isinstance( - field_description, + if isinstance( + field, ( - ForeignKeyFieldInstanceDescription, - OneToOneFieldInstanceDescription, - BackwardOneToOneRelationDescription + ForeignKeyFieldInstance, + OneToOneFieldInstance, + BackwardOneToOneRelation ) ): - return self._process_single_field_relation(field_name, field_description, json_schema_extra), True - elif field_description.field_type is JSONField: + return self._process_single_field_relation(field_name, field, json_schema_extra), True + elif isinstance(field, (BackwardFKRelation, ManyToManyFieldInstance)): + return self._process_many_field_relation(field_name, field), False + elif field.field_type is JSONField: return Any, False - return self._process_data_field_description(field_name, field_description, json_schema_extra, fconfig), False + return self._process_data_field(field_name, field, json_schema_extra, fconfig), False def _process_single_field_relation( self, field_name: str, - field_description: Union[ - ForeignKeyFieldInstanceDescription, - OneToOneFieldInstanceDescription, - BackwardOneToOneRelationDescription + field: Union[ + ForeignKeyFieldInstance, + OneToOneFieldInstance, + BackwardOneToOneRelation ], json_schema_extra: Dict[str, Any], ) -> Optional[Type[PydanticModel]]: - model: Optional[Type[PydanticModel]] = self._get_submodel(field_description.python_type, field_name) + python_type = getattr(field, "related_model", field.field_type) + model: Optional[Type[PydanticModel]] = self._get_submodel(python_type, field_name) if model: self._relational_fields_index.append((field_name, model.__name__)) - if field_description.nullable: + if field.null: json_schema_extra["nullable"] = True - if field_description.nullable or field_description.default is not None: + if field.null or field.default is not None: model = Optional[model] # type: ignore return model @@ -482,40 +475,43 @@ def _process_single_field_relation( def _process_many_field_relation( self, field_name: str, - field_description: Union[BackwardFKRelationDescription, ManyToManyFieldInstanceDescription], + field: Union[BackwardFKRelation, ManyToManyFieldInstance], ) -> Optional[Type[List[Type[PydanticModel]]]]: - model = self._get_submodel(field_description.python_type, field_name) + python_type = field.related_model + model = self._get_submodel(python_type, field_name) if model: self._relational_fields_index.append((field_name, model.__name__)) return List[model] # type: ignore return None - def _process_data_field_description( + def _process_data_field( self, field_name: str, - field_description: FieldDescriptionBase, + field: Field, json_schema_extra: Dict[str, Any], fconfig: Dict[str, Any], ) -> Optional[Any]: annotation = self._annotations.get(field_name, None) - if "readOnly" in field_description.constraints: - json_schema_extra["readOnly"] = field_description.constraints["readOnly"] - del field_description.constraints["readOnly"] - fconfig.update(field_description.constraints) - ptype = field_description.python_type - if field_description.nullable: + constraints = copy(field.constraints) + if "readOnly" in constraints: + json_schema_extra["readOnly"] = constraints["readOnly"] + del constraints["readOnly"] + fconfig.update(constraints) + python_type = getattr(field, "related_model", field.field_type) + ptype = python_type + if field.null: json_schema_extra["nullable"] = True - if field_name in self._optional or field_description.default is not None or field_description.nullable: - ptype = Optional[ptype] # type: ignore + if field_name in self._optional or field.default is not None or field.null: + ptype = Optional[ptype] if not (self._exclude_read_only and json_schema_extra.get("readOnly") is True): return annotation or ptype return None - def _process_computed_field_description( + def _process_computed_field( self, - field_description: ComputedFieldDescription, + field: ComputedFieldDescription, ) -> Optional[Any]: - func = field_description.function + func = field.function annotation = get_annotations(self._cls, func).get("return", None) comment = _cleandoc(func) if annotation is not None: diff --git a/tortoise/contrib/pydantic/dataclasses.py b/tortoise/contrib/pydantic/dataclasses.py index 6fd9e7b24..74c3b545b 100644 --- a/tortoise/contrib/pydantic/dataclasses.py +++ b/tortoise/contrib/pydantic/dataclasses.py @@ -1,7 +1,6 @@ import dataclasses import sys -from copy import copy -from typing import Type, Optional, Any, TYPE_CHECKING, Dict, List, Tuple +from typing import Type, Optional, Any, TYPE_CHECKING, Dict, List, Tuple, Callable if sys.version_info >= (3, 11): from typing import Self @@ -11,150 +10,63 @@ from pydantic import ConfigDict from tortoise.fields import Field -from tortoise.fields.relational import RelationalField, ForeignKeyFieldInstance, ManyToManyFieldInstance, \ - BackwardOneToOneRelation, BackwardFKRelation, OneToOneFieldInstance, MODEL if TYPE_CHECKING: # pragma: nocoverage from tortoise.models import Model -@dataclasses.dataclass -class FieldDescriptionBase: - name: str - field_type: Type[Field] - nullable: bool - constraints: Dict - python_type: Optional[type] = None - default: Optional[Any] = None - description: Optional[str] = None - docstring: Optional[str] = None - - @classmethod - def _from_field(cls, field: Field): - field_type = getattr(field, "related_model", field.field_type) - return cls( - name=field.model_field_name, - field_type=field.__class__, - python_type=field_type, - nullable=field.null, - default=field.default, - description=field.description, - docstring=field.docstring, - constraints=copy(field.constraints), - ) - - @classmethod - def from_field(cls, field: Field): - return cls._from_field(field) - - -@dataclasses.dataclass -class FieldDescription(FieldDescriptionBase): - ... - - -@dataclasses.dataclass -class RelationalFieldDescription(FieldDescriptionBase): - python_type: Optional[Type["Model"]] = None - - -@dataclasses.dataclass -class ForeignKeyFieldInstanceDescription(RelationalFieldDescription): - raw_field: Optional[str] = "" - - @classmethod - def from_field(cls, field: Field): - fd = cls._from_field(field) - fd.raw_field = field.source_field - return fd - - -@dataclasses.dataclass -class BackwardFKRelationDescription(ForeignKeyFieldInstanceDescription): - ... - - -@dataclasses.dataclass -class OneToOneFieldInstanceDescription(ForeignKeyFieldInstanceDescription): - ... - - -@dataclasses.dataclass -class BackwardOneToOneRelationDescription(ForeignKeyFieldInstanceDescription): - ... - - -@dataclasses.dataclass -class ManyToManyFieldInstanceDescription(RelationalFieldDescription): - ... - - @dataclasses.dataclass class ModelDescription: - pk_field: FieldDescriptionBase - data_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) - fk_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) - backward_fk_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) - o2o_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) - backward_o2o_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) - m2m_fields: List[FieldDescriptionBase] = dataclasses.field(default_factory=list) + pk_field: Field + data_fields: List[Field] = dataclasses.field(default_factory=list) + fk_fields: List[Field] = dataclasses.field(default_factory=list) + backward_fk_fields: List[Field] = dataclasses.field(default_factory=list) + o2o_fields: List[Field] = dataclasses.field(default_factory=list) + backward_o2o_fields: List[Field] = dataclasses.field(default_factory=list) + m2m_fields: List[Field] = dataclasses.field(default_factory=list) @classmethod - def from_model(cls, model: Type[MODEL]) -> Self: + def from_model(cls, model: Type["Model"]) -> Self: return cls( - pk_field=describe_field_by_dataclass(model._meta.fields_map[model._meta.pk_attr]), + pk_field=model._meta.fields_map[model._meta.pk_attr], data_fields=[ - describe_field_by_dataclass(field) + field for name, field in model._meta.fields_map.items() if name != model._meta.pk_attr and name in (model._meta.fields - model._meta.fetch_fields) ], fk_fields=[ - describe_field_by_dataclass(field) + field for name, field in model._meta.fields_map.items() if name in model._meta.fk_fields ], backward_fk_fields=[ - describe_field_by_dataclass(field) + field for name, field in model._meta.fields_map.items() if name in model._meta.backward_fk_fields ], o2o_fields=[ - describe_field_by_dataclass(field) + field for name, field in model._meta.fields_map.items() if name in model._meta.o2o_fields ], backward_o2o_fields=[ - describe_field_by_dataclass(field) + field for name, field in model._meta.fields_map.items() if name in model._meta.backward_o2o_fields ], m2m_fields=[ - describe_field_by_dataclass(field) + field for name, field in model._meta.fields_map.items() if name in model._meta.m2m_fields ], ) -def describe_field_by_dataclass(field: Field) -> FieldDescriptionBase: - if isinstance(field, RelationalField): - if isinstance(field, ForeignKeyFieldInstance): - # ForeignKeyFieldInstance -> RelationalField - if isinstance(field, OneToOneFieldInstance): - # OneToOneFieldInstance -> ForeignKeyFieldInstance -> RelationalField - return OneToOneFieldInstanceDescription.from_field(field) - return ForeignKeyFieldInstanceDescription.from_field(field) - if isinstance(field, BackwardFKRelation): - # BackwardFKRelation -> RelationalField - if isinstance(field, BackwardOneToOneRelation): - # BackwardOneToOneRelation -> BackwardFKRelation -> RelationalField - return BackwardOneToOneRelationDescription.from_field(field) - return BackwardFKRelationDescription.from_field(field) - if isinstance(field, ManyToManyFieldInstance): - # ManyToManyFieldInstance -> RelationalField - return ManyToManyFieldInstanceDescription.from_field(field) - return RelationalFieldDescription.from_field(field) - return FieldDescription.from_field(field) +@dataclasses.dataclass +class ComputedFieldDescription: + field_type: Any + function: Callable[[], Any] + description: Optional[str] @dataclasses.dataclass diff --git a/tortoise/fields/relational.py b/tortoise/fields/relational.py index 5d28e55fe..a95e5103f 100644 --- a/tortoise/fields/relational.py +++ b/tortoise/fields/relational.py @@ -48,7 +48,7 @@ class ReverseRelation(Generic[MODEL]): def __init__( self, - remote_model: "Type[MODEL]", + remote_model: Type[MODEL], relation_field: str, instance: "Model", from_field: str, From bf1d3f0e6b8aa363ce0e48312e30cce0a981c1cc Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 8 Nov 2024 10:58:06 +0100 Subject: [PATCH 23/27] remove forgotten line of #1465 --- tests/contrib/test_pydantic.py | 55 +++++++++++++++++----------- tortoise/contrib/pydantic/creator.py | 3 +- 2 files changed, 35 insertions(+), 23 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index a732d1654..5650447f5 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -119,11 +119,12 @@ def test_event_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, }, - "required": ["id", "name", "alias"], + "required": ["id", "name"], "title": "Team", "type": "object", }, @@ -139,6 +140,7 @@ def test_event_schema(self): "name": {"maxLength": 255, "title": "Name", "type": "string"}, "desc": { "anyOf": [{"type": "string"}, {"type": "null"}], + "default": None, "nullable": True, "title": "Desc", }, @@ -149,7 +151,7 @@ def test_event_schema(self): "type": "string", }, }, - "required": ["id", "name", "desc", "created"], + "required": ["id", "name", "created"], "title": "Tournament", "type": "object", }, @@ -197,6 +199,7 @@ def test_event_schema(self): {"maximum": 2147483647, "minimum": -2147483648, "type": "integer"}, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, @@ -219,7 +222,6 @@ def test_event_schema(self): "participants", "modified", "token", - "alias", "address", ], "title": "Event", @@ -283,6 +285,7 @@ def test_eventlist_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, @@ -305,7 +308,6 @@ def test_eventlist_schema(self): "participants", "modified", "token", - "alias", "address", ], "title": "Event", @@ -363,11 +365,12 @@ def test_eventlist_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, }, - "required": ["id", "name", "alias"], + "required": ["id", "name"], "title": "Team", "type": "object", }, @@ -383,6 +386,7 @@ def test_eventlist_schema(self): "name": {"maxLength": 255, "title": "Name", "type": "string"}, "desc": { "anyOf": [{"type": "string"}, {"type": "null"}], + "default": None, "nullable": True, "title": "Desc", }, @@ -393,7 +397,7 @@ def test_eventlist_schema(self): "type": "string", }, }, - "required": ["id", "name", "desc", "created"], + "required": ["id", "name", "created"], "title": "Tournament", "type": "object", }, @@ -461,6 +465,7 @@ def test_address_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, @@ -473,7 +478,6 @@ def test_address_schema(self): "participants", "modified", "token", - "alias", ], "title": "Event", "type": "object", @@ -514,11 +518,12 @@ def test_address_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, }, - "required": ["id", "name", "alias"], + "required": ["id", "name"], "title": "Team", "type": "object", }, @@ -534,6 +539,7 @@ def test_address_schema(self): "name": {"maxLength": 255, "title": "Name", "type": "string"}, "desc": { "anyOf": [{"type": "string"}, {"type": "null"}], + "default": None, "nullable": True, "title": "Desc", }, @@ -544,7 +550,7 @@ def test_address_schema(self): "type": "string", }, }, - "required": ["id", "name", "desc", "created"], + "required": ["id", "name", "created"], "title": "Tournament", "type": "object", }, @@ -619,6 +625,7 @@ def test_tournament_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, @@ -640,7 +647,6 @@ def test_tournament_schema(self): "participants", "modified", "token", - "alias", "address", ], "title": "Event", @@ -698,11 +704,12 @@ def test_tournament_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, }, - "required": ["id", "name", "alias"], + "required": ["id", "name"], "title": "Team", "type": "object", }, @@ -713,6 +720,7 @@ def test_tournament_schema(self): "name": {"maxLength": 255, "title": "Name", "type": "string"}, "desc": { "anyOf": [{"type": "string"}, {"type": "null"}], + "default": None, "nullable": True, "title": "Desc", }, @@ -729,7 +737,7 @@ def test_tournament_schema(self): "type": "array", }, }, - "required": ["id", "name", "desc", "created", "events"], + "required": ["id", "name", "created", "events"], "title": "Tournament", "type": "object", }, @@ -784,6 +792,7 @@ def test_team_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, @@ -805,7 +814,6 @@ def test_team_schema(self): "reporter", "modified", "token", - "alias", "address", ], "title": "Event", @@ -855,6 +863,7 @@ def test_team_schema(self): "name": {"maxLength": 255, "title": "Name", "type": "string"}, "desc": { "anyOf": [{"type": "string"}, {"type": "null"}], + "default": None, "nullable": True, "title": "Desc", }, @@ -865,7 +874,7 @@ def test_team_schema(self): "type": "string", }, }, - "required": ["id", "name", "desc", "created"], + "required": ["id", "name", "created"], "title": "Tournament", "type": "object", }, @@ -885,6 +894,7 @@ def test_team_schema(self): {"maximum": 2147483647, "minimum": -2147483648, "type": "integer"}, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Alias", }, @@ -894,7 +904,7 @@ def test_team_schema(self): "type": "array", }, }, - "required": ["id", "name", "alias", "events"], + "required": ["id", "name", "events"], "title": "Team", "type": "object", }, @@ -1317,6 +1327,7 @@ def test_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Manager Id", }, @@ -1326,7 +1337,7 @@ def test_schema(self): "type": "array", }, }, - "required": ["id", "name", "talks_to", "manager_id", "team_members"], + "required": ["id", "name", "talks_to", "team_members"], "title": "Employee", "type": "object", }, @@ -1349,11 +1360,12 @@ def test_schema(self): }, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Manager Id", }, }, - "required": ["id", "name", "manager_id"], + "required": ["id", "name"], "title": "Employee", "type": "object", }, @@ -1377,6 +1389,7 @@ def test_schema(self): {"maximum": 2147483647, "minimum": -2147483648, "type": "integer"}, {"type": "null"}, ], + "default": None, "nullable": True, "title": "Manager Id", }, @@ -1386,7 +1399,7 @@ def test_schema(self): "type": "array", }, }, - "required": ["id", "name", "talks_to", "manager_id", "team_members"], + "required": ["id", "name", "talks_to", "team_members"], "title": "Employee", "type": "object", }, @@ -1516,6 +1529,7 @@ async def test_computed_field_schema(self): "type": "null" } ], + "default": None, "nullable": True, "title": "Manager Id" }, @@ -1535,7 +1549,6 @@ async def test_computed_field_schema(self): "required": [ "id", "name", - "manager_id", "name_length", "team_size" ], @@ -1574,6 +1587,7 @@ async def test_computed_field_schema(self): "type": "null" } ], + "default": None, "nullable": True, "title": "Manager Id" }, @@ -1601,7 +1615,6 @@ async def test_computed_field_schema(self): "id", "name", "talks_to", - "manager_id", "team_members", "name_length", "team_size" @@ -1641,6 +1654,7 @@ async def test_computed_field_schema(self): "type": "null" } ], + "default": None, "nullable": True, "title": "Manager Id" }, @@ -1668,7 +1682,6 @@ async def test_computed_field_schema(self): "id", "name", "talks_to", - "manager_id", "team_members", "name_length", "team_size" diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 0ed464ffc..283533cd7 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -13,7 +13,7 @@ BackwardOneToOneRelation from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations -from tortoise.fields import JSONField, Field, IntField, TextField +from tortoise.fields import JSONField, Field from tortoise.contrib.pydantic.dataclasses import ModelDescription, PydanticMetaData, ComputedFieldDescription if TYPE_CHECKING: # pragma: nocoverage @@ -411,7 +411,6 @@ def _process_field( ( json_schema_extra.get("nullable") and not is_to_one_relation - and field.__class__ not in (IntField, TextField) ) or (self._exclude_read_only and json_schema_extra.get("readOnly")) ): From b81b9a4713afa5b8c72d1199adf7842ac03ad4eb Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 8 Nov 2024 11:22:55 +0100 Subject: [PATCH 24/27] include optional in hashed value --- tests/contrib/test_pydantic.py | 108 +++++++++++++-------------- tests/test_early_init.py | 4 +- tortoise/contrib/pydantic/creator.py | 2 +- 3 files changed, 57 insertions(+), 57 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 5650447f5..feeec4f04 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -67,7 +67,7 @@ def test_event_schema(self): self.Event_Pydantic.model_json_schema(), { "$defs": { - "Address_4smkxs_leaf": { + "Address_coqnj7_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -83,7 +83,7 @@ def test_event_schema(self): "title": "Address", "type": "object", }, - "Reporter_4tvqui_leaf": { + "Reporter_fgnv33_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -99,7 +99,7 @@ def test_event_schema(self): "title": "Reporter", "type": "object", }, - "Team_bnxjyv_leaf": { + "Team_ip4pg6_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -128,7 +128,7 @@ def test_event_schema(self): "title": "Team", "type": "object", }, - "Tournament_djsht2_leaf": { + "Tournament_5y7e7j_leaf": { "additionalProperties": False, "properties": { "id": { @@ -167,13 +167,13 @@ def test_event_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_djsht2_leaf", + "$ref": "#/$defs/Tournament_5y7e7j_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_4tvqui_leaf" + "$ref": "#/$defs/Reporter_fgnv33_leaf" }, {"type": "null"}, ], @@ -182,7 +182,7 @@ def test_event_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_bnxjyv_leaf" + "$ref": "#/$defs/Team_ip4pg6_leaf" }, "title": "Participants", "type": "array", @@ -206,7 +206,7 @@ def test_event_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_4smkxs_leaf" + "$ref": "#/$defs/Address_coqnj7_leaf" }, {"type": "null"}, ], @@ -234,7 +234,7 @@ def test_eventlist_schema(self): self.Event_Pydantic_List.model_json_schema(), { "$defs": { - "Event_c5pgat": { + "Event_padfez": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -246,13 +246,13 @@ def test_eventlist_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_djsht2_leaf", + "$ref": "#/$defs/Tournament_5y7e7j_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_4tvqui_leaf" + "$ref": "#/$defs/Reporter_fgnv33_leaf" }, {"type": "null"}, ], @@ -261,7 +261,7 @@ def test_eventlist_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_bnxjyv_leaf" + "$ref": "#/$defs/Team_ip4pg6_leaf" }, "title": "Participants", "type": "array", @@ -292,7 +292,7 @@ def test_eventlist_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_4smkxs_leaf" + "$ref": "#/$defs/Address_coqnj7_leaf" }, {"type": "null"}, ], @@ -313,7 +313,7 @@ def test_eventlist_schema(self): "title": "Event", "type": "object", }, - "Address_4smkxs_leaf": { + "Address_coqnj7_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -329,7 +329,7 @@ def test_eventlist_schema(self): "title": "Address", "type": "object", }, - "Reporter_4tvqui_leaf": { + "Reporter_fgnv33_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -345,7 +345,7 @@ def test_eventlist_schema(self): "title": "Reporter", "type": "object", }, - "Team_bnxjyv_leaf": { + "Team_ip4pg6_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -374,7 +374,7 @@ def test_eventlist_schema(self): "title": "Team", "type": "object", }, - "Tournament_djsht2_leaf": { + "Tournament_5y7e7j_leaf": { "additionalProperties": False, "properties": { "id": { @@ -403,7 +403,7 @@ def test_eventlist_schema(self): }, }, "description": "Events on the calendar", - "items": {"$ref": "#/$defs/Event_c5pgat"}, + "items": {"$ref": "#/$defs/Event_padfez"}, "title": "Event_list", "type": "array", }, @@ -414,7 +414,7 @@ def test_address_schema(self): self.Address_Pydantic.model_json_schema(), { "$defs": { - "Event_jz7ivr_leaf": { + "Event_zvunzw_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -426,13 +426,13 @@ def test_address_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_djsht2_leaf", + "$ref": "#/$defs/Tournament_5y7e7j_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_4tvqui_leaf" + "$ref": "#/$defs/Reporter_fgnv33_leaf" }, {"type": "null"}, ], @@ -441,7 +441,7 @@ def test_address_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_bnxjyv_leaf" + "$ref": "#/$defs/Team_ip4pg6_leaf" }, "title": "Participants", "type": "array", @@ -482,7 +482,7 @@ def test_address_schema(self): "title": "Event", "type": "object", }, - "Reporter_4tvqui_leaf": { + "Reporter_fgnv33_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -498,7 +498,7 @@ def test_address_schema(self): "title": "Reporter", "type": "object", }, - "Team_bnxjyv_leaf": { + "Team_ip4pg6_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -527,7 +527,7 @@ def test_address_schema(self): "title": "Team", "type": "object", }, - "Tournament_djsht2_leaf": { + "Tournament_5y7e7j_leaf": { "additionalProperties": False, "properties": { "id": { @@ -559,7 +559,7 @@ def test_address_schema(self): "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, "street": {"maxLength": 128, "title": "Street", "type": "string"}, - "event": {"$ref": "#/$defs/Event_jz7ivr_leaf"}, + "event": {"$ref": "#/$defs/Event_zvunzw_leaf"}, "event_id": { "maximum": 9223372036854775807, "minimum": -9223372036854775808, @@ -578,7 +578,7 @@ def test_tournament_schema(self): self.Tournament_Pydantic.model_json_schema(), { "$defs": { - "Event_yu5egy_leaf": { + "Event_jgrv4c_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -592,7 +592,7 @@ def test_tournament_schema(self): "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_4tvqui_leaf" + "$ref": "#/$defs/Reporter_fgnv33_leaf" }, {"type": "null"}, ], @@ -601,7 +601,7 @@ def test_tournament_schema(self): }, "participants": { "items": { - "$ref": "#/$defs/Team_bnxjyv_leaf" + "$ref": "#/$defs/Team_ip4pg6_leaf" }, "title": "Participants", "type": "array", @@ -632,7 +632,7 @@ def test_tournament_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_4smkxs_leaf" + "$ref": "#/$defs/Address_coqnj7_leaf" }, {"type": "null"}, ], @@ -652,7 +652,7 @@ def test_tournament_schema(self): "title": "Event", "type": "object", }, - "Address_4smkxs_leaf": { + "Address_coqnj7_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -668,7 +668,7 @@ def test_tournament_schema(self): "title": "Address", "type": "object", }, - "Reporter_4tvqui_leaf": { + "Reporter_fgnv33_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -684,7 +684,7 @@ def test_tournament_schema(self): "title": "Reporter", "type": "object", }, - "Team_bnxjyv_leaf": { + "Team_ip4pg6_leaf": { "additionalProperties": False, "description": "Team that is a playing", "properties": { @@ -732,7 +732,7 @@ def test_tournament_schema(self): }, "events": { "description": "What tournaments is a happenin'", - "items": {"$ref": "#/$defs/Event_yu5egy_leaf"}, + "items": {"$ref": "#/$defs/Event_jgrv4c_leaf"}, "title": "Events", "type": "array", }, @@ -748,7 +748,7 @@ def test_team_schema(self): self.Team_Pydantic.model_json_schema(), { "$defs": { - "Event_7fv6fv_leaf": { + "Event_n2kadx_leaf": { "additionalProperties": False, "description": "Events on the calendar", "properties": { @@ -760,13 +760,13 @@ def test_team_schema(self): }, "name": {"description": "The name", "title": "Name", "type": "string"}, "tournament": { - "$ref": "#/$defs/Tournament_djsht2_leaf", + "$ref": "#/$defs/Tournament_5y7e7j_leaf", "description": "What tournaments is a happenin'", }, "reporter": { "anyOf": [ { - "$ref": "#/$defs/Reporter_4tvqui_leaf" + "$ref": "#/$defs/Reporter_fgnv33_leaf" }, {"type": "null"}, ], @@ -799,7 +799,7 @@ def test_team_schema(self): "address": { "anyOf": [ { - "$ref": "#/$defs/Address_4smkxs_leaf" + "$ref": "#/$defs/Address_coqnj7_leaf" }, {"type": "null"}, ], @@ -819,7 +819,7 @@ def test_team_schema(self): "title": "Event", "type": "object", }, - "Address_4smkxs_leaf": { + "Address_coqnj7_leaf": { "additionalProperties": False, "properties": { "city": {"maxLength": 64, "title": "City", "type": "string"}, @@ -835,7 +835,7 @@ def test_team_schema(self): "title": "Address", "type": "object", }, - "Reporter_4tvqui_leaf": { + "Reporter_fgnv33_leaf": { "additionalProperties": False, "description": "Whom is assigned as the reporter", "properties": { @@ -851,7 +851,7 @@ def test_team_schema(self): "title": "Reporter", "type": "object", }, - "Tournament_djsht2_leaf": { + "Tournament_5y7e7j_leaf": { "additionalProperties": False, "properties": { "id": { @@ -899,7 +899,7 @@ def test_team_schema(self): "title": "Alias", }, "events": { - "items": {"$ref": "#/$defs/Event_7fv6fv_leaf"}, + "items": {"$ref": "#/$defs/Event_n2kadx_leaf"}, "title": "Events", "type": "array", }, @@ -1303,7 +1303,7 @@ def test_schema(self): self.Employee_Pydantic.model_json_schema(), { "$defs": { - "Employee_lqnwvu_leaf": { + "Employee_6tkbjb_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1314,7 +1314,7 @@ def test_schema(self): }, "name": {"maxLength": 50, "title": "Name", "type": "string"}, "talks_to": { - "items": {"$ref": "#/$defs/Employee_okqh4k_leaf"}, + "items": {"$ref": "#/$defs/Employee_fj2ly4_leaf"}, "title": "Talks To", "type": "array", }, @@ -1332,7 +1332,7 @@ def test_schema(self): "title": "Manager Id", }, "team_members": { - "items": {"$ref": "#/$defs/Employee_okqh4k_leaf"}, + "items": {"$ref": "#/$defs/Employee_fj2ly4_leaf"}, "title": "Team Members", "type": "array", }, @@ -1341,7 +1341,7 @@ def test_schema(self): "title": "Employee", "type": "object", }, - "Employee_okqh4k_leaf": { + "Employee_fj2ly4_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1380,7 +1380,7 @@ def test_schema(self): }, "name": {"maxLength": 50, "title": "Name", "type": "string"}, "talks_to": { - "items": {"$ref": "#/$defs/Employee_lqnwvu_leaf"}, + "items": {"$ref": "#/$defs/Employee_6tkbjb_leaf"}, "title": "Talks To", "type": "array", }, @@ -1394,7 +1394,7 @@ def test_schema(self): "title": "Manager Id", }, "team_members": { - "items": {"$ref": "#/$defs/Employee_lqnwvu_leaf"}, + "items": {"$ref": "#/$defs/Employee_6tkbjb_leaf"}, "title": "Team Members", "type": "array", }, @@ -1504,7 +1504,7 @@ async def test_computed_field_schema(self): self.Employee_Pydantic.model_json_schema(mode="serialization"), { "$defs": { - "Employee_okqh4k_leaf": { + "Employee_fj2ly4_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1555,7 +1555,7 @@ async def test_computed_field_schema(self): "title": "Employee", "type": "object" }, - "Employee_lqnwvu_leaf": { + "Employee_6tkbjb_leaf": { "additionalProperties": False, "properties": { "id": { @@ -1571,7 +1571,7 @@ async def test_computed_field_schema(self): }, "talks_to": { "items": { - "$ref": "#/$defs/Employee_okqh4k_leaf" + "$ref": "#/$defs/Employee_fj2ly4_leaf" }, "title": "Talks To", "type": "array" @@ -1593,7 +1593,7 @@ async def test_computed_field_schema(self): }, "team_members": { "items": { - "$ref": "#/$defs/Employee_okqh4k_leaf" + "$ref": "#/$defs/Employee_fj2ly4_leaf" }, "title": "Team Members", "type": "array" @@ -1638,7 +1638,7 @@ async def test_computed_field_schema(self): }, "talks_to": { "items": { - "$ref": "#/$defs/Employee_lqnwvu_leaf" + "$ref": "#/$defs/Employee_6tkbjb_leaf" }, "title": "Talks To", "type": "array" @@ -1660,7 +1660,7 @@ async def test_computed_field_schema(self): }, "team_members": { "items": { - "$ref": "#/$defs/Employee_lqnwvu_leaf" + "$ref": "#/$defs/Employee_6tkbjb_leaf" }, "title": "Team Members", "type": "array" diff --git a/tests/test_early_init.py b/tests/test_early_init.py index 14dd7ce13..24b454a8a 100644 --- a/tests/test_early_init.py +++ b/tests/test_early_init.py @@ -167,7 +167,7 @@ def test_early_init(self): Event_Pydantic.model_json_schema(), { "$defs": { - "Tournament_z7k3ew_leaf": { + "Tournament_aapnxb_leaf": { "additionalProperties": False, "properties": { "id": { @@ -211,7 +211,7 @@ def test_early_init(self): "type": "string", }, "tournament": { - "anyOf": [{"$ref": "#/$defs/Tournament_z7k3ew_leaf"}, {"type": "null"}], + "anyOf": [{"$ref": "#/$defs/Tournament_aapnxb_leaf"}, {"type": "null"}], "nullable": True, "title": "Tournament", }, diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 283533cd7..a42866f3f 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -290,7 +290,7 @@ def __init__( def _hash(self): if self.__hash == "": hashval = ( - f"{self._fqname};{self._properties.keys()};{self._relational_fields_index};" + f"{self._fqname};{self._properties.keys()};{self._relational_fields_index};{self._optional};" f"{self.meta.allow_cycles}" ) self.__hash = b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6] From 5b77fd8078920208950bd6c1cfd5c293ee3f17bd Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 8 Nov 2024 11:31:15 +0100 Subject: [PATCH 25/27] move dataclasses.py to descriptions.py --- tortoise/contrib/pydantic/creator.py | 2 +- tortoise/contrib/pydantic/{dataclasses.py => descriptions.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename tortoise/contrib/pydantic/{dataclasses.py => descriptions.py} (100%) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index a42866f3f..efa13350b 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -14,7 +14,7 @@ from tortoise.contrib.pydantic.base import PydanticListModel, PydanticModel from tortoise.contrib.pydantic.utils import get_annotations from tortoise.fields import JSONField, Field -from tortoise.contrib.pydantic.dataclasses import ModelDescription, PydanticMetaData, ComputedFieldDescription +from tortoise.contrib.pydantic.descriptions import ModelDescription, PydanticMetaData, ComputedFieldDescription if TYPE_CHECKING: # pragma: nocoverage from tortoise.models import Model diff --git a/tortoise/contrib/pydantic/dataclasses.py b/tortoise/contrib/pydantic/descriptions.py similarity index 100% rename from tortoise/contrib/pydantic/dataclasses.py rename to tortoise/contrib/pydantic/descriptions.py From ef2a7a4233b572a7b50438c695eca9d1683215d3 Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Fri, 8 Nov 2024 11:35:44 +0100 Subject: [PATCH 26/27] stupid unused import. --- tortoise/contrib/pydantic/creator.py | 3 --- tortoise/contrib/pydantic/descriptions.py | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index efa13350b..243b47f04 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -87,9 +87,6 @@ def _pydantic_recursion_protector( return pmc.create_pydantic_model() -# FieldDescriptionT = TypeVar('FieldDescriptionT', bound=FieldDescriptionBase) - - class FieldMap(MutableMapping[str, Union[Field, ComputedFieldDescription]]): def __init__(self, meta: PydanticMetaData, pk_field: Optional[Field] = None): self._field_map: Dict[str, Union[Field, ComputedFieldDescription]] = {} diff --git a/tortoise/contrib/pydantic/descriptions.py b/tortoise/contrib/pydantic/descriptions.py index 74c3b545b..8d3770b8a 100644 --- a/tortoise/contrib/pydantic/descriptions.py +++ b/tortoise/contrib/pydantic/descriptions.py @@ -1,6 +1,6 @@ import dataclasses import sys -from typing import Type, Optional, Any, TYPE_CHECKING, Dict, List, Tuple, Callable +from typing import Type, Optional, Any, TYPE_CHECKING, List, Tuple, Callable if sys.version_info >= (3, 11): from typing import Self From 3d796a63f7a53916f5f524f838269f05b358e04d Mon Sep 17 00:00:00 2001 From: Markus Beckschulte Date: Sun, 17 Nov 2024 17:29:08 +0100 Subject: [PATCH 27/27] formatting --- tortoise/contrib/pydantic/creator.py | 32 +++++++++++++++++----------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/tortoise/contrib/pydantic/creator.py b/tortoise/contrib/pydantic/creator.py index 243b47f04..8d0e3c828 100644 --- a/tortoise/contrib/pydantic/creator.py +++ b/tortoise/contrib/pydantic/creator.py @@ -194,7 +194,7 @@ def pydantic_queryset_creator( model = create_model( lname, __base__=PydanticListModel, - root=(List[submodel], Field(default_factory=list)), # type: ignore + root=(List[submodel], PydanticField(default_factory=list)), # type: ignore ) # Copy the Model docstring over model.__doc__ = _cleandoc(cls) @@ -251,7 +251,12 @@ def __init__( if meta_override: meta_from_class = meta_from_class.construct_pydantic_meta(meta_override) self.meta = meta_from_class.finalize_meta( - exclude, include, computed, allow_cycles, sort_alphabetically, model_config + exclude=exclude, + include=include, + computed=computed, + allow_cycles=allow_cycles, + sort_alphabetically=sort_alphabetically, + model_config=model_config, ) self._exclude_read_only: bool = exclude_readonly @@ -323,9 +328,11 @@ def _initialize_pconfig(self) -> ConfigDict: return pconfig def _initialize_field_map(self) -> FieldMap: - return FieldMap(self.meta) \ - if self._exclude_read_only \ + return ( + FieldMap(self.meta) + if self._exclude_read_only else FieldMap(self.meta, pk_field=self._model_description.pk_field) + ) def _construct_field_map(self) -> None: self._field_map.field_map_update(fields=self._model_description.data_fields, meta=self.meta) @@ -524,17 +531,16 @@ def _get_submodel(self, _model: Optional["Type[Model]"], field_name: str) -> Opt # Get pydantic schema for the submodel prefix_len = len(field_name) + 1 + + def get_fields_to_carry_on(field_tuple: Tuple[str, ...]) -> Tuple[str, ...]: + return tuple( + str(v[prefix_len:]) for v in field_tuple if v.startswith(field_name + ".") + ) pmodel = _pydantic_recursion_protector( _model, - exclude=tuple( - str(v[prefix_len:]) for v in self.meta.exclude if v.startswith(field_name + ".") - ), - include=tuple( - str(v[prefix_len:]) for v in self.meta.include if v.startswith(field_name + ".") - ), - computed=tuple( - str(v[prefix_len:]) for v in self.meta.computed if v.startswith(field_name + ".") - ), + exclude=get_fields_to_carry_on(self.meta.exclude), + include=get_fields_to_carry_on(self.meta.include), + computed=get_fields_to_carry_on(self.meta.computed), stack=new_stack, allow_cycles=self.meta.allow_cycles, sort_alphabetically=self.meta.sort_alphabetically,