Skip to content

Commit

Permalink
Fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
a_bondar committed Apr 23, 2024
1 parent 89322b9 commit 63b008d
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 27 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
TORTOISE_MSSQL_DRIVER: ODBC Driver 18 for SQL Server
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/cache@v3
with:
Expand Down
24 changes: 12 additions & 12 deletions tests/contrib/test_pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def test_eventlist_schema(self):
self.Event_Pydantic_List.model_json_schema(),
{
"$defs": {
"Event": {
"Event_ct5gv4": {
"additionalProperties": False,
"description": "Events on the calendar",
"properties": {
Expand Down Expand Up @@ -405,7 +405,7 @@ def test_eventlist_schema(self):
},
},
"description": "Events on the calendar",
"items": {"$ref": "#/$defs/Event"},
"items": {"$ref": "#/$defs/Event_ct5gv4"},
"title": "Event_list",
"type": "array",
},
Expand All @@ -416,7 +416,7 @@ def test_address_schema(self):
self.Address_Pydantic.model_json_schema(),
{
"$defs": {
"orhjcw": {
"Event_aajoh6": {
"additionalProperties": False,
"description": "Events on the calendar",
"properties": {
Expand Down Expand Up @@ -563,7 +563,7 @@ def test_address_schema(self):
"properties": {
"city": {"maxLength": 64, "title": "City", "type": "string"},
"street": {"maxLength": 128, "title": "Street", "type": "string"},
"event": {"$ref": "#/$defs/orhjcw"},
"event": {"$ref": "#/$defs/Event_aajoh6"},
"event_id": {
"maximum": 9223372036854775807,
"minimum": 1,
Expand All @@ -582,7 +582,7 @@ def test_tournament_schema(self):
self.Tournament_Pydantic.model_json_schema(),
{
"$defs": {
"b4oydv": {
"Event_h4reuz": {
"additionalProperties": False,
"description": "Events on the calendar",
"properties": {
Expand Down Expand Up @@ -734,7 +734,7 @@ def test_tournament_schema(self):
},
"events": {
"description": "What tournaments is a happenin'",
"items": {"$ref": "#/$defs/b4oydv"},
"items": {"$ref": "#/$defs/Event_h4reuz"},
"title": "Events",
"type": "array",
},
Expand All @@ -750,7 +750,7 @@ def test_team_schema(self):
self.Team_Pydantic.model_json_schema(),
{
"$defs": {
"dlqoeq": {
"Event_mfn2l6": {
"additionalProperties": False,
"description": "Events on the calendar",
"properties": {
Expand Down Expand Up @@ -898,7 +898,7 @@ def test_team_schema(self):
"title": "Alias",
},
"events": {
"items": {"$ref": "#/$defs/dlqoeq"},
"items": {"$ref": "#/$defs/Event_mfn2l6"},
"title": "Events",
"type": "array",
},
Expand Down Expand Up @@ -1302,7 +1302,7 @@ def test_schema(self):
self.Employee_Pydantic.model_json_schema(),
{
"$defs": {
"4fgkwn": {
"Employee_4fgkwn": {
"additionalProperties": False,
"properties": {
"id": {
Expand Down Expand Up @@ -1335,7 +1335,7 @@ def test_schema(self):
"title": "Employee",
"type": "object",
},
"5gupxf": {
"Employee_5gupxf": {
"additionalProperties": False,
"properties": {
"id": {
Expand Down Expand Up @@ -1397,7 +1397,7 @@ def test_schema(self):
"id": {"maximum": 2147483647, "minimum": 1, "title": "Id", "type": "integer"},
"name": {"maxLength": 50, "title": "Name", "type": "string"},
"talks_to": {
"items": {"$ref": "#/$defs/5gupxf"},
"items": {"$ref": "#/$defs/Employee_5gupxf"},
"title": "Talks To",
"type": "array",
},
Expand All @@ -1410,7 +1410,7 @@ def test_schema(self):
"title": "Manager Id",
},
"team_members": {
"items": {"$ref": "#/$defs/4fgkwn"},
"items": {"$ref": "#/$defs/Employee_4fgkwn"},
"title": "Team Members",
"type": "array",
},
Expand Down
48 changes: 35 additions & 13 deletions tortoise/contrib/pydantic/creator.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def get_name() -> str:
f"{fqname};{exclude};{include};{computed};{_stack}:{sort_alphabetically}:{allow_cycles}"
)
postfix = (
"." + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6]
":" + b32encode(sha3_224(hashval.encode("utf-8")).digest()).decode("utf-8").lower()[:6]
if not is_default
else ""
)
Expand All @@ -203,7 +203,9 @@ def get_name() -> str:

def get_param(attr: str) -> Any:
if meta_override:
return getattr(meta_override, attr, getattr(meta, attr, getattr(PydanticMeta, attr)))
return getattr(
meta_override, attr, getattr(meta, attr, getattr(PydanticMeta, attr))
)
return getattr(meta, attr, getattr(PydanticMeta, attr))

default_include: Tuple[str, ...] = tuple(get_param("include"))
Expand All @@ -220,7 +222,9 @@ def get_param(attr: str) -> Any:
if sort_alphabetically is None
else sort_alphabetically
)
_allow_cycles: bool = bool(get_param("allow_cycles") if allow_cycles is None else allow_cycles)
_allow_cycles: bool = bool(
get_param("allow_cycles") if allow_cycles is None else allow_cycles
)

# Update parameters with defaults
include = tuple(include) + default_include
Expand Down Expand Up @@ -264,7 +268,11 @@ def field_map_update(keys: tuple, is_relation=True) -> None:
continue
# Remove raw fields
raw_field = fd.get("raw_field", None)
if raw_field is not None and exclude_raw_fields and raw_field != pk_raw_field:
if (
raw_field is not None
and exclude_raw_fields
and raw_field != pk_raw_field
):
del field_map[raw_field]
field_map[n] = fd

Expand All @@ -289,7 +297,11 @@ def field_map_update(keys: tuple, is_relation=True) -> None:
# Add possible computed fields
field_map.update(
{
k: {"field_type": callable, "function": getattr(cls, k), "description": None}
k: {
"field_type": callable,
"function": getattr(cls, k),
"description": None,
}
for k in computed
}
)
Expand All @@ -301,7 +313,9 @@ def field_map_update(keys: tuple, is_relation=True) -> None:
else:
# Sort to definition order
field_map = {
k: field_map[k] for k in tuple(cls._meta.fields_map.keys()) + computed if k in field_map
k: field_map[k]
for k in tuple(cls._meta.fields_map.keys()) + computed
if k in field_map
}
# Process fields
for fname, fdesc in field_map.items():
Expand All @@ -325,13 +339,19 @@ def get_submodel(_model: "Type[Model]") -> Optional[Type[PydanticModel]]:
pmodel = _pydantic_recursion_protector(
_model,
exclude=tuple(
str(v[prefix_len:]) for v in exclude if v.startswith(fname + ".")
str(v[prefix_len:])
for v in exclude
if v.startswith(fname + ".")
),
include=tuple(
str(v[prefix_len:]) for v in include if v.startswith(fname + ".")
str(v[prefix_len:])
for v in include
if v.startswith(fname + ".")
),
computed=tuple(
str(v[prefix_len:]) for v in computed if v.startswith(fname + ".")
str(v[prefix_len:])
for v in computed
if v.startswith(fname + ".")
),
stack=new_stack,
allow_cycles=_allow_cycles,
Expand Down Expand Up @@ -383,9 +403,9 @@ def get_submodel(_model: "Type[Model]") -> Optional[Type[PydanticModel]]:
annotation = get_annotations(cls, func).get("return", None)
comment = _cleandoc(func)
if annotation is not None:
properties[fname] = computed_field(return_type=annotation, description=comment)(
func
)
properties[fname] = computed_field(
return_type=annotation, description=comment
)(func)

# Json fields
elif field_type is JSONField:
Expand All @@ -407,7 +427,9 @@ def get_submodel(_model: "Type[Model]") -> Optional[Type[PydanticModel]]:

if fname in properties and not isinstance(properties[fname], tuple):
fconfig["title"] = fname.replace("_", " ").title()
description = comment or _br_it(fdesc.get("docstring") or fdesc["description"] or "")
description = comment or _br_it(
fdesc.get("docstring") or fdesc["description"] or ""
)
if description:
fconfig["description"] = description
ftype = properties[fname]
Expand Down
2 changes: 1 addition & 1 deletion tortoise/queryset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1616,7 +1616,7 @@ def _make_query(self) -> None:
self.resolve_filters(
model=self.model,
q_objects=self.q_objects,
annotations=self._annotations,
annotations=self.annotations,
custom_filters=self.custom_filters,
)
if self.limit:
Expand Down

0 comments on commit 63b008d

Please sign in to comment.