From dc120b13993cac8d2f2058c7cf5b10b295c5b8e3 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Sat, 24 Jun 2023 21:19:58 +0300 Subject: [PATCH 01/19] no message --- pydango/connection/session.py | 31 +++++++++++++++++++++++----- pydango/orm/query.py | 21 ++++++++++--------- pydango/query/expressions.py | 5 +++-- tests/session/test_social_network.py | 3 ++- 4 files changed, 42 insertions(+), 18 deletions(-) diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 2a43df9..b4ae9f4 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -1,6 +1,7 @@ import dataclasses import logging from collections import OrderedDict, defaultdict +from enum import Enum from itertools import groupby from typing import Iterator, Optional, Type, Union, cast @@ -20,6 +21,7 @@ from pydango.query.expressions import NEW, IteratorExpression, VariableExpression from pydango.query.functions import First, Length, Merge, UnionArrays from pydango.query.operations import RangeExpression +from pydango.query.options import UpsertOptions logger = logging.getLogger(__name__) @@ -50,6 +52,11 @@ def _group_by_relation( yield m, thing +class UpdateStrategy(str, Enum): + UPDATE = "update" + REPLACE = "replace" + + class PydangoSession: def __init__(self, database: StandardDatabase): self.database = database @@ -108,7 +115,7 @@ def _bind_edge(cls, from_model, instance, rels, to_model, vertex_collections, ve from_var = vertex_let_queries[from_model] to_var = vertex_let_queries[to_model] iterator = IteratorExpression() - ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} + ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} # noqa: PyProtectedMember return iterator, new_rels, ret @classmethod @@ -123,9 +130,9 @@ def _build_vertex_query(cls, v, vertex_collections, vertex_let_queries): def _build_graph(cls, document: VertexModel, _visited: set[int]): vertex_collections: dict[Type[VertexModel], IndexedOrderedDict[ArangoModel]] = OrderedDict() edge_collections: dict[Type[EdgeModel], IndexedOrderedDict[list[TEdge]]] = OrderedDict() - edge_vertex_index: dict[EdgeModel, dict[tuple[Type[VertexModel], Type[VertexModel]], dict[int, list[int]]]] = ( - defaultdict(lambda: defaultdict(lambda: defaultdict(list))) - ) + edge_vertex_index: dict[ + Type[EdgeModel], dict[tuple[Type[VertexModel], Type[VertexModel]], dict[int, list[int]]] + ] = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) def _prepare_relation(model, edge_cls, edge_doc, relation_doc, visited): if edge_doc: @@ -180,9 +187,23 @@ async def init(self, model: Type[BaseArangoModel]): else: await index.mapping[i.__class__](collection, **dataclasses.asdict(i)) - async def save(self, document: ArangoModel) -> ArangoModel: + async def save( + self, + document: ArangoModel, + strategy: UpdateStrategy = UpdateStrategy.UPDATE, + follow_links: bool = False, + options: Union[UpsertOptions, None] = None, + ) -> ArangoModel: if isinstance(document, VertexModel): query = self._build_graph_query(document) + else: + if strategy == UpdateStrategy.UPDATE: + query = ORMQuery().upsert(document, document, update=document, options=options) + elif strategy == UpdateStrategy.REPLACE: + query = ORMQuery().upsert(document, document, replace=document, options=options) + else: + raise ValueError(f"strategy must be instance of {UpdateStrategy.__name__}") + cursor = await query.execute(self.database) result = await cursor.next() logger.debug("cursor stats", extra=cursor.statistics()) diff --git a/pydango/orm/query.py b/pydango/orm/query.py index f6aec12..fe4d3db 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -46,7 +46,7 @@ def _bind(query: "ORMQuery", node: Expression): if isinstance(node, FieldExpression): if node.parent and isinstance(node.parent, type) and issubclass(node.parent, BaseArangoModel): - node.parent = query.orm_bound_vars[node.parent] + node.parent = query.orm_bound_vars[cast(Type[BaseArangoModel], node.parent)] # if isinstance(node.parent, Aliased): if isinstance(node, FieldExpression) and isinstance(node.parent, Aliased): @@ -154,9 +154,10 @@ def insert( raise ValueError(IMPLICIT_COLLECTION_ERROR) return super().insert(doc, collection) - # noinspection PyMethodOverriding @overload - def remove(self, expression: BaseArangoModel, *, options: Optional[RemoveOptions] = None): + def remove( # noqa: PyMethodOverriding + self, expression: BaseArangoModel, *, options: Optional[RemoveOptions] = None + ): ... @overload @@ -187,7 +188,7 @@ def remove( return super().remove(expression, collection, options=options) @overload - def update(self, key, doc, *, options: Optional[UpdateOptions] = None) -> Self: + def update(self, key, doc, *, options: Optional[UpdateOptions] = None) -> Self: # noqa: PyMethodOverriding ... @overload @@ -204,7 +205,7 @@ def update(self, key, doc, coll=None, *, options: Optional[UpdateOptions] = None return self @overload - def replace( + def replace( # noqa: PyMethodOverriding self, key: Union[str, dict, BaseArangoModel], doc: BaseArangoModel, @@ -214,7 +215,7 @@ def replace( ... @overload - def replace( + def replace( # noqa: PyMethodOverriding self, key: BaseArangoModel, doc: Union[dict, BaseArangoModel], @@ -257,7 +258,7 @@ def replace( return super().replace(key, doc, collection, options=options) @overload - def upsert( + def upsert( # noqa: PyMethodOverriding self, filter_: BaseArangoModel, insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel], @@ -268,7 +269,7 @@ def upsert( ... @overload - def upsert( + def upsert( # noqa: PyMethodOverriding self, filter_: BaseArangoModel, insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel], @@ -279,7 +280,7 @@ def upsert( ... @overload - def upsert( + def upsert( # noqa: PyMethodOverriding self, filter_: Union[dict, BaseModel, ObjectExpression], insert: Union[dict, BaseModel, ObjectExpression], @@ -291,7 +292,7 @@ def upsert( ... @overload - def upsert( + def upsert( # noqa: PyMethodOverriding self, filter_: Union[dict, BaseModel, ObjectExpression], insert: Union[dict, BaseModel, ObjectExpression], diff --git a/pydango/query/expressions.py b/pydango/query/expressions.py index 3b63b43..4782b06 100644 --- a/pydango/query/expressions.py +++ b/pydango/query/expressions.py @@ -98,13 +98,14 @@ class OLD(ModificationVariable): _keyword = "OLD" -# noinspection PyTypeChecker class FieldExpression(Expression, ReturnableMixin): """ Expression class for field access of objects and documents """ - def __init__(self, field: Union[str, Expression], parent: Optional[VariableExpression] = None): + def __init__( + self, field: Union[str, Expression], parent: Union[VariableExpression, "FieldExpression", None] = None + ): self.parent = parent self.field = field diff --git a/tests/session/test_social_network.py b/tests/session/test_social_network.py index b9a43a2..5813f23 100644 --- a/tests/session/test_social_network.py +++ b/tests/session/test_social_network.py @@ -2,6 +2,7 @@ from typing import Annotated, List, Optional, Type, Union import pytest +from aioarango.database import StandardDatabase from pydango.connection.session import PydangoSession from pydango.orm.models import ( @@ -177,7 +178,7 @@ class Collection(EdgeCollectionConfig): @pytest.mark.asyncio -async def test_save(database): +async def test_save(database: StandardDatabase): session = PydangoSession(database) models: list[Type[BaseArangoModel]] = [] models += VertexModel.__subclasses__() From a81015484be12fdf09947652c7255c64bf8e60a1 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Fri, 30 Jun 2023 12:26:25 +0300 Subject: [PATCH 02/19] feat: save supports upsert with UpdateStrategy --- .pre-commit-config.yaml | 2 +- poetry.lock | 62 +++++++++++++++++------------------ pydango/connection/session.py | 48 +++++++++++++++++++++++---- pydango/index.py | 11 ++++++- pydango/orm/models.py | 4 +-- pydango/orm/query.py | 24 +++++++------- pydango/query/operations.py | 20 +++++------ pydango/query/query.py | 20 +++++------ pyproject.toml | 7 ++-- requirements.txt | 6 ++-- tests/conftest.py | 7 ++-- 11 files changed, 127 insertions(+), 84 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a4a6a5d..d2678c5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -103,4 +103,4 @@ repos: hooks: - id: poetry-check - id: poetry-lock - - id: poetry-export +# - id: poetry-export diff --git a/poetry.lock b/poetry.lock index daf74af..0c8c864 100644 --- a/poetry.lock +++ b/poetry.lock @@ -446,43 +446,43 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "mypy" -version = "1.4.0" +version = "1.4.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3af348e0925a59213244f28c7c0c3a2c2088b4ba2fe9d6c8d4fbb0aba0b7d05"}, - {file = "mypy-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0b2e0da7ff9dd8d2066d093d35a169305fc4e38db378281fce096768a3dbdbf"}, - {file = "mypy-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210fe0f39ec5be45dd9d0de253cb79245f0a6f27631d62e0c9c7988be7152965"}, - {file = "mypy-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f7a5971490fd4a5a436e143105a1f78fa8b3fe95b30fff2a77542b4f3227a01f"}, - {file = "mypy-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:50f65f0e9985f1e50040e603baebab83efed9eb37e15a22a4246fa7cd660f981"}, - {file = "mypy-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1b5c875fcf3e7217a3de7f708166f641ca154b589664c44a6fd6d9f17d9e7e"}, - {file = "mypy-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4c734d947e761c7ceb1f09a98359dd5666460acbc39f7d0a6b6beec373c5840"}, - {file = "mypy-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5984a8d13d35624e3b235a793c814433d810acba9eeefe665cdfed3d08bc3af"}, - {file = "mypy-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0f98973e39e4a98709546a9afd82e1ffcc50c6ec9ce6f7870f33ebbf0bd4f26d"}, - {file = "mypy-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:19d42b08c7532d736a7e0fb29525855e355fa51fd6aef4f9bbc80749ff64b1a2"}, - {file = "mypy-1.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ba9a69172abaa73910643744d3848877d6aac4a20c41742027dcfd8d78f05d9"}, - {file = "mypy-1.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a34eed094c16cad0f6b0d889811592c7a9b7acf10d10a7356349e325d8704b4f"}, - {file = "mypy-1.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:53c2a1fed81e05ded10a4557fe12bae05b9ecf9153f162c662a71d924d504135"}, - {file = "mypy-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bba57b4d2328740749f676807fcf3036e9de723530781405cc5a5e41fc6e20de"}, - {file = "mypy-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:653863c75f0dbb687d92eb0d4bd9fe7047d096987ecac93bb7b1bc336de48ebd"}, - {file = "mypy-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7461469e163f87a087a5e7aa224102a30f037c11a096a0ceeb721cb0dce274c8"}, - {file = "mypy-1.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf0ca95e4b8adeaf07815a78b4096b65adf64ea7871b39a2116c19497fcd0dd"}, - {file = "mypy-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94a81b9354545123feb1a99b960faeff9e1fa204fce47e0042335b473d71530d"}, - {file = "mypy-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:67242d5b28ed0fa88edd8f880aed24da481929467fdbca6487167cb5e3fd31ff"}, - {file = "mypy-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f2b353eebef669529d9bd5ae3566905a685ae98b3af3aad7476d0d519714758"}, - {file = "mypy-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62bf18d97c6b089f77f0067b4e321db089d8520cdeefc6ae3ec0f873621c22e5"}, - {file = "mypy-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca33ab70a4aaa75bb01086a0b04f0ba8441e51e06fc57e28585176b08cad533b"}, - {file = "mypy-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5a0ee54c2cb0f957f8a6f41794d68f1a7e32b9968675ade5846f538504856d42"}, - {file = "mypy-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c34d43e3d54ad05024576aef28081d9d0580f6fa7f131255f54020eb12f5352"}, - {file = "mypy-1.4.0-py3-none-any.whl", hash = "sha256:f051ca656be0c179c735a4c3193f307d34c92fdc4908d44fd4516fbe8b10567d"}, - {file = "mypy-1.4.0.tar.gz", hash = "sha256:de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -835,13 +835,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.6.3" +version = "4.7.0" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, - {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, + {file = "typing_extensions-4.7.0-py3-none-any.whl", hash = "sha256:5d8c9dac95c27d20df12fb1d97b9793ab8b2af8a3a525e68c80e21060c161771"}, + {file = "typing_extensions-4.7.0.tar.gz", hash = "sha256:935ccf31549830cda708b42289d44b6f74084d616a00be651601a4f968e77c82"}, ] [[package]] diff --git a/pydango/connection/session.py b/pydango/connection/session.py index b4ae9f4..5865f9a 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -62,14 +62,14 @@ def __init__(self, database: StandardDatabase): self.database = database @classmethod - def _build_graph_query(cls, document: VertexModel) -> ORMQuery: + def _build_graph_query(cls, document: VertexModel, strategy: UpdateStrategy = UpdateStrategy.UPDATE) -> ORMQuery: query = ORMQuery() _visited: set[int] = set() edge_collections, edge_vertex_index, vertex_collections = cls._build_graph(document, _visited) vertex_let_queries: dict[Type[VertexModel], VariableExpression] = {} for v in vertex_collections: - from_var, vertices = cls._build_vertex_query(v, vertex_collections, vertex_let_queries) + from_var, vertices = cls._build_vertex_query(v, vertex_collections, vertex_let_queries, strategy) query.let(from_var, vertices) main = VariableExpression() @@ -104,7 +104,8 @@ def _build_graph_query(cls, document: VertexModel) -> ORMQuery: continue edge_iter = IteratorExpression() - query.let(VariableExpression(), for_(edge_iter, edges).insert(edge_iter, e.Collection.name)) + + query.let(VariableExpression(), cls.build_upsert_query(edge_iter, strategy, e, edges)) return query.return_(main) @@ -119,12 +120,45 @@ def _bind_edge(cls, from_model, instance, rels, to_model, vertex_collections, ve return iterator, new_rels, ret @classmethod - def _build_vertex_query(cls, v, vertex_collections, vertex_let_queries): + def _build_vertex_query(cls, v, vertex_collections, vertex_let_queries, strategy: UpdateStrategy): i = IteratorExpression() from_var = VariableExpression() vertex_let_queries[v] = from_var - vertices = for_(i, in_=list(vertex_collections[v].values())).insert(i, v.Collection.name).return_(NEW()) - return from_var, vertices + + # vertices = ( + # for_(i, in_=list(vertex_collections[v].values())) + # .insert(i, v.Collection.name) + # .return_(NEW()) + # ) + + vertices_docs = list(vertex_collections[v].values()) + query = cls.build_upsert_query(i, strategy, v, vertices_docs) + return from_var, query + + @classmethod + def build_upsert_query( + cls, + i: IteratorExpression, + strategy: UpdateStrategy, + model: Type[BaseArangoModel], + docs: Union[VariableExpression, list[VariableExpression]], + ): + filter_ = {} + for model_index in model.Collection.indexes: + if hasattr(model_index, "unique") and model_index.unique: + filter_ = {j: getattr(i, j) for j in model_index.fields} + if isinstance(model_index, dict) and model_index.get("unique"): + filter_ = {j: getattr(i, j) for j in model_index.get("fields", [])} + if isinstance(docs, list): + if not filter_ and all([x.get("_key") for x in docs]): + filter_ = {"_key": i._key} # noqa: PyProtectedMember + query = for_(i, in_=docs) + if strategy == strategy.UPDATE: + query = query.upsert(filter_, i, model.Collection.name, update=i) + elif strategy == strategy.REPLACE: + query = query.upsert(filter_, i, model.Collection.name, replace=i) + query = query.return_(NEW()) + return query @classmethod def _build_graph(cls, document: VertexModel, _visited: set[int]): @@ -191,7 +225,7 @@ async def save( self, document: ArangoModel, strategy: UpdateStrategy = UpdateStrategy.UPDATE, - follow_links: bool = False, + # todo: follow_links: bool = False, options: Union[UpsertOptions, None] = None, ) -> ArangoModel: if isinstance(document, VertexModel): diff --git a/pydango/index.py b/pydango/index.py index 2b70492..f56705e 100644 --- a/pydango/index.py +++ b/pydango/index.py @@ -1,5 +1,12 @@ +import sys from dataclasses import dataclass -from typing import Optional, Sequence +from typing import Optional, Sequence, Union + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + import aioarango.database from aioarango.typings import Fields @@ -71,3 +78,5 @@ class TTLIndex(Index): TTLIndex: aioarango.database.StandardCollection.add_ttl_index, HashIndex: aioarango.database.StandardCollection.add_hash_index, } + +Indexes: TypeAlias = Union[GeoIndex, HashIndex, SkipListIndex, FullTextIndex, PersistentIndex, TTLIndex] diff --git a/pydango/orm/models.py b/pydango/orm/models.py index 2a566d2..b0a57a0 100644 --- a/pydango/orm/models.py +++ b/pydango/orm/models.py @@ -56,7 +56,7 @@ from pydantic.utils import GetterDict from pydango import NAO -from pydango.index import Index +from pydango.index import Indexes from pydango.orm.fields import DocFieldDescriptor from pydango.orm.relations import LIST_TYPES, LinkTypes @@ -178,7 +178,7 @@ class CollectionConfig: type: CollectionType wait_for_sync: Optional[bool] = False sync_json_schema: Optional[bool] = True - indexes: Optional[list[Union[Index, dict]]] = [] + indexes: list[Indexes] = [] OPERATIONAL_FIELDS = {"key", "id", "rev"} diff --git a/pydango/orm/query.py b/pydango/orm/query.py index fe4d3db..b385ca4 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -261,9 +261,9 @@ def replace( def upsert( # noqa: PyMethodOverriding self, filter_: BaseArangoModel, - insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel], + insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], *, - replace: Union[dict, BaseModel, ObjectExpression, BaseArangoModel], + replace: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], options: Optional[UpsertOptions] = None, ) -> Self: ... @@ -272,9 +272,9 @@ def upsert( # noqa: PyMethodOverriding def upsert( # noqa: PyMethodOverriding self, filter_: BaseArangoModel, - insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel], + insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], *, - update: Union[dict, BaseModel, ObjectExpression, BaseArangoModel], + update: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], options: Optional[UpsertOptions] = None, ) -> Self: ... @@ -282,11 +282,11 @@ def upsert( # noqa: PyMethodOverriding @overload def upsert( # noqa: PyMethodOverriding self, - filter_: Union[dict, BaseModel, ObjectExpression], - insert: Union[dict, BaseModel, ObjectExpression], + filter_: Union[dict, BaseModel, ObjectExpression, VariableExpression], + insert: Union[dict, BaseModel, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], *, - replace: Union[dict, BaseModel, ObjectExpression], + replace: Union[dict, BaseModel, ObjectExpression, VariableExpression], options: Optional[UpsertOptions] = None, ) -> Self: ... @@ -294,19 +294,19 @@ def upsert( # noqa: PyMethodOverriding @overload def upsert( # noqa: PyMethodOverriding self, - filter_: Union[dict, BaseModel, ObjectExpression], - insert: Union[dict, BaseModel, ObjectExpression], + filter_: Union[dict, BaseModel, ObjectExpression, VariableExpression], + insert: Union[dict, BaseModel, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], *, - update: Union[dict, BaseModel, ObjectExpression], + update: Union[dict, BaseModel, ObjectExpression, VariableExpression], options: Optional[UpsertOptions] = None, ) -> Self: ... def upsert( self, - filter_: Union[dict, ObjectExpression, BaseModel, BaseArangoModel], - insert: Union[dict, ObjectExpression, BaseModel, BaseArangoModel], + filter_: Union[dict, ObjectExpression, BaseModel, BaseArangoModel, VariableExpression], + insert: Union[dict, ObjectExpression, BaseModel, BaseArangoModel, VariableExpression], collection: Union[str, CollectionExpression, None] = None, **kwargs, # update: Union[dict,ObjectExpression, BaseModel, BaseArangoModel, None] = None, diff --git a/pydango/query/operations.py b/pydango/query/operations.py index 23f589a..81802d4 100644 --- a/pydango/query/operations.py +++ b/pydango/query/operations.py @@ -499,11 +499,11 @@ class UpsertOperation(Operation): def __init__( self, query_ref: "AQLQuery", - filter_: Union[dict, ObjectExpression], + filter_: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], - insert: Union[dict, ObjectExpression], + insert: Union[dict, ObjectExpression, VariableExpression], *, - update: Union[dict, ObjectExpression], + update: Union[dict, ObjectExpression, VariableExpression], options: Optional[UpsertOptions] = None, ): ... @@ -512,11 +512,11 @@ def __init__( def __init__( self, query_ref: "AQLQuery", - filter_: Union[dict, ObjectExpression], + filter_: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], - insert: Union[dict, ObjectExpression], + insert: Union[dict, ObjectExpression, VariableExpression], *, - replace: Union[dict, ObjectExpression], + replace: Union[dict, ObjectExpression, VariableExpression], options: Optional[UpsertOptions] = None, ): ... @@ -524,12 +524,12 @@ def __init__( def __init__( self, query_ref: "AQLQuery", - filter_: Union[dict, ObjectExpression], + filter_: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], - insert: Union[dict, ObjectExpression], + insert: Union[dict, ObjectExpression, VariableExpression], *, - update: Union[dict, ObjectExpression, None] = None, - replace: Union[dict, ObjectExpression, None] = None, + update: Union[dict, ObjectExpression, VariableExpression, None] = None, + replace: Union[dict, ObjectExpression, VariableExpression, None] = None, options: Optional[UpsertOptions] = None, ): super().__init__(query_ref) diff --git a/pydango/query/query.py b/pydango/query/query.py index f5af9a7..c98985e 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -270,11 +270,11 @@ def replace( @overload def upsert( self, - filter_: Union[dict, ObjectExpression], - insert: Union[dict, ObjectExpression], + filter_: Union[dict, ObjectExpression, VariableExpression], + insert: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], *, - replace: Union[dict, ObjectExpression], + replace: Union[dict, ObjectExpression, VariableExpression], options: Optional[UpsertOptions] = None, ) -> Self: ... @@ -282,23 +282,23 @@ def upsert( @overload def upsert( self, - filter_: Union[dict, ObjectExpression], - insert: Union[dict, ObjectExpression], + filter_: Union[dict, ObjectExpression, VariableExpression], + insert: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], *, - update: Union[dict, ObjectExpression], + update: Union[dict, ObjectExpression, VariableExpression], options: Optional[UpsertOptions] = None, ) -> Self: ... def upsert( self, - filter_: Union[dict, ObjectExpression], - insert: Union[dict, ObjectExpression], + filter_: Union[dict, ObjectExpression, VariableExpression], + insert: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression], *, - update: Union[dict, ObjectExpression, None] = None, - replace: Union[dict, ObjectExpression, None] = None, + update: Union[dict, ObjectExpression, VariableExpression, None] = None, + replace: Union[dict, ObjectExpression, VariableExpression, None] = None, options: Optional[UpsertOptions] = None, ) -> Self: self.__is_modification_query__ = True diff --git a/pyproject.toml b/pyproject.toml index e133b90..00a220f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ readme = "README.md" python = ">=3.9,<4.0" aioarango = "^1.0.0" pydantic = "^1.10.7" -urllib3= "==1.26.15" +urllib3 = "==1.26.15" indexed = "^1.3.0" @@ -71,6 +71,7 @@ profile = "black" [tool.pytest.ini_options] addopts = "-ra" +asyncio_mode = "auto" #testpaths = [ # "tests", #] @@ -79,12 +80,12 @@ addopts = "-ra" skips = ['**/test_*.py', '**/test_*.py'] [tool.ruff] -line-length=120 +line-length = 120 [tool.mypy] mypy_path = "./stubs/" #explicit_package_bases = true -exclude=""" +exclude = """ ( /( backups # exclude a few common directories in the diff --git a/requirements.txt b/requirements.txt index 9ccfcbe..6d281f3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -153,9 +153,9 @@ rfc3986[idna2008]==1.5.0 ; python_version >= "3.9" and python_version < "4.0" \ sniffio==1.3.0 ; python_version >= "3.9" and python_version < "4.0" \ --hash=sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101 \ --hash=sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384 -typing-extensions==4.6.3 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26 \ - --hash=sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5 +typing-extensions==4.7.0 ; python_version >= "3.9" and python_version < "4.0" \ + --hash=sha256:5d8c9dac95c27d20df12fb1d97b9793ab8b2af8a3a525e68c80e21060c161771 \ + --hash=sha256:935ccf31549830cda708b42289d44b6f74084d616a00be651601a4f968e77c82 urllib3==1.26.15 ; python_version >= "3.9" and python_version < "4.0" \ --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 diff --git a/tests/conftest.py b/tests/conftest.py index 2b6096b..b92e9af 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ from typing import AsyncGenerator, TypeVar import pytest -import pytest_asyncio from aioarango import ArangoClient from aioarango.database import Database @@ -75,21 +74,21 @@ def format(self, record): AsyncFixture = AsyncGenerator[T, None] -@pytest_asyncio.fixture(scope="session") +@pytest.fixture(scope="session") async def client() -> AsyncFixture[ArangoClient]: client = ArangoClient() yield client await client.close() -@pytest_asyncio.fixture(scope="session") +@pytest.fixture(scope="session") async def database(client: ArangoClient) -> AsyncFixture[Database]: db = await get_or_create_db(client, "pydango") yield db # await (await client.db("_system")).delete_database("pydango") -@pytest_asyncio.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=True) async def populate(database: Database): responses = defaultdict(list) for coll in DATA: From 66fee3644aa5750beca539d3cff6e721b66026f8 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Sat, 1 Jul 2023 11:53:47 +0300 Subject: [PATCH 03/19] sync 1 --- poetry.lock | 76 ++++---- pydango/connection/session.py | 330 +++++++++++++++++++++++----------- pydango/orm/models.py | 85 +++++++-- pydango/orm/query.py | 23 ++- pydango/query/expressions.py | 7 +- pydango/query/functions.py | 9 +- pydango/query/query.py | 11 +- pyproject.toml | 2 +- tests/session/test_family.py | 68 ++++++- 9 files changed, 448 insertions(+), 163 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0c8c864..ac5dad6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -587,47 +587,47 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "1.10.9" +version = "1.10.10" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"}, - {file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"}, - {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"}, - {file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"}, - {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"}, - {file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"}, - {file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"}, - {file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"}, - {file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"}, - {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"}, - {file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"}, - {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"}, - {file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"}, - {file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"}, - {file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"}, - {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"}, - {file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"}, - {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"}, - {file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"}, - {file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"}, - {file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"}, - {file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"}, - {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"}, - {file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"}, - {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"}, - {file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"}, - {file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"}, - {file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"}, - {file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"}, - {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"}, - {file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"}, - {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"}, - {file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"}, - {file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"}, - {file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"}, - {file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"}, + {file = "pydantic-1.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adad1ee4ab9888f12dac2529276704e719efcf472e38df7813f5284db699b4ec"}, + {file = "pydantic-1.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a7db03339893feef2092ff7b1afc9497beed15ebd4af84c3042a74abce02d48"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b3714b97ff84b2689654851c2426389bcabfac9080617bcf4306c69db606f6"}, + {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edfdf0a5abc5c9bf2052ebaec20e67abd52e92d257e4f2d30e02c354ed3e6030"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a3b30fd255eeeb63caa9483502ba96b7795ce5bf895c6a179b3d909d9f53a6"}, + {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db4c7f7e60ca6f7d6c1785070f3e5771fcb9b2d88546e334d2f2c3934d949028"}, + {file = "pydantic-1.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:a2d5be50ac4a0976817144c7d653e34df2f9436d15555189f5b6f61161d64183"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:566a04ba755e8f701b074ffb134ddb4d429f75d5dced3fbd829a527aafe74c71"}, + {file = "pydantic-1.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f79db3652ed743309f116ba863dae0c974a41b688242482638b892246b7db21d"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62376890b819bebe3c717a9ac841a532988372b7e600e76f75c9f7c128219d5"}, + {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4870f13a4fafd5bc3e93cff3169222534fad867918b188e83ee0496452978437"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:990027e77cda6072a566e433b6962ca3b96b4f3ae8bd54748e9d62a58284d9d7"}, + {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c40964596809eb616d94f9c7944511f620a1103d63d5510440ed2908fc410af"}, + {file = "pydantic-1.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:ea9eebc2ebcba3717e77cdeee3f6203ffc0e78db5f7482c68b1293e8cc156e5e"}, + {file = "pydantic-1.10.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:762aa598f79b4cac2f275d13336b2dd8662febee2a9c450a49a2ab3bec4b385f"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dab5219659f95e357d98d70577b361383057fb4414cfdb587014a5f5c595f7b"}, + {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3d4ee957a727ccb5a36f1b0a6dbd9fad5dedd2a41eada99a8df55c12896e18d"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b69f9138dec566962ec65623c9d57bee44412d2fc71065a5f3ebb3820bdeee96"}, + {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7aa75d1bd9cc275cf9782f50f60cddaf74cbaae19b6ada2a28e737edac420312"}, + {file = "pydantic-1.10.10-cp37-cp37m-win_amd64.whl", hash = "sha256:9f62a727f5c590c78c2d12fda302d1895141b767c6488fe623098f8792255fe5"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aac218feb4af73db8417ca7518fb3bade4534fcca6e3fb00f84966811dd94450"}, + {file = "pydantic-1.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88546dc10a40b5b52cae87d64666787aeb2878f9a9b37825aedc2f362e7ae1da"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c41bbaae89e32fc582448e71974de738c055aef5ab474fb25692981a08df808a"}, + {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b71bd504d1573b0b722ae536e8ffb796bedeef978979d076bf206e77dcc55a5"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e088e3865a2270ecbc369924cd7d9fbc565667d9158e7f304e4097ebb9cf98dd"}, + {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3403a090db45d4027d2344859d86eb797484dfda0706cf87af79ace6a35274ef"}, + {file = "pydantic-1.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:e0014e29637125f4997c174dd6167407162d7af0da73414a9340461ea8573252"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9965e49c6905840e526e5429b09e4c154355b6ecc0a2f05492eda2928190311d"}, + {file = "pydantic-1.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:748d10ab6089c5d196e1c8be9de48274f71457b01e59736f7a09c9dc34f51887"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86936c383f7c38fd26d35107eb669c85d8f46dfceae873264d9bab46fe1c7dde"}, + {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a26841be620309a9697f5b1ffc47dce74909e350c5315ccdac7a853484d468a"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:409b810f387610cc7405ab2fa6f62bdf7ea485311845a242ebc0bd0496e7e5ac"}, + {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ce937a2a2c020bcad1c9fde02892392a1123de6dda906ddba62bfe8f3e5989a2"}, + {file = "pydantic-1.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:37ebddef68370e6f26243acc94de56d291e01227a67b2ace26ea3543cf53dd5f"}, + {file = "pydantic-1.10.10-py3-none-any.whl", hash = "sha256:a5939ec826f7faec434e2d406ff5e4eaf1716eb1f247d68cd3d0b3612f7b4c8a"}, + {file = "pydantic-1.10.10.tar.gz", hash = "sha256:3b8d5bd97886f9eb59260594207c9f57dce14a6f869c6ceea90188715d29921a"}, ] [package.dependencies] @@ -883,4 +883,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "157485b029db7fbcc5d05ed69ee2c1ef6d76992e146b29e87772aa52fc87b767" +content-hash = "00e74057465e8df418904d57bd1d2ad2a6ae268ccfdb07cf21b73265065d293b" diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 5865f9a..4be19d4 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -1,9 +1,18 @@ +import ctypes import dataclasses import logging +import sys from collections import OrderedDict, defaultdict from enum import Enum from itertools import groupby -from typing import Iterator, Optional, Type, Union, cast +from typing import Any, Iterator, Optional, Type, Union, cast + +from pydango.orm.relations import LIST_TYPES + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias from aioarango.collection import StandardCollection from aioarango.database import StandardDatabase @@ -17,7 +26,8 @@ from pydango.orm.query import ORMQuery, for_ from pydango.orm.types import TEdge, TVertexModel from pydango.orm.utils import convert_edge_data_to_valid_kwargs -from pydango.query.consts import FROM, TO +from pydango.query import AQLQuery +from pydango.query.consts import FROM, KEY, TO from pydango.query.expressions import NEW, IteratorExpression, VariableExpression from pydango.query.functions import First, Length, Merge, UnionArrays from pydango.query.operations import RangeExpression @@ -50,6 +60,7 @@ def _group_by_relation( ): for thing in group: yield m, thing + return None class UpdateStrategy(str, Enum): @@ -57,29 +68,118 @@ class UpdateStrategy(str, Enum): REPLACE = "replace" +def _make_upsert_query( + filter_: Any, + i: Any, + model: Union[Type[BaseArangoModel], BaseArangoModel], + query: AQLQuery, + strategy: UpdateStrategy, + options: Union[UpsertOptions, None] = None, +): + if strategy == strategy.UPDATE: + query = query.upsert(filter_, i, model.Collection.name, update=i, options=options) + elif strategy == strategy.REPLACE: + query = query.upsert(filter_, i, model.Collection.name, replace=i, options=options) + else: + raise ValueError(f"strategy must be instance of {UpdateStrategy.__name__}") + + return query + + +def _get_upsert_filter( + document: Union[BaseArangoModel, VariableExpression], model: Union[Type[BaseArangoModel], None] = None +): + if not isinstance(document, BaseArangoModel) and model is not None: + indexes = model.Collection.indexes + elif isinstance(document, BaseArangoModel): + indexes = document.Collection.indexes + else: + indexes = tuple() + + # todo: check first by _key or _id + + filter_ = {} + for model_index in indexes: + if hasattr(model_index, "unique") and model_index.unique: + filter_ = {index_field: getattr(document, index_field) for index_field in model_index.fields} + + if isinstance(model_index, dict) and model_index.get("unique"): + filter_ = {j: getattr(document, j) for j in model_index.get("fields", [])} + + if not filter_: + key = getattr(document, KEY) + filter_ = {"_key": key} if key is not None else {} # noqa: PyProtectedMember + + return filter_ + + +CollectionUpsertOptions: TypeAlias = dict[Union[str, Type[BaseArangoModel]], UpsertOptions] + + +def _bind_edge(from_model, instance, rels, to_model, vertex_collections, vertex_let_queries): + from_ = vertex_collections[from_model].keys().index(instance) + new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] + from_var = vertex_let_queries[from_model] + to_var = vertex_let_queries[to_model] + iterator = IteratorExpression() + ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} # noqa: PyProtectedMember + return iterator, new_rels, ret + + +def _build_upsert_query( + i: IteratorExpression, + strategy: UpdateStrategy, + model: Type[BaseArangoModel], + docs: Union[VariableExpression, list[VariableExpression]], +): + filter_ = _get_upsert_filter(i, model) + query = for_(i, in_=docs) + query = _make_upsert_query(filter_, i, model, query, strategy, None).return_(NEW()) + return query + + +def _build_vertex_collection_query(v, vertices_docs, strategy: UpdateStrategy): + i = IteratorExpression() + from_var = VariableExpression() + query = _build_upsert_query(i, strategy, v, vertices_docs) + return from_var, query + + class PydangoSession: def __init__(self, database: StandardDatabase): self.database = database @classmethod - def _build_graph_query(cls, document: VertexModel, strategy: UpdateStrategy = UpdateStrategy.UPDATE) -> ORMQuery: + def _build_graph_query( + cls, + document: VertexModel, + strategy: UpdateStrategy = UpdateStrategy.UPDATE, + collection_options: Union[CollectionUpsertOptions, None] = None, + ) -> ORMQuery: query = ORMQuery() _visited: set[int] = set() - edge_collections, edge_vertex_index, vertex_collections = cls._build_graph(document, _visited) + edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping = cls._build_graph( + document, _visited + ) vertex_let_queries: dict[Type[VertexModel], VariableExpression] = {} - + vertices_ids = {} + edge_ids = defaultdict(lambda: defaultdict(list)) for v in vertex_collections: - from_var, vertices = cls._build_vertex_query(v, vertex_collections, vertex_let_queries, strategy) - query.let(from_var, vertices) + vertex_docs = list(vertex_collections[v].values()) + vertices_ids[v] = [id(doc) for doc in vertex_docs] + from_var, vertex_query = _build_vertex_collection_query(v, vertex_docs, strategy) + vertex_let_queries[v] = from_var + + query.let(from_var, vertex_query) main = VariableExpression() query.let(main, First(vertex_let_queries[document.__class__])) - + edge_let_queries = {} for e, coll in edge_vertex_index.items(): edge_vars = [] for (from_model, to_model), instances in coll.items(): for instance, rels in instances.items(): - iterator, new_rels, ret = cls._bind_edge( + iterator, new_rels, ret = _bind_edge( from_model, instance, rels, to_model, vertex_collections, vertex_let_queries ) v = VariableExpression() @@ -88,6 +188,7 @@ def _build_graph_query(cls, document: VertexModel, strategy: UpdateStrategy = Up merger = IteratorExpression() edge = VariableExpression() query.let(edge, edge_collections[e][instance]) + edge_ids[e][instance].extend([id(doc) for doc in edge_collections[e][instance]]) merged = VariableExpression() query.let( @@ -104,62 +205,66 @@ def _build_graph_query(cls, document: VertexModel, strategy: UpdateStrategy = Up continue edge_iter = IteratorExpression() + # edge_let_queries[e] = edges + edge_let_queries[e] = VariableExpression() + query.let(edge_let_queries[e], _build_upsert_query(edge_iter, strategy, e, edges)) + + fields = defaultdict(list) + + for vertex_cls, vertex_ids in vertices_ids.items(): + if vertex_cls == document.__class__: + vertex_ids = vertex_ids[1:] + + for i, v_id in enumerate(vertex_ids): + obj2 = ctypes.cast(v_id, ctypes.py_object).value + model_fields = model_fields_mapping[v_id].get(vertex_cls, {}) + for j, field in enumerate(model_fields.values()): + if vertex_cls == document.__class__: + if vertex_cls.__relationships__[field].link_type in LIST_TYPES: + fields[field].append(vertex_let_queries[vertex_cls][i + j + 1]) + else: + fields[field] = vertex_let_queries[vertex_cls][i + j + 1] - query.let(VariableExpression(), cls.build_upsert_query(edge_iter, strategy, e, edges)) - - return query.return_(main) - - @classmethod - def _bind_edge(cls, from_model, instance, rels, to_model, vertex_collections, vertex_let_queries): - from_ = vertex_collections[from_model].keys().index(instance) - new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] - from_var = vertex_let_queries[from_model] - to_var = vertex_let_queries[to_model] - iterator = IteratorExpression() - ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} # noqa: PyProtectedMember - return iterator, new_rels, ret - - @classmethod - def _build_vertex_query(cls, v, vertex_collections, vertex_let_queries, strategy: UpdateStrategy): - i = IteratorExpression() - from_var = VariableExpression() - vertex_let_queries[v] = from_var - - # vertices = ( - # for_(i, in_=list(vertex_collections[v].values())) - # .insert(i, v.Collection.name) - # .return_(NEW()) + else: + if vertex_cls.__relationships__[field].link_type in LIST_TYPES: + fields[field].append(vertex_let_queries[vertex_cls][i + j]) + else: + fields[field] = vertex_let_queries[vertex_cls][i + j + 1] + # todo: handle recursive + # break + + edges = defaultdict(list) + # edges ={} + # for edge_cls, edge_ids in edge_ids.items(): + # for i, e_id in enumerate(edge_ids): + # obj2 = ctypes.cast(e_id, ctypes.py_object).value + # model_fields = model_fields_mapping[e_id].get(edge_cls, {}) + # for j, field in enumerate(model_fields.values()): + # obj2 = ctypes.cast(relation_doc, ctypes.py_object).value + # var = VariableExpression() + # query.let(var, edge_let_queries[edge_cls]) + # if vertex_cls.__relationships__[field].link_type in LIST_TYPES: + # edges[field].append(var[i + j]) + # else: + # edges[field] = var[i + j] + + # todo: handle recursive + # break + # break + + # for vertex_id,v in model_fields_mapping.items(): + # vertices_ids[] + # pass + return query.return_(Merge(main, fields, {"edges": edges})) + # { + # "main": main, + # "fields":fields, + # "vertex": {k.Collection.name: v for k, v in vertex_let_queries.items()}, + # "edges": {k.Collection.name: v for k, v in edge_let_queries.items()}, + # "edges": edges, + # } # ) - vertices_docs = list(vertex_collections[v].values()) - query = cls.build_upsert_query(i, strategy, v, vertices_docs) - return from_var, query - - @classmethod - def build_upsert_query( - cls, - i: IteratorExpression, - strategy: UpdateStrategy, - model: Type[BaseArangoModel], - docs: Union[VariableExpression, list[VariableExpression]], - ): - filter_ = {} - for model_index in model.Collection.indexes: - if hasattr(model_index, "unique") and model_index.unique: - filter_ = {j: getattr(i, j) for j in model_index.fields} - if isinstance(model_index, dict) and model_index.get("unique"): - filter_ = {j: getattr(i, j) for j in model_index.get("fields", [])} - if isinstance(docs, list): - if not filter_ and all([x.get("_key") for x in docs]): - filter_ = {"_key": i._key} # noqa: PyProtectedMember - query = for_(i, in_=docs) - if strategy == strategy.UPDATE: - query = query.upsert(filter_, i, model.Collection.name, update=i) - elif strategy == strategy.REPLACE: - query = query.upsert(filter_, i, model.Collection.name, replace=i) - query = query.return_(NEW()) - return query - @classmethod def _build_graph(cls, document: VertexModel, _visited: set[int]): vertex_collections: dict[Type[VertexModel], IndexedOrderedDict[ArangoModel]] = OrderedDict() @@ -167,49 +272,72 @@ def _build_graph(cls, document: VertexModel, _visited: set[int]): edge_vertex_index: dict[ Type[EdgeModel], dict[tuple[Type[VertexModel], Type[VertexModel]], dict[int, list[int]]] ] = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) + model_fields_mapping: dict[int, defaultdict[str, list[tuple[int, int]]]] = {} - def _prepare_relation(model, edge_cls, edge_doc, relation_doc, visited): + def _prepare_relation(field, model, edge_cls, edge_doc, relation_doc): + model_id = id(model) if edge_doc: - edge_collections.setdefault(edge_cls, IndexedOrderedDict()).setdefault(id(model), []).append( - edge_doc.save_dict() - ) - traverse(relation_doc, visited) + edge_collections.setdefault(edge_cls, IndexedOrderedDict()).setdefault(model_id, []).append(edge_doc) + + if model_id not in model_fields_mapping: + model_fields_mapping[model_id] = {relation_doc.__class__: {}} - edge_vertex_index[edge_cls][model.__class__, relation_doc.__class__][id(model)].append(id(relation_doc)) + if edge_cls not in model_fields_mapping[model_id]: + model_fields_mapping[model_id][edge_cls] = {} + pass + + model_fields_mapping[model_id][relation_doc.__class__][id(relation_doc)] = field + model_fields_mapping[model_id][edge_cls][id(edge_doc)] = field + + edge_vertex_index[edge_cls][model.__class__, relation_doc.__class__][model_id].append(id(relation_doc)) def traverse(model: TVertexModel, visited: set): - if id(model) in visited: + model_id = id(model) + if model_id in visited: return if isinstance(model, VertexModel): - vertex_collections.setdefault(model.__class__, IndexedOrderedDict())[id(model)] = model.save_dict() - visited.add(id(model)) + vertex_collections.setdefault(model.__class__, IndexedOrderedDict())[model_id] = model + visited.add(model_id) models: tuple[Type[VertexModel], Optional[Type[EdgeModel]]] - for models, field in _group_by_relation(model): - edge_cls: Optional[Type[EdgeModel]] = models[1] - relation_doc = getattr(model, field) - if not relation_doc: - continue - - if isinstance(relation_doc, LazyProxy): - relation_doc = relation_doc.__instance__ - - if model.edges: - if isinstance(model.edges, dict): - convert_edge_data_to_valid_kwargs(model.edges) - model.edges = model.__fields__[EDGES].type_(**model.edges) - - if isinstance(relation_doc, list): - z = zip(relation_doc, getattr(model.edges, field, [])) - for vertex_doc, edge_doc in z: - _prepare_relation(model, edge_cls, edge_doc, vertex_doc, visited) + relations = list(_group_by_relation(model)) + if relations: + for models, field in relations: + edge_cls: Optional[Type[EdgeModel]] = models[1] + relation_doc = getattr(model, field) + if not relation_doc: + model_fields_mapping[model_id] = {} + continue + + if isinstance(relation_doc, LazyProxy): + relation_doc = relation_doc.__instance__ + + if model.edges: + if isinstance(model.edges, dict): + convert_edge_data_to_valid_kwargs(model.edges) + model.edges = model.__fields__[EDGES].type_(**model.edges) + + if isinstance(relation_doc, list): + z = zip(relation_doc, getattr(model.edges, field, [])) + for vertex_doc, edge_doc in z: + _prepare_relation(field, model, edge_cls, edge_doc, vertex_doc) + traverse(vertex_doc, visited) + else: + edge_doc = getattr(model.edges, field) + _prepare_relation(field, model, edge_cls, edge_doc, relation_doc) + traverse(relation_doc, visited) else: - edge_doc = getattr(model.edges, field) - _prepare_relation(model, edge_cls, edge_doc, relation_doc, visited) + # todo: insert join relation + pass + else: + pass + # if not model_id in model_fields_mapping: + # model_fields_mapping[model_id]={} + # model_fields_mapping[model_id][model.__class__] = {} traverse(document, _visited) - return edge_collections, edge_vertex_index, vertex_collections + return edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping async def init(self, model: Type[BaseArangoModel]): collection = await get_or_create_collection(self.database, model) @@ -226,17 +354,19 @@ async def save( document: ArangoModel, strategy: UpdateStrategy = UpdateStrategy.UPDATE, # todo: follow_links: bool = False, - options: Union[UpsertOptions, None] = None, + collection_options: Union[CollectionUpsertOptions, None] = None, ) -> ArangoModel: if isinstance(document, VertexModel): - query = self._build_graph_query(document) + query = self._build_graph_query(document, collection_options=collection_options) else: - if strategy == UpdateStrategy.UPDATE: - query = ORMQuery().upsert(document, document, update=document, options=options) - elif strategy == UpdateStrategy.REPLACE: - query = ORMQuery().upsert(document, document, replace=document, options=options) - else: - raise ValueError(f"strategy must be instance of {UpdateStrategy.__name__}") + options = ( + collection_options + and (collection_options.get(document.Collection.name) or collection_options.get(document.__class__)) + or None + ) + + filter_ = _get_upsert_filter(document) + query = _make_upsert_query(filter_, document, document, ORMQuery(), strategy, options) cursor = await query.execute(self.database) result = await cursor.next() diff --git a/pydango/orm/models.py b/pydango/orm/models.py index b0a57a0..e347b6d 100644 --- a/pydango/orm/models.py +++ b/pydango/orm/models.py @@ -1,7 +1,7 @@ from __future__ import annotations import sys -from abc import ABC +from abc import ABC, abstractmethod from enum import Enum from typing import ( TYPE_CHECKING, @@ -13,6 +13,7 @@ Generic, Mapping, Optional, + Sequence, Type, Union, cast, @@ -21,8 +22,10 @@ ) import pydantic.typing +from pydantic.fields import ConfigError from pydango.orm.consts import EDGES +from pydango.orm.encoders import jsonable_encoder from pydango.orm.types import ArangoModel, TEdge from pydango.orm.utils import convert_edge_data_to_valid_kwargs, get_globals from pydango.query.consts import FROM, ID, KEY, REV, TO @@ -178,7 +181,7 @@ class CollectionConfig: type: CollectionType wait_for_sync: Optional[bool] = False sync_json_schema: Optional[bool] = True - indexes: list[Indexes] = [] + indexes: Sequence[Indexes] = [] OPERATIONAL_FIELDS = {"key", "id", "rev"} @@ -222,7 +225,7 @@ def ArangoField(model_field, relation) -> DocFieldDescriptor: def edge_data_validator(*args, **kwargs): - print(args, kwargs) + # print(args, kwargs) return args, kwargs @@ -260,6 +263,7 @@ def __new__(mcs, name, bases, namespace, **kwargs): __edge_namespace__[field] = (relation_info.via_model, ...) m = create_model(f"{name}Edges", **__edge_namespace__, __base__=EdgeData) + namespace[EDGES] = Field(None, exclude=True) original_annotations[EDGES] = Optional[m] @@ -270,7 +274,8 @@ def __new__(mcs, name, bases, namespace, **kwargs): "__annotations__": original_annotations, "__relationships__": relationships, } - + if VertexModel in bases: + dict_used.update({"__edges_model__": m}) new_cls = super().__new__( mcs, name, @@ -328,6 +333,7 @@ class BaseArangoModel(BaseModel, ABC, metaclass=ArangoModelMeta): if TYPE_CHECKING: __relationships__: Relationships = {} __relationships_fields__: RelationshipFields = {} + __edges_model__: Union[Type[EdgeData], None] = None class Config(BaseConfig): arbitrary_types_allowed = True @@ -373,8 +379,10 @@ def from_orm(cls: Type[ArangoModel], obj: Any, *, session=None) -> ArangoModel: continue if field.required: obj[field_name] = NAO - - obj = super().from_orm(obj) + try: + obj = super().from_orm(obj) + except ConfigError as e: + raise e obj.__dali__session__ = session # object_setattr(obj, DALI_SESSION_KW, session) return obj @@ -389,6 +397,19 @@ def update_forward_refs(cls, **localns: Any) -> None: relation.link_model = cls.__fields__[name].type_ if isinstance(relation.via_model, ForwardRef): relation.via_model = pydantic.typing.evaluate_forwardref(relation.via_model, get_globals(cls), localns) + # cls.__edges_model__.update_forward_refs(**localns) + for field in cls.__edges_model__.__fields__.values(): + # update_field_forward_refs(field, get_globals(cls), localns) + # field.type_ = pydantic.typing.evaluate_forwardref(field.type_, get_globals(cls), localns) + # field.outer_type_ = pydantic.typing.evaluate_forwardref(field.outer_type_, get_globals(cls), localns) + # relation.via_model = pydantic.typing.evaluate_forwardref(relation.via_model, get_globals(cls), localns) + pass + # + # print(field) + + @abstractmethod + def save_dict(self) -> DictStrAny: + ... class VertexCollectionConfig(CollectionConfig): @@ -400,25 +421,29 @@ class EdgeCollectionConfig(CollectionConfig): class EdgeModel(BaseArangoModel, ABC): - from_: Optional[Union[str, VertexModel]] = Field(None, alias=FROM) - to: Optional[Union[str, VertexModel]] = Field(None, alias=TO) + from_: Optional[Union[str]] = Field(None, alias=FROM) + to: Optional[Union[str]] = Field(None, alias=TO) class Collection(EdgeCollectionConfig): pass def save_dict(self) -> DictStrAny: - exclude = set() + exclude: set[Union[int, str]] = set() for key in ["from_", "to"]: if self.__getattribute__(key) is None: exclude.add(key) - - return self.dict(by_alias=True, exclude=exclude) + return jsonable_encoder(self, by_alias=True, exclude=exclude) + # return self.dict(by_alias=True, exclude=exclude) # EdgeModel.update_forward_refs() # VertexModel.update_forward_refs() +def save_dict(model: BaseArangoModel): + return model.save_dict() + + class VertexModel(BaseArangoModel, ABC): if TYPE_CHECKING: edges: Union[dict, EdgeData, None] = None @@ -431,8 +456,44 @@ def __init__(self, **data: Any): convert_edge_data_to_valid_kwargs(data[EDGES]) super().__init__(**data) + def dict( + self, + *, + include: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, + exclude: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + include_edges: bool = False, + ) -> DictStrAny: + if include_edges: + self.__exclude_fields__.pop("edges") + # if include_edges and include: + # include_keys = {"edges"} + # include_keys &= include.keys() + # elif include_edges: + # include_keys = set(self.__dict__.keys()) + # include_keys = include_keys.union( {"edges"}) + # else: + # include_keys = None + + super__dict = super().dict( + include=include, + exclude=exclude, + by_alias=by_alias, + skip_defaults=skip_defaults, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + self.__exclude_fields__["edges"] = True + return super__dict + def save_dict(self) -> DictStrAny: - return self.dict(by_alias=True, exclude=self.__relationships_fields__.keys()) + return jsonable_encoder(self, by_alias=True, exclude=cast(set, self.__relationships_fields__.keys())) @classmethod def update_forward_refs(cls, **localns: Any) -> None: diff --git a/pydango/orm/query.py b/pydango/orm/query.py index b385ca4..c995d59 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -1,6 +1,8 @@ +import logging import sys -from typing import Optional, Type, Union, cast, overload +from typing import Optional, Sequence, Type, Union, cast, overload +from pydango.orm.encoders import jsonable_encoder from pydango.orm.fields import ModelFieldExpression if sys.version_info >= (3, 10): @@ -11,12 +13,13 @@ from pydantic import BaseModel from pydantic.utils import lenient_issubclass -from pydango.orm.models import BaseArangoModel +from pydango.orm.models import BaseArangoModel, save_dict from pydango.orm.proxy import LazyProxy from pydango.orm.utils import Aliased from pydango.query.expressions import ( BinaryExpression, BinaryLogicalExpression, + BindableExpression, CollectionExpression, ConditionExpression, Expression, @@ -38,6 +41,8 @@ ) from pydango.query.query import AQLQuery +logger = logging.getLogger(__name__) + ORMForParams = Union[ForParams, Type[BaseArangoModel], Aliased[Type[BaseArangoModel]]] IMPLICIT_COLLECTION_ERROR = "you must specify collection when the collection cannot be implicitly resolved" MULTIPLE_COLLECTIONS_RESOLVED = "multiple collections resolved" @@ -76,6 +81,7 @@ def _find_models_and_bind(condition: Union[ConditionExpression, BinaryLogicalExp class ORMQuery(AQLQuery): def __init__(self, parent: Optional[AQLQuery] = None): super().__init__(parent) + # self.bind_parameter_to_sequence = {} self.orm_bound_vars: dict[Union[Type[BaseArangoModel], Aliased, ModelFieldExpression], VariableExpression] = {} def for_( @@ -111,8 +117,13 @@ def for_( elif isinstance(in_, LazyProxy): super().for_(collection_or_variable, in_.dict(by_alias=True)) - else: + elif isinstance(in_, Sequence): + # if in_ and isinstance(in_[0], BaseArangoModel) and len(set([x.Collection.name for x in in_])) == 1: + # self.bind_parameter_to_sequence[id(in_[0])] = in_[0].Collection.name super().for_(cast(Union[str, IteratorExpression], collection_or_variable), cast(list, in_)) + else: + # logger.info("couldn't resolver at orm layer",extra={"vars":[collection_or_variable,in_]}) + super().for_(cast(Union[str, IteratorExpression], collection_or_variable), in_) return self @@ -187,6 +198,9 @@ def remove( return super().remove(expression, collection, options=options) + def bind_parameter(self, parameter: BindableExpression, override_var_name: Optional[str] = None) -> str: + return super().bind_parameter(parameter) + @overload def update(self, key, doc, *, options: Optional[UpdateOptions] = None) -> Self: # noqa: PyMethodOverriding ... @@ -357,6 +371,9 @@ def return_(self, return_expr: Union[Type[BaseArangoModel], Aliased, ReturnableM super().return_(return_expr) return self + def _serialize_vars(self): + return jsonable_encoder(self.bind_vars, by_alias=True, custom_encoder={BaseArangoModel: save_dict}) + def for_( collection_or_variable: ORMForParams, diff --git a/pydango/query/expressions.py b/pydango/query/expressions.py index 4782b06..0a26406 100644 --- a/pydango/query/expressions.py +++ b/pydango/query/expressions.py @@ -211,6 +211,7 @@ class IterableExpression(Expression, ReturnableMixin, ABC): Base class for iterable expressions """ + # todo: move this to a more relevant place def __init__(self, iterator: Optional[Union[IteratorExpression, str]] = None): if iterator is None: self.iterator = IteratorExpression() @@ -449,7 +450,7 @@ def __init__(self, value: ListValues, iterator: Optional[Union[IteratorExpressio if isinstance(i, QueryExpression): self._copy.append(SubQueryExpression(i)) self._need_compile = True - elif isinstance(i, VariableExpression): + elif isinstance(i, (VariableExpression, FieldExpression)): self._copy.append(i) self._need_compile = True elif isinstance(i, Expression): @@ -508,10 +509,10 @@ def __init__(self, value: ObjectParams, parent: Optional[Union[VariableExpressio for field, mapped_field in self.value.items(): if isinstance(mapped_field, list): self.value[field] = ListExpression(mapped_field) - self.__all_literals__ = False + self.__all_literals__ = self.__all_literals__ or not self.value[field]._need_compile elif isinstance(mapped_field, dict): self.value[field] = ObjectExpression(mapped_field, self.parent) - self.__all_literals__ = False + self.__all_literals__ = self.__all_literals__ or self.value[field].__all_literals__ elif isinstance(mapped_field, QueryExpression): subquery = SubQueryExpression(mapped_field) diff --git a/pydango/query/functions.py b/pydango/query/functions.py index 58be5f3..45e5f77 100644 --- a/pydango/query/functions.py +++ b/pydango/query/functions.py @@ -268,7 +268,12 @@ def __init__(self, array): super().__init__(array) -class Slice(FunctionExpression, IterableExpression): +class ArrayFunctionMixin: + def __getitem__(self, item): + return self.arguments[item] + + +class Slice(FunctionExpression, IterableExpression, ArrayFunctionMixin): name = "SLICE" def __init__(self, array, start, count=None): @@ -278,7 +283,7 @@ def __init__(self, array, start, count=None): super().__init__(array, start, count) -class UnionArrays(FunctionExpression, IterableExpression): +class UnionArrays(FunctionExpression, IterableExpression, ArrayFunctionMixin): name = "UNION" def __init__(self, *arrays: Union[ListExpression, VariableExpression]): diff --git a/pydango/query/query.py b/pydango/query/query.py index c98985e..1c5e239 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -3,6 +3,8 @@ import sys from typing import Any, Dict, List, Optional, Union, overload +# from pydango.orm.models import BaseArangoModel, save_dict + if sys.version_info >= (3, 10): from typing import Self, TypeAlias else: @@ -198,7 +200,7 @@ def compile(self, *args, **kwargs) -> str: def bind_variable(self) -> str: return self._get_var_name() - def bind_parameter(self, parameter: BindableExpression) -> str: + def bind_parameter(self, parameter: BindableExpression, override_var_name: Optional[str] = None) -> str: if self.parent: return self.parent.bind_parameter(parameter) is_hashable = False @@ -211,7 +213,7 @@ def bind_parameter(self, parameter: BindableExpression) -> str: if str(parameter.value) in self._parameters: return self._parameters[str(parameter.value)] - var = self._get_param_var() + var = override_var_name or self._get_param_var() self.bind_vars[var[1:]] = parameter.value @@ -391,8 +393,11 @@ def collect( ) return self + def _serialize_vars(self): + self.compiled_vars = jsonable_encoder(self.bind_vars, by_alias=True) + async def execute(self, db: Database, **options): compiled = self.compile() - self.compiled_vars = jsonable_encoder(self.bind_vars) + self.compiled_vars = self._serialize_vars() logger.debug("executing query", extra={"query": compiled, "bind_vars": json.dumps(self.compiled_vars)}) return await db.aql.execute(compiled, bind_vars=self.compiled_vars, **options) diff --git a/pyproject.toml b/pyproject.toml index 00a220f..5b61615 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] python = ">=3.9,<4.0" aioarango = "^1.0.0" -pydantic = "^1.10.7" +pydantic = "==1.10.10" urllib3 = "==1.26.15" indexed = "^1.3.0" diff --git a/tests/session/test_family.py b/tests/session/test_family.py index 5286d34..46da435 100644 --- a/tests/session/test_family.py +++ b/tests/session/test_family.py @@ -1,3 +1,4 @@ +from pprint import pprint from typing import Annotated, Optional import pytest @@ -36,6 +37,71 @@ class Collection(EdgeCollectionConfig): Person.update_forward_refs() +def test_obj(): + a = Person.parse_obj( + { + "_id": "people/29887", + "_key": "29887", + "_rev": "_gO2JSqS---", + "age": 35, + "brothers": [{"_key": "29888", "_id": "people/29888", "_rev": "_gO2JSqS--_", "name": "Ben", "age": 45}], + "father": {"_key": "29891", "_id": "people/29891", "_rev": "_gO2JSqS--C", "name": "Father", "age": 70}, + "mother": {"_key": "29892", "_id": "people/29892", "_rev": "_gO2JSqS--D", "name": "Mother", "age": 70}, + "name": "John", + "sisters": [ + {"_key": "29889", "_id": "people/29889", "_rev": "_gO2JSqS--A", "name": "Fiona", "age": 12}, + {"_key": "29890", "_id": "people/29890", "_rev": "_gO2JSqS--B", "name": "Jessica", "age": 12}, + ], + "edges": { + "brothers": [ + { + "_key": "29893", + "_id": "siblings/29893", + "_from": "people/29887", + "_to": "people/29888", + "_rev": "_gO2JSqW---", + "connection": "Brother", + } + ], + "sisters": [ + { + "_key": "29894", + "_id": "siblings/29894", + "_from": "people/29887", + "_to": "people/29889", + "_rev": "_gO2JSqm---", + "connection": "Sister", + }, + { + "_key": "29895", + "_id": "siblings/29895", + "_from": "people/29887", + "_to": "people/29890", + "_rev": "_gO2JSqm--_", + "connection": "Sister", + }, + ], + "father": { + "_key": "29896", + "_id": "siblings/29896", + "_from": "people/29887", + "_to": "people/29891", + "_rev": "_gO2JSqq---", + "connection": "Father", + }, + "mother": { + "_key": "29897", + "_id": "siblings/29897", + "_from": "people/29887", + "_to": "people/29892", + "_rev": "_gO2JSqq--_", + "connection": "Mother", + }, + }, + } + ) + + @pytest.mark.asyncio async def test_save(database): session = PydangoSession(database) @@ -92,4 +158,4 @@ async def test_save(database): p = await session.save(john) - print(p) + pprint(p.dict(include_edges=True)) From 0eaf312174bc30708b8d40bc3695a6590c446d8b Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Sat, 1 Jul 2023 11:58:13 +0300 Subject: [PATCH 04/19] half working --- pydango/connection/session.py | 122 ++++++++++++++++------------------ 1 file changed, 59 insertions(+), 63 deletions(-) diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 4be19d4..97900bc 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -116,35 +116,6 @@ def _get_upsert_filter( CollectionUpsertOptions: TypeAlias = dict[Union[str, Type[BaseArangoModel]], UpsertOptions] -def _bind_edge(from_model, instance, rels, to_model, vertex_collections, vertex_let_queries): - from_ = vertex_collections[from_model].keys().index(instance) - new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] - from_var = vertex_let_queries[from_model] - to_var = vertex_let_queries[to_model] - iterator = IteratorExpression() - ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} # noqa: PyProtectedMember - return iterator, new_rels, ret - - -def _build_upsert_query( - i: IteratorExpression, - strategy: UpdateStrategy, - model: Type[BaseArangoModel], - docs: Union[VariableExpression, list[VariableExpression]], -): - filter_ = _get_upsert_filter(i, model) - query = for_(i, in_=docs) - query = _make_upsert_query(filter_, i, model, query, strategy, None).return_(NEW()) - return query - - -def _build_vertex_collection_query(v, vertices_docs, strategy: UpdateStrategy): - i = IteratorExpression() - from_var = VariableExpression() - query = _build_upsert_query(i, strategy, v, vertices_docs) - return from_var, query - - class PydangoSession: def __init__(self, database: StandardDatabase): self.database = database @@ -158,7 +129,7 @@ def _build_graph_query( ) -> ORMQuery: query = ORMQuery() _visited: set[int] = set() - edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping = cls._build_graph( + edge_collections, edge_vertex_index, vertex_collections, model_field_mapping = cls._build_graph( document, _visited ) vertex_let_queries: dict[Type[VertexModel], VariableExpression] = {} @@ -167,7 +138,7 @@ def _build_graph_query( for v in vertex_collections: vertex_docs = list(vertex_collections[v].values()) vertices_ids[v] = [id(doc) for doc in vertex_docs] - from_var, vertex_query = _build_vertex_collection_query(v, vertex_docs, strategy) + from_var, vertex_query = cls._build_vertex_query(v, vertex_docs, strategy) vertex_let_queries[v] = from_var query.let(from_var, vertex_query) @@ -179,7 +150,7 @@ def _build_graph_query( edge_vars = [] for (from_model, to_model), instances in coll.items(): for instance, rels in instances.items(): - iterator, new_rels, ret = _bind_edge( + iterator, new_rels, ret = cls._bind_edge( from_model, instance, rels, to_model, vertex_collections, vertex_let_queries ) v = VariableExpression() @@ -207,7 +178,7 @@ def _build_graph_query( edge_iter = IteratorExpression() # edge_let_queries[e] = edges edge_let_queries[e] = VariableExpression() - query.let(edge_let_queries[e], _build_upsert_query(edge_iter, strategy, e, edges)) + query.let(edge_let_queries[e], cls.build_upsert_query(edge_iter, strategy, e, edges)) fields = defaultdict(list) @@ -217,7 +188,7 @@ def _build_graph_query( for i, v_id in enumerate(vertex_ids): obj2 = ctypes.cast(v_id, ctypes.py_object).value - model_fields = model_fields_mapping[v_id].get(vertex_cls, {}) + model_fields = model_field_mapping[v_id].get(vertex_cls, {}) for j, field in enumerate(model_fields.values()): if vertex_cls == document.__class__: if vertex_cls.__relationships__[field].link_type in LIST_TYPES: @@ -231,28 +202,28 @@ def _build_graph_query( else: fields[field] = vertex_let_queries[vertex_cls][i + j + 1] # todo: handle recursive - # break + break edges = defaultdict(list) # edges ={} - # for edge_cls, edge_ids in edge_ids.items(): - # for i, e_id in enumerate(edge_ids): - # obj2 = ctypes.cast(e_id, ctypes.py_object).value - # model_fields = model_fields_mapping[e_id].get(edge_cls, {}) - # for j, field in enumerate(model_fields.values()): - # obj2 = ctypes.cast(relation_doc, ctypes.py_object).value - # var = VariableExpression() - # query.let(var, edge_let_queries[edge_cls]) - # if vertex_cls.__relationships__[field].link_type in LIST_TYPES: - # edges[field].append(var[i + j]) - # else: - # edges[field] = var[i + j] - - # todo: handle recursive - # break - # break - - # for vertex_id,v in model_fields_mapping.items(): + for edge_cls, edge_ids in edge_ids.items(): + for i, e_id in enumerate(edge_ids): + obj2 = ctypes.cast(e_id, ctypes.py_object).value + model_fields = model_field_mapping[e_id].get(edge_cls, {}) + for j, field in enumerate(model_fields.values()): + # obj2 = ctypes.cast(relation_doc, ctypes.py_object).value + var = VariableExpression() + query.let(var, edge_let_queries[edge_cls]) + if vertex_cls.__relationships__[field].link_type in LIST_TYPES: + edges[field].append(var[i + j]) + else: + edges[field] = var[i + j] + + # todo: handle recursive + break + break + + # for vertex_id,v in model_field_mapping.items(): # vertices_ids[] # pass return query.return_(Merge(main, fields, {"edges": edges})) @@ -265,6 +236,36 @@ def _build_graph_query( # } # ) + @classmethod + def _bind_edge(cls, from_model, instance, rels, to_model, vertex_collections, vertex_let_queries): + from_ = vertex_collections[from_model].keys().index(instance) + new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] + from_var = vertex_let_queries[from_model] + to_var = vertex_let_queries[to_model] + iterator = IteratorExpression() + ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} # noqa: PyProtectedMember + return iterator, new_rels, ret + + @classmethod + def _build_vertex_query(cls, v, vertices_docs, strategy: UpdateStrategy): + i = IteratorExpression() + from_var = VariableExpression() + query = cls.build_upsert_query(i, strategy, v, vertices_docs) + return from_var, query + + @classmethod + def build_upsert_query( + cls, + i: IteratorExpression, + strategy: UpdateStrategy, + model: Type[BaseArangoModel], + docs: Union[VariableExpression, list[VariableExpression]], + ): + filter_ = _get_upsert_filter(i, model) + query = for_(i, in_=docs) + query = _make_upsert_query(filter_, i, model, query, strategy, None).return_(NEW()) + return query + @classmethod def _build_graph(cls, document: VertexModel, _visited: set[int]): vertex_collections: dict[Type[VertexModel], IndexedOrderedDict[ArangoModel]] = OrderedDict() @@ -284,7 +285,6 @@ def _prepare_relation(field, model, edge_cls, edge_doc, relation_doc): if edge_cls not in model_fields_mapping[model_id]: model_fields_mapping[model_id][edge_cls] = {} - pass model_fields_mapping[model_id][relation_doc.__class__][id(relation_doc)] = field model_fields_mapping[model_id][edge_cls][id(edge_doc)] = field @@ -292,13 +292,12 @@ def _prepare_relation(field, model, edge_cls, edge_doc, relation_doc): edge_vertex_index[edge_cls][model.__class__, relation_doc.__class__][model_id].append(id(relation_doc)) def traverse(model: TVertexModel, visited: set): - model_id = id(model) - if model_id in visited: + if id(model) in visited: return if isinstance(model, VertexModel): - vertex_collections.setdefault(model.__class__, IndexedOrderedDict())[model_id] = model - visited.add(model_id) + vertex_collections.setdefault(model.__class__, IndexedOrderedDict())[id(model)] = model + visited.add(id(model)) models: tuple[Type[VertexModel], Optional[Type[EdgeModel]]] relations = list(_group_by_relation(model)) @@ -307,7 +306,7 @@ def traverse(model: TVertexModel, visited: set): edge_cls: Optional[Type[EdgeModel]] = models[1] relation_doc = getattr(model, field) if not relation_doc: - model_fields_mapping[model_id] = {} + model_fields_mapping[id(model)] = {} continue if isinstance(relation_doc, LazyProxy): @@ -331,10 +330,7 @@ def traverse(model: TVertexModel, visited: set): # todo: insert join relation pass else: - pass - # if not model_id in model_fields_mapping: - # model_fields_mapping[model_id]={} - # model_fields_mapping[model_id][model.__class__] = {} + model_fields_mapping[id(model)] = {} traverse(document, _visited) return edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping From 49cd4ad20fdbabb2d717cef41d5e19773e2d62a7 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Sun, 2 Jul 2023 01:14:50 +0300 Subject: [PATCH 05/19] no message --- .gitignore | 1 + .pre-commit-config.yaml | 5 +- pydango/connection/session.py | 383 +++++++++++++++------------ pydango/connection/utils.py | 8 +- pydango/index.py | 29 +- pydango/orm/fields.py | 6 +- pydango/orm/models.py | 94 +++---- pydango/orm/proxy.py | 4 +- pydango/orm/types.py | 2 +- pydango/query/expressions.py | 8 +- pydango/query/functions.py | 2 - pydango/query/operations.py | 3 +- pydango/query/query.py | 2 +- pydango/query/utils.py | 15 +- pyproject.toml | 10 +- tests/conftest.py | 11 +- tests/session/test_cities.py | 79 ++++-- tests/session/test_family.py | 33 ++- tests/session/test_social_network.py | 138 +++++++++- tests/test_orm_query.py | 12 +- tests/utils.py | 39 +++ 21 files changed, 591 insertions(+), 293 deletions(-) create mode 100644 tests/utils.py diff --git a/.gitignore b/.gitignore index 47a2c14..bf1fa12 100644 --- a/.gitignore +++ b/.gitignore @@ -162,3 +162,4 @@ cython_debug/ __pycache__ .idea local +stubs/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d2678c5..c0d4fcf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -73,7 +73,10 @@ repos: hooks: - id: mypy additional_dependencies: - - 'pydantic' + - pydantic==1.10.10 + - mypy-extensions +# - pytest +# - httpx # - repo: https://github.com/jendrikseipp/vulture diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 97900bc..993f89e 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -1,4 +1,3 @@ -import ctypes import dataclasses import logging import sys @@ -8,6 +7,8 @@ from typing import Any, Iterator, Optional, Type, Union, cast from pydango.orm.relations import LIST_TYPES +from pydango.orm.types import ArangoModel +from pydango.query.utils import new if sys.version_info >= (3, 10): from typing import TypeAlias @@ -16,20 +17,19 @@ from aioarango.collection import StandardCollection from aioarango.database import StandardDatabase -from indexed import IndexedOrderedDict +from indexed import IndexedOrderedDict # type: ignore[attr-defined] from pydango import index from pydango.connection.utils import get_or_create_collection from pydango.orm.consts import EDGES -from pydango.orm.models import ArangoModel, BaseArangoModel, EdgeModel, VertexModel +from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel from pydango.orm.proxy import LazyProxy from pydango.orm.query import ORMQuery, for_ -from pydango.orm.types import TEdge, TVertexModel from pydango.orm.utils import convert_edge_data_to_valid_kwargs from pydango.query import AQLQuery -from pydango.query.consts import FROM, KEY, TO -from pydango.query.expressions import NEW, IteratorExpression, VariableExpression -from pydango.query.functions import First, Length, Merge, UnionArrays +from pydango.query.consts import FROM, ID, KEY, REV, TO +from pydango.query.expressions import IteratorExpression, VariableExpression +from pydango.query.functions import Length, Merge, UnionArrays from pydango.query.operations import RangeExpression from pydango.query.options import UpsertOptions @@ -46,7 +46,7 @@ class DocumentNotFoundError(Exception): # document.rev = result["_rev"] -def _collection_from_model(database: StandardDatabase, model: Type[ArangoModel]) -> StandardCollection: +def _collection_from_model(database: StandardDatabase, model: Type[BaseArangoModel]) -> StandardCollection: return database.collection(model.Collection.name) @@ -87,7 +87,7 @@ def _make_upsert_query( def _get_upsert_filter( - document: Union[BaseArangoModel, VariableExpression], model: Union[Type[BaseArangoModel], None] = None + document: Union["BaseArangoModel", VariableExpression], model: Union[Type["BaseArangoModel"], None] = None ): if not isinstance(document, BaseArangoModel) and model is not None: indexes = model.Collection.indexes @@ -113,185 +113,148 @@ def _get_upsert_filter( return filter_ -CollectionUpsertOptions: TypeAlias = dict[Union[str, Type[BaseArangoModel]], UpsertOptions] +def _build_upsert_query( + i: IteratorExpression, + strategy: UpdateStrategy, + model: Type["BaseArangoModel"], + docs: Union[VariableExpression, list[VariableExpression]], + *, + edge: bool = False, +): + filter_ = _get_upsert_filter(i, model) + query = for_(i, in_=docs) + query = _make_upsert_query(filter_, i, model, query, strategy, None).return_(new(edge=edge)) + return query -class PydangoSession: - def __init__(self, database: StandardDatabase): - self.database = database +def _build_vertex_query(v, vertices_docs, strategy: UpdateStrategy): + i = IteratorExpression() + from_var = VariableExpression() + query = _build_upsert_query(i, strategy, v, vertices_docs) + return from_var, query - @classmethod - def _build_graph_query( - cls, - document: VertexModel, - strategy: UpdateStrategy = UpdateStrategy.UPDATE, - collection_options: Union[CollectionUpsertOptions, None] = None, - ) -> ORMQuery: - query = ORMQuery() - _visited: set[int] = set() - edge_collections, edge_vertex_index, vertex_collections, model_field_mapping = cls._build_graph( - document, _visited - ) - vertex_let_queries: dict[Type[VertexModel], VariableExpression] = {} - vertices_ids = {} - edge_ids = defaultdict(lambda: defaultdict(list)) - for v in vertex_collections: - vertex_docs = list(vertex_collections[v].values()) - vertices_ids[v] = [id(doc) for doc in vertex_docs] - from_var, vertex_query = cls._build_vertex_query(v, vertex_docs, strategy) - vertex_let_queries[v] = from_var - query.let(from_var, vertex_query) +def _bind_edge(from_model, instance, rels, to_model, vertex_collections, vertex_let_queries): + from_ = vertex_collections[from_model].keys().index(instance) + new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] + from_var = vertex_let_queries[from_model] + to_var = vertex_let_queries[to_model] + iterator = IteratorExpression() + ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} # noqa: PyProtectedMember + return iterator, new_rels, ret - main = VariableExpression() - query.let(main, First(vertex_let_queries[document.__class__])) - edge_let_queries = {} - for e, coll in edge_vertex_index.items(): - edge_vars = [] - for (from_model, to_model), instances in coll.items(): - for instance, rels in instances.items(): - iterator, new_rels, ret = cls._bind_edge( - from_model, instance, rels, to_model, vertex_collections, vertex_let_queries - ) - v = VariableExpression() - query.let(v, for_(iterator, new_rels).return_(ret)) - merger = IteratorExpression() - edge = VariableExpression() - query.let(edge, edge_collections[e][instance]) - edge_ids[e][instance].extend([id(doc) for doc in edge_collections[e][instance]]) +CollectionUpsertOptions: TypeAlias = dict[Union[str, Type["BaseArangoModel"]], UpsertOptions] +ModelFieldMapping: TypeAlias = dict[int, defaultdict[str, list[tuple[int, int]]]] +VerticesIdsMapping: TypeAlias = dict[Type[VertexModel], dict[int, int]] +EdgesIdsMapping: TypeAlias = defaultdict[Type[EdgeModel], defaultdict[int, dict[int, int]]] - merged = VariableExpression() - query.let( - merged, - for_(merger, RangeExpression(0, Length(edge) - 1)).return_(Merge(edge[merger], v[merger])), - ) - edge_vars.append(merged) - edges: Union[VariableExpression, list[VariableExpression]] - if len(edge_vars) > 1: - edges = cast(list[VariableExpression], UnionArrays(*edge_vars)) - elif len(edge_vars) == 1: - edges = edge_vars[0] - else: - continue - edge_iter = IteratorExpression() - # edge_let_queries[e] = edges - edge_let_queries[e] = VariableExpression() - query.let(edge_let_queries[e], cls.build_upsert_query(edge_iter, strategy, e, edges)) +def traverse2( + model: VertexModel, + visited: set, + result, + model_fields_mapping: ModelFieldMapping, + vertices_ids: VerticesIdsMapping, + edges_ids: EdgesIdsMapping, +): + model_id = id(model) + if model_id in visited: + return + + if isinstance(model, VertexModel): + visited.add(model_id) + + v_index = vertices_ids[model.__class__][model_id] + v_obj = result["vertex"][model.Collection.name][v_index] + model.id = v_obj[ID] + model.key = v_obj[KEY] + model.rev = v_obj[REV] + + models: tuple[Type[VertexModel], Optional[Type[EdgeModel]]] + relations = list(_group_by_relation(model)) + if relations: + for models, field in relations: + relation_doc = getattr(model, field) + if not relation_doc: + continue + + if isinstance(relation_doc, LazyProxy): + relation_doc = relation_doc.__instance__ + + if model.edges: + for edge_field, obj in model.edges.__dict__.items(): + if isinstance(obj, list): + for i in obj: + _set_edge_operational_fields(result, model_id, edges_ids, i) + else: + _set_edge_operational_fields(result, model_id, edges_ids, obj) + if isinstance(relation_doc, list): + z = zip(relation_doc, getattr(model.edges, field, [])) + for vertex_doc, edge_doc in z: + traverse2(vertex_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) + else: + getattr(model.edges, field) + traverse2(relation_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) + else: + # todo: insert join relation + pass + else: + pass + # model_fields_mapping[id(model)] = {} - fields = defaultdict(list) - for vertex_cls, vertex_ids in vertices_ids.items(): - if vertex_cls == document.__class__: - vertex_ids = vertex_ids[1:] +def _set_edge_operational_fields(result, model_id, edges_ids, i): + e_obj = result["edges"][i.Collection.name][edges_ids[i.__class__][model_id][id(i)]] + i.id = e_obj[ID] + i.key = e_obj[KEY] + i.rev = e_obj[REV] + i.from_ = e_obj[FROM] + i.to = e_obj[TO] - for i, v_id in enumerate(vertex_ids): - obj2 = ctypes.cast(v_id, ctypes.py_object).value - model_fields = model_field_mapping[v_id].get(vertex_cls, {}) - for j, field in enumerate(model_fields.values()): - if vertex_cls == document.__class__: - if vertex_cls.__relationships__[field].link_type in LIST_TYPES: - fields[field].append(vertex_let_queries[vertex_cls][i + j + 1]) - else: - fields[field] = vertex_let_queries[vertex_cls][i + j + 1] - else: - if vertex_cls.__relationships__[field].link_type in LIST_TYPES: - fields[field].append(vertex_let_queries[vertex_cls][i + j]) - else: - fields[field] = vertex_let_queries[vertex_cls][i + j + 1] - # todo: handle recursive - break - - edges = defaultdict(list) - # edges ={} - for edge_cls, edge_ids in edge_ids.items(): - for i, e_id in enumerate(edge_ids): - obj2 = ctypes.cast(e_id, ctypes.py_object).value - model_fields = model_field_mapping[e_id].get(edge_cls, {}) - for j, field in enumerate(model_fields.values()): - # obj2 = ctypes.cast(relation_doc, ctypes.py_object).value - var = VariableExpression() - query.let(var, edge_let_queries[edge_cls]) - if vertex_cls.__relationships__[field].link_type in LIST_TYPES: - edges[field].append(var[i + j]) - else: - edges[field] = var[i + j] - - # todo: handle recursive - break - break - - # for vertex_id,v in model_field_mapping.items(): - # vertices_ids[] - # pass - return query.return_(Merge(main, fields, {"edges": edges})) - # { - # "main": main, - # "fields":fields, - # "vertex": {k.Collection.name: v for k, v in vertex_let_queries.items()}, - # "edges": {k.Collection.name: v for k, v in edge_let_queries.items()}, - # "edges": edges, - # } - # ) +EdgeCollectionsMapping: TypeAlias = dict[Type[EdgeModel], IndexedOrderedDict[list[EdgeModel]]] +EdgeVerticesIndexMapping = dict[ + Type[EdgeModel], dict[tuple[Type[VertexModel], Type[VertexModel]], dict[int, list[int]]] +] - @classmethod - def _bind_edge(cls, from_model, instance, rels, to_model, vertex_collections, vertex_let_queries): - from_ = vertex_collections[from_model].keys().index(instance) - new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] - from_var = vertex_let_queries[from_model] - to_var = vertex_let_queries[to_model] - iterator = IteratorExpression() - ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} # noqa: PyProtectedMember - return iterator, new_rels, ret +VertexCollectionsMapping = dict[Type[VertexModel], IndexedOrderedDict[BaseArangoModel]] - @classmethod - def _build_vertex_query(cls, v, vertices_docs, strategy: UpdateStrategy): - i = IteratorExpression() - from_var = VariableExpression() - query = cls.build_upsert_query(i, strategy, v, vertices_docs) - return from_var, query - @classmethod - def build_upsert_query( - cls, - i: IteratorExpression, - strategy: UpdateStrategy, - model: Type[BaseArangoModel], - docs: Union[VariableExpression, list[VariableExpression]], - ): - filter_ = _get_upsert_filter(i, model) - query = for_(i, in_=docs) - query = _make_upsert_query(filter_, i, model, query, strategy, None).return_(NEW()) - return query +class PydangoSession: + def __init__(self, database: StandardDatabase): + self.database = database @classmethod - def _build_graph(cls, document: VertexModel, _visited: set[int]): - vertex_collections: dict[Type[VertexModel], IndexedOrderedDict[ArangoModel]] = OrderedDict() - edge_collections: dict[Type[EdgeModel], IndexedOrderedDict[list[TEdge]]] = OrderedDict() - edge_vertex_index: dict[ - Type[EdgeModel], dict[tuple[Type[VertexModel], Type[VertexModel]], dict[int, list[int]]] - ] = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) - model_fields_mapping: dict[int, defaultdict[str, list[tuple[int, int]]]] = {} + def _build_graph( + cls, document: VertexModel, _visited: set[int] + ) -> tuple[EdgeCollectionsMapping, EdgeVerticesIndexMapping, VertexCollectionsMapping, ModelFieldMapping]: + vertex_collections: VertexCollectionsMapping = OrderedDict() + edge_collections: EdgeCollectionsMapping = OrderedDict() + edge_vertex_index: EdgeVerticesIndexMapping = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) + model_fields_mapping: ModelFieldMapping = {} def _prepare_relation(field, model, edge_cls, edge_doc, relation_doc): model_id = id(model) if edge_doc: edge_collections.setdefault(edge_cls, IndexedOrderedDict()).setdefault(model_id, []).append(edge_doc) - if model_id not in model_fields_mapping: - model_fields_mapping[model_id] = {relation_doc.__class__: {}} + _add_model_field_to_mapping(model, field, relation_doc, edge_doc) + + edge_vertex_index[edge_cls][model.__class__, relation_doc.__class__][model_id].append(id(relation_doc)) - if edge_cls not in model_fields_mapping[model_id]: - model_fields_mapping[model_id][edge_cls] = {} + def _add_model_field_to_mapping(model, field, relation_doc, edge_doc): + model_id = id(model) - model_fields_mapping[model_id][relation_doc.__class__][id(relation_doc)] = field - model_fields_mapping[model_id][edge_cls][id(edge_doc)] = field + mapping = model_fields_mapping.setdefault(model.__class__, {}) + model_mapping = mapping.setdefault(model_id, {}) - edge_vertex_index[edge_cls][model.__class__, relation_doc.__class__][model_id].append(id(relation_doc)) + if model.__relationships__[field].link_type in LIST_TYPES: + model_mapping.setdefault(field, []).append({"v": id(relation_doc), "e": id(edge_doc)}) + else: + model_mapping[field] = {"v": id(relation_doc), "e": id(edge_doc)} - def traverse(model: TVertexModel, visited: set): + def traverse(model: VertexModel, visited: set[int]): if id(model) in visited: return @@ -306,7 +269,7 @@ def traverse(model: TVertexModel, visited: set): edge_cls: Optional[Type[EdgeModel]] = models[1] relation_doc = getattr(model, field) if not relation_doc: - model_fields_mapping[id(model)] = {} + _add_model_field_to_mapping(model, field, None, None) continue if isinstance(relation_doc, LazyProxy): @@ -315,9 +278,12 @@ def traverse(model: TVertexModel, visited: set): if model.edges: if isinstance(model.edges, dict): convert_edge_data_to_valid_kwargs(model.edges) + # todo: this initiate the class edge model so it validates the edges, should we do that? model.edges = model.__fields__[EDGES].type_(**model.edges) if isinstance(relation_doc, list): + if len(getattr(model.edges, field, [])) != len(relation_doc): + raise AssertionError(f"{model.__class__.__name__} vertex edges {field} number mismatch") z = zip(relation_doc, getattr(model.edges, field, [])) for vertex_doc, edge_doc in z: _prepare_relation(field, model, edge_cls, edge_doc, vertex_doc) @@ -330,15 +296,91 @@ def traverse(model: TVertexModel, visited: set): # todo: insert join relation pass else: - model_fields_mapping[id(model)] = {} + pass + # model_fields_mapping[id(model)] = {} traverse(document, _visited) return edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping + @classmethod + def _build_graph_query( + cls, + document: VertexModel, + strategy: UpdateStrategy = UpdateStrategy.UPDATE, + collection_options: Union[CollectionUpsertOptions, None] = None, + ) -> tuple[ModelFieldMapping, VerticesIdsMapping, EdgesIdsMapping, ORMQuery]: + query = ORMQuery() + _visited: set[int] = set() + edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping = cls._build_graph( + document, _visited + ) + vertex_let_queries: dict[Type[VertexModel], VariableExpression] = {} + vertices_ids: VerticesIdsMapping = {} + edge_ids: EdgesIdsMapping = defaultdict(lambda: defaultdict(dict)) + for v in vertex_collections: + vertex_docs = list(vertex_collections[v].values()) + vertices_ids[v] = {id(doc): i for i, doc in enumerate(vertex_docs)} + from_var, vertex_query = _build_vertex_query(v, vertex_docs, strategy) + vertex_let_queries[v] = from_var + + query.let(from_var, vertex_query) + + edge_let_queries = {} + + for e, coll in edge_vertex_index.items(): + counter = 0 + edge_vars = [] + for (from_model, to_model), instances in coll.items(): + for instance, rels in instances.items(): + iterator, new_rels, ret = _bind_edge( + from_model, instance, rels, to_model, vertex_collections, vertex_let_queries + ) + var = VariableExpression() + query.let(var, for_(iterator, new_rels).return_(ret)) + + merger = IteratorExpression() + edge = VariableExpression() + query.let(edge, edge_collections[e][instance]) + edge_ids[e][instance].update( + {id(doc): i + counter for i, doc in enumerate(edge_collections[e][instance])} + ) + + merged = VariableExpression() + query.let( + merged, + for_(merger, RangeExpression(0, Length(edge) - 1)).return_(Merge(edge[merger], var[merger])), + ) + edge_vars.append(merged) + counter += len(edge_collections[e][instance]) + edges: Union[VariableExpression, list[VariableExpression]] + if len(edge_vars) > 1: + edges = cast(list[VariableExpression], UnionArrays(*edge_vars)) + elif len(edge_vars) == 1: + edges = edge_vars[0] + else: + continue + + edge_iter = IteratorExpression() + edge_let_queries[e] = VariableExpression() + query.let(edge_let_queries[e], _build_upsert_query(edge_iter, strategy, e, edges, edge=True)) + + return ( + model_fields_mapping, + vertices_ids, + edge_ids, + query.return_( + { + "vertex": {k.Collection.name: v for k, v in vertex_let_queries.items()}, + "edges": {k.Collection.name: v for k, v in edge_let_queries.items()}, + } + ), + ) + async def init(self, model: Type[BaseArangoModel]): collection = await get_or_create_collection(self.database, model) if model.Collection.indexes: logger.debug("creating indexes", extra=dict(indexes=model.Collection.indexes, model=model)) + for i in model.Collection.indexes or []: if isinstance(i, dict): await index.mapping[i.__class__](collection, **i) @@ -352,8 +394,11 @@ async def save( # todo: follow_links: bool = False, collection_options: Union[CollectionUpsertOptions, None] = None, ) -> ArangoModel: + model_fields_mapping = None if isinstance(document, VertexModel): - query = self._build_graph_query(document, collection_options=collection_options) + model_fields_mapping, vertices_ids, edge_ids, query = self._build_graph_query( + document, collection_options=collection_options + ) else: options = ( collection_options @@ -366,10 +411,12 @@ async def save( cursor = await query.execute(self.database) result = await cursor.next() + if model_fields_mapping: + traverse2(cast(VertexModel, document), set(), result, model_fields_mapping, vertices_ids, edge_ids) logger.debug("cursor stats", extra=cursor.statistics()) - return document.__class__.from_orm(result, session=self) + return document - async def get(self, model: Type[ArangoModel], _id: str, should_raise=False) -> Optional[ArangoModel]: + async def get(self, model: Type[BaseArangoModel], _id: str, should_raise=False) -> Optional[BaseArangoModel]: collection_name = model.Collection.name collection = self.database.collection(collection_name) try: @@ -377,11 +424,11 @@ async def get(self, model: Type[ArangoModel], _id: str, should_raise=False) -> O # result[DALI_SESSION_KW] = self if result is None and should_raise: raise DocumentNotFoundError() - document: ArangoModel = model.from_orm(result, session=self) + document: BaseArangoModel = model.from_orm(result, session=self) return document except DocumentNotFoundError: return None - async def find(self, model: Type[ArangoModel], filters=None, skip=None, limit=None): + async def find(self, model: Type[BaseArangoModel], filters=None, skip=None, limit=None): collection = _collection_from_model(self.database, model) return await collection.find(filters, skip, limit) diff --git a/pydango/connection/utils.py b/pydango/connection/utils.py index 4ef4cea..8daa387 100644 --- a/pydango/connection/utils.py +++ b/pydango/connection/utils.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Optional, Type, Union, overload +from typing import TYPE_CHECKING, Awaitable, Optional, Type, Union, cast, overload import aioarango @@ -37,12 +37,12 @@ async def get_or_create_collection( if not await db.has_collection(collection_name): try: - return await db.create_collection(collection_name, edge=edge) + return await cast(Awaitable[StandardCollection], db.create_collection(collection_name, edge=edge)) except aioarango.exceptions.CollectionCreateError as e: if e.error_code != 1207: raise e - else: - return db.collection(collection_name) + + return db.collection(collection_name) async def get_or_create_db(client: "ArangoClient", db: str, user: str = "", password: str = "") -> "StandardDatabase": diff --git a/pydango/index.py b/pydango/index.py index f56705e..d2513ea 100644 --- a/pydango/index.py +++ b/pydango/index.py @@ -1,15 +1,18 @@ import sys from dataclasses import dataclass -from typing import Optional, Sequence, Union +from typing import TYPE_CHECKING, Awaitable, Callable, Optional, Sequence, Type, Union if sys.version_info >= (3, 10): from typing import TypeAlias else: from typing_extensions import TypeAlias +from aioarango.collection import Collection +from aioarango.result import Result +from aioarango.typings import Json -import aioarango.database -from aioarango.typings import Fields +if TYPE_CHECKING: + from aioarango.typings import Fields @dataclass() @@ -19,7 +22,7 @@ class Index: @dataclass() class GeoIndex(Index): - fields: Fields + fields: "Fields" ordered: Optional[bool] = None name: Optional[str] = None in_background: Optional[bool] = None @@ -70,13 +73,13 @@ class TTLIndex(Index): in_background: Optional[bool] = None -mapping = { - GeoIndex: aioarango.database.StandardCollection.add_geo_index, - PersistentIndex: aioarango.database.StandardCollection.add_persistent_index, - FullTextIndex: aioarango.database.StandardCollection.add_fulltext_index, - SkipListIndex: aioarango.database.StandardCollection.add_skiplist_index, - TTLIndex: aioarango.database.StandardCollection.add_ttl_index, - HashIndex: aioarango.database.StandardCollection.add_hash_index, -} - Indexes: TypeAlias = Union[GeoIndex, HashIndex, SkipListIndex, FullTextIndex, PersistentIndex, TTLIndex] + +mapping: dict[Type[Indexes], Callable[..., Awaitable[Result[Json]]]] = { + GeoIndex: Collection.add_geo_index, + HashIndex: Collection.add_hash_index, + SkipListIndex: Collection.add_skiplist_index, + FullTextIndex: Collection.add_fulltext_index, + PersistentIndex: Collection.add_persistent_index, + TTLIndex: Collection.add_ttl_index, +} diff --git a/pydango/orm/fields.py b/pydango/orm/fields.py index 0650400..a0f22ac 100644 --- a/pydango/orm/fields.py +++ b/pydango/orm/fields.py @@ -2,8 +2,6 @@ from typing import TYPE_CHECKING, Generic, Optional, Type, TypeVar, Union, cast -from pydantic.fields import ModelField - from pydango.connection import DALI_SESSION_KW from pydango.orm.proxy import LazyProxy from pydango.query.expressions import ( @@ -14,6 +12,8 @@ ) if TYPE_CHECKING: + from pydantic.fields import ModelField # type: ignore[attr-defined] + from pydango.orm.models import BaseArangoModel, Relationship from pydango.orm.types import ArangoModel from pydango.query import AQLQuery @@ -51,7 +51,7 @@ def __set__(self, instance, value): def __get__( self, instance: Optional[ArangoModel], owner: Type[BaseArangoModel] - ) -> Union[LazyProxy, ModelFieldExpression, None]: + ) -> Union[LazyProxy[ArangoModel], ModelFieldExpression, None]: if not instance and self.field.name in owner.__fields__.keys(): return ModelFieldExpression(self.field.name, owner) diff --git a/pydango/orm/models.py b/pydango/orm/models.py index e347b6d..2cdc5db 100644 --- a/pydango/orm/models.py +++ b/pydango/orm/models.py @@ -1,7 +1,8 @@ from __future__ import annotations +import logging import sys -from abc import ABC, abstractmethod +from abc import ABC, ABCMeta, abstractmethod from enum import Enum from typing import ( TYPE_CHECKING, @@ -22,11 +23,11 @@ ) import pydantic.typing -from pydantic.fields import ConfigError +from pydantic.fields import ConfigError # type: ignore[attr-defined] from pydango.orm.consts import EDGES from pydango.orm.encoders import jsonable_encoder -from pydango.orm.types import ArangoModel, TEdge +from pydango.orm.types import ArangoModel from pydango.orm.utils import convert_edge_data_to_valid_kwargs, get_globals from pydango.query.consts import FROM, ID, KEY, REV, TO @@ -36,7 +37,7 @@ from typing_extensions import TypeAlias, dataclass_transform from pydantic import BaseConfig, BaseModel -from pydantic.fields import ( +from pydantic.fields import ( # type: ignore[attr-defined] SHAPE_FROZENSET, SHAPE_ITERABLE, SHAPE_LIST, @@ -63,11 +64,18 @@ from pydango.orm.fields import DocFieldDescriptor from pydango.orm.relations import LIST_TYPES, LinkTypes +logger = logging.getLogger(__name__) + if TYPE_CHECKING: - from pydantic.fields import LocStr, ValidateReturn + from pydantic.fields import LocStr, ValidateReturn # type: ignore[attr-defined] from pydantic.main import Model from pydantic.types import ModelOrDc - from pydantic.typing import DictStrAny, MappingIntStrAny, ReprArgs + from pydantic.typing import ( + AbstractSetIntStr, + DictStrAny, + MappingIntStrAny, + ReprArgs, + ) from pydango.connection.session import PydangoSession @@ -111,7 +119,7 @@ def __init__( field: ModelField, back_populates: Optional[str] = None, link_model: Type[VertexModel], - via_model: Optional[Type[TEdge]] = None, + via_model: Optional[Type[EdgeModel]] = None, link_type: LinkTypes, ): self.via_model = via_model @@ -234,7 +242,7 @@ class EdgeData(BaseModel): @dataclass_transform(kw_only_default=True, field_specifiers=(ArangoField,)) -class ArangoModelMeta(ModelMetaclass): +class ArangoModelMeta(ModelMetaclass, ABCMeta): def __new__(mcs, name, bases, namespace, **kwargs): parents = [b for b in bases if isinstance(b, mcs)] if not parents or BaseArangoModel in parents: @@ -283,14 +291,14 @@ def __new__(mcs, name, bases, namespace, **kwargs): dict_used, **kwargs, ) - relationship_fields = {} + __relationship_fields__ = {} for field_name, field in [(k, v) for k, v in new_cls.__fields__.items() if k != EDGES]: if field_name in relationships: model_field = get_pydango_field(field, RelationModelField) # todo improve this relationships[field_name].field = model_field - relationship_fields[field_name] = model_field + __relationship_fields__[field_name] = model_field new_cls.__fields__[field_name] = model_field setattr( @@ -306,7 +314,7 @@ def __new__(mcs, name, bases, namespace, **kwargs): new_cls.__relationships__ = relationships - new_cls.__relationships_fields__ = relationship_fields + new_cls.__relationships_fields__ = __relationship_fields__ new_cls.__annotations__ = { # **relationship_annotations, **original_annotations, @@ -323,7 +331,7 @@ def __new__(mcs, name, bases, namespace, **kwargs): Relationships: TypeAlias = dict[str, Relationship] -class BaseArangoModel(BaseModel, ABC, metaclass=ArangoModelMeta): +class BaseArangoModel(BaseModel, metaclass=ArangoModelMeta): id: Optional[str] = Field(None, alias=ID) key: Optional[str] = Field(None, alias=KEY) rev: Optional[str] = Field(None, alias=REV) @@ -398,14 +406,14 @@ def update_forward_refs(cls, **localns: Any) -> None: if isinstance(relation.via_model, ForwardRef): relation.via_model = pydantic.typing.evaluate_forwardref(relation.via_model, get_globals(cls), localns) # cls.__edges_model__.update_forward_refs(**localns) - for field in cls.__edges_model__.__fields__.values(): - # update_field_forward_refs(field, get_globals(cls), localns) - # field.type_ = pydantic.typing.evaluate_forwardref(field.type_, get_globals(cls), localns) - # field.outer_type_ = pydantic.typing.evaluate_forwardref(field.outer_type_, get_globals(cls), localns) - # relation.via_model = pydantic.typing.evaluate_forwardref(relation.via_model, get_globals(cls), localns) - pass - # - # print(field) + # for field in cls.__edges_model__.__fields__.values(): + # update_field_forward_refs(field, get_globals(cls), localns) + # field.type_ = pydantic.typing.evaluate_forwardref(field.type_, get_globals(cls), localns) + # field.outer_type_ = pydantic.typing.evaluate_forwardref(field.outer_type_, get_globals(cls), localns) + # relation.via_model = pydantic.typing.evaluate_forwardref(relation.via_model, get_globals(cls), localns) + # pass + # + # print(field) @abstractmethod def save_dict(self) -> DictStrAny: @@ -436,10 +444,6 @@ def save_dict(self) -> DictStrAny: # return self.dict(by_alias=True, exclude=exclude) -# EdgeModel.update_forward_refs() -# VertexModel.update_forward_refs() - - def save_dict(model: BaseArangoModel): return model.save_dict() @@ -468,28 +472,28 @@ def dict( exclude_none: bool = False, include_edges: bool = False, ) -> DictStrAny: - if include_edges: - self.__exclude_fields__.pop("edges") - # if include_edges and include: - # include_keys = {"edges"} - # include_keys &= include.keys() - # elif include_edges: - # include_keys = set(self.__dict__.keys()) - # include_keys = include_keys.union( {"edges"}) - # else: - # include_keys = None - - super__dict = super().dict( - include=include, - exclude=exclude, - by_alias=by_alias, - skip_defaults=skip_defaults, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) + d = cast(dict, self.__exclude_fields__) + if include_edges and self.__exclude_fields__: + d.pop("edges") + + try: + super__dict = super().dict( + include=include, + exclude=exclude, + by_alias=by_alias, + skip_defaults=skip_defaults, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + except RecursionError as e: + raise AssertionError( + "is not possible to call .dict() when using recursive model, instead traverse the graph and collect" + " data or exclude recursive fields" + ) from e + if self.__exclude_fields__: + d["edges"] = True - self.__exclude_fields__["edges"] = True return super__dict def save_dict(self) -> DictStrAny: diff --git a/pydango/orm/proxy.py b/pydango/orm/proxy.py index 4779dfa..910b8e6 100644 --- a/pydango/orm/proxy.py +++ b/pydango/orm/proxy.py @@ -15,9 +15,9 @@ def __new__(cls, name, bases, namespace, **kwargs): return super().__new__(cls, name, bases, namespace, **kwargs) -class LazyProxy(Generic[ArangoModel], metaclass=LazyProxyMeta): +class LazyProxy(Generic[ArangoModel]): _initialized: bool = False - __instance__ = None + __instance__: Optional[ArangoModel] = None def __init__(self, instance, field, session: Optional["PydangoSession"]): self.session = session diff --git a/pydango/orm/types.py b/pydango/orm/types.py index 4142333..9226fc4 100644 --- a/pydango/orm/types.py +++ b/pydango/orm/types.py @@ -4,5 +4,5 @@ from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel # noqa: F401 ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") -TEdge = TypeVar("TEdge", bound="EdgeModel") +# TEdge = TypeVar("TEdge", bound="EdgeModel") TVertexModel = TypeVar("TVertexModel", bound="VertexModel") diff --git a/pydango/query/expressions.py b/pydango/query/expressions.py index 0a26406..9ebb0cc 100644 --- a/pydango/query/expressions.py +++ b/pydango/query/expressions.py @@ -2,6 +2,7 @@ import sys from abc import ABC, abstractmethod from copy import deepcopy +from enum import Enum from functools import lru_cache from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence, Type, Union, cast @@ -11,12 +12,17 @@ from typing import TypeAlias else: from typing_extensions import TypeAlias -from pydango.query.utils import SortDirection + if TYPE_CHECKING: from pydango.query.query import AQLQuery +class SortDirection(str, Enum): + ASC = "ASC" + DESC = "DESC" + + class ReturnableMixin(ABC): """ Base class for returnable expressions diff --git a/pydango/query/functions.py b/pydango/query/functions.py index 45e5f77..630ee0f 100644 --- a/pydango/query/functions.py +++ b/pydango/query/functions.py @@ -18,8 +18,6 @@ class BaseFunctionExpression(Expression): - returns = None - def __init__(self, name, *arguments): self.name = name arguments = list(arguments) diff --git a/pydango/query/operations.py b/pydango/query/operations.py index 81802d4..94a7af9 100644 --- a/pydango/query/operations.py +++ b/pydango/query/operations.py @@ -26,6 +26,7 @@ ObjectExpression, QueryExpression, ReturnableMixin, + SortDirection, SortExpression, VariableExpression, ) @@ -38,7 +39,7 @@ UpdateOptions, UpsertOptions, ) -from pydango.query.utils import Compilable, SortDirection +from pydango.query.utils import Compilable if TYPE_CHECKING: from pydango.query.query import AQLQuery diff --git a/pydango/query/query.py b/pydango/query/query.py index 1c5e239..368ace1 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -394,7 +394,7 @@ def collect( return self def _serialize_vars(self): - self.compiled_vars = jsonable_encoder(self.bind_vars, by_alias=True) + return jsonable_encoder(self.bind_vars, by_alias=True) async def execute(self, db: Database, **options): compiled = self.compile() diff --git a/pydango/query/utils.py b/pydango/query/utils.py index e872fbf..2608dd3 100644 --- a/pydango/query/utils.py +++ b/pydango/query/utils.py @@ -1,15 +1,20 @@ -from enum import Enum from typing import Protocol, TypeVar, Union +from pydango.query.consts import FROM, ID, KEY, REV, TO +from pydango.query.expressions import NEW + class Compilable(Protocol): def compile(self, *args, **kwargs) -> Union[str, None]: ... -class SortDirection(str, Enum): - ASC = "ASC" - DESC = "DESC" +T = TypeVar("T") -T = TypeVar("T") +def new(*, edge=False) -> dict[str, str]: + _new = NEW() + d = {ID: _new[ID], KEY: _new[KEY], REV: _new[REV]} + if edge: + d.update({FROM: _new[FROM], TO: _new[TO]}) + return d diff --git a/pyproject.toml b/pyproject.toml index 5b61615..eada92d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,10 +77,11 @@ asyncio_mode = "auto" #] [tool.bandit.assert_used] -skips = ['**/test_*.py', '**/test_*.py'] +skips = ["tests/utils*.py", '**/test_*.py', '**/test_*.py'] [tool.ruff] line-length = 120 +exclude = ["stubs"] [tool.mypy] mypy_path = "./stubs/" @@ -96,3 +97,10 @@ exclude = """ plugins = ["pydantic.mypy"] warn_redundant_casts = true + +[[tool.mypy.overrides]] +module = [ + "requests_toolbelt", + "indexed" +] +ignore_missing_imports = true diff --git a/tests/conftest.py b/tests/conftest.py index b92e9af..34bdedc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,8 +5,9 @@ from typing import AsyncGenerator, TypeVar import pytest +import pytest_asyncio from aioarango import ArangoClient -from aioarango.database import Database +from aioarango.database import Database, StandardDatabase from pydango.connection.utils import get_or_create_db from pydango.query.expressions import NEW @@ -65,8 +66,8 @@ def format(self, record): handler = logging.StreamHandler(stream=sys.stdout) handler.setFormatter(formatter) logging.getLogger("pydango").addHandler(handler) - with caplog.at_level(logging.DEBUG, "pydango"): - yield + # with caplog.at_level(logging.DEBUG, "pydango"): + # yield T = TypeVar("T") @@ -82,13 +83,13 @@ async def client() -> AsyncFixture[ArangoClient]: @pytest.fixture(scope="session") -async def database(client: ArangoClient) -> AsyncFixture[Database]: +async def database(client: ArangoClient) -> AsyncFixture[StandardDatabase]: db = await get_or_create_db(client, "pydango") yield db # await (await client.db("_system")).delete_database("pydango") -@pytest.fixture(scope="session", autouse=True) +@pytest_asyncio.fixture(scope="session", autouse=True) async def populate(database: Database): responses = defaultdict(list) for coll in DATA: diff --git a/tests/session/test_cities.py b/tests/session/test_cities.py index 6a3488e..cc76369 100644 --- a/tests/session/test_cities.py +++ b/tests/session/test_cities.py @@ -1,5 +1,5 @@ import datetime -from typing import Annotated +from typing import TYPE_CHECKING, Annotated import pytest @@ -12,6 +12,10 @@ VertexCollectionConfig, VertexModel, ) +from tests.utils import assert_equals_dicts + +if TYPE_CHECKING: + from aioarango.database import StandardDatabase class Visited(EdgeModel): @@ -64,10 +68,7 @@ class Collection(VertexCollectionConfig): @pytest.mark.asyncio -async def test_save(database): - # await get_or_create_db(client, "pydango") - # db = await client.db("pydango") - +async def test_save(database: "StandardDatabase"): session = PydangoSession(database) await session.init(Person) await session.init(City) @@ -91,27 +92,49 @@ async def test_save(database): ) p = await session.save(p) + from unittest.mock import ANY + + expected = { + "age": 35, + "edges": { + "lives_in": { + "from_": ANY, + "id": ANY, + "key": ANY, + "rev": ANY, + "since": datetime.datetime(2023, 7, 1, 18, 16, 38, 350095), + "to": ANY, + }, + "visited": [ + { + "from_": ANY, + "id": ANY, + "key": ANY, + "on_date": datetime.date(2023, 7, 1), + "rating": 10, + "rev": ANY, + "to": ANY, + }, + { + "from_": ANY, + "id": ANY, + "key": ANY, + "on_date": datetime.date(2023, 7, 1), + "rating": 10, + "rev": ANY, + "to": ANY, + }, + ], + }, + "id": ANY, + "key": ANY, + "lives_in": {"id": ANY, "key": ANY, "name": "tlv", "population": 123, "rev": ANY}, + "name": "John", + "rev": ANY, + "visited": [ + {"id": ANY, "key": ANY, "name": "New York", "population": 123, "rev": ANY}, + {"id": ANY, "key": ANY, "name": "Amsterdam", "population": 123, "rev": ANY}, + ], + } - print(p) - # city = City(name="BsAs", population=33) - # await session.save(city) - # p = await session.get(Person, "356900") - # await session.save() - - # new_q = NEW() - # city_q = ORMQuery().insert(city).return_(new_q._id) - # new_p = NEW() - # person_q = ORMQuery().insert(p).return_(new_p) - # lives_in_q = ORMQuery().insert(LivesIn(from_=new_p._id, to=new_q._id)) - - # await p.lives_in - - # print(type(p.lives_in)) - # print(id(p.lives_in.name)) - # print(p.lives_in.fetch()) - - # print(p.dict(exclude={"id"})) - - -# if __name__ == "__main__": -# asyncio.run(run()) + assert_equals_dicts(p.dict(include_edges=True), expected) diff --git a/tests/session/test_family.py b/tests/session/test_family.py index 46da435..d32561d 100644 --- a/tests/session/test_family.py +++ b/tests/session/test_family.py @@ -1,10 +1,11 @@ -from pprint import pprint -from typing import Annotated, Optional +import json +from typing import Annotated, Optional, Sequence import pytest from pydango.connection.session import PydangoSession from pydango.index import PersistentIndex +from pydango.orm.encoders import jsonable_encoder from pydango.orm.models import ( EdgeCollectionConfig, EdgeModel, @@ -38,7 +39,7 @@ class Collection(EdgeCollectionConfig): def test_obj(): - a = Person.parse_obj( + Person.parse_obj( { "_id": "people/29887", "_key": "29887", @@ -158,4 +159,28 @@ async def test_save(database): p = await session.save(john) - pprint(p.dict(include_edges=True)) + def traverse_recursive_fields(p, recursive_fields, visited): + if isinstance(p, Sequence): + for i in p: + traverse_recursive_fields(i, exclude, visited) + + else: + d = p.dict(include_edges=False, exclude=recursive_fields) + for recursive_field in recursive_fields: + attr = getattr(p, recursive_field) + + for i in attr: + d[recursive_field] = i.dict(include_edges=False, exclude=recursive_fields) + visited.add(id(i)) + if id(attr) in visited: + return d + visited.add(id(attr)) + traverse_recursive_fields(attr, exclude, visited) + return d + + exclude = { + "brothers", + "sisters", + } + a = traverse_recursive_fields(p, exclude, visited=set()) + print(json.dumps(jsonable_encoder(a), indent=2)) diff --git a/tests/session/test_social_network.py b/tests/session/test_social_network.py index 5813f23..fdb88d9 100644 --- a/tests/session/test_social_network.py +++ b/tests/session/test_social_network.py @@ -1,5 +1,6 @@ import datetime from typing import Annotated, List, Optional, Type, Union +from unittest.mock import ANY import pytest from aioarango.database import StandardDatabase @@ -13,6 +14,7 @@ VertexCollectionConfig, VertexModel, ) +from tests.utils import assert_equals_dicts class Post(VertexModel): @@ -115,7 +117,7 @@ class Collection(EdgeCollectionConfig): user1.friends = [user2, user3, user4] user1.posts = [post1] user1.comments = [comment1, comment2] -user1.likes = [comment2, post2] +user1.likes = [comment2] # user2.friends = [user1, user3] # user2.posts = [post1] @@ -133,7 +135,7 @@ class Collection(EdgeCollectionConfig): # user4.likes = [comment1, comment2] # user1.edges = { - User.friends: [friendship1, friendship2, friendship3, friendship4], + User.friends: [friendship1, friendship2, friendship3], User.comments: [commentary1, commentary2], User.posts: [authorship1], User.likes: [like1], @@ -187,3 +189,135 @@ async def test_save(database: StandardDatabase): await session.init(i) await session.save(user1) + + expected = { + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "age": 25, + "comments": [ + {"_id": ANY, "_key": ANY, "_rev": ANY, "text": "Great post!"}, + {"_id": ANY, "_key": ANY, "_rev": ANY, "text": "I enjoyed reading this."}, + ], + "edges": { + "comments": [ + { + "_from": ANY, + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "_to": ANY, + "commented_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121092), + }, + { + "_from": ANY, + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "_to": ANY, + "commented_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121098), + }, + ], + "friends": [ + { + "_from": ANY, + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "_to": ANY, + "since": datetime.date(2020, 1, 1), + }, + { + "_from": ANY, + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "_to": ANY, + "since": datetime.date(2021, 3, 15), + }, + { + "_from": ANY, + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "_to": ANY, + "since": datetime.date(2022, 5, 10), + }, + ], + "likes": [ + { + "_from": ANY, + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "_to": ANY, + "liked_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121134), + } + ], + "posts": [ + { + "_from": ANY, + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "_to": ANY, + "created_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121070), + } + ], + }, + "email": "john@example.com", + "friends": [ + { + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "age": 30, + "comments": None, + "edges": None, + "email": "jane@example.com", + "friends": None, + "likes": None, + "name": "Jane", + "posts": None, + }, + { + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "age": 28, + "comments": None, + "edges": None, + "email": "alice@example.com", + "friends": None, + "likes": None, + "name": "Alice", + "posts": None, + }, + { + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "age": 32, + "comments": None, + "edges": None, + "email": "bob@example.com", + "friends": None, + "likes": None, + "name": "Bob", + "posts": None, + }, + ], + "likes": [{"_id": ANY, "_key": ANY, "_rev": ANY, "text": "I enjoyed reading this."}], + "name": "John", + "posts": [ + { + "_id": ANY, + "_key": ANY, + "_rev": ANY, + "comments": [{"_id": "comments/62920", "_key": "62920", "_rev": "_gPGmmm2--D", "text": "Great post!"}], + "content": "This is my first post!", + "title": "First Post", + } + ], + } + assert_equals_dicts(expected, user2.dict(by_alias=True, include_edges=True)) diff --git a/tests/test_orm_query.py b/tests/test_orm_query.py index 3886f39..4d8247c 100644 --- a/tests/test_orm_query.py +++ b/tests/test_orm_query.py @@ -161,7 +161,7 @@ def test_sub_query(): def test_insert(): - aql = ORMQuery().insert(User(name="nadir", age=35)).return_(NEW()) + aql = ORMQuery().insert(User(name="john", age=35)).return_(NEW()) expected_repr = "INSERT {name: ?, age: ?} INTO RETURN NEW" expected_compiled = "INSERT {name: @param1, age: @param2} INTO `users` RETURN NEW" assert repr(aql) == expected_repr @@ -169,7 +169,7 @@ def test_insert(): def test_remove(): - aql = ORMQuery().remove(User(key="user/123", name="nadir", age=35)).return_(OLD()) + aql = ORMQuery().remove(User(key="user/123", name="john", age=35)).return_(OLD()) expected_repr = "REMOVE {_key: ?} IN RETURN OLD" expected_compiled = "REMOVE {_key: @param1} IN `users` RETURN OLD" assert repr(aql) == expected_repr @@ -177,7 +177,7 @@ def test_remove(): def test_replace(): - aql = ORMQuery().replace(User(name="nadir", age=35), User(name="nadir", age=36)).return_(NEW()) + aql = ORMQuery().replace(User(name="john", age=35), User(name="john", age=36)).return_(NEW()) expected_repr = "REPLACE {name: ?, age: ?} IN RETURN NEW" expected_compiled = "REPLACE {name: @param1, age: @param2} IN `users` RETURN NEW" assert repr(aql) == expected_repr @@ -185,7 +185,7 @@ def test_replace(): def test_update(): - aql = ORMQuery().update(User(name="nadir", age=35), User(name="nadir", age=36)).return_(NEW()) + aql = ORMQuery().update(User(name="john", age=35), User(name="john", age=36)).return_(NEW()) expected_repr = "UPDATE {name: ?, age: ?} IN RETURN NEW" expected_compiled = "UPDATE {name: @param1, age: @param2} IN `users` RETURN NEW" assert repr(aql) == expected_repr @@ -193,8 +193,8 @@ def test_update(): def test_upsert(): - user = User(name="nadir", age=36) - aql = ORMQuery().upsert(User(name="nadir", age=35), insert=user, update=user).return_(NEW()) + user = User(name="john", age=36) + aql = ORMQuery().upsert(User(name="john", age=35), insert=user, update=user).return_(NEW()) expected_repr = ( "UPSERT {name: ?, age: ?} INSERT {name: ?, age: ?} UPDATE {name: ?, age: ?} IN " " RETURN NEW" diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..ac92433 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,39 @@ +from unittest.mock import ANY + + +def _assert(actual, expected, key): + expected_value = expected[key] + actual_value = actual[key] + if expected_value == ANY: + return + + if isinstance(expected_value, dict) and isinstance(actual_value, dict): + assert_equals_dicts(expected_value, actual_value) + elif isinstance(expected_value, list) and isinstance(actual_value, list): + assert_equals_lists(expected_value, actual_value) + else: + assert expected_value == actual_value, f"Values for key '{key}' do not match" + + +def assert_equals_dicts(expected, actual): + assert isinstance(actual, dict), "Expected a dictionary for actual value" + assert isinstance(expected, dict), "Expected a dictionary for expected value" + + expected_keys = expected.keys() + actual_keys = actual.keys() + _expected_keys = set(expected_keys) + _actual_keys = set(actual_keys) + assert _actual_keys == _expected_keys, ("Keys in dictionaries do not match", _actual_keys, _expected_keys) + assert actual_keys == expected_keys, "Keys in dictionaries do not match" + for key in expected: + _assert(actual, expected, key) + + +def assert_equals_lists(expected, actual): + assert isinstance(actual, list), "Expected a list for actual value" + assert isinstance(expected, list), "Expected a list for expected value" + + assert len(actual) == len(expected), "Lists have different lengths" + + for i in range(len(expected)): + _assert(actual, expected, i) From 80039e930b6f125fb416805841c508cb708ea112 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Fri, 22 Sep 2023 15:30:29 +0300 Subject: [PATCH 06/19] working tests --- .pre-commit-config.yaml | 5 +- poetry.lock | 236 +++++---- pydango/connection/session.py | 492 +++++++++++++++--- pydango/connection/utils.py | 2 +- pydango/orm/fields.py | 12 +- pydango/orm/models.py | 148 ++++-- pydango/orm/query.py | 30 +- pydango/orm/types.py | 2 +- pydango/orm/utils.py | 8 + pydango/query/expressions.py | 42 +- pydango/query/functions.py | 7 + pydango/query/operations.py | 36 +- pydango/query/query.py | 8 +- pydango/query/utils.py | 4 +- pydango/utils.py | 17 + pyproject.toml | 2 + requirements.txt | 161 ------ stubs/aioarango/__init__.pyi | 2 + tests/conftest.py | 32 +- tests/session/conftest.py | 16 + tests/session/test_cities.py | 148 ++++-- tests/session/test_family.py | 217 ++++++-- tests/session/test_social_network.py | 382 ++++++-------- tests/test_queries/__init__.py | 0 tests/test_queries/conftest.py | 24 + tests/{ => test_queries}/data.py | 0 tests/{ => test_queries}/ecommerce_queries.py | 0 tests/{ => test_queries}/test_ecommerce.py | 4 +- .../test_queries_integration.py | 2 +- 29 files changed, 1314 insertions(+), 725 deletions(-) create mode 100644 pydango/utils.py delete mode 100644 requirements.txt create mode 100644 tests/session/conftest.py create mode 100644 tests/test_queries/__init__.py create mode 100644 tests/test_queries/conftest.py rename tests/{ => test_queries}/data.py (100%) rename tests/{ => test_queries}/ecommerce_queries.py (100%) rename tests/{ => test_queries}/test_ecommerce.py (96%) rename tests/{ => test_queries}/test_queries_integration.py (97%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c0d4fcf..122d177 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -65,6 +65,7 @@ repos: args: - -c - pyproject.toml + - --quiet additional_dependencies: - toml @@ -106,4 +107,6 @@ repos: hooks: - id: poetry-check - id: poetry-lock -# - id: poetry-export + args: + - --no-update + # - id: poetry-export diff --git a/poetry.lock b/poetry.lock index ac5dad6..31b88ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -18,13 +18,13 @@ requests-toolbelt = ">=0.9.1,<0.10.0" [[package]] name = "anyio" -version = "3.7.0" +version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.7" files = [ - {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"}, - {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"}, + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, ] [package.dependencies] @@ -33,7 +33,7 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] @@ -110,97 +110,97 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.4" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"}, + {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"}, ] [package.dependencies] @@ -302,13 +302,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, ] [package.extras] @@ -329,6 +329,20 @@ files = [ docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +[[package]] +name = "freezegun" +version = "1.2.2" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.6" +files = [ + {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, + {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "h11" version = "0.12.0" @@ -539,13 +553,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.8.0" +version = "3.8.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, - {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, + {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, + {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, ] [package.extras] @@ -637,6 +651,17 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pydiction" +version = "0.1.0" +description = "" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pydiction-0.1.0-py3-none-any.whl", hash = "sha256:58f3679ceff25967ddc3fb9bfb2a5ac5f200d8317fe03021d11083cd75112a3c"}, + {file = "pydiction-0.1.0.tar.gz", hash = "sha256:9f93d7bd2e2a9f9be7e10ead3d9e8ea75ef3257a2c2452f0b2a775dddcd29206"}, +] + [[package]] name = "pyjwt" version = "2.7.0" @@ -694,6 +719,20 @@ pytest = ">=7.0.0" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pyyaml" version = "6.0" @@ -811,6 +850,17 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "sniffio" version = "1.3.0" @@ -835,13 +885,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.7.0-py3-none-any.whl", hash = "sha256:5d8c9dac95c27d20df12fb1d97b9793ab8b2af8a3a525e68c80e21060c161771"}, - {file = "typing_extensions-4.7.0.tar.gz", hash = "sha256:935ccf31549830cda708b42289d44b6f74084d616a00be651601a4f968e77c82"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] @@ -883,4 +933,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "00e74057465e8df418904d57bd1d2ad2a6ae268ccfdb07cf21b73265065d293b" +content-hash = "8aec742783e1c98e1d774e53b914636d541a068b2ee876dca6c2354a17d2bc57" diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 993f89e..9777ca6 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -1,14 +1,19 @@ import dataclasses import logging import sys -from collections import OrderedDict, defaultdict +from collections import OrderedDict, defaultdict, namedtuple from enum import Enum -from itertools import groupby -from typing import Any, Iterator, Optional, Type, Union, cast +from typing import Any, Iterator, Optional, Type, Union, cast, get_args, get_origin +from aioarango import AQLQueryExecuteError +from pydantic import BaseModel +from pydantic.fields import ModelField + +from pydango.connection import DALI_SESSION_KW from pydango.orm.relations import LIST_TYPES -from pydango.orm.types import ArangoModel +from pydango.orm.types import ArangoModel, TVertexModel from pydango.query.utils import new +from pydango.utils import get_collection_from_document if sys.version_info >= (3, 10): from typing import TypeAlias @@ -29,8 +34,8 @@ from pydango.query import AQLQuery from pydango.query.consts import FROM, ID, KEY, REV, TO from pydango.query.expressions import IteratorExpression, VariableExpression -from pydango.query.functions import Length, Merge, UnionArrays -from pydango.query.operations import RangeExpression +from pydango.query.functions import Document, Length, Merge, UnionArrays +from pydango.query.operations import RangeExpression, TraversalDirection from pydango.query.options import UpsertOptions logger = logging.getLogger(__name__) @@ -46,6 +51,51 @@ class DocumentNotFoundError(Exception): # document.rev = result["_rev"] +class MySpecialIter: + def __init__(self, d): + self.d = d + + def __iter__(self): + return iter(self.d) + + +class groupby: + # [k for k, g in groupby('AAAABBBCCDAABBB')] --> A B C D A B + # [list(g) for k, g in groupby('AAAABBBCCD')] --> AAAA BBB CC D + + def __init__(self, iterable, key=None): + if key is None: + + def key(x): + return x + + self.keyfunc = key + self.it = iter(iterable) + self.tgtkey = self.currkey = self.currvalue = object() + + def __iter__(self): + return self + + def __next__(self): + self.id = object() + while self.currkey == self.tgtkey: + self.currvalue = next(self.it) # Exit on StopIteration + self.currkey = self.keyfunc(self.currvalue) + self.tgtkey = self.currkey + return self.currkey, self._grouper(self.tgtkey, self.id) + + def _grouper(self, tgtkey, id): + while self.id is id and self.currkey == tgtkey: + yield self.currvalue + if get_origin(self.currvalue) is Union: + continue + try: + self.currvalue = next(self.it) + except StopIteration: + return + self.currkey = self.keyfunc(self.currvalue) + + def _collection_from_model(database: StandardDatabase, model: Type[BaseArangoModel]) -> StandardCollection: return database.collection(model.Collection.name) @@ -54,15 +104,49 @@ def _group_by_relation( model: BaseArangoModel, ) -> Iterator[tuple[tuple[Type[VertexModel], Optional[Type[EdgeModel]]], str]]: relationships = model.__relationships__ + + def grouper(x): + source = relationships[x].link_model + if source is Union: + source = get_args(source) + + dst = relationships[x].via_model + if dst is Union: + dst = get_args(source) + + return source, dst + for m, group in groupby( relationships, lambda x: (relationships[x].link_model, relationships[x].via_model), ): for thing in group: - yield m, thing + if get_origin(m[0]) is Union: + for i in get_args(m[0]): + yield (i, m[1]), thing + else: + yield m, thing return None +RelationGroup = namedtuple("RelationGroup", ["collection", "field", "model", "via_model"]) + + +def _group_by_relation2( + model: BaseArangoModel, +) -> Iterator[RelationGroup]: + relationships = model.__relationships__ + for field, relation in relationships.items(): + if get_origin(relation.link_model) is Union: + for model_option in get_args(relation.link_model): + # model_option: ArangoModel + yield RelationGroup(model_option.Collection.name, field, model_option, relation.via_model) + # result[model_option.Collection.name][field][model_option] = relation.via_model + else: + yield RelationGroup(relation.link_model.Collection.name, field, relation.link_model, relation.via_model) + # result[relation.link_model.Collection.name][field][relation.link_model] = relation.via_model + + class UpdateStrategy(str, Enum): UPDATE = "update" REPLACE = "replace" @@ -129,7 +213,7 @@ def _build_upsert_query( def _build_vertex_query(v, vertices_docs, strategy: UpdateStrategy): i = IteratorExpression() - from_var = VariableExpression() + from_var = VariableExpression(v.Collection.name) query = _build_upsert_query(i, strategy, v, vertices_docs) return from_var, query @@ -187,7 +271,7 @@ def traverse2( if isinstance(obj, list): for i in obj: _set_edge_operational_fields(result, model_id, edges_ids, i) - else: + elif obj is not None: _set_edge_operational_fields(result, model_id, edges_ids, obj) if isinstance(relation_doc, list): z = zip(relation_doc, getattr(model.edges, field, [])) @@ -231,17 +315,33 @@ def _build_graph( ) -> tuple[EdgeCollectionsMapping, EdgeVerticesIndexMapping, VertexCollectionsMapping, ModelFieldMapping]: vertex_collections: VertexCollectionsMapping = OrderedDict() edge_collections: EdgeCollectionsMapping = OrderedDict() - edge_vertex_index: EdgeVerticesIndexMapping = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) + edge_vertex_index: EdgeVerticesIndexMapping = {} # defaultdict(lambda: defaultdict(lambda: defaultdict(list))) model_fields_mapping: ModelFieldMapping = {} def _prepare_relation(field, model, edge_cls, edge_doc, relation_doc): model_id = id(model) + if id(relation_doc) in ( + edge_vertex_index.setdefault(edge_cls, {}) + .setdefault(model_id, {}) + .setdefault((model.__class__, relation_doc.__class__), []) + ): + return False + if edge_doc: edge_collections.setdefault(edge_cls, IndexedOrderedDict()).setdefault(model_id, []).append(edge_doc) _add_model_field_to_mapping(model, field, relation_doc, edge_doc) - edge_vertex_index[edge_cls][model.__class__, relation_doc.__class__][model_id].append(id(relation_doc)) + ( + edge_vertex_index.setdefault(edge_cls, {}) + .setdefault(model_id, {}) + .setdefault((model.__class__, relation_doc.__class__), []) + .append(id(relation_doc)) + ) + + # edge_vertex_index.setdefault(edge_cls, {}).setdefault((model.__class__, relation_doc.__class__), + # {}).setdefault(model_id, set[int]()).add( + # id(relation_doc)) def _add_model_field_to_mapping(model, field, relation_doc, edge_doc): model_id = id(model) @@ -254,7 +354,7 @@ def _add_model_field_to_mapping(model, field, relation_doc, edge_doc): else: model_mapping[field] = {"v": id(relation_doc), "e": id(edge_doc)} - def traverse(model: VertexModel, visited: set[int]): + def traverse_old(model: VertexModel, visited: set[int]): if id(model) in visited: return @@ -263,11 +363,11 @@ def traverse(model: VertexModel, visited: set[int]): visited.add(id(model)) models: tuple[Type[VertexModel], Optional[Type[EdgeModel]]] - relations = list(_group_by_relation(model)) + relations = list(_group_by_relation2(model)) if relations: for models, field in relations: edge_cls: Optional[Type[EdgeModel]] = models[1] - relation_doc = getattr(model, field) + relation_doc: ModelField = getattr(model, field) if not relation_doc: _add_model_field_to_mapping(model, field, None, None) continue @@ -287,11 +387,11 @@ def traverse(model: VertexModel, visited: set[int]): z = zip(relation_doc, getattr(model.edges, field, [])) for vertex_doc, edge_doc in z: _prepare_relation(field, model, edge_cls, edge_doc, vertex_doc) - traverse(vertex_doc, visited) + traverse_old(vertex_doc, visited) else: edge_doc = getattr(model.edges, field) _prepare_relation(field, model, edge_cls, edge_doc, relation_doc) - traverse(relation_doc, visited) + traverse_old(relation_doc, visited) else: # todo: insert join relation pass @@ -299,7 +399,56 @@ def traverse(model: VertexModel, visited: set[int]): pass # model_fields_mapping[id(model)] = {} - traverse(document, _visited) + def traverse_new(model: VertexModel, visited: set[int]): + nonlocal edge_collections + if id(model) in visited: + return + + if isinstance(model, VertexModel): + vertex_collections.setdefault(model.__class__, IndexedOrderedDict())[id(model)] = model + visited.add(id(model)) + + relations = list(_group_by_relation2(model)) + if relations: + for relation_group in relations: + relation_doc: VertexModel = getattr(model, relation_group.field) + if not relation_doc: + _add_model_field_to_mapping(model, relation_group.field, None, None) + continue + + edge_cls: Optional[Type[EdgeModel]] = relation_group.via_model + + if isinstance(relation_doc, LazyProxy): + relation_doc = relation_doc.__instance__ + + if model.edges: + if isinstance(model.edges, dict): + convert_edge_data_to_valid_kwargs(model.edges) + # todo: this initiate the class edge model so it validates the edges, should we do that? + model.edges = model.__fields__[EDGES].type_(**model.edges) + + if isinstance(relation_doc, list): + if len(getattr(model.edges, relation_group.field, [])) != len(relation_doc): + raise AssertionError( + f"{model.__class__.__name__} vertex edges {relation_group.field} number mismatch" + ) + z = zip(relation_doc, getattr(model.edges, relation_group.field, [])) + for vertex_doc, edge_doc in z: + _prepare_relation(relation_group.field, model, edge_cls, edge_doc, vertex_doc) + traverse_new(vertex_doc, visited) + + else: + edge_doc = getattr(model.edges, relation_group.field) + _prepare_relation(relation_group.field, model, edge_cls, edge_doc, relation_doc) + traverse_new(relation_doc, visited) + else: + # todo: insert join relation + pass + else: + pass + # model_fields_mapping[id(model)] = {} + + traverse_new(document, _visited) return edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping @classmethod @@ -316,7 +465,7 @@ def _build_graph_query( ) vertex_let_queries: dict[Type[VertexModel], VariableExpression] = {} vertices_ids: VerticesIdsMapping = {} - edge_ids: EdgesIdsMapping = defaultdict(lambda: defaultdict(dict)) + edge_ids: EdgesIdsMapping = {} for v in vertex_collections: vertex_docs = list(vertex_collections[v].values()) vertices_ids[v] = {id(doc): i for i, doc in enumerate(vertex_docs)} @@ -327,42 +476,57 @@ def _build_graph_query( edge_let_queries = {} + def invert_edge_index(d: dict): + r = {} + for k, v in d.items(): + for nested_key, nested_value in v.items(): + r.setdefault(nested_key, {})[k] = nested_value + return r + for e, coll in edge_vertex_index.items(): counter = 0 edge_vars = [] - for (from_model, to_model), instances in coll.items(): - for instance, rels in instances.items(): - iterator, new_rels, ret = _bind_edge( - from_model, instance, rels, to_model, vertex_collections, vertex_let_queries - ) - var = VariableExpression() - query.let(var, for_(iterator, new_rels).return_(ret)) - - merger = IteratorExpression() - edge = VariableExpression() - query.let(edge, edge_collections[e][instance]) - edge_ids[e][instance].update( - {id(doc): i + counter for i, doc in enumerate(edge_collections[e][instance])} - ) - - merged = VariableExpression() + for j, (instance, mapping) in enumerate(list(coll.items())): + iterator = IteratorExpression() + edge_ids.setdefault(e, {}).setdefault(instance, {}).update( + {id(doc): i + counter for i, doc in enumerate(edge_collections[e][instance])} + ) + edge_var_name = f"{e.Collection.name}_{j + 1}" + edge = VariableExpression(edge_var_name) + query.let(edge, edge_collections[e][instance]) + for k, ((from_model, to_model), rels) in enumerate(mapping.items()): + from_ = vertex_collections[from_model].keys().index(instance) + new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] + from_var = vertex_let_queries[from_model] + to_var = vertex_let_queries[to_model] + ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} + + edge_from_to = VariableExpression(edge_var_name + f"_{k}_from_to") + query.let(edge_from_to, for_(iterator, new_rels).return_(ret)) + + merger = IteratorExpression("merger") + + merged = VariableExpression(edge_var_name + f"_{k}_merged") query.let( merged, - for_(merger, RangeExpression(0, Length(edge) - 1)).return_(Merge(edge[merger], var[merger])), + for_(merger, RangeExpression(0, Length(edge_from_to) - 1)).return_( + Merge(edge[merger], edge_from_to[merger]) + ), ) edge_vars.append(merged) - counter += len(edge_collections[e][instance]) - edges: Union[VariableExpression, list[VariableExpression]] - if len(edge_vars) > 1: - edges = cast(list[VariableExpression], UnionArrays(*edge_vars)) - elif len(edge_vars) == 1: - edges = edge_vars[0] - else: - continue + counter += len(rels) - edge_iter = IteratorExpression() - edge_let_queries[e] = VariableExpression() - query.let(edge_let_queries[e], _build_upsert_query(edge_iter, strategy, e, edges, edge=True)) + edges: Union[VariableExpression, list[VariableExpression]] + if len(edge_vars) > 1: + edges = cast(list[VariableExpression], UnionArrays(*edge_vars)) + elif len(edge_vars) == 1: + edges = edge_vars[0] + else: + continue + + edge_iter = IteratorExpression() + edge_let_queries[e] = VariableExpression(edge_var_name + "_result") + query.let(edge_let_queries[e], _build_upsert_query(edge_iter, strategy, e, edges, edge=True)) return ( model_fields_mapping, @@ -378,9 +542,12 @@ def _build_graph_query( async def init(self, model: Type[BaseArangoModel]): collection = await get_or_create_collection(self.database, model) + await self.create_indexes(collection, model) + + @staticmethod + async def create_indexes(collection, model): if model.Collection.indexes: logger.debug("creating indexes", extra=dict(indexes=model.Collection.indexes, model=model)) - for i in model.Collection.indexes or []: if isinstance(i, dict): await index.mapping[i.__class__](collection, **i) @@ -393,7 +560,7 @@ async def save( strategy: UpdateStrategy = UpdateStrategy.UPDATE, # todo: follow_links: bool = False, collection_options: Union[CollectionUpsertOptions, None] = None, - ) -> ArangoModel: + ) -> Union[ArangoModel, TVertexModel]: model_fields_mapping = None if isinstance(document, VertexModel): model_fields_mapping, vertices_ids, edge_ids, query = self._build_graph_query( @@ -409,26 +576,227 @@ async def save( filter_ = _get_upsert_filter(document) query = _make_upsert_query(filter_, document, document, ORMQuery(), strategy, options) - cursor = await query.execute(self.database) - result = await cursor.next() + try: + cursor = await query.execute(self.database) + except AQLQueryExecuteError as e: + logger.exception(query) + raise e + else: + result = await cursor.next() if model_fields_mapping: traverse2(cast(VertexModel, document), set(), result, model_fields_mapping, vertices_ids, edge_ids) logger.debug("cursor stats", extra=cursor.statistics()) return document - async def get(self, model: Type[BaseArangoModel], _id: str, should_raise=False) -> Optional[BaseArangoModel]: - collection_name = model.Collection.name - collection = self.database.collection(collection_name) - try: - result = await collection.get(_id) - # result[DALI_SESSION_KW] = self - if result is None and should_raise: - raise DocumentNotFoundError() - document: BaseArangoModel = model.from_orm(result, session=self) - return document - except DocumentNotFoundError: - return None + async def get( + self, + model: Type[BaseArangoModel], + key: str, + should_raise: bool = False, + fetch_edges: Union[set[str], bool] = False, + fetch_edges_data: Union[set[str], bool] = False, + fetch_path: bool = False, + depth: range = range(1, 1), + prune: bool = False, + projection: Optional[Type[BaseArangoModel]] = None, + return_raw: bool = False, + ) -> Optional[Union[TVertexModel, ArangoModel]]: + collection = model.Collection.name + _id = f"{collection}/{key}" + d = Document(_id) + doc = VariableExpression() + main_query = ORMQuery().let(doc, d) + return_ = doc + if fetch_edges: + if isinstance(fetch_edges, set): + edges = fetch_edges + else: + edges = tuple({i.via_model.Collection.name for i in model.__relationships__.values()}) + + v = IteratorExpression("v") + iterators = [v] + e = IteratorExpression("e") + iterators.append(e) + # if fetch_edges_data: + + if fetch_path: + p = IteratorExpression("p") + iterators.append(p) + traversal_result = VariableExpression() + traversal = ( + ORMQuery() + .traverse(tuple(iterators), edges, _id, depth, TraversalDirection.OUTBOUND) + .return_({"v": iterators[0], "e": iterators[1]}) + ) + main_query.let(traversal_result, traversal) + return_ = {"doc": return_, "edges": traversal_result} + + main_query.return_(return_) + # logger.debug(str(main_query)) + cursor = await main_query.execute(self.database) + result = await cursor.next() + result, recursive = construct(result, model) + + if return_raw: + return result + + result[DALI_SESSION_KW] = self + if result is None and should_raise: + raise DocumentNotFoundError() + + if projection: + document = projection.from_orm(result, session=self) + else: + document = model.from_orm(result, session=self) + + return document + + # except DocumentNotFoundError: + # return None async def find(self, model: Type[BaseArangoModel], filters=None, skip=None, limit=None): collection = _collection_from_model(self.database, model) return await collection.find(filters, skip, limit) + + +def traverse_model_and_map(pydantic_model: Type[BaseModel], variable: VariableExpression): + result = {} + + for field, value in pydantic_model.__fields__.items(): + if value.alias: + field = value.alias + if issubclass(value.type_, BaseModel): + result[field] = traverse_model_and_map(value.type_, variable) + elif isinstance(value.type_, list): + result[field] = [] + for item in value.type_: + if issubclass(item, BaseModel): + result[field].append(traverse_model_and_map(item, variable)) + elif isinstance(item, dict): + mapped_item = {} + for key, val in item.items(): + mapped_item[key] = variable[val] + result[field].append(mapped_item) + elif isinstance(value.type_, dict): + mapped_value = {} + for key, val in value.type_.items(): + mapped_value[key] = variable[val] + result[field] = mapped_value + else: + result[field] = variable[field] + + return result + + +def construct(traversal_result: dict, model: Type[VertexModel]): + doc = traversal_result["doc"] + + # for relation in traversal_result["edges"]: + # v = relation["v"] + # e = relation["e"] + # coll, _, __ = e[ID].partition("/") + # for func in model.__edge_to_field_mapping__[coll]: + # if func(e, v): + # link_type = model.__relationships__[func.__name__].link_type + # if link_type in LIST_TYPES: + # if func.__name__ not in new_d: + # new_d[func.__name__] = [] + # new_d[func.__name__].append(v) + # if func.__name__ not in new_d[EDGES]: + # new_d[EDGES][func.__name__] = [] + # new_d[EDGES][func.__name__].append(e) + # else: + # new_d[func.__name__] = v + # new_d[EDGES][func.__name__] = e + # break + + vertices = defaultdict(dict) + edges = {} + if doc: + vertices[doc[ID]] = doc + edge_count = 0 + for relation in traversal_result["edges"]: + v = relation["v"] + e = relation["e"] + edge_coll = get_collection_from_document(e) + vertices[v[ID]] = v + coordinate = (e[FROM], e[TO]) + + if coordinate not in edges: + edges.setdefault(edge_coll, {})[coordinate] = e + + elif not isinstance(edges[coordinate], list): + edges.setdefault(edge_coll, {})[coordinate] = [edges[coordinate]] + edges[edge_coll][coordinate].append(e) + + else: + edges[edge_coll][coordinate].append(e) + + edge_count += 1 + if len(traversal_result["edges"]) != edge_count: + raise AssertionError("something happend edges are not the same length") + + new_d, recursive = reorder_graph({"start": doc[ID], "vertices": vertices, "edges": edges}, model) + + return new_d, recursive + + +def reorder_graph(graph, model): + vertices = graph["vertices"] + edges = graph["edges"] + start = graph["start"] + visited = set() + recursive = False + for coll, _edges in edges.items(): + for (f, t), e in _edges.items(): + to = vertices[t] + coll, _, __ = e[ID].partition("/") + + for func in model.__edge_to_field_mapping__[coll]: + if to[ID] == start: + continue + if callable(func): + if func(e, to): + map_edge(e, f, func.__name__, model, to, vertices) + break + else: + map_edge(e, f, func, model, to, vertices) + + if id(to) in visited: + recursive = True + visited.add(id(to)) + + return vertices[start], recursive + + +def map_edge(e, f, func, model, to, vertices): + link_type = model.__relationships__[func].link_type + if link_type in LIST_TYPES: + if func not in vertices[f]: + vertices[f][func] = [] + + vertices[f][func].append(to) + + if func not in vertices[f].setdefault(EDGES, {}): + vertices[f][EDGES][func] = [] + vertices[f][EDGES][func].append(e) + else: + vertices[f][func] = to + vertices[f].setdefault(EDGES, {})[func] = e + + +def remove_circular_refs(ob, _seen=None): + if _seen is None: + _seen = set() + if id(ob) in _seen: + return None + _seen.add(id(ob)) + res = ob + if isinstance(ob, dict): + res = {remove_circular_refs(k, _seen): remove_circular_refs(v, _seen) for k, v in ob.items()} + elif isinstance(ob, (list, tuple, set, frozenset)): + objs = type(ob)(remove_circular_refs(v, _seen) for v in ob) + res = all(objs) and objs or None + + _seen.remove(id(ob)) + return res diff --git a/pydango/connection/utils.py b/pydango/connection/utils.py index 8daa387..7116ffa 100644 --- a/pydango/connection/utils.py +++ b/pydango/connection/utils.py @@ -37,7 +37,7 @@ async def get_or_create_collection( if not await db.has_collection(collection_name): try: - return await cast(Awaitable[StandardCollection], db.create_collection(collection_name, edge=edge)) + return await cast(Awaitable["StandardCollection"], db.create_collection(collection_name, edge=edge)) except aioarango.exceptions.CollectionCreateError as e: if e.error_code != 1207: raise e diff --git a/pydango/orm/fields.py b/pydango/orm/fields.py index a0f22ac..da69d01 100644 --- a/pydango/orm/fields.py +++ b/pydango/orm/fields.py @@ -1,4 +1,4 @@ -from __future__ import annotations +# from __future__ import annotations from typing import TYPE_CHECKING, Generic, Optional, Type, TypeVar, Union, cast @@ -22,11 +22,11 @@ class ModelFieldExpression(FieldExpression): - def __init__(self, field: Union[str, Expression], parent: Type[BaseArangoModel]): + def __init__(self, field: Union[str, Expression], parent: Type["BaseArangoModel"]): super().__init__(field, cast(VariableExpression, parent)) self.parent = parent # type: ignore[assignment] - def compile(self, query_ref: AQLQuery) -> str: + def compile(self, query_ref: "AQLQuery") -> str: if isinstance(self.field, Expression): return super().compile(query_ref) else: @@ -41,7 +41,7 @@ def __hash__(self): class DocFieldDescriptor(Generic[FieldType]): - def __init__(self, field: ModelField, relation: Optional[Relationship] = None): + def __init__(self, field: "ModelField", relation: Optional["Relationship"] = None): self.relation = relation self.field = field @@ -50,8 +50,8 @@ def __set__(self, instance, value): # instance.__dict__[self.name] = LazyProxy(value) def __get__( - self, instance: Optional[ArangoModel], owner: Type[BaseArangoModel] - ) -> Union[LazyProxy[ArangoModel], ModelFieldExpression, None]: + self, instance: Optional["ArangoModel"], owner: Type["BaseArangoModel"] + ) -> Union[LazyProxy["ArangoModel"], ModelFieldExpression, None]: if not instance and self.field.name in owner.__fields__.keys(): return ModelFieldExpression(self.field.name, owner) diff --git a/pydango/orm/models.py b/pydango/orm/models.py index 2cdc5db..f228374 100644 --- a/pydango/orm/models.py +++ b/pydango/orm/models.py @@ -25,10 +25,15 @@ import pydantic.typing from pydantic.fields import ConfigError # type: ignore[attr-defined] +import pydango.orm.fields from pydango.orm.consts import EDGES from pydango.orm.encoders import jsonable_encoder from pydango.orm.types import ArangoModel -from pydango.orm.utils import convert_edge_data_to_valid_kwargs, get_globals +from pydango.orm.utils import ( + convert_edge_data_to_valid_kwargs, + evaluate_forward_ref, + get_globals, +) from pydango.query.consts import FROM, ID, KEY, REV, TO if sys.version_info >= (3, 10): @@ -212,7 +217,7 @@ def validate( return super().validate(v, values, loc=loc, cls=cls) if v is not NAO else (v, None) -def get_pydango_field(field, cls=RelationModelField): +def get_pydango_field(field: ModelField, cls: Type[RelationModelField] = RelationModelField) -> RelationModelField: return cls( name=field.name, type_=field.annotation, @@ -243,14 +248,14 @@ class EdgeData(BaseModel): @dataclass_transform(kw_only_default=True, field_specifiers=(ArangoField,)) class ArangoModelMeta(ModelMetaclass, ABCMeta): - def __new__(mcs, name, bases, namespace, **kwargs): + def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): parents = [b for b in bases if isinstance(b, mcs)] if not parents or BaseArangoModel in parents: - new_cls = super().__new__(mcs, name, bases, namespace, **kwargs) - new_cls.__relationships__ = {} - new_cls.__relationships_fields__ = {} - return new_cls - relationships = {} + skipped_cls: BaseArangoModel = super().__new__(mcs, name, bases, namespace, **kwargs) + skipped_cls.__relationships__ = {} + skipped_cls.__relationships_fields__ = {} + return skipped_cls + _relationships: dict[str, Relationship] = {} original_annotations = resolve_annotations( namespace.get("__annotations__", {}), namespace.get("__module__", None) @@ -259,62 +264,101 @@ def __new__(mcs, name, bases, namespace, **kwargs): for k, v in original_annotations.items(): relation = get_relation(k, v, namespace.get(k, Undefined), BaseConfig) if relation: - relationships[k] = relation - original_annotations[k] = Union[original_annotations[k]] + _relationships[k] = relation + # original_annotations[k] = Union[original_annotations[k]] if VertexModel in bases: - __edge_namespace__ = {} - for field, relation_info in relationships.items(): + __edge_namespace__: dict[str, Any] = {} + for field, relation_info in _relationships.items(): + via_model = relation_info.via_model if relation_info.link_type in LIST_TYPES: - __edge_namespace__[field] = (list[relation_info.via_model], ...) + if relation_info.link_type in (LinkTypes.OPTIONAL_EDGE_LIST, LinkTypes.OPTIONAL_LIST): + __edge_namespace__[field] = (Optional[list[via_model]], None) # type: ignore[valid-type] + else: + __edge_namespace__[field] = (list[via_model], ...) # type: ignore[valid-type] + + elif relation_info.link_type in (LinkTypes.OPTIONAL_EDGE, LinkTypes.OPTIONAL_DIRECT): + __edge_namespace__[field] = (Optional[via_model], None) else: - __edge_namespace__[field] = (relation_info.via_model, ...) + __edge_namespace__[field] = (via_model, ...) # type: ignore[assignment] m = create_model(f"{name}Edges", **__edge_namespace__, __base__=EdgeData) namespace[EDGES] = Field(None, exclude=True) - - original_annotations[EDGES] = Optional[m] + original_annotations[EDGES] = cast(Any, Optional[m]) + else: + namespace[EDGES] = Field(None, exclude=True) + original_annotations[EDGES] = cast(Any, None) dict_used = { **namespace, "__weakref__": None, "__annotations__": original_annotations, - "__relationships__": relationships, + "__relationships__": _relationships, } - if VertexModel in bases: - dict_used.update({"__edges_model__": m}) - new_cls = super().__new__( - mcs, - name, - bases, - dict_used, - **kwargs, - ) + + new_cls: BaseArangoModel = super().__new__(mcs, name, bases, dict_used, **kwargs) + + __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] = {} + for relation_field, relation_info in _relationships.items(): + if not relation_info.via_model: + continue + if isinstance(relation_info.via_model, ForwardRef): + __edge_to_field_mapping__.setdefault(relation_info.via_model, []).append(cast(str, relation_field)) + elif issubclass(relation_info.via_model, BaseArangoModel): + __edge_to_field_mapping__.setdefault(relation_info.via_model.Collection.name, []).append(relation_field) + + errors: dict[Union[str, ForwardRef], list[str]] = {} + + items = __edge_to_field_mapping__.items() + + for coll_or_forward_ref, fields in items: + if len(fields) > 1: + for i, f in enumerate(fields): + func = getattr(new_cls.Collection, f) + if func: + if not callable(func): + raise ValueError(f"{func} is not callable") + fields[i] = func + + else: + errors.setdefault(coll_or_forward_ref, []).append(f) + + if errors: + raise AttributeError(f"you must define the following Collection functions for distinction {dict(errors)}") + __relationship_fields__ = {} - for field_name, field in [(k, v) for k, v in new_cls.__fields__.items() if k != EDGES]: - if field_name in relationships: - model_field = get_pydango_field(field, RelationModelField) + for field_name, model_field in [(x, y) for x, y in new_cls.__fields__.items() if x != EDGES]: + if field_name in _relationships: + pydango_field = get_pydango_field(model_field, RelationModelField) # todo improve this - relationships[field_name].field = model_field - __relationship_fields__[field_name] = model_field - new_cls.__fields__[field_name] = model_field + # todo: check why fully qualified module name needed + relationship = cast( # type: ignore[redundant-cast] + pydango.orm.models.Relationship, _relationships[field_name] + ) + relationship.field = pydango_field + __relationship_fields__[field_name] = pydango_field + new_cls.__fields__[field_name] = pydango_field setattr( new_cls, field_name, - DocFieldDescriptor[model_field.type_](model_field, relationships[field_name]), + DocFieldDescriptor[pydango_field.type_](pydango_field, relationship), # type: ignore[name-defined] ) - new_cls.__annotations__.update({field_name: DocFieldDescriptor[model_field.type_]}) - # if issubclass(new_cls, VertexModel): - # pass - else: - setattr(new_cls, field_name, DocFieldDescriptor[field.type_](field)) - new_cls.__relationships__ = relationships + field_annotation = {field_name: DocFieldDescriptor[pydango_field.type_]} # type: ignore[name-defined] + new_cls.__annotations__.update(field_annotation) + else: + setattr( + new_cls, + field_name, + DocFieldDescriptor[model_field.type_](model_field), # type: ignore[name-defined] + ) + new_cls.__relationships__ = _relationships new_cls.__relationships_fields__ = __relationship_fields__ + new_cls.__edge_to_field_mapping__ = __edge_to_field_mapping__ new_cls.__annotations__ = { # **relationship_annotations, **original_annotations, @@ -341,7 +385,8 @@ class BaseArangoModel(BaseModel, metaclass=ArangoModelMeta): if TYPE_CHECKING: __relationships__: Relationships = {} __relationships_fields__: RelationshipFields = {} - __edges_model__: Union[Type[EdgeData], None] = None + # __edges_model__: Union[Type[EdgeData], None] = None + __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] class Config(BaseConfig): arbitrary_types_allowed = True @@ -404,16 +449,19 @@ def update_forward_refs(cls, **localns: Any) -> None: relation.field = cls.__fields__[name] relation.link_model = cls.__fields__[name].type_ if isinstance(relation.via_model, ForwardRef): - relation.via_model = pydantic.typing.evaluate_forwardref(relation.via_model, get_globals(cls), localns) - # cls.__edges_model__.update_forward_refs(**localns) - # for field in cls.__edges_model__.__fields__.values(): - # update_field_forward_refs(field, get_globals(cls), localns) - # field.type_ = pydantic.typing.evaluate_forwardref(field.type_, get_globals(cls), localns) - # field.outer_type_ = pydantic.typing.evaluate_forwardref(field.outer_type_, get_globals(cls), localns) - # relation.via_model = pydantic.typing.evaluate_forwardref(relation.via_model, get_globals(cls), localns) - # pass - # - # print(field) + relation.via_model = evaluate_forward_ref(cls, relation.via_model, **localns) + + if isinstance(relation.link_model, ForwardRef): + relation.link_model = evaluate_forward_ref(cls, relation.link_model, **localns) + + for k in cls.__edge_to_field_mapping__.copy(): + if isinstance(k, ForwardRef): + funcs = cls.__edge_to_field_mapping__.pop(k) + new_k = evaluate_forward_ref(cls, k, **localns) + if new_k in cls.__edge_to_field_mapping__: + cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) + else: + cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs @abstractmethod def save_dict(self) -> DictStrAny: diff --git a/pydango/orm/query.py b/pydango/orm/query.py index c995d59..a0bc0ca 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -32,7 +32,12 @@ SortExpression, VariableExpression, ) -from pydango.query.operations import ForParams, SortParams +from pydango.query.operations import ( + ForParams, + RangeExpression, + SortParams, + TraversalDirection, +) from pydango.query.options import ( RemoveOptions, ReplaceOptions, @@ -374,9 +379,32 @@ def return_(self, return_expr: Union[Type[BaseArangoModel], Aliased, ReturnableM def _serialize_vars(self): return jsonable_encoder(self.bind_vars, by_alias=True, custom_encoder={BaseArangoModel: save_dict}) + def traverse( + self, + iterators: Union[ + IteratorExpression, + tuple[IteratorExpression], + tuple[IteratorExpression, IteratorExpression], + tuple[IteratorExpression, IteratorExpression, IteratorExpression], + ], + edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], + start: Union["LiteralExpression", VariableExpression, FieldExpression, str], + depth: Union[RangeExpression, range, tuple[int, int]], + direction: TraversalDirection, + ): + return super().traverse(iterators, edges, start, depth, direction) + # return self + def for_( collection_or_variable: ORMForParams, in_: Optional[Union[IterableExpression, VariableExpression, list[VariableExpression], list]] = None, ) -> ORMQuery: return ORMQuery().for_(collection_or_variable, in_) + + +def traverse( + collection_or_variable: ORMForParams, + in_: Optional[Union[IterableExpression, VariableExpression, list[VariableExpression], list]] = None, +) -> ORMQuery: + return ORMQuery().for_(collection_or_variable, in_) diff --git a/pydango/orm/types.py b/pydango/orm/types.py index 9226fc4..4142333 100644 --- a/pydango/orm/types.py +++ b/pydango/orm/types.py @@ -4,5 +4,5 @@ from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel # noqa: F401 ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") -# TEdge = TypeVar("TEdge", bound="EdgeModel") +TEdge = TypeVar("TEdge", bound="EdgeModel") TVertexModel = TypeVar("TVertexModel", bound="VertexModel") diff --git a/pydango/orm/utils.py b/pydango/orm/utils.py index 257a8a9..4e190de 100644 --- a/pydango/orm/utils.py +++ b/pydango/orm/utils.py @@ -1,6 +1,9 @@ import sys +from functools import lru_cache from typing import TYPE_CHECKING, Generic, TypeVar +from pydantic.typing import evaluate_forwardref + from pydango.orm.fields import ModelFieldExpression from pydango.query.expressions import FieldExpression @@ -95,3 +98,8 @@ def get_globals(cls): else: globalns = {} return globalns + + +@lru_cache +def evaluate_forward_ref(source, model, **localns): + return evaluate_forwardref(model, get_globals(source), localns) diff --git a/pydango/query/expressions.py b/pydango/query/expressions.py index 9ebb0cc..42aa1c9 100644 --- a/pydango/query/expressions.py +++ b/pydango/query/expressions.py @@ -13,7 +13,6 @@ else: from typing_extensions import TypeAlias - if TYPE_CHECKING: from pydango.query.query import AQLQuery @@ -383,6 +382,7 @@ def __init__(self, iterator): class SubQueryExpression(Expression, ReturnableMixin): def __init__(self, query: QueryExpression): self.query = query + self.query.sep = " " def __repr__(self): if self.query.sep == "\n": @@ -390,8 +390,9 @@ def __repr__(self): return f"({repr(self.query)})" def compile(self, query_ref) -> str: + # self.query.sep = "\n" if self.query.sep == "\n": - return f"(\t{self.query.compile(query_ref)})" + return f"({self.query.compile(query_ref)})" return f"({self.query.compile(query_ref)})" @@ -444,7 +445,7 @@ class ListExpression( BindableExpression, IterableExpression, ): - def __init__(self, value: ListValues, iterator: Optional[Union[IteratorExpression, str]] = None): + def __init__(self, value: ListValues, iterator: Optional[Union[IteratorExpression, str]] = None, brackets=True): if isinstance(value, list): value = tuple(value) @@ -452,6 +453,7 @@ def __init__(self, value: ListValues, iterator: Optional[Union[IteratorExpressio super(BindableExpression, self).__init__(iterator) self._copy: list[Expression] = [] self._need_compile = False + self._brackets = brackets for i in self.value: if isinstance(i, QueryExpression): self._copy.append(SubQueryExpression(i)) @@ -478,7 +480,10 @@ def compile(self, query_ref: "AQLQuery", **kwargs) -> str: if isinstance(i, SubQueryExpression): i.query.parent = cast(QueryExpression, query_ref) result.append(i.compile(query_ref)) - return f'[{", ".join(result)}]' + if self._brackets: + return f'[{", ".join(result)}]' + else: + return ", ".join(result) return super().compile(query_ref) @@ -519,7 +524,6 @@ def __init__(self, value: ObjectParams, parent: Optional[Union[VariableExpressio elif isinstance(mapped_field, dict): self.value[field] = ObjectExpression(mapped_field, self.parent) self.__all_literals__ = self.__all_literals__ or self.value[field].__all_literals__ - elif isinstance(mapped_field, QueryExpression): subquery = SubQueryExpression(mapped_field) self.value[field] = subquery @@ -541,6 +545,8 @@ def compile(self, query_ref: "AQLQuery") -> str: if isinstance(self.value, dict): for field, mapped_field in self.value.items(): + if isinstance(field, Expression): + field = field.compile(query_ref) pairs.append(f"{field}: {mapped_field.compile(query_ref)}") return f"{{{', '.join(pairs)}}}" @@ -556,8 +562,9 @@ def __repr__(self): return f"{{{', '.join(pairs)}}}" -# class BaseAQLVariableExpressionMixin: -# ... +class BaseAQLVariableExpressionMixin(Expression): + def __init__(self, value: str): + self.value = value # class AQLVariableExpression(BaseAQLVariableExpressionMixin): @@ -568,9 +575,12 @@ def __repr__(self): # return f"@{super().compile(*args, **kwargs)}" -# class AQLCollectionVariableExpression(BaseAQLVariableExpressionMixin): -# def compile(self, *args, **kwargs) -> str: -# return f"@@{super().compile(*args, **kwargs)}" +class AQLCollectionVariableExpression(VariableExpression): + def __init__(self, value: str): + super().__init__(value) + + def compile(self, *args, **kwargs) -> str: + return f"@@{super().compile(*args, **kwargs)}" def _set_operator(self, operator, other, cls: Type[BinaryExpression]) -> BinaryExpression: @@ -600,3 +610,15 @@ def __invert__(self): self.direction = SortDirection.DESC else: self.direction = SortDirection.ASC + + +class DynamicFieldExpression(FieldExpression): + def compile(self, query_ref: "AQLQuery") -> str: + return f"[{super().compile(query_ref)}]" + + def __hash__(self): + field = "" + if self.parent: + field += f"{self.parent}." + field += f"{self.field}" + return hash(field) diff --git a/pydango/query/functions.py b/pydango/query/functions.py index 630ee0f..78f0d6b 100644 --- a/pydango/query/functions.py +++ b/pydango/query/functions.py @@ -61,6 +61,13 @@ def __init__(self, *arguments): # document +class Document(FunctionExpression): + name = "DOCUMENT" + + def __init__(self, _id: str): + super().__init__(_id) + + class Unset(FunctionExpression): name = "UNSET" diff --git a/pydango/query/operations.py b/pydango/query/operations.py index 94a7af9..dfee9a5 100644 --- a/pydango/query/operations.py +++ b/pydango/query/operations.py @@ -190,16 +190,19 @@ def __init__( tuple[IteratorExpression, IteratorExpression, IteratorExpression], tuple[IteratorExpression, ...], ], - edge: Union[str, CollectionExpression], + edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], start: Union[str, "LiteralExpression", VariableExpression, FieldExpression], depth: Union[range, tuple[int, int], RangeExpression], direction: TraversalDirection, query_ref: "AQLQuery", ): super().__init__(query_ref) - if isinstance(edge, str): - edge = CollectionExpression(edge) - # handle iterators + if isinstance(edges, str): + edges = CollectionExpression(edges) + elif isinstance(edges, Sequence): + edges = [CollectionExpression(i) if isinstance(i, str) else i for i in edges] + # for i in enumerate(edges): + # if isinstance(i,str): if isinstance(iterators, IteratorExpression): iterators = (iterators,) @@ -215,7 +218,7 @@ def __init__( if isinstance(start, str): start = LiteralExpression(start) self.iterators = iterators - self.edge = edge + self.edges = edges self.start = start self.direction = direction self.depth = depth @@ -224,11 +227,15 @@ def compile(self, *args, **kwargs): compiled_iterators = [] for i in self.iterators: compiled_iterators.append(i.compile(self.query_ref)) - + edges = ( + isinstance(self.edges, list) + and ", ".join([i.compile(self.query_ref) for i in self.edges]) + or self.edges.compile(self.query_ref) + ) return ( f"FOR {', '.join(compiled_iterators)} IN " f"{self.depth.compile(self.query_ref)} {self.direction.value} {self.start.compile(self.query_ref)} " - f"{self.edge.compile(self.query_ref)}" + f"{edges}" ) def __repr__(self): @@ -237,7 +244,8 @@ def __repr__(self): compiled_iterators.append(repr(i)) return ( - f"FOR {', '.join(compiled_iterators)} IN {repr(self.depth)} {self.direction.value} {self.start} {self.edge}" + f"FOR {', '.join(compiled_iterators)} IN" + f" {repr(self.depth)} {self.direction.value} {self.start} {self.edges}" ) @@ -271,6 +279,10 @@ def __init__( self.expression = AssignmentExpression(variable, expression) def compile(self, *args, **kwargs): + if isinstance(self.expression, AssignmentExpression) and isinstance( + self.expression.expression, QueryExpression + ): + self.expression.expression.sep = " " return f"LET {self.expression.compile(self.query_ref)}" def __repr__(self): @@ -320,9 +332,11 @@ def __init__(self, return_expr: Union[dict, ReturnableMixin], query_ref: "AQLQue return_expr = return_expr.iterator elif isinstance(return_expr, list): - for i in return_expr: - if not isinstance(i, FieldExpression): - raise Exception("todo: check this") + return_expr = ListExpression(return_expr) + # for i in return_expr: + # if isinstance(i,str) + # if not isinstance(i, FieldExpression): + # raise Exception("todo: check this") elif isinstance(return_expr, Mapping): return_expr = ObjectExpression(return_expr) diff --git a/pydango/query/query.py b/pydango/query/query.py index 368ace1..f3a2d80 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -1,7 +1,7 @@ import json import logging import sys -from typing import Any, Dict, List, Optional, Union, overload +from typing import Any, Dict, List, Optional, Sequence, Union, overload # from pydango.orm.models import BaseArangoModel, save_dict @@ -133,15 +133,15 @@ def traverse( tuple[IteratorExpression, IteratorExpression], tuple[IteratorExpression, IteratorExpression, IteratorExpression], ], - edge: Union[str, CollectionExpression], + edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], start: Union["LiteralExpression", VariableExpression, FieldExpression, str], depth: Union[RangeExpression, range, tuple[int, int]], direction: TraversalDirection, - ): + ) -> Self: self._ops.append( TraversalOperation( iterators=iterators, - edge=edge, + edges=edges, start=start, depth=depth, direction=direction, diff --git a/pydango/query/utils.py b/pydango/query/utils.py index 2608dd3..f53fb2d 100644 --- a/pydango/query/utils.py +++ b/pydango/query/utils.py @@ -12,8 +12,10 @@ def compile(self, *args, **kwargs) -> Union[str, None]: T = TypeVar("T") -def new(*, edge=False) -> dict[str, str]: +def new(*, edge=False, debug=True) -> Union[dict[str, str], NEW]: _new = NEW() + if debug: + return _new d = {ID: _new[ID], KEY: _new[KEY], REV: _new[REV]} if edge: d.update({FROM: _new[FROM], TO: _new[TO]}) diff --git a/pydango/utils.py b/pydango/utils.py new file mode 100644 index 0000000..5efe4b7 --- /dev/null +++ b/pydango/utils.py @@ -0,0 +1,17 @@ +from typing import Union + +from pydango.orm.models import BaseArangoModel +from pydango.orm.types import ArangoModel +from pydango.query.consts import ID + + +def get_collection_from_document(obj: Union[str, dict, ArangoModel]) -> str: + if isinstance(obj, dict): + obj = obj.get(ID) + elif isinstance(obj, BaseArangoModel): + obj = obj.id + + if not isinstance(obj, str): + raise ValueError("o") + + return obj.partition("/")[0] diff --git a/pyproject.toml b/pyproject.toml index eada92d..95ce847 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,8 @@ pytest-asyncio = "^0.21.0" coverage = "^7.2.5" isort = "^5.12.0" mypy = "^1.3.0" +freezegun = "^1.2.2" +pydiction = "^0" [build-system] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 6d281f3..0000000 --- a/requirements.txt +++ /dev/null @@ -1,161 +0,0 @@ -aioarango==1.0.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:8a7e06814c95323a5a29c4ac73eb300f451f7e894e2a1dc5a064506a5d0a81af \ - --hash=sha256:9a8983234c252375cda763105460906565045e4886d167179d3c037e6b7ddfe6 -anyio==3.7.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \ - --hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0 -certifi==2023.5.7 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \ - --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 -charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \ - --hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \ - --hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \ - --hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \ - --hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \ - --hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \ - --hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \ - --hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \ - --hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \ - --hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \ - --hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \ - --hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \ - --hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \ - --hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \ - --hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \ - --hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \ - --hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \ - --hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \ - --hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \ - --hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \ - --hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \ - --hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \ - --hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \ - --hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \ - --hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \ - --hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \ - --hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \ - --hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \ - --hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \ - --hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \ - --hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \ - --hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \ - --hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \ - --hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \ - --hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \ - --hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \ - --hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \ - --hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \ - --hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \ - --hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \ - --hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \ - --hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \ - --hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \ - --hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \ - --hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \ - --hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \ - --hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \ - --hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \ - --hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \ - --hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \ - --hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \ - --hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \ - --hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \ - --hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \ - --hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \ - --hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \ - --hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \ - --hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \ - --hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \ - --hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \ - --hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \ - --hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \ - --hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \ - --hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \ - --hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \ - --hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \ - --hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \ - --hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \ - --hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \ - --hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \ - --hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \ - --hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \ - --hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \ - --hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \ - --hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab -exceptiongroup==1.1.1 ; python_version >= "3.9" and python_version < "3.11" \ - --hash=sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e \ - --hash=sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785 -h11==0.12.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6 \ - --hash=sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042 -httpcore==0.13.7 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:036f960468759e633574d7c121afba48af6419615d36ab8ede979f1ad6276fa3 \ - --hash=sha256:369aa481b014cf046f7067fddd67d00560f2f00426e79569d99cb11245134af0 -httpx==0.18.2 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:979afafecb7d22a1d10340bafb403cf2cb75aff214426ff206521fc79d26408c \ - --hash=sha256:9f99c15d33642d38bce8405df088c1c4cfd940284b4290cacbfb02e64f4877c6 -idna==3.4 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 -indexed==1.3.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:6a0dd1f164db2eef6f9983bf1c5302d4b250a05b784f15c4c3f436d8778243d9 \ - --hash=sha256:a35db8644bef9273be710f5f06b5ffe71b8699d9212593cbae422b5e3c5f64c6 -pydantic==1.10.9 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d \ - --hash=sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a \ - --hash=sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc \ - --hash=sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3 \ - --hash=sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a \ - --hash=sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7 \ - --hash=sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf \ - --hash=sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f \ - --hash=sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91 \ - --hash=sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece \ - --hash=sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29 \ - --hash=sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60 \ - --hash=sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a \ - --hash=sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305 \ - --hash=sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766 \ - --hash=sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f \ - --hash=sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8 \ - --hash=sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276 \ - --hash=sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c \ - --hash=sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60 \ - --hash=sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896 \ - --hash=sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be \ - --hash=sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb \ - --hash=sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298 \ - --hash=sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4 \ - --hash=sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572 \ - --hash=sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d \ - --hash=sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82 \ - --hash=sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0 \ - --hash=sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4 \ - --hash=sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca \ - --hash=sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1 \ - --hash=sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f \ - --hash=sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f \ - --hash=sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6 \ - --hash=sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e -pyjwt==2.7.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1 \ - --hash=sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074 -requests-toolbelt==0.9.1 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 -requests==2.31.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 -rfc3986[idna2008]==1.5.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835 \ - --hash=sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97 -sniffio==1.3.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101 \ - --hash=sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384 -typing-extensions==4.7.0 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:5d8c9dac95c27d20df12fb1d97b9793ab8b2af8a3a525e68c80e21060c161771 \ - --hash=sha256:935ccf31549830cda708b42289d44b6f74084d616a00be651601a4f968e77c82 -urllib3==1.26.15 ; python_version >= "3.9" and python_version < "4.0" \ - --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ - --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 diff --git a/stubs/aioarango/__init__.pyi b/stubs/aioarango/__init__.pyi index 07a59cc..e44baca 100644 --- a/stubs/aioarango/__init__.pyi +++ b/stubs/aioarango/__init__.pyi @@ -1 +1,3 @@ +from aioarango.exceptions import * +from aioarango.http import * from aioarango.client import ArangoClient as ArangoClient diff --git a/tests/conftest.py b/tests/conftest.py index 34bdedc..8eb1546 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,18 +1,13 @@ import asyncio import logging import sys -from collections import defaultdict from typing import AsyncGenerator, TypeVar import pytest -import pytest_asyncio from aioarango import ArangoClient -from aioarango.database import Database, StandardDatabase +from aioarango.database import StandardDatabase from pydango.connection.utils import get_or_create_db -from pydango.query.expressions import NEW -from tests.data import DATA -from tests.queries import insert_return_new_query @pytest.fixture(scope="session", autouse=True) @@ -58,7 +53,7 @@ def format(self, record): for i in record.__dict__: if i not in exclude: - formatted_record += f" | {i}={record.__dict__[i]}" + formatted_record += f"\n{i}=\n{record.__dict__[i]}" return formatted_record @@ -66,8 +61,8 @@ def format(self, record): handler = logging.StreamHandler(stream=sys.stdout) handler.setFormatter(formatter) logging.getLogger("pydango").addHandler(handler) - # with caplog.at_level(logging.DEBUG, "pydango"): - # yield + with caplog.at_level(logging.DEBUG, "pydango"): + yield T = TypeVar("T") @@ -84,22 +79,9 @@ async def client() -> AsyncFixture[ArangoClient]: @pytest.fixture(scope="session") async def database(client: ArangoClient) -> AsyncFixture[StandardDatabase]: + # await (await client.db("_system")).delete_database("pydango") + # exit() + db = await get_or_create_db(client, "pydango") yield db # await (await client.db("_system")).delete_database("pydango") - - -@pytest_asyncio.fixture(scope="session", autouse=True) -async def populate(database: Database): - responses = defaultdict(list) - for coll in DATA: - await database.delete_collection(coll, ignore_missing=True) - await database.create_collection(coll) - for coll in DATA: - for i, row in enumerate(DATA[coll]): - aql, _, __ = insert_return_new_query(coll, row, NEW()) - response = await aql.execute(database) - next_ = await response.next() - DATA[coll][i] = next_ - responses[coll].append(next_) - yield diff --git a/tests/session/conftest.py b/tests/session/conftest.py new file mode 100644 index 0000000..6827e78 --- /dev/null +++ b/tests/session/conftest.py @@ -0,0 +1,16 @@ +import pytest +from aioarango.database import StandardDatabase +from pydiction import Matcher + +from pydango.connection.session import PydangoSession +from tests.conftest import AsyncFixture + + +@pytest.fixture(scope="package") +async def session(database: StandardDatabase) -> AsyncFixture[PydangoSession]: + yield PydangoSession(database) + + +@pytest.fixture(scope="package") +def matcher(): + return Matcher() diff --git a/tests/session/test_cities.py b/tests/session/test_cities.py index cc76369..d380e92 100644 --- a/tests/session/test_cities.py +++ b/tests/session/test_cities.py @@ -1,7 +1,11 @@ +import asyncio import datetime from typing import TYPE_CHECKING, Annotated import pytest +from _pytest.fixtures import FixtureRequest +from pydantic import Field +from pydiction import ANY_NOT_NONE, Matcher from pydango.connection.session import PydangoSession from pydango.index import PersistentIndex @@ -12,10 +16,13 @@ VertexCollectionConfig, VertexModel, ) -from tests.utils import assert_equals_dicts +from pydango.query.consts import ID + +# from tests.utils import find_dict_diffs, ANY_NOT_NONE +# from tests.utils2 import Matcher if TYPE_CHECKING: - from aioarango.database import StandardDatabase + pass class Visited(EdgeModel): @@ -65,15 +72,86 @@ class Collection(VertexCollectionConfig): # LivesIn.update_forward_refs() # Person.update_forward_refs() +def expected_person(person: Person): + expected = { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "name": person.name, + "age": person.age, + "lives_in": { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "name": "tlv", + "population": person.lives_in.population, + }, + "visited": [ + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "name": person.visited[0].name, + "population": person.visited[0].population, + }, + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "name": person.visited[1].name, + "population": person.visited[1].population, + }, + ], + "edges": { + "lives_in": { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_from": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "since": person.edges.lives_in.since, + }, + "visited": [ + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_from": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "on_date": person.edges.visited[0].on_date, + "rating": person.edges.visited[0].rating, + }, + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_from": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "on_date": person.edges.visited[1].on_date, + "rating": person.edges.visited[1].rating, + }, + ], + }, + } + return expected + +@pytest.fixture(scope="module", autouse=True) +async def init_collections(session: PydangoSession): + await asyncio.gather(*[session.init(coll) for coll in (Person, City, LivesIn, Visited)]) + +@pytest.mark.run(order=1) @pytest.mark.asyncio -async def test_save(database: "StandardDatabase"): - session = PydangoSession(database) - await session.init(Person) - await session.init(City) - await session.init(LivesIn) - await session.init(Visited) +async def test_save(matcher: Matcher, session: PydangoSession, request: FixtureRequest, person): + p = await session.save(person) + + request.config.cache.set("person_key", p.key) + matcher.assert_declarative_object(p.dict(by_alias=True, include_edges=True), expected_person(p)) + + +@pytest.fixture +def person(): p = Person( name="John", age=35, @@ -90,51 +168,15 @@ async def test_save(database: "StandardDatabase"): ], }, ) + return p - p = await session.save(p) - from unittest.mock import ANY - expected = { - "age": 35, - "edges": { - "lives_in": { - "from_": ANY, - "id": ANY, - "key": ANY, - "rev": ANY, - "since": datetime.datetime(2023, 7, 1, 18, 16, 38, 350095), - "to": ANY, - }, - "visited": [ - { - "from_": ANY, - "id": ANY, - "key": ANY, - "on_date": datetime.date(2023, 7, 1), - "rating": 10, - "rev": ANY, - "to": ANY, - }, - { - "from_": ANY, - "id": ANY, - "key": ANY, - "on_date": datetime.date(2023, 7, 1), - "rating": 10, - "rev": ANY, - "to": ANY, - }, - ], - }, - "id": ANY, - "key": ANY, - "lives_in": {"id": ANY, "key": ANY, "name": "tlv", "population": 123, "rev": ANY}, - "name": "John", - "rev": ANY, - "visited": [ - {"id": ANY, "key": ANY, "name": "New York", "population": 123, "rev": ANY}, - {"id": ANY, "key": ANY, "name": "Amsterdam", "population": 123, "rev": ANY}, - ], - } +class IdProjection(VertexModel): + id: str = Field(alias=ID) + - assert_equals_dicts(p.dict(include_edges=True), expected) +@pytest.mark.run(order=2) +async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): + _id = request.config.cache.get("person_key", None) + result = await session.get(Person, _id, fetch_edges=True) + matcher.assert_declarative_object(result.dict(by_alias=True, include_edges=True), expected_person(result)) diff --git a/tests/session/test_family.py b/tests/session/test_family.py index d32561d..9933bf3 100644 --- a/tests/session/test_family.py +++ b/tests/session/test_family.py @@ -1,11 +1,12 @@ -import json -from typing import Annotated, Optional, Sequence +import asyncio +from typing import Annotated, Optional import pytest +from _pytest.fixtures import FixtureRequest +from pydiction import ANY_NOT_NONE, Contains, Matcher from pydango.connection.session import PydangoSession from pydango.index import PersistentIndex -from pydango.orm.encoders import jsonable_encoder from pydango.orm.models import ( EdgeCollectionConfig, EdgeModel, @@ -27,6 +28,22 @@ class Collection(VertexCollectionConfig): name = "people" indexes = [PersistentIndex(fields=["name"]), PersistentIndex(fields=["age"])] + @staticmethod + def brothers(e: dict, _: dict) -> bool: + return e["connection"] == "Brother" + + @staticmethod + def sisters(e: dict, _: dict) -> bool: + return e["connection"] == "Sister" + + @staticmethod + def father(e: dict, _: dict) -> bool: + return e["connection"] == "Father" + + @staticmethod + def mother(e: dict, _: dict) -> bool: + return e["connection"] == "Mother" + class Sibling(EdgeModel): connection: str @@ -103,11 +120,136 @@ def test_obj(): ) +@pytest.fixture(scope="module", autouse=True) +async def init_collections(session: PydangoSession): + await asyncio.gather(*[session.init(coll) for coll in (Person, Sibling)]) + + +def expected_person(person: Person): + return { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "age": person.age, + "brothers": [ + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "age": person.brothers[0].age, + "brothers": None, + "edges": None, + "father": None, + "mother": None, + "name": person.brothers[0].name, + "sisters": None, + } + ], + "edges": { + "brothers": [ + { + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "connection": person.edges.brothers[0].connection, + } + ], + "father": { + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "connection": person.edges.father.connection, + }, + "mother": { + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "connection": person.edges.mother.connection, + }, + "sisters": [ + { + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "connection": person.edges.sisters[0].connection, + }, + { + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "connection": person.edges.sisters[1].connection, + }, + ], + }, + "father": { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "age": person.father.age, + "brothers": None, + "edges": None, + "father": None, + "mother": None, + "name": person.father.name, + "sisters": None, + }, + "mother": { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "age": person.mother.age, + "brothers": None, + "edges": None, + "father": None, + "mother": None, + "name": person.mother.name, + "sisters": None, + }, + "name": person.name, + "sisters": Contains( + [ + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "age": person.sisters[0].age, + "brothers": None, + "edges": None, + "father": None, + "mother": None, + "name": person.sisters[0].name, + "sisters": None, + }, + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "age": person.sisters[1].age, + "brothers": None, + "edges": None, + "father": None, + "mother": None, + "name": person.sisters[1].name, + "sisters": None, + }, + ] + ), + } + + +@pytest.mark.run(order=1) @pytest.mark.asyncio -async def test_save(database): - session = PydangoSession(database) - await session.init(Person) - await session.init(Sibling) +async def test_save(session: PydangoSession, request: FixtureRequest): fiona = Person(name="Fiona", age=12) jessica = Person(name="Jessica", age=12) ben = Person(name="Ben", age=45) @@ -158,29 +300,40 @@ async def test_save(database): ben.edges = brother_edges.copy() p = await session.save(john) + request.config.cache.set("person_key", p.key) - def traverse_recursive_fields(p, recursive_fields, visited): - if isinstance(p, Sequence): - for i in p: - traverse_recursive_fields(i, exclude, visited) - - else: - d = p.dict(include_edges=False, exclude=recursive_fields) - for recursive_field in recursive_fields: - attr = getattr(p, recursive_field) - - for i in attr: - d[recursive_field] = i.dict(include_edges=False, exclude=recursive_fields) - visited.add(id(i)) - if id(attr) in visited: - return d - visited.add(id(attr)) - traverse_recursive_fields(attr, exclude, visited) - return d - - exclude = { - "brothers", - "sisters", - } - a = traverse_recursive_fields(p, exclude, visited=set()) - print(json.dumps(jsonable_encoder(a), indent=2)) + # todo: there is currently a caveat with pydantic v1 with circular references, in pydantic v2 this is resolved + # def traverse_recursive_fields(p, recursive_fields, visited): + # if isinstance(p, Sequence): + # for i in p: + # traverse_recursive_fields(i, exclude, visited) + # + # else: + # d = p.dict(include_edges=False, by_alias=True, exclude=recursive_fields) + # for recursive_field in recursive_fields: + # attr = getattr(p, recursive_field) + # + # for i in attr: + # d[recursive_field] = i.dict(include_edges=False, by_alias=True, exclude=recursive_fields) + # visited.add(id(i)) + # if id(attr) in visited: + # return d + # visited.add(id(attr)) + # traverse_recursive_fields(attr, exclude, visited) + # return d + # exclude = { + # "brothers", + # "sisters", + # } + # actual = traverse_recursive_fields(p, exclude, visited=set()) + # person = expected_person(p) + # Matcher().assert_declarative_object(actual, person) + + +@pytest.mark.run(order=2) +async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): + _id = request.config.cache.get("person_key", None) + result = await session.get(Person, _id, fetch_edges=True) + result_dict = result.dict(by_alias=True, include_edges=True) + person = expected_person(result) + matcher.assert_declarative_object(result_dict, person) diff --git a/tests/session/test_social_network.py b/tests/session/test_social_network.py index fdb88d9..a0de462 100644 --- a/tests/session/test_social_network.py +++ b/tests/session/test_social_network.py @@ -1,20 +1,20 @@ +import asyncio import datetime -from typing import Annotated, List, Optional, Type, Union -from unittest.mock import ANY +from typing import Annotated, List, Optional, Union import pytest -from aioarango.database import StandardDatabase +from _pytest.fixtures import FixtureRequest +from pydiction import ANY_NOT_NONE, Contains, Matcher from pydango.connection.session import PydangoSession from pydango.orm.models import ( - BaseArangoModel, EdgeCollectionConfig, EdgeModel, Relation, VertexCollectionConfig, VertexModel, ) -from tests.utils import assert_equals_dicts +from pydango.orm.types import ArangoModel class Post(VertexModel): @@ -22,7 +22,7 @@ class Post(VertexModel): content: str # todo: make this work # author: Annotated["User", BackRelation["Authorship"]] - comments: Annotated[Optional[List["Comment"]], Relation["PostComment"]] = None + comments: Annotated[Optional[List["Comment"]], Relation["Commentary"]] = None class Collection(VertexCollectionConfig): name = "posts" @@ -41,7 +41,7 @@ class User(VertexModel): age: int friends: Annotated[Optional[List["User"]], Relation["Friendship"]] = None posts: Annotated[Optional[List["Post"]], Relation["Authorship"]] = None - comments: Annotated[Optional[List["Comment"]], Relation["Commentary"]] = None + comments: Annotated[Optional[List["Comment"]], Relation["Commentary"]] likes: Annotated[Optional[List[Union["Post", "Comment"]]], Relation["Like"]] = None class Collection(VertexCollectionConfig): @@ -69,13 +69,6 @@ class Collection(EdgeCollectionConfig): name = "commentaries" -class PostComment(EdgeModel): - connection: str - - class Collection(EdgeCollectionConfig): - name = "post_comments" - - class Like(EdgeModel): liked_at: datetime.datetime @@ -87,204 +80,111 @@ class Collection(EdgeCollectionConfig): Comment.update_forward_refs() User.update_forward_refs() -user1 = User(name="John", email="john@example.com", age=25) -user2 = User(name="Jane", email="jane@example.com", age=30) -user3 = User(name="Alice", email="alice@example.com", age=28) -user4 = User(name="Bob", email="bob@example.com", age=32) - -friendship1 = Friendship(since=datetime.date(2020, 1, 1)) -friendship2 = Friendship(since=datetime.date(2021, 3, 15)) -friendship3 = Friendship(since=datetime.date(2022, 5, 10)) -friendship4 = Friendship(since=datetime.date(2023, 2, 20)) - -authorship1 = Authorship(created_at=datetime.datetime.now()) -authorship2 = Authorship(created_at=datetime.datetime.now()) -authorship3 = Authorship(created_at=datetime.datetime.now()) - -commentary1 = Commentary(commented_at=datetime.datetime.now()) -commentary2 = Commentary(commented_at=datetime.datetime.now()) - -post1 = Post(title="First Post", content="This is my first post!") -post2 = Post(title="Second Post", content="This is my second post!") - -comment1 = Comment(text="Great post!") -comment2 = Comment(text="I enjoyed reading this.") - -like1 = Like(liked_at=datetime.datetime.now()) -like2 = Like(liked_at=datetime.datetime.now()) -like3 = Like(liked_at=datetime.datetime.now()) - -user1.friends = [user2, user3, user4] -user1.posts = [post1] -user1.comments = [comment1, comment2] -user1.likes = [comment2] - -# user2.friends = [user1, user3] -# user2.posts = [post1] -# user2.comments = [comment1] -# user2.likes = [post1] -# -# user3.friends = [user1, user2] -# user3.posts = [post2] -# user3.comments = [comment2] -# user3.likes = [comment1] -# -# user4.friends = [user1] -# user4.posts = [post2] -# user4.comments = [comment2] -# user4.likes = [comment1, comment2] -# -user1.edges = { - User.friends: [friendship1, friendship2, friendship3], - User.comments: [commentary1, commentary2], - User.posts: [authorship1], - User.likes: [like1], -} - -# user2.edges = { -# User.friends: [friendship1, friendship2], -# User.posts: [authorship1], -# User.comments: [commentary1], -# User.likes: [like1], -# } -# -# user3.edges = { -# User.friends: [friendship1, friendship3], -# User.posts: [authorship2], -# User.comments: [commentary2], -# User.likes: [comment1], -# } -# -# user4.edges = { -# User.friends: [friendship4], -# User.posts: [authorship2], -# User.comments: [comment2], -# User.likes: [comment1, comment2], -# } - -# post1.author = user1 -post1.comments = [comment1] - - -# post2.author = user1 -# post2.comments = [comment2] - -# comment1.author = user1 -# comment1.post = post1 - -# comment2.author = user3 -# comment2.post = post2 - -# comment1.likes = [like1, like2] -# comment2.likes = [like3] +@pytest.fixture(scope="module", autouse=True) +async def init_collections(session: PydangoSession): + await asyncio.gather( + *[session.init(coll) for coll in (Post, Comment, User, Friendship, Authorship, Commentary, Like)] + ) + + +@pytest.fixture() +def user(): + user1 = User(name="John", email="john@example.com", age=25) + user2 = User(name="Alice", email="alice@example.com", age=21) + post1 = Post(title="First Post", content="This is my first post!") + comment1 = Comment( + text="Great post!", + ) + + now = datetime.datetime.now() + authorship1 = Authorship(created_at=now) + commentary1 = Commentary(commented_at=now) + like1 = Like(liked_at=now) + like2 = Like(liked_at=now) + + user1.likes = [comment1, post1] + user1.comments = [comment1] + user1.posts = [post1] + user1.friends = [user2] + + user1.edges = { + User.comments: [commentary1], + User.posts: [authorship1], + User.likes: [like1, like2], + User.friends: [Friendship(since=now)], + } -@pytest.mark.asyncio -async def test_save(database: StandardDatabase): - session = PydangoSession(database) - models: list[Type[BaseArangoModel]] = [] - models += VertexModel.__subclasses__() - models += EdgeModel.__subclasses__() - for i in models: - await session.init(i) - - await session.save(user1) - - expected = { - "_id": ANY, - "_key": ANY, - "_rev": ANY, + post1.comments = [comment1] + post1.edges = {Post.comments: [commentary1]} + return user1 + + +def expected_user_depth1(user: VertexModel): + return { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "name": "John", "age": 25, - "comments": [ - {"_id": ANY, "_key": ANY, "_rev": ANY, "text": "Great post!"}, - {"_id": ANY, "_key": ANY, "_rev": ANY, "text": "I enjoyed reading this."}, - ], + "comments": [{"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}], "edges": { "comments": [ { - "_from": ANY, - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "_to": ANY, - "commented_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121092), - }, - { - "_from": ANY, - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "_to": ANY, - "commented_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121098), + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "commented_at": user.edges.comments[0].commented_at, }, ], "friends": [ { - "_from": ANY, - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "_to": ANY, - "since": datetime.date(2020, 1, 1), - }, + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "since": user.edges.friends[0].since, + } + ], + "likes": [ { - "_from": ANY, - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "_to": ANY, - "since": datetime.date(2021, 3, 15), + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "liked_at": user.edges.likes[0].liked_at, }, { - "_from": ANY, - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "_to": ANY, - "since": datetime.date(2022, 5, 10), + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "liked_at": user.edges.likes[1].liked_at, }, ], - "likes": [ - { - "_from": ANY, - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "_to": ANY, - "liked_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121134), - } - ], "posts": [ { - "_from": ANY, - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "_to": ANY, - "created_at": datetime.datetime(2023, 7, 1, 18, 53, 20, 121070), + "_from": ANY_NOT_NONE, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "_to": ANY_NOT_NONE, + "created_at": user.edges.posts[0].created_at, } ], }, "email": "john@example.com", "friends": [ { - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "age": 30, - "comments": None, - "edges": None, - "email": "jane@example.com", - "friends": None, - "likes": None, - "name": "Jane", - "posts": None, - }, - { - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "age": 28, + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "age": 21, "comments": None, "edges": None, "email": "alice@example.com", @@ -292,32 +192,94 @@ async def test_save(database: StandardDatabase): "likes": None, "name": "Alice", "posts": None, - }, - { - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "age": 32, - "comments": None, - "edges": None, - "email": "bob@example.com", - "friends": None, - "likes": None, - "name": "Bob", - "posts": None, - }, + } ], - "likes": [{"_id": ANY, "_key": ANY, "_rev": ANY, "text": "I enjoyed reading this."}], - "name": "John", + "likes": Contains( + [ + {"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}, + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "content": "This is my first post!", + "title": "First Post", + }, + ] + ), "posts": [ { - "_id": ANY, - "_key": ANY, - "_rev": ANY, - "comments": [{"_id": "comments/62920", "_key": "62920", "_rev": "_gPGmmm2--D", "text": "Great post!"}], + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "comments": None, "content": "This is my first post!", "title": "First Post", } ], } - assert_equals_dicts(expected, user2.dict(by_alias=True, include_edges=True)) + + +def expected_user_depth2(user: ArangoModel): + user = expected_user_depth1(user) + user.update( + { + "likes": Contains( + [ + {"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}, + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "content": "This is my first post!", + "title": "First Post", + "comments": [ + {"text": "Great post!", "_id": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "_key": ANY_NOT_NONE} + ], + }, + ], + ), + "posts": [ + { + "_id": ANY_NOT_NONE, + "_key": ANY_NOT_NONE, + "_rev": ANY_NOT_NONE, + "comments": [ + {"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"} + ], + "content": "This is my first post!", + "title": "First Post", + } + ], + } + ) + return user + + +@pytest.mark.run(order=1) +@pytest.mark.asyncio +async def test_save(matcher: Matcher, session: PydangoSession, request: FixtureRequest, user: User): + await session.save(user) + request.config.cache.set("user_key", user.key) + matcher.assert_declarative_object(user.dict(by_alias=True, include_edges=True), expected_user_depth2(user)) + + +@pytest.mark.run(order=2) +async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): + _id = request.config.cache.get("user_key", None) + result = await session.get(User, _id, fetch_edges=True, depth=range(1, 1)) + expected_user = expected_user_depth1(result) + matcher.assert_declarative_object( + result.dict(by_alias=True, include_edges=True), + expected_user, + check_order=False, + ) + + +@pytest.mark.run(order=2) +async def test_get2(matcher: Matcher, session: PydangoSession, request: FixtureRequest): + _id = request.config.cache.get("user_key", None) + result = await session.get(User, _id, fetch_edges=True, depth=range(1, 2)) + + result_dict = result.dict(by_alias=True, include_edges=True) + depth = expected_user_depth2(result) + matcher.assert_declarative_object(result_dict, depth, check_order=False) diff --git a/tests/test_queries/__init__.py b/tests/test_queries/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_queries/conftest.py b/tests/test_queries/conftest.py new file mode 100644 index 0000000..f405398 --- /dev/null +++ b/tests/test_queries/conftest.py @@ -0,0 +1,24 @@ +from collections import defaultdict + +import pytest_asyncio +from aioarango.database import Database + +from pydango.query.expressions import NEW +from tests.queries import insert_return_new_query +from tests.test_queries.data import DATA + + +@pytest_asyncio.fixture(scope="package", autouse=True) +async def populate(database: Database): + responses = defaultdict(list) + for coll in DATA: + await database.delete_collection(coll, ignore_missing=True) + await database.create_collection(coll) + for coll in DATA: + for i, row in enumerate(DATA[coll]): + aql, _, __ = insert_return_new_query(coll, row, NEW()) + response = await aql.execute(database) + next_ = await response.next() + DATA[coll][i] = next_ + responses[coll].append(next_) + yield diff --git a/tests/data.py b/tests/test_queries/data.py similarity index 100% rename from tests/data.py rename to tests/test_queries/data.py diff --git a/tests/ecommerce_queries.py b/tests/test_queries/ecommerce_queries.py similarity index 100% rename from tests/ecommerce_queries.py rename to tests/test_queries/ecommerce_queries.py diff --git a/tests/test_ecommerce.py b/tests/test_queries/test_ecommerce.py similarity index 96% rename from tests/test_ecommerce.py rename to tests/test_queries/test_ecommerce.py index 8911d93..322ff96 100644 --- a/tests/test_ecommerce.py +++ b/tests/test_queries/test_ecommerce.py @@ -2,8 +2,8 @@ from aioarango.database import Database from pydango.connection.utils import deplete_cursor -from tests.data import DATA -from tests.ecommerce_queries import ( +from tests.test_queries.data import DATA +from tests.test_queries.ecommerce_queries import ( get_ordered_products_with_reviews_query, get_product_orders_reviews_query, get_product_reviews_query, diff --git a/tests/test_queries_integration.py b/tests/test_queries/test_queries_integration.py similarity index 97% rename from tests/test_queries_integration.py rename to tests/test_queries/test_queries_integration.py index 92e276c..a943c06 100644 --- a/tests/test_queries_integration.py +++ b/tests/test_queries/test_queries_integration.py @@ -5,8 +5,8 @@ from aioarango.database import Database from pydango.connection.utils import deplete_cursor, iterate_cursor -from tests.data import DATA from tests.queries import multiple_filters_query, projection_complex_query, simple_query +from tests.test_queries.data import DATA @pytest.mark.asyncio From 0c59e4abca6f0cfb1913b7e52b2d3f523dd129ca Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Sat, 23 Sep 2023 13:07:30 +0300 Subject: [PATCH 07/19] mypy passed --- .pre-commit-config.yaml | 26 +- poetry.lock | 116 ++++--- pydango/connection/session.py | 174 +++++------ pydango/connection/utils.py | 4 +- pydango/orm/__init__.py | 3 + pydango/orm/fields.py | 69 ----- pydango/orm/models.py | 431 +++++++++++++++++++-------- pydango/orm/proxy.py | 68 ----- pydango/orm/query.py | 22 +- pydango/orm/types.py | 13 +- pydango/orm/utils.py | 88 ------ pydango/query/operations.py | 2 +- pydango/query/query.py | 14 +- pydango/utils.py | 17 -- pyproject.toml | 2 +- tests/conftest.py | 2 +- tests/session/test_cities.py | 13 +- tests/session/test_family.py | 15 +- tests/session/test_social_network.py | 28 +- tests/test_orm_query.py | 3 +- tests/utils.py | 39 --- 21 files changed, 553 insertions(+), 596 deletions(-) delete mode 100644 tests/utils.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 122d177..28c17c7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ default_stages: - commit repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.7.0 + rev: v3.12.0 hooks: - id: pyupgrade args: @@ -17,15 +17,15 @@ repos: - --profile=black - - repo: https://github.com/psf/black - rev: 23.3.0 + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 23.7.0 hooks: - id: black args: - --config=pyproject.toml - repo: https://github.com/myint/autoflake - rev: v2.1.1 + rev: v2.2.1 hooks: - id: autoflake exclude: .*/__init__.py @@ -37,7 +37,7 @@ repos: - --remove-unused-variables - repo: https://github.com/pre-commit/mirrors-autopep8 - rev: v2.0.2 + rev: v2.0.4 hooks: - id: autopep8 @@ -48,13 +48,13 @@ repos: - id: python-check-blanket-noqa - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: 'v0.0.275' + rev: 'v0.0.290' hooks: - id: ruff args: - --config - ./pyproject.toml # args: -# - --fix + - --fix - repo: https://github.com/PyCQA/bandit rev: 1.7.5 @@ -70,14 +70,18 @@ repos: - toml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.4.0 + rev: v1.5.1 hooks: - id: mypy + args: + - --show-traceback additional_dependencies: - pydantic==1.10.10 - mypy-extensions -# - pytest -# - httpx + - pytest~=7.3.1 + - httpx~=0.18.2 + - pydiction~=0.1.0 + - pytest-asyncio~=0.21.0 # - repo: https://github.com/jendrikseipp/vulture @@ -103,7 +107,7 @@ repos: - repo: https://github.com/python-poetry/poetry - rev: '1.5.0' + rev: '1.4.0' hooks: - id: poetry-check - id: poetry-lock diff --git a/poetry.lock b/poetry.lock index 31b88ea..b367f1c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. [[package]] name = "aioarango" version = "1.0.0" description = "Asynchronous driver for ArangoDB" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -20,6 +21,7 @@ requests-toolbelt = ">=0.9.1,<0.10.0" name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -41,6 +43,7 @@ trio = ["trio (<0.22)"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -90,6 +93,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -101,6 +105,7 @@ files = [ name = "cfgv" version = "3.3.1" description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -112,6 +117,7 @@ files = [ name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -196,6 +202,7 @@ files = [ name = "click" version = "8.1.4" description = "Composable command line interface toolkit" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -210,6 +217,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -221,6 +229,7 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -293,6 +302,7 @@ toml = ["tomli"] name = "distlib" version = "0.3.6" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -304,6 +314,7 @@ files = [ name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -318,6 +329,7 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.2" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -333,6 +345,7 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -347,6 +360,7 @@ python-dateutil = ">=2.7" name = "h11" version = "0.12.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -358,6 +372,7 @@ files = [ name = "httpcore" version = "0.13.7" description = "A minimal low-level HTTP client." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -366,9 +381,9 @@ files = [ ] [package.dependencies] -anyio = "==3.*" +anyio = ">=3.0.0,<4.0.0" h11 = ">=0.11,<0.13" -sniffio = "==1.*" +sniffio = ">=1.0.0,<2.0.0" [package.extras] http2 = ["h2 (>=3,<5)"] @@ -377,6 +392,7 @@ http2 = ["h2 (>=3,<5)"] name = "httpx" version = "0.18.2" description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -391,13 +407,14 @@ rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" [package.extras] -brotli = ["brotlicffi (==1.*)"] -http2 = ["h2 (==3.*)"] +brotli = ["brotlicffi (>=1.0.0,<2.0.0)"] +http2 = ["h2 (>=3.0.0,<4.0.0)"] [[package]] name = "identify" version = "2.5.24" description = "File identification library for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -412,6 +429,7 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -423,6 +441,7 @@ files = [ name = "indexed" version = "1.3.0" description = "A dictionary that is indexed by insertion order." +category = "main" optional = false python-versions = "*" files = [ @@ -434,6 +453,7 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -445,6 +465,7 @@ files = [ name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -460,37 +481,39 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "mypy" -version = "1.4.1" +version = "1.5.1" description = "Optional static typing for Python" +category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, ] [package.dependencies] @@ -501,13 +524,13 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -519,6 +542,7 @@ files = [ name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -533,6 +557,7 @@ setuptools = "*" name = "packaging" version = "23.1" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -544,6 +569,7 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -555,6 +581,7 @@ files = [ name = "platformdirs" version = "3.8.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -570,6 +597,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -585,6 +613,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pre-commit" version = "3.3.3" description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -603,6 +632,7 @@ virtualenv = ">=20.10.0" name = "pydantic" version = "1.10.10" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -655,6 +685,7 @@ email = ["email-validator (>=1.0.3)"] name = "pydiction" version = "0.1.0" description = "" +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -666,6 +697,7 @@ files = [ name = "pyjwt" version = "2.7.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -683,6 +715,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -705,6 +738,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.21.0" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -723,6 +757,7 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -737,6 +772,7 @@ six = ">=1.5" name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -786,6 +822,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -807,6 +844,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "0.9.1" description = "A utility belt for advanced users of python-requests" +category = "main" optional = false python-versions = "*" files = [ @@ -821,6 +859,7 @@ requests = ">=2.0.1,<3.0.0" name = "rfc3986" version = "1.5.0" description = "Validating URI References per RFC 3986" +category = "main" optional = false python-versions = "*" files = [ @@ -838,6 +877,7 @@ idna2008 = ["idna"] name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -854,6 +894,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -865,6 +906,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -876,6 +918,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -887,6 +930,7 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -898,6 +942,7 @@ files = [ name = "urllib3" version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -914,6 +959,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.23.1" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -933,4 +979,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "8aec742783e1c98e1d774e53b914636d541a068b2ee876dca6c2354a17d2bc57" +content-hash = "d7f5ad86c1566899c8641217554daa6f668ff060781525dcc3c3422650080105" diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 9777ca6..471ef9a 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -3,17 +3,28 @@ import sys from collections import OrderedDict, defaultdict, namedtuple from enum import Enum -from typing import Any, Iterator, Optional, Type, Union, cast, get_args, get_origin +from typing import ( + Any, + DefaultDict, + Iterator, + Optional, + Sequence, + Type, + Union, + cast, + get_args, + get_origin, +) from aioarango import AQLQueryExecuteError from pydantic import BaseModel -from pydantic.fields import ModelField from pydango.connection import DALI_SESSION_KW from pydango.orm.relations import LIST_TYPES -from pydango.orm.types import ArangoModel, TVertexModel +from pydango.query.query import TraverseIterators from pydango.query.utils import new -from pydango.utils import get_collection_from_document + +# from pydango.utils import get_collection_from_document if sys.version_info >= (3, 10): from typing import TypeAlias @@ -27,10 +38,16 @@ from pydango import index from pydango.connection.utils import get_or_create_collection from pydango.orm.consts import EDGES -from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel -from pydango.orm.proxy import LazyProxy +from pydango.orm.models import ( + ArangoModel, + BaseArangoModel, + EdgeModel, + LazyProxy, + TVertexModel, + VertexModel, + convert_edge_data_to_valid_kwargs, +) from pydango.orm.query import ORMQuery, for_ -from pydango.orm.utils import convert_edge_data_to_valid_kwargs from pydango.query import AQLQuery from pydango.query.consts import FROM, ID, KEY, REV, TO from pydango.query.expressions import IteratorExpression, VariableExpression @@ -41,6 +58,19 @@ logger = logging.getLogger(__name__) +def get_collection_from_document(obj: Union[str, dict, "ArangoModel"]) -> str: + _obj = None + if isinstance(obj, dict): + _obj = obj.get(ID) + elif isinstance(obj, BaseArangoModel): + _obj = obj.id + + if not _obj or not isinstance(_obj, str): + raise ValueError("cannot parse collection") + + return _obj.partition("/")[0] + + class DocumentNotFoundError(Exception): pass @@ -231,10 +261,10 @@ def _bind_edge(from_model, instance, rels, to_model, vertex_collections, vertex_ CollectionUpsertOptions: TypeAlias = dict[Union[str, Type["BaseArangoModel"]], UpsertOptions] ModelFieldMapping: TypeAlias = dict[int, defaultdict[str, list[tuple[int, int]]]] VerticesIdsMapping: TypeAlias = dict[Type[VertexModel], dict[int, int]] -EdgesIdsMapping: TypeAlias = defaultdict[Type[EdgeModel], defaultdict[int, dict[int, int]]] +EdgesIdsMapping: TypeAlias = dict[Type[EdgeModel], dict[int, dict[int, int]]] -def traverse2( +def db_traverse( model: VertexModel, visited: set, result, @@ -276,10 +306,10 @@ def traverse2( if isinstance(relation_doc, list): z = zip(relation_doc, getattr(model.edges, field, [])) for vertex_doc, edge_doc in z: - traverse2(vertex_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) + db_traverse(vertex_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) else: getattr(model.edges, field) - traverse2(relation_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) + db_traverse(relation_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) else: # todo: insert join relation pass @@ -299,7 +329,7 @@ def _set_edge_operational_fields(result, model_id, edges_ids, i): EdgeCollectionsMapping: TypeAlias = dict[Type[EdgeModel], IndexedOrderedDict[list[EdgeModel]]] EdgeVerticesIndexMapping = dict[ - Type[EdgeModel], dict[tuple[Type[VertexModel], Type[VertexModel]], dict[int, list[int]]] + Type[EdgeModel], dict[int, dict[tuple[Type[VertexModel], Type[VertexModel]], list[int]]] ] VertexCollectionsMapping = dict[Type[VertexModel], IndexedOrderedDict[BaseArangoModel]] @@ -354,52 +384,7 @@ def _add_model_field_to_mapping(model, field, relation_doc, edge_doc): else: model_mapping[field] = {"v": id(relation_doc), "e": id(edge_doc)} - def traverse_old(model: VertexModel, visited: set[int]): - if id(model) in visited: - return - - if isinstance(model, VertexModel): - vertex_collections.setdefault(model.__class__, IndexedOrderedDict())[id(model)] = model - visited.add(id(model)) - - models: tuple[Type[VertexModel], Optional[Type[EdgeModel]]] - relations = list(_group_by_relation2(model)) - if relations: - for models, field in relations: - edge_cls: Optional[Type[EdgeModel]] = models[1] - relation_doc: ModelField = getattr(model, field) - if not relation_doc: - _add_model_field_to_mapping(model, field, None, None) - continue - - if isinstance(relation_doc, LazyProxy): - relation_doc = relation_doc.__instance__ - - if model.edges: - if isinstance(model.edges, dict): - convert_edge_data_to_valid_kwargs(model.edges) - # todo: this initiate the class edge model so it validates the edges, should we do that? - model.edges = model.__fields__[EDGES].type_(**model.edges) - - if isinstance(relation_doc, list): - if len(getattr(model.edges, field, [])) != len(relation_doc): - raise AssertionError(f"{model.__class__.__name__} vertex edges {field} number mismatch") - z = zip(relation_doc, getattr(model.edges, field, [])) - for vertex_doc, edge_doc in z: - _prepare_relation(field, model, edge_cls, edge_doc, vertex_doc) - traverse_old(vertex_doc, visited) - else: - edge_doc = getattr(model.edges, field) - _prepare_relation(field, model, edge_cls, edge_doc, relation_doc) - traverse_old(relation_doc, visited) - else: - # todo: insert join relation - pass - else: - pass - # model_fields_mapping[id(model)] = {} - - def traverse_new(model: VertexModel, visited: set[int]): + def pydantic_traverse(model: TVertexModel, visited: set[int]): nonlocal edge_collections if id(model) in visited: return @@ -411,7 +396,7 @@ def traverse_new(model: VertexModel, visited: set[int]): relations = list(_group_by_relation2(model)) if relations: for relation_group in relations: - relation_doc: VertexModel = getattr(model, relation_group.field) + relation_doc: Union[TVertexModel, None] = getattr(model, relation_group.field) if not relation_doc: _add_model_field_to_mapping(model, relation_group.field, None, None) continue @@ -419,7 +404,7 @@ def traverse_new(model: VertexModel, visited: set[int]): edge_cls: Optional[Type[EdgeModel]] = relation_group.via_model if isinstance(relation_doc, LazyProxy): - relation_doc = relation_doc.__instance__ + relation_doc = cast(VertexModel, relation_doc.__instance__) if model.edges: if isinstance(model.edges, dict): @@ -432,15 +417,18 @@ def traverse_new(model: VertexModel, visited: set[int]): raise AssertionError( f"{model.__class__.__name__} vertex edges {relation_group.field} number mismatch" ) - z = zip(relation_doc, getattr(model.edges, relation_group.field, [])) - for vertex_doc, edge_doc in z: + vertex_doc: VertexModel + edge_doc: EdgeModel + for vertex_doc, edge_doc in zip( + relation_doc, getattr(model.edges, relation_group.field, []) + ): _prepare_relation(relation_group.field, model, edge_cls, edge_doc, vertex_doc) - traverse_new(vertex_doc, visited) + pydantic_traverse(vertex_doc, visited) else: edge_doc = getattr(model.edges, relation_group.field) _prepare_relation(relation_group.field, model, edge_cls, edge_doc, relation_doc) - traverse_new(relation_doc, visited) + pydantic_traverse(relation_doc, visited) else: # todo: insert join relation pass @@ -448,7 +436,7 @@ def traverse_new(model: VertexModel, visited: set[int]): pass # model_fields_mapping[id(model)] = {} - traverse_new(document, _visited) + pydantic_traverse(document, _visited) return edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping @classmethod @@ -476,13 +464,6 @@ def _build_graph_query( edge_let_queries = {} - def invert_edge_index(d: dict): - r = {} - for k, v in d.items(): - for nested_key, nested_value in v.items(): - r.setdefault(nested_key, {})[k] = nested_value - return r - for e, coll in edge_vertex_index.items(): counter = 0 edge_vars = [] @@ -494,6 +475,8 @@ def invert_edge_index(d: dict): edge_var_name = f"{e.Collection.name}_{j + 1}" edge = VariableExpression(edge_var_name) query.let(edge, edge_collections[e][instance]) + from_model: Type[VertexModel] + to_model: Type[VertexModel] for k, ((from_model, to_model), rels) in enumerate(mapping.items()): from_ = vertex_collections[from_model].keys().index(instance) new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] @@ -540,17 +523,17 @@ def invert_edge_index(d: dict): ), ) - async def init(self, model: Type[BaseArangoModel]): + async def init(self, model: type[ArangoModel]): collection = await get_or_create_collection(self.database, model) await self.create_indexes(collection, model) @staticmethod - async def create_indexes(collection, model): + async def create_indexes(collection: StandardCollection, model: Type[ArangoModel]): if model.Collection.indexes: logger.debug("creating indexes", extra=dict(indexes=model.Collection.indexes, model=model)) for i in model.Collection.indexes or []: if isinstance(i, dict): - await index.mapping[i.__class__](collection, **i) + await index.mapping[i["type"]](collection, **i) else: await index.mapping[i.__class__](collection, **dataclasses.asdict(i)) @@ -584,13 +567,13 @@ async def save( else: result = await cursor.next() if model_fields_mapping: - traverse2(cast(VertexModel, document), set(), result, model_fields_mapping, vertices_ids, edge_ids) + db_traverse(cast(VertexModel, document), set(), result, model_fields_mapping, vertices_ids, edge_ids) logger.debug("cursor stats", extra=cursor.statistics()) return document async def get( self, - model: Type[BaseArangoModel], + model: Type[ArangoModel], key: str, should_raise: bool = False, fetch_edges: Union[set[str], bool] = False, @@ -598,7 +581,7 @@ async def get( fetch_path: bool = False, depth: range = range(1, 1), prune: bool = False, - projection: Optional[Type[BaseArangoModel]] = None, + projection: Optional[Type[ArangoModel]] = None, return_raw: bool = False, ) -> Optional[Union[TVertexModel, ArangoModel]]: collection = model.Collection.name @@ -606,36 +589,43 @@ async def get( d = Document(_id) doc = VariableExpression() main_query = ORMQuery().let(doc, d) - return_ = doc + return_: Union[VariableExpression, dict[str, VariableExpression]] = doc + edges: Sequence[str] if fetch_edges: if isinstance(fetch_edges, set): - edges = fetch_edges + edges = cast(Sequence[str], fetch_edges) else: - edges = tuple({i.via_model.Collection.name for i in model.__relationships__.values()}) + _edges = [] + for i in model.__relationships__.values(): + if i.via_model: + _edges.append(i.via_model.Collection.name) + edges = _edges v = IteratorExpression("v") iterators = [v] e = IteratorExpression("e") iterators.append(e) - # if fetch_edges_data: if fetch_path: p = IteratorExpression("p") iterators.append(p) traversal_result = VariableExpression() + + traversal_iterators: TraverseIterators = cast(TraverseIterators, tuple(iterators)) traversal = ( ORMQuery() - .traverse(tuple(iterators), edges, _id, depth, TraversalDirection.OUTBOUND) + .traverse(traversal_iterators, edges, _id, depth, TraversalDirection.OUTBOUND) .return_({"v": iterators[0], "e": iterators[1]}) ) main_query.let(traversal_result, traversal) - return_ = {"doc": return_, "edges": traversal_result} + return_ = {"doc": doc, "edges": traversal_result} main_query.return_(return_) # logger.debug(str(main_query)) cursor = await main_query.execute(self.database) result = await cursor.next() - result, recursive = construct(result, model) + if issubclass(model, VertexModel): + result, recursive = construct(result, model) if return_raw: return result @@ -710,8 +700,8 @@ def construct(traversal_result: dict, model: Type[VertexModel]): # new_d[EDGES][func.__name__] = e # break - vertices = defaultdict(dict) - edges = {} + vertices: DefaultDict[str, dict[str, Any]] = defaultdict(dict) + edges: dict[str, dict[tuple[str, str], Union[list[dict[str, Any]], dict[str, Any]]]] = {} if doc: vertices[doc[ID]] = doc edge_count = 0 @@ -724,13 +714,11 @@ def construct(traversal_result: dict, model: Type[VertexModel]): if coordinate not in edges: edges.setdefault(edge_coll, {})[coordinate] = e - - elif not isinstance(edges[coordinate], list): - edges.setdefault(edge_coll, {})[coordinate] = [edges[coordinate]] - edges[edge_coll][coordinate].append(e) - + elif isinstance(edges[edge_coll][coordinate], list): + cast(list, edges[edge_coll][coordinate]).append(e) else: - edges[edge_coll][coordinate].append(e) + edges.setdefault(edge_coll, {})[coordinate] = [cast(dict[str, Any], edges[edge_coll][coordinate])] + cast(list, edges[edge_coll][coordinate]).append(e) edge_count += 1 if len(traversal_result["edges"]) != edge_count: diff --git a/pydango/connection/utils.py b/pydango/connection/utils.py index 7116ffa..9f05869 100644 --- a/pydango/connection/utils.py +++ b/pydango/connection/utils.py @@ -7,7 +7,7 @@ from aioarango.collection import StandardCollection from aioarango.database import StandardDatabase - from pydango.orm.models import BaseArangoModel + from pydango.orm.models import ArangoModel, BaseArangoModel @overload @@ -23,7 +23,7 @@ async def get_or_create_collection(db: "StandardDatabase", model: str, *, edge=N async def get_or_create_collection( - db: "StandardDatabase", model: Union[str, Type["BaseArangoModel"]], *, edge: Optional[bool] = None + db: "StandardDatabase", model: Union[str, Type["ArangoModel"]], *, edge: Optional[bool] = None ) -> "StandardCollection": if isinstance(model, str): collection_name = model diff --git a/pydango/orm/__init__.py b/pydango/orm/__init__.py index e69de29..2cfe4e9 100644 --- a/pydango/orm/__init__.py +++ b/pydango/orm/__init__.py @@ -0,0 +1,3 @@ +from .models import ArangoModel, EdgeModel, TEdge, TVertexModel, VertexModel + +__all__ = ["VertexModel", "EdgeModel", "ArangoModel", "TVertexModel", "TEdge"] diff --git a/pydango/orm/fields.py b/pydango/orm/fields.py index da69d01..e69de29 100644 --- a/pydango/orm/fields.py +++ b/pydango/orm/fields.py @@ -1,69 +0,0 @@ -# from __future__ import annotations - -from typing import TYPE_CHECKING, Generic, Optional, Type, TypeVar, Union, cast - -from pydango.connection import DALI_SESSION_KW -from pydango.orm.proxy import LazyProxy -from pydango.query.expressions import ( - Expression, - FieldExpression, - IteratorExpression, - VariableExpression, -) - -if TYPE_CHECKING: - from pydantic.fields import ModelField # type: ignore[attr-defined] - - from pydango.orm.models import BaseArangoModel, Relationship - from pydango.orm.types import ArangoModel - from pydango.query import AQLQuery - -FieldType = TypeVar("FieldType") - - -class ModelFieldExpression(FieldExpression): - def __init__(self, field: Union[str, Expression], parent: Type["BaseArangoModel"]): - super().__init__(field, cast(VariableExpression, parent)) - self.parent = parent # type: ignore[assignment] - - def compile(self, query_ref: "AQLQuery") -> str: - if isinstance(self.field, Expression): - return super().compile(query_ref) - else: - if not isinstance(self.parent, IteratorExpression): - # currently importing ORMQuery creates a circular dependency - compiled = query_ref.orm_bound_vars[self.parent] # type: ignore[attr-defined] - return f"{compiled.compile(query_ref)}.{self.field}" - return super().compile(query_ref) - - def __hash__(self): - return hash(self.field) - - -class DocFieldDescriptor(Generic[FieldType]): - def __init__(self, field: "ModelField", relation: Optional["Relationship"] = None): - self.relation = relation - self.field = field - - def __set__(self, instance, value): - raise AssertionError() - # instance.__dict__[self.name] = LazyProxy(value) - - def __get__( - self, instance: Optional["ArangoModel"], owner: Type["BaseArangoModel"] - ) -> Union[LazyProxy["ArangoModel"], ModelFieldExpression, None]: - if not instance and self.field.name in owner.__fields__.keys(): - return ModelFieldExpression(self.field.name, owner) - - field_value = instance.__dict__.get(self.field.name) - if field_value is not None: - return field_value - - if self.relation: - return LazyProxy[owner]( # type: ignore[valid-type] - field_value, self.field, getattr(instance, DALI_SESSION_KW, None) # type: ignore[arg-type] - ) - return None - - def __set_name__(self, owner, name): - self.name = name diff --git a/pydango/orm/models.py b/pydango/orm/models.py index f228374..be7b63c 100644 --- a/pydango/orm/models.py +++ b/pydango/orm/models.py @@ -4,6 +4,7 @@ import sys from abc import ABC, ABCMeta, abstractmethod from enum import Enum +from functools import partial from typing import ( TYPE_CHECKING, AbstractSet, @@ -24,24 +25,29 @@ import pydantic.typing from pydantic.fields import ConfigError # type: ignore[attr-defined] +from pydantic.generics import GenericModel -import pydango.orm.fields +from pydango.connection import DALI_SESSION_KW from pydango.orm.consts import EDGES from pydango.orm.encoders import jsonable_encoder -from pydango.orm.types import ArangoModel -from pydango.orm.utils import ( - convert_edge_data_to_valid_kwargs, - evaluate_forward_ref, - get_globals, -) +from pydango.orm.utils import evaluate_forward_ref, get_globals from pydango.query.consts import FROM, ID, KEY, REV, TO +from pydango.query.expressions import ( + Expression, + FieldExpression, + IteratorExpression, + ObjectExpression, + VariableExpression, +) if sys.version_info >= (3, 10): - from typing import TypeAlias, dataclass_transform + from typing import TypeAlias else: - from typing_extensions import TypeAlias, dataclass_transform + from typing_extensions import TypeAlias -from pydantic import BaseConfig, BaseModel +from typing import TypeVar + +from pydantic import BaseConfig from pydantic.fields import ( # type: ignore[attr-defined] SHAPE_FROZENSET, SHAPE_ITERABLE, @@ -55,20 +61,18 @@ PrivateAttr, Undefined, ) -from pydantic.main import ( # noqa: ignore - ModelMetaclass, - create_model, - object_setattr, - validate_model, -) +from pydantic.main import BaseModel, ModelMetaclass, create_model from pydantic.typing import resolve_annotations from pydantic.utils import GetterDict -from pydango import NAO +from pydango import NAO, NotAnObject from pydango.index import Indexes -from pydango.orm.fields import DocFieldDescriptor from pydango.orm.relations import LIST_TYPES, LinkTypes +ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") + +TVertexModel = TypeVar("TVertexModel", bound="VertexModel") + logger = logging.getLogger(__name__) if TYPE_CHECKING: @@ -83,7 +87,7 @@ ) from pydango.connection.session import PydangoSession - + from pydango.query import AQLQuery LIST_SHAPES = { SHAPE_LIST, SHAPE_TUPLE_ELLIPSIS, @@ -242,11 +246,15 @@ def edge_data_validator(*args, **kwargs): return args, kwargs -class EdgeData(BaseModel): - pass +class VertexCollectionConfig(CollectionConfig): + type = CollectionType.NODE + + +class EdgeCollectionConfig(CollectionConfig): + type = CollectionType.EDGE -@dataclass_transform(kw_only_default=True, field_specifiers=(ArangoField,)) +# @dataclass_transform(kw_only_default=True, field_specifiers=(ArangoField,)) class ArangoModelMeta(ModelMetaclass, ABCMeta): def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): parents = [b for b in bases if isinstance(b, mcs)] @@ -255,40 +263,8 @@ def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): skipped_cls.__relationships__ = {} skipped_cls.__relationships_fields__ = {} return skipped_cls - _relationships: dict[str, Relationship] = {} - original_annotations = resolve_annotations( - namespace.get("__annotations__", {}), namespace.get("__module__", None) - ) - - for k, v in original_annotations.items(): - relation = get_relation(k, v, namespace.get(k, Undefined), BaseConfig) - if relation: - _relationships[k] = relation - # original_annotations[k] = Union[original_annotations[k]] - - if VertexModel in bases: - __edge_namespace__: dict[str, Any] = {} - for field, relation_info in _relationships.items(): - via_model = relation_info.via_model - if relation_info.link_type in LIST_TYPES: - if relation_info.link_type in (LinkTypes.OPTIONAL_EDGE_LIST, LinkTypes.OPTIONAL_LIST): - __edge_namespace__[field] = (Optional[list[via_model]], None) # type: ignore[valid-type] - else: - __edge_namespace__[field] = (list[via_model], ...) # type: ignore[valid-type] - - elif relation_info.link_type in (LinkTypes.OPTIONAL_EDGE, LinkTypes.OPTIONAL_DIRECT): - __edge_namespace__[field] = (Optional[via_model], None) - else: - __edge_namespace__[field] = (via_model, ...) # type: ignore[assignment] - - m = create_model(f"{name}Edges", **__edge_namespace__, __base__=EdgeData) - - namespace[EDGES] = Field(None, exclude=True) - original_annotations[EDGES] = cast(Any, Optional[m]) - else: - namespace[EDGES] = Field(None, exclude=True) - original_annotations[EDGES] = cast(Any, None) + _relationships, original_annotations = ArangoModelMeta.get_relations_from_namespace(namespace) dict_used = { **namespace, @@ -299,55 +275,50 @@ def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): new_cls: BaseArangoModel = super().__new__(mcs, name, bases, dict_used, **kwargs) - __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] = {} - for relation_field, relation_info in _relationships.items(): - if not relation_info.via_model: - continue - if isinstance(relation_info.via_model, ForwardRef): - __edge_to_field_mapping__.setdefault(relation_info.via_model, []).append(cast(str, relation_field)) - elif issubclass(relation_info.via_model, BaseArangoModel): - __edge_to_field_mapping__.setdefault(relation_info.via_model.Collection.name, []).append(relation_field) - - errors: dict[Union[str, ForwardRef], list[str]] = {} - - items = __edge_to_field_mapping__.items() + __relationship_fields__ = ArangoModelMeta.set_field_descriptors(_relationships, new_cls) - for coll_or_forward_ref, fields in items: - if len(fields) > 1: - for i, f in enumerate(fields): - func = getattr(new_cls.Collection, f) - if func: - if not callable(func): - raise ValueError(f"{func} is not callable") - fields[i] = func + new_cls.__relationships__ = _relationships + new_cls.__relationships_fields__ = __relationship_fields__ + new_cls.__annotations__ = { + # **relationship_annotations, + **original_annotations, + **new_cls.__annotations__, + } - else: - errors.setdefault(coll_or_forward_ref, []).append(f) + return new_cls - if errors: - raise AttributeError(f"you must define the following Collection functions for distinction {dict(errors)}") + @staticmethod + def get_relations_from_namespace(namespace: dict[str, Any]) -> tuple[Relationships, dict[str, Any]]: + _relationships: dict[str, Relationship] = {} + original_annotations = resolve_annotations( + namespace.get("__annotations__", {}), namespace.get("__module__", None) + ) + for k, v in original_annotations.items(): + relation = get_relation(k, v, namespace.get(k, Undefined), BaseConfig) + if relation: + _relationships[k] = relation + # original_annotations[k] = Union[original_annotations[k]] + return _relationships, original_annotations + @staticmethod + def set_field_descriptors(_relationships, new_cls): __relationship_fields__ = {} - for field_name, model_field in [(x, y) for x, y in new_cls.__fields__.items() if x != EDGES]: if field_name in _relationships: pydango_field = get_pydango_field(model_field, RelationModelField) - # todo improve this - # todo: check why fully qualified module name needed - relationship = cast( # type: ignore[redundant-cast] - pydango.orm.models.Relationship, _relationships[field_name] - ) + relationship = _relationships[field_name] relationship.field = pydango_field __relationship_fields__[field_name] = pydango_field new_cls.__fields__[field_name] = pydango_field + type_ = cast(ModelField, pydango_field).type_ setattr( new_cls, field_name, - DocFieldDescriptor[pydango_field.type_](pydango_field, relationship), # type: ignore[name-defined] + DocFieldDescriptor[type_](pydango_field, relationship), # type: ignore[valid-type] ) - field_annotation = {field_name: DocFieldDescriptor[pydango_field.type_]} # type: ignore[name-defined] + field_annotation = {field_name: DocFieldDescriptor[type_]} # type: ignore[valid-type] new_cls.__annotations__.update(field_annotation) else: setattr( @@ -355,17 +326,7 @@ def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): field_name, DocFieldDescriptor[model_field.type_](model_field), # type: ignore[name-defined] ) - - new_cls.__relationships__ = _relationships - new_cls.__relationships_fields__ = __relationship_fields__ - new_cls.__edge_to_field_mapping__ = __edge_to_field_mapping__ - new_cls.__annotations__ = { - # **relationship_annotations, - **original_annotations, - **new_cls.__annotations__, - } - - return new_cls + return __relationship_fields__ # def __hash__(self): # return hash(self.Collection.name) @@ -373,6 +334,7 @@ def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): RelationshipFields: TypeAlias = dict[str, RelationModelField] Relationships: TypeAlias = dict[str, Relationship] +EdgeFieldMapping: TypeAlias = dict[Union[str, ForwardRef], list[str]] class BaseArangoModel(BaseModel, metaclass=ArangoModelMeta): @@ -385,8 +347,6 @@ class BaseArangoModel(BaseModel, metaclass=ArangoModelMeta): if TYPE_CHECKING: __relationships__: Relationships = {} __relationships_fields__: RelationshipFields = {} - # __edges_model__: Union[Type[EdgeData], None] = None - __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] class Config(BaseConfig): arbitrary_types_allowed = True @@ -424,7 +384,7 @@ def _calculate_keys( return super()._calculate_keys(include, exclude, exclude_unset, update) @classmethod - def from_orm(cls: Type[ArangoModel], obj: Any, *, session=None) -> ArangoModel: # type: ignore[misc] + def from_orm(cls: Type[ArangoModel], obj: Any, *, session=None) -> ArangoModel: for field_name, field in cls.__relationships_fields__.items(): exists_in_orm = obj.get(field_name, None) if exists_in_orm: @@ -433,7 +393,7 @@ def from_orm(cls: Type[ArangoModel], obj: Any, *, session=None) -> ArangoModel: if field.required: obj[field_name] = NAO try: - obj = super().from_orm(obj) + obj = cast(Type[ArangoModel], super()).from_orm(obj) except ConfigError as e: raise e obj.__dali__session__ = session @@ -454,28 +414,20 @@ def update_forward_refs(cls, **localns: Any) -> None: if isinstance(relation.link_model, ForwardRef): relation.link_model = evaluate_forward_ref(cls, relation.link_model, **localns) - for k in cls.__edge_to_field_mapping__.copy(): - if isinstance(k, ForwardRef): - funcs = cls.__edge_to_field_mapping__.pop(k) - new_k = evaluate_forward_ref(cls, k, **localns) - if new_k in cls.__edge_to_field_mapping__: - cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) - else: - cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs + # for k in cls.__edge_to_field_mapping__.copy(): + # if isinstance(k, ForwardRef): + # funcs = cls.__edge_to_field_mapping__.pop(k) + # new_k = evaluate_forward_ref(cls, k, **localns) + # if new_k in cls.__edge_to_field_mapping__: + # cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) + # else: + # cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs @abstractmethod def save_dict(self) -> DictStrAny: ... -class VertexCollectionConfig(CollectionConfig): - type = CollectionType.NODE - - -class EdgeCollectionConfig(CollectionConfig): - type = CollectionType.EDGE - - class EdgeModel(BaseArangoModel, ABC): from_: Optional[Union[str]] = Field(None, alias=FROM) to: Optional[Union[str]] = Field(None, alias=TO) @@ -496,9 +448,238 @@ def save_dict(model: BaseArangoModel): return model.save_dict() -class VertexModel(BaseArangoModel, ABC): +class LazyProxy(Generic[ArangoModel]): + _initialized: bool = False + __instance__: Union[ArangoModel, NotAnObject] + + def __init__(self, instance: Union[ArangoModel, NotAnObject], field, session: Optional[PydangoSession]): + self.session = session + self._field = field + if instance is not NAO: + self._initialized = True + + self.__instance__ = instance + + def __getattr__(self, item): + if item in getattr(self, "__dict__"): + return getattr(self, item) + + if isinstance(self.__instance__, list): + if item in ["dict"]: + return partial(jsonable_encoder, obj=self.__instance__) + + # if item in getattr(getattr(self, '_instance'), item): + attr = getattr(self.__instance__, item, None) + if attr: + return attr + else: + return getattr(self._field.type_, item) + + def __repr__(self): + return repr(self.__instance__) + + def __getitem__(self, item): + if self: + return self.__instance__[item] + raise AttributeError( + "you are attempting to access " + f"{self._field.type_.__name__} via {self._field.name} which is not initialized use fetch" + ) + + def __bool__(self): + return self._initialized and bool(self.__instance__) + + def fetch(self): + self.session.get( + self._field.type_, + ) + + def compile(self, query_ref): + return ObjectExpression(self.dict()).compile(query_ref) + + def dict(self, *args, by_alias=True, **kwargs): + return jsonable_encoder(self.__instance__, by_alias=by_alias, *args, **kwargs) + + +class ModelFieldExpression(FieldExpression): + def __init__(self, field: Union[str, Expression], parent: Type[BaseArangoModel]): + super().__init__(field, cast(VariableExpression, parent)) + self.parent = parent # type: ignore[assignment] + + def compile(self, query_ref: AQLQuery) -> str: + if isinstance(self.field, Expression): + return super().compile(query_ref) + else: + if not isinstance(self.parent, IteratorExpression): + # currently importing ORMQuery creates a circular dependency + compiled = query_ref.orm_bound_vars[self.parent] # type: ignore[attr-defined] + return f"{compiled.compile(query_ref)}.{self.field}" + return super().compile(query_ref) + + def __hash__(self): + return hash(self.field) + + +FieldType = TypeVar("FieldType") + + +class DocFieldDescriptor(Generic[FieldType]): + def __init__(self, field: ModelField, relation: Optional[Relationship] = None): + self.relation = relation + self.field = field + + def __set__(self, instance, value: FieldType): + raise AssertionError() + # instance.__dict__[self.name] = LazyProxy(value) + + def __get__( + self, instance: Optional[ArangoModel], owner: Type[BaseArangoModel] + ) -> Union[LazyProxy[ArangoModel], ModelFieldExpression, None]: + if not instance and self.field.name in owner.__fields__.keys(): + return ModelFieldExpression(self.field.name, owner) + + field_value = instance.__dict__.get(self.field.name) + if field_value is not None: + return field_value + + if self.relation: + return LazyProxy[owner]( # type: ignore[valid-type] + field_value, self.field, getattr(instance, DALI_SESSION_KW, None) + ) + return None + + def __set_name__(self, owner, name): + self.name = name + + +class Aliased(Generic[ArangoModel]): + def __init__(self, entity: ArangoModel, alias=None): + self.entity: ArangoModel = entity + self.alias = alias + + def __getattr__(self, item): + # if item == "Collection": + # Temp = namedtuple("Temp", ["name"]) + # return Temp(name=self.entity.Collection.name) + # + # if item == "var_name": + # return self.alias + + attr = getattr(self.entity, item) + if isinstance(attr, FieldExpression): + attr.parent = self + + return attr + + def __str__(self): + return str(self.alias or "") + + def __repr__(self): + return f"" + + # def compile(self, query_ref: "ORMQuery") -> str: + # return query_ref.orm_bound_vars[self].compile(query_ref) + + +def convert_edge_data_to_valid_kwargs(edge_dict): + for i in edge_dict.copy(): + if isinstance(i, ModelFieldExpression): + edge_dict[i.field] = edge_dict.pop(i) + + +TEdge = TypeVar("TEdge", bound="EdgeModel") + + +class EdgeData(GenericModel, Generic[TEdge]): + pass + + +TEdges = TypeVar("TEdges", bound=EdgeData) + + +class VertexMeta(ArangoModelMeta): + def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): + parents = [b for b in bases if isinstance(b, mcs)] + if not parents: + return super().__new__(mcs, name, bases, namespace, **kwargs) + _relationships, original_annotations = mcs.get_relations_from_namespace(namespace) + __edge_to_field_mapping__, edge_annotation = mcs.build_edges_model(_relationships, bases, name, namespace) + + namespace["__edge_to_field_mapping__"] = __edge_to_field_mapping__ + namespace["__annotations__"][EDGES] = edge_annotation + + return super().__new__(mcs, name, bases, namespace, **kwargs) + + @staticmethod + def build_edges_model( + _relationships: Relationships, bases: tuple[Type[Any]], name: str, namespace: dict[str, Any] + ) -> tuple[EdgeFieldMapping, ModelField]: + if VertexModel in bases: + edges_model = VertexMeta._build_model(_relationships, name) + namespace[EDGES] = Field(None, exclude=True) + edge_annotation = cast(Any, Optional[edges_model]) + else: + namespace[EDGES] = Field(None, exclude=True) + edge_annotation = cast(Any, None) + + __edge_to_field_mapping__ = VertexMeta._build_edge_to_field_mapping(_relationships) + + VertexMeta._validate_edges(__edge_to_field_mapping__, namespace) + return __edge_to_field_mapping__, edge_annotation + + @staticmethod + def _build_edge_to_field_mapping(relationships: Relationships) -> EdgeFieldMapping: + __edge_to_field_mapping__: EdgeFieldMapping = {} + for relation_field, relation_info in relationships.items(): + if not relation_info.via_model: + continue + if isinstance(relation_info.via_model, ForwardRef): + __edge_to_field_mapping__.setdefault(relation_info.via_model, []).append(cast(str, relation_field)) + elif issubclass(relation_info.via_model, BaseArangoModel): + __edge_to_field_mapping__.setdefault(relation_info.via_model.Collection.name, []).append(relation_field) + return __edge_to_field_mapping__ + + @staticmethod + def _validate_edges(edge_to_field_mapping: EdgeFieldMapping, namespace: dict[str, Any]) -> None: + errors: dict[Union[str, ForwardRef], list[str]] = {} + items = edge_to_field_mapping.items() + for coll_or_forward_ref, fields in items: + if len(fields) > 1: + for i, f in enumerate(fields): + func = getattr(namespace.get("Collection"), f) + if func: + if not callable(func): + raise ValueError(f"{func} is not callable") + fields[i] = func + + else: + errors.setdefault(coll_or_forward_ref, []).append(f) + if errors: + raise AttributeError(f"you must define the following Collection functions for distinction {dict(errors)}") + + @staticmethod + def _build_model(relationships: Relationships, name: str): + __edge_namespace__: dict[str, Any] = {} + for field, relation_info in relationships.items(): + via_model = relation_info.via_model + if relation_info.link_type in LIST_TYPES: + if relation_info.link_type in (LinkTypes.OPTIONAL_EDGE_LIST, LinkTypes.OPTIONAL_LIST): + __edge_namespace__[field] = (Optional[list[via_model]], None) # type: ignore[valid-type] + else: + __edge_namespace__[field] = (list[via_model], ...) # type: ignore[valid-type] + + elif relation_info.link_type in (LinkTypes.OPTIONAL_EDGE, LinkTypes.OPTIONAL_DIRECT): + __edge_namespace__[field] = (Optional[via_model], None) + else: + __edge_namespace__[field] = (via_model, ...) # type: ignore[assignment] + m = create_model(f"{name}Edges", **__edge_namespace__, __base__=EdgeData) + return m + + +class VertexModel(BaseArangoModel, Generic[TEdges], metaclass=VertexMeta): if TYPE_CHECKING: - edges: Union[dict, EdgeData, None] = None + edges: TEdges + __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] = {} class Collection(VertexCollectionConfig): ... @@ -550,6 +731,16 @@ def save_dict(self) -> DictStrAny: @classmethod def update_forward_refs(cls, **localns: Any) -> None: super().update_forward_refs(**localns) + + for k in cls.__edge_to_field_mapping__.copy(): + if isinstance(k, ForwardRef): + funcs = cls.__edge_to_field_mapping__.pop(k) + new_k = evaluate_forward_ref(cls, k, **localns) + if new_k in cls.__edge_to_field_mapping__: + cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) + else: + cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs + globalns = get_globals(cls) for fields, model_field in cls.__fields__[EDGES].type_.__fields__.items(): diff --git a/pydango/orm/proxy.py b/pydango/orm/proxy.py index 910b8e6..e69de29 100644 --- a/pydango/orm/proxy.py +++ b/pydango/orm/proxy.py @@ -1,68 +0,0 @@ -from functools import partial -from typing import TYPE_CHECKING, Generic, Optional - -from pydango import NAO -from pydango.orm.encoders import jsonable_encoder -from pydango.orm.types import ArangoModel -from pydango.query.expressions import ObjectExpression - -if TYPE_CHECKING: - from pydango.connection.session import PydangoSession - - -class LazyProxyMeta(type): - def __new__(cls, name, bases, namespace, **kwargs): - return super().__new__(cls, name, bases, namespace, **kwargs) - - -class LazyProxy(Generic[ArangoModel]): - _initialized: bool = False - __instance__: Optional[ArangoModel] = None - - def __init__(self, instance, field, session: Optional["PydangoSession"]): - self.session = session - self._field = field - if instance is not NAO: - self._initialized = True - - self.__instance__ = instance - - def __getattr__(self, item): - if item in getattr(self, "__dict__"): - return getattr(self, item) - - if isinstance(self.__instance__, list): - if item in ["dict"]: - return partial(jsonable_encoder, obj=self.__instance__) - - # if item in getattr(getattr(self, '_instance'), item): - attr = getattr(self.__instance__, item, None) - if attr: - return attr - else: - return getattr(self._field.type_, item) - - def __repr__(self): - return repr(self.__instance__) - - def __getitem__(self, item): - if self: - return self.__instance__[item] - raise AttributeError( - "you are attempting to access " - f"{self._field.type_.__name__} via {self._field.name} which is not initialized use fetch" - ) - - def __bool__(self): - return self._initialized and bool(self.__instance__) - - def fetch(self): - self.session.get( - self._field.type_, - ) - - def compile(self, query_ref): - return ObjectExpression(self.dict()).compile(query_ref) - - def dict(self, *args, by_alias=True, **kwargs): - return jsonable_encoder(self.__instance__, by_alias=by_alias, *args, **kwargs) diff --git a/pydango/orm/query.py b/pydango/orm/query.py index a0bc0ca..dfb62d8 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -3,7 +3,6 @@ from typing import Optional, Sequence, Type, Union, cast, overload from pydango.orm.encoders import jsonable_encoder -from pydango.orm.fields import ModelFieldExpression if sys.version_info >= (3, 10): from typing import Self @@ -13,9 +12,13 @@ from pydantic import BaseModel from pydantic.utils import lenient_issubclass -from pydango.orm.models import BaseArangoModel, save_dict -from pydango.orm.proxy import LazyProxy -from pydango.orm.utils import Aliased +from pydango.orm.models import ( + Aliased, + BaseArangoModel, + LazyProxy, + ModelFieldExpression, + save_dict, +) from pydango.query.expressions import ( BinaryExpression, BinaryLogicalExpression, @@ -44,11 +47,11 @@ UpdateOptions, UpsertOptions, ) -from pydango.query.query import AQLQuery +from pydango.query.query import AQLQuery, TraverseIterators logger = logging.getLogger(__name__) -ORMForParams = Union[ForParams, Type[BaseArangoModel], Aliased[Type[BaseArangoModel]]] +ORMForParams = Union[ForParams, Type[BaseArangoModel], Aliased[BaseArangoModel]] IMPLICIT_COLLECTION_ERROR = "you must specify collection when the collection cannot be implicitly resolved" MULTIPLE_COLLECTIONS_RESOLVED = "multiple collections resolved" @@ -381,12 +384,7 @@ def _serialize_vars(self): def traverse( self, - iterators: Union[ - IteratorExpression, - tuple[IteratorExpression], - tuple[IteratorExpression, IteratorExpression], - tuple[IteratorExpression, IteratorExpression, IteratorExpression], - ], + iterators: TraverseIterators, edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], start: Union["LiteralExpression", VariableExpression, FieldExpression, str], depth: Union[RangeExpression, range, tuple[int, int]], diff --git a/pydango/orm/types.py b/pydango/orm/types.py index 4142333..ba69d92 100644 --- a/pydango/orm/types.py +++ b/pydango/orm/types.py @@ -1,8 +1,5 @@ -from typing import TYPE_CHECKING, TypeVar - -if TYPE_CHECKING: - from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel # noqa: F401 - -ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") -TEdge = TypeVar("TEdge", bound="EdgeModel") -TVertexModel = TypeVar("TVertexModel", bound="VertexModel") +# from typing import TypeVar +# +# ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") +# TEdge = TypeVar("TEdge", bound="EdgeModel") +# TVertexModel = TypeVar("TVertexModel", bound="VertexModel") diff --git a/pydango/orm/utils.py b/pydango/orm/utils.py index 4e190de..77235ba 100644 --- a/pydango/orm/utils.py +++ b/pydango/orm/utils.py @@ -1,96 +1,8 @@ import sys from functools import lru_cache -from typing import TYPE_CHECKING, Generic, TypeVar from pydantic.typing import evaluate_forwardref -from pydango.orm.fields import ModelFieldExpression -from pydango.query.expressions import FieldExpression - -if TYPE_CHECKING: - from pydango.orm.query import ORMQuery - -# class QueryableProjectableModelMeta(BaseModel.__class__, Expression.__class__): -# def __new__(mcs, name, bases, namespace, **kwargs): -# parents = [b for b in bases if isinstance(b, mcs)] -# if not parents: -# return cast(QueryableProjectableModelMeta, super().__new__(mcs, name, bases, namespace)) -# -# new_cls = super().__new__(mcs, name, bases, namespace, **kwargs) -# -# for field_name, field in new_cls.__fields__.items(): -# # model_field = get_pydango_field(field) -# setattr(new_cls, field_name, DocFieldDescriptor(field)) -# -# return new_cls -# -# -# class PydangoSchema(BaseModel, Expression, metaclass=QueryableProjectableModelMeta): -# @classmethod -# def compile(cls, query_ref: "AQLQuery"): -# d = {} -# for name, field_info in cls.__fields__.items(): -# d[name] = field_info.default_factory and field_info.default_factory() or field_info.default -# return str(d) -# -# def __repr__(self): -# d = {} -# for name, field_info in self.__fields__.items(): -# d[name] = field_info.default_factory and field_info.default_factory() or field_info.default -# return str(d) -# -# @classmethod -# def to_aql(cls, iterator: IteratorExpression) -> dict: -# def create_fields_dict(fields): -# result = {} -# for name, field in fields.items(): -# if issubclass(field.type_, BaseModel): -# result[name] = create_fields_dict(field.type_.__fields__) -# else: -# result[name] = f"{iterator}.{name}" -# return result -# -# result = create_fields_dict(cls.__fields__) -# return result - - -T = TypeVar("T") - - -class Aliased(Generic[T]): - def __init__(self, entity, alias=None): - self.entity = entity - self.alias = alias - - def __getattr__(self, item): - # if item == "Collection": - # Temp = namedtuple("Temp", ["name"]) - # return Temp(name=self.entity.Collection.name) - # - # if item == "var_name": - # return self.alias - - attr = getattr(self.entity, item) - if isinstance(attr, FieldExpression): - attr.parent = self - - return attr - - def __str__(self): - return str(self.alias or "") - - def __repr__(self): - return f"" - - def compile(self, query_ref: "ORMQuery") -> str: - return query_ref.orm_bound_vars[self].compile(query_ref) - - -def convert_edge_data_to_valid_kwargs(edge_dict): - for i in edge_dict.copy(): - if isinstance(i, ModelFieldExpression): - edge_dict[i.field] = edge_dict.pop(i) - def get_globals(cls): if cls.__module__ in sys.modules: diff --git a/pydango/query/operations.py b/pydango/query/operations.py index dfee9a5..0053b42 100644 --- a/pydango/query/operations.py +++ b/pydango/query/operations.py @@ -642,7 +642,7 @@ def __init__( if assignment.variable.var_name: self.query_ref.__used_vars__.add(assignment.variable.var_name) - for param in [into or [], with_count_into or []]: + for param in [into, with_count_into]: if isinstance(param, VariableExpression): if param.var_name: self.query_ref.__used_vars__.add(param.var_name) diff --git a/pydango/query/query.py b/pydango/query/query.py index f3a2d80..220fee6 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -66,6 +66,13 @@ JsonType: TypeAlias = Union[None, int, str, bool, List["JsonType"], Dict[str, "JsonType"]] +TraverseIterators: TypeAlias = Union[ + IteratorExpression, + tuple[IteratorExpression], + tuple[IteratorExpression, IteratorExpression], + tuple[IteratorExpression, IteratorExpression, IteratorExpression], +] + class AQLQuery(QueryExpression): sep = " " @@ -127,12 +134,7 @@ def for_( def traverse( self, - iterators: Union[ - IteratorExpression, - tuple[IteratorExpression], - tuple[IteratorExpression, IteratorExpression], - tuple[IteratorExpression, IteratorExpression, IteratorExpression], - ], + iterators: TraverseIterators, edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], start: Union["LiteralExpression", VariableExpression, FieldExpression, str], depth: Union[RangeExpression, range, tuple[int, int]], diff --git a/pydango/utils.py b/pydango/utils.py index 5efe4b7..e69de29 100644 --- a/pydango/utils.py +++ b/pydango/utils.py @@ -1,17 +0,0 @@ -from typing import Union - -from pydango.orm.models import BaseArangoModel -from pydango.orm.types import ArangoModel -from pydango.query.consts import ID - - -def get_collection_from_document(obj: Union[str, dict, ArangoModel]) -> str: - if isinstance(obj, dict): - obj = obj.get(ID) - elif isinstance(obj, BaseArangoModel): - obj = obj.id - - if not isinstance(obj, str): - raise ValueError("o") - - return obj.partition("/")[0] diff --git a/pyproject.toml b/pyproject.toml index 95ce847..bb94c8e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ pre-commit = "^3.3.1" pytest-asyncio = "^0.21.0" coverage = "^7.2.5" isort = "^5.12.0" -mypy = "^1.3.0" +mypy = "^1.5.1" freezegun = "^1.2.2" pydiction = "^0" diff --git a/tests/conftest.py b/tests/conftest.py index 8eb1546..33bef2d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -84,4 +84,4 @@ async def database(client: ArangoClient) -> AsyncFixture[StandardDatabase]: db = await get_or_create_db(client, "pydango") yield db - # await (await client.db("_system")).delete_database("pydango") + await (await client.db("_system")).delete_database("pydango") diff --git a/tests/session/test_cities.py b/tests/session/test_cities.py index d380e92..b49c1d3 100644 --- a/tests/session/test_cities.py +++ b/tests/session/test_cities.py @@ -1,6 +1,6 @@ import asyncio import datetime -from typing import TYPE_CHECKING, Annotated +from typing import TYPE_CHECKING, Annotated, Iterable, Type import pytest from _pytest.fixtures import FixtureRequest @@ -10,6 +10,7 @@ from pydango.connection.session import PydangoSession from pydango.index import PersistentIndex from pydango.orm.models import ( + BaseArangoModel, EdgeCollectionConfig, EdgeModel, Relation, @@ -138,15 +139,16 @@ def expected_person(person: Person): @pytest.fixture(scope="module", autouse=True) async def init_collections(session: PydangoSession): - await asyncio.gather(*[session.init(coll) for coll in (Person, City, LivesIn, Visited)]) + models: Iterable[Type[BaseArangoModel]] = (Person, City, LivesIn, Visited) + await asyncio.gather(*[session.init(coll) for coll in models]) @pytest.mark.run(order=1) @pytest.mark.asyncio async def test_save(matcher: Matcher, session: PydangoSession, request: FixtureRequest, person): p = await session.save(person) - - request.config.cache.set("person_key", p.key) + print(p) + request.config.cache.set("person_key", p.key) # type: ignore[union-attr] matcher.assert_declarative_object(p.dict(by_alias=True, include_edges=True), expected_person(p)) @@ -177,6 +179,7 @@ class IdProjection(VertexModel): @pytest.mark.run(order=2) async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): - _id = request.config.cache.get("person_key", None) + _id = request.config.cache.get("person_key", None) # type: ignore[union-attr] result = await session.get(Person, _id, fetch_edges=True) + assert result is not None matcher.assert_declarative_object(result.dict(by_alias=True, include_edges=True), expected_person(result)) diff --git a/tests/session/test_family.py b/tests/session/test_family.py index 9933bf3..65b322e 100644 --- a/tests/session/test_family.py +++ b/tests/session/test_family.py @@ -1,5 +1,5 @@ import asyncio -from typing import Annotated, Optional +from typing import Annotated, Iterable, Optional, Type import pytest from _pytest.fixtures import FixtureRequest @@ -8,6 +8,7 @@ from pydango.connection.session import PydangoSession from pydango.index import PersistentIndex from pydango.orm.models import ( + BaseArangoModel, EdgeCollectionConfig, EdgeModel, Relation, @@ -122,10 +123,15 @@ def test_obj(): @pytest.fixture(scope="module", autouse=True) async def init_collections(session: PydangoSession): - await asyncio.gather(*[session.init(coll) for coll in (Person, Sibling)]) + models: Iterable[Type[BaseArangoModel]] = (Person, Sibling) + await asyncio.gather(*[session.init(coll) for coll in models]) def expected_person(person: Person): + assert person.sisters + assert person.brothers + assert person.father + assert person.mother return { "_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, @@ -300,7 +306,7 @@ async def test_save(session: PydangoSession, request: FixtureRequest): ben.edges = brother_edges.copy() p = await session.save(john) - request.config.cache.set("person_key", p.key) + request.config.cache.set("person_key", p.key) # type: ignore[union-attr] # todo: there is currently a caveat with pydantic v1 with circular references, in pydantic v2 this is resolved # def traverse_recursive_fields(p, recursive_fields, visited): @@ -332,8 +338,9 @@ async def test_save(session: PydangoSession, request: FixtureRequest): @pytest.mark.run(order=2) async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): - _id = request.config.cache.get("person_key", None) + _id = request.config.cache.get("person_key", None) # type: ignore[union-attr] result = await session.get(Person, _id, fetch_edges=True) + assert result result_dict = result.dict(by_alias=True, include_edges=True) person = expected_person(result) matcher.assert_declarative_object(result_dict, person) diff --git a/tests/session/test_social_network.py b/tests/session/test_social_network.py index a0de462..397e008 100644 --- a/tests/session/test_social_network.py +++ b/tests/session/test_social_network.py @@ -1,6 +1,6 @@ import asyncio import datetime -from typing import Annotated, List, Optional, Union +from typing import Annotated, Any, Iterable, List, Optional, Type, Union import pytest from _pytest.fixtures import FixtureRequest @@ -8,13 +8,13 @@ from pydango.connection.session import PydangoSession from pydango.orm.models import ( + BaseArangoModel, EdgeCollectionConfig, EdgeModel, Relation, VertexCollectionConfig, VertexModel, ) -from pydango.orm.types import ArangoModel class Post(VertexModel): @@ -83,9 +83,8 @@ class Collection(EdgeCollectionConfig): @pytest.fixture(scope="module", autouse=True) async def init_collections(session: PydangoSession): - await asyncio.gather( - *[session.init(coll) for coll in (Post, Comment, User, Friendship, Authorship, Commentary, Like)] - ) + models: Iterable[Type[BaseArangoModel]] = (Post, Comment, User, Friendship, Authorship, Commentary, Like) + await asyncio.gather(*[session.init(coll) for coll in models]) @pytest.fixture() @@ -120,7 +119,7 @@ def user(): return user1 -def expected_user_depth1(user: VertexModel): +def expected_user_depth1(user: VertexModel) -> dict[str, Any]: return { "_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, @@ -219,9 +218,9 @@ def expected_user_depth1(user: VertexModel): } -def expected_user_depth2(user: ArangoModel): - user = expected_user_depth1(user) - user.update( +def expected_user_depth2(user: VertexModel): + new_user: dict[str, Any] = expected_user_depth1(user) + new_user.update( { "likes": Contains( [ @@ -252,21 +251,22 @@ def expected_user_depth2(user: ArangoModel): ], } ) - return user + return new_user @pytest.mark.run(order=1) @pytest.mark.asyncio async def test_save(matcher: Matcher, session: PydangoSession, request: FixtureRequest, user: User): await session.save(user) - request.config.cache.set("user_key", user.key) + request.config.cache.set("user_key", user.key) # type: ignore[union-attr] matcher.assert_declarative_object(user.dict(by_alias=True, include_edges=True), expected_user_depth2(user)) @pytest.mark.run(order=2) async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): - _id = request.config.cache.get("user_key", None) + _id = request.config.cache.get("user_key", None) # type: ignore[union-attr] result = await session.get(User, _id, fetch_edges=True, depth=range(1, 1)) + assert result expected_user = expected_user_depth1(result) matcher.assert_declarative_object( result.dict(by_alias=True, include_edges=True), @@ -277,9 +277,9 @@ async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRe @pytest.mark.run(order=2) async def test_get2(matcher: Matcher, session: PydangoSession, request: FixtureRequest): - _id = request.config.cache.get("user_key", None) + _id = request.config.cache.get("user_key", None) # type: ignore[union-attr] result = await session.get(User, _id, fetch_edges=True, depth=range(1, 2)) - + assert result result_dict = result.dict(by_alias=True, include_edges=True) depth = expected_user_depth2(result) matcher.assert_declarative_object(result_dict, depth, check_order=False) diff --git a/tests/test_orm_query.py b/tests/test_orm_query.py index 4d8247c..23d727d 100644 --- a/tests/test_orm_query.py +++ b/tests/test_orm_query.py @@ -1,8 +1,7 @@ import datetime -from pydango.orm.models import VertexCollectionConfig, VertexModel +from pydango.orm.models import Aliased, VertexCollectionConfig, VertexModel from pydango.orm.query import ORMQuery -from pydango.orm.utils import Aliased from pydango.query.expressions import ( NEW, OLD, diff --git a/tests/utils.py b/tests/utils.py deleted file mode 100644 index ac92433..0000000 --- a/tests/utils.py +++ /dev/null @@ -1,39 +0,0 @@ -from unittest.mock import ANY - - -def _assert(actual, expected, key): - expected_value = expected[key] - actual_value = actual[key] - if expected_value == ANY: - return - - if isinstance(expected_value, dict) and isinstance(actual_value, dict): - assert_equals_dicts(expected_value, actual_value) - elif isinstance(expected_value, list) and isinstance(actual_value, list): - assert_equals_lists(expected_value, actual_value) - else: - assert expected_value == actual_value, f"Values for key '{key}' do not match" - - -def assert_equals_dicts(expected, actual): - assert isinstance(actual, dict), "Expected a dictionary for actual value" - assert isinstance(expected, dict), "Expected a dictionary for expected value" - - expected_keys = expected.keys() - actual_keys = actual.keys() - _expected_keys = set(expected_keys) - _actual_keys = set(actual_keys) - assert _actual_keys == _expected_keys, ("Keys in dictionaries do not match", _actual_keys, _expected_keys) - assert actual_keys == expected_keys, "Keys in dictionaries do not match" - for key in expected: - _assert(actual, expected, key) - - -def assert_equals_lists(expected, actual): - assert isinstance(actual, list), "Expected a list for actual value" - assert isinstance(expected, list), "Expected a list for expected value" - - assert len(actual) == len(expected), "Lists have different lengths" - - for i in range(len(expected)): - _assert(actual, expected, i) From 64b7b2722d10f0ba89c6aa590e0d1c1dca5b6dda Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Sat, 23 Sep 2023 16:34:23 +0300 Subject: [PATCH 08/19] working --- .editorconfig | 26 + ... tests.session.test_social_network.run.xml | 29 + .tool-versions | 1 + poetry.toml | 5 + pydango/__init__.py | 6 - pydango/connection/session.py | 58 +- pydango/connection/utils.py | 4 +- pydango/orm/__init__.py | 4 +- pydango/orm/fields.py | 0 pydango/orm/models.py | 750 ------------------ pydango/orm/models/__init__.py | 5 + pydango/orm/models/base.py | 419 ++++++++++ pydango/orm/models/edge.py | 40 + pydango/orm/models/fields.py | 63 ++ pydango/orm/models/relations.py | 33 + pydango/orm/models/sentinel.py | 6 + pydango/orm/models/shapes.py | 17 + pydango/orm/models/types.py | 14 + pydango/orm/models/utils.py | 16 + pydango/orm/models/vertex.py | 187 +++++ pydango/orm/proxy.py | 0 pydango/orm/query.py | 120 +-- pydango/orm/relations.py | 34 - pydango/orm/types.py | 5 - pydango/query/operations.py | 9 +- pydango/query/query.py | 14 +- tests/session/test_cities.py | 12 +- tests/session/test_family.py | 12 +- tests/session/test_social_network.py | 12 +- tests/test_orm_query.py | 4 +- 30 files changed, 979 insertions(+), 926 deletions(-) create mode 100644 .editorconfig create mode 100644 .run/pytest for tests.session.test_social_network.run.xml create mode 100644 .tool-versions create mode 100644 poetry.toml delete mode 100644 pydango/orm/fields.py delete mode 100644 pydango/orm/models.py create mode 100644 pydango/orm/models/__init__.py create mode 100644 pydango/orm/models/base.py create mode 100644 pydango/orm/models/edge.py create mode 100644 pydango/orm/models/fields.py create mode 100644 pydango/orm/models/relations.py create mode 100644 pydango/orm/models/sentinel.py create mode 100644 pydango/orm/models/shapes.py create mode 100644 pydango/orm/models/types.py create mode 100644 pydango/orm/models/utils.py create mode 100644 pydango/orm/models/vertex.py delete mode 100644 pydango/orm/proxy.py delete mode 100644 pydango/orm/relations.py delete mode 100644 pydango/orm/types.py diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..7be6b33 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,26 @@ +# EditorConfig is awesome: http://EditorConfig.org + +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +charset = utf-8 + +# 4 space indentation +[*.{py,java,r,R}] +indent_style = space +indent_size = 4 + +# 2 space indentation +# [*.{js,json,y{a,}ml,html,cwl}] +# indent_style = space +# indent_size = 2 + +[*.{md,Rmd,rst}] +trim_trailing_whitespace = false +indent_style = space +indent_size = 2 diff --git a/.run/pytest for tests.session.test_social_network.run.xml b/.run/pytest for tests.session.test_social_network.run.xml new file mode 100644 index 0000000..38c14fb --- /dev/null +++ b/.run/pytest for tests.session.test_social_network.run.xml @@ -0,0 +1,29 @@ + + + + + diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000..bb44e0d --- /dev/null +++ b/.tool-versions @@ -0,0 +1 @@ +python 3.9.5 diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 0000000..c0c666b --- /dev/null +++ b/poetry.toml @@ -0,0 +1,5 @@ +[installer] +no-binary = ["pydantic"] + +#[installer] +#no-binary = [":all:"] diff --git a/pydango/__init__.py b/pydango/__init__.py index 5d2ce29..e69de29 100644 --- a/pydango/__init__.py +++ b/pydango/__init__.py @@ -1,6 +0,0 @@ -class NotAnObject: - def __repr__(self): - return "NAO" - - -NAO = NotAnObject() diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 471ef9a..dc769f0 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -4,6 +4,7 @@ from collections import OrderedDict, defaultdict, namedtuple from enum import Enum from typing import ( + TYPE_CHECKING, Any, DefaultDict, Iterator, @@ -17,36 +18,18 @@ ) from aioarango import AQLQueryExecuteError -from pydantic import BaseModel - -from pydango.connection import DALI_SESSION_KW -from pydango.orm.relations import LIST_TYPES -from pydango.query.query import TraverseIterators -from pydango.query.utils import new - -# from pydango.utils import get_collection_from_document - -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - from aioarango.collection import StandardCollection from aioarango.database import StandardDatabase from indexed import IndexedOrderedDict # type: ignore[attr-defined] +from pydantic import BaseModel from pydango import index +from pydango.connection import DALI_SESSION_KW from pydango.connection.utils import get_or_create_collection from pydango.orm.consts import EDGES -from pydango.orm.models import ( - ArangoModel, - BaseArangoModel, - EdgeModel, - LazyProxy, - TVertexModel, - VertexModel, - convert_edge_data_to_valid_kwargs, -) +from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel +from pydango.orm.models.base import LIST_TYPES, LazyProxy +from pydango.orm.models.vertex import convert_edge_data_to_valid_kwargs from pydango.orm.query import ORMQuery, for_ from pydango.query import AQLQuery from pydango.query.consts import FROM, ID, KEY, REV, TO @@ -54,6 +37,19 @@ from pydango.query.functions import Document, Length, Merge, UnionArrays from pydango.query.operations import RangeExpression, TraversalDirection from pydango.query.options import UpsertOptions +from pydango.query.query import TraverseIterators +from pydango.query.utils import new + +# from pydango.utils import get_collection_from_document + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + from pydango.orm.models.base import ArangoModel + from pydango.orm.models.vertex import TVertexModel logger = logging.getLogger(__name__) @@ -384,7 +380,7 @@ def _add_model_field_to_mapping(model, field, relation_doc, edge_doc): else: model_mapping[field] = {"v": id(relation_doc), "e": id(edge_doc)} - def pydantic_traverse(model: TVertexModel, visited: set[int]): + def pydantic_traverse(model: "TVertexModel", visited: set[int]): nonlocal edge_collections if id(model) in visited: return @@ -523,12 +519,12 @@ def _build_graph_query( ), ) - async def init(self, model: type[ArangoModel]): + async def init(self, model: type["ArangoModel"]): collection = await get_or_create_collection(self.database, model) await self.create_indexes(collection, model) @staticmethod - async def create_indexes(collection: StandardCollection, model: Type[ArangoModel]): + async def create_indexes(collection: StandardCollection, model: Type["ArangoModel"]): if model.Collection.indexes: logger.debug("creating indexes", extra=dict(indexes=model.Collection.indexes, model=model)) for i in model.Collection.indexes or []: @@ -539,11 +535,11 @@ async def create_indexes(collection: StandardCollection, model: Type[ArangoModel async def save( self, - document: ArangoModel, + document: "ArangoModel", strategy: UpdateStrategy = UpdateStrategy.UPDATE, # todo: follow_links: bool = False, collection_options: Union[CollectionUpsertOptions, None] = None, - ) -> Union[ArangoModel, TVertexModel]: + ) -> Union["ArangoModel", "TVertexModel"]: model_fields_mapping = None if isinstance(document, VertexModel): model_fields_mapping, vertices_ids, edge_ids, query = self._build_graph_query( @@ -573,7 +569,7 @@ async def save( async def get( self, - model: Type[ArangoModel], + model: Type["ArangoModel"], key: str, should_raise: bool = False, fetch_edges: Union[set[str], bool] = False, @@ -581,9 +577,9 @@ async def get( fetch_path: bool = False, depth: range = range(1, 1), prune: bool = False, - projection: Optional[Type[ArangoModel]] = None, + projection: Optional[Type["ArangoModel"]] = None, return_raw: bool = False, - ) -> Optional[Union[TVertexModel, ArangoModel]]: + ) -> Optional[Union["TVertexModel", "ArangoModel"]]: collection = model.Collection.name _id = f"{collection}/{key}" d = Document(_id) diff --git a/pydango/connection/utils.py b/pydango/connection/utils.py index 9f05869..5eb6a88 100644 --- a/pydango/connection/utils.py +++ b/pydango/connection/utils.py @@ -7,12 +7,12 @@ from aioarango.collection import StandardCollection from aioarango.database import StandardDatabase - from pydango.orm.models import ArangoModel, BaseArangoModel + from pydango.orm.models.base import ArangoModel @overload async def get_or_create_collection( - db: "StandardDatabase", model: Type["BaseArangoModel"], *, edge=None + db: "StandardDatabase", model: Type["ArangoModel"], *, edge=None ) -> "StandardCollection": ... diff --git a/pydango/orm/__init__.py b/pydango/orm/__init__.py index 2cfe4e9..fba59e2 100644 --- a/pydango/orm/__init__.py +++ b/pydango/orm/__init__.py @@ -1,3 +1,3 @@ -from .models import ArangoModel, EdgeModel, TEdge, TVertexModel, VertexModel +# from .models import ArangoModel, EdgeModel, TEdge, TVertexModel, VertexModel -__all__ = ["VertexModel", "EdgeModel", "ArangoModel", "TVertexModel", "TEdge"] +# __all__ = ["VertexModel", "EdgeModel", "ArangoModel", "TVertexModel", "TEdge"] diff --git a/pydango/orm/fields.py b/pydango/orm/fields.py deleted file mode 100644 index e69de29..0000000 diff --git a/pydango/orm/models.py b/pydango/orm/models.py deleted file mode 100644 index be7b63c..0000000 --- a/pydango/orm/models.py +++ /dev/null @@ -1,750 +0,0 @@ -from __future__ import annotations - -import logging -import sys -from abc import ABC, ABCMeta, abstractmethod -from enum import Enum -from functools import partial -from typing import ( - TYPE_CHECKING, - AbstractSet, - Annotated, - Any, - Dict, - ForwardRef, - Generic, - Mapping, - Optional, - Sequence, - Type, - Union, - cast, - get_args, - get_origin, -) - -import pydantic.typing -from pydantic.fields import ConfigError # type: ignore[attr-defined] -from pydantic.generics import GenericModel - -from pydango.connection import DALI_SESSION_KW -from pydango.orm.consts import EDGES -from pydango.orm.encoders import jsonable_encoder -from pydango.orm.utils import evaluate_forward_ref, get_globals -from pydango.query.consts import FROM, ID, KEY, REV, TO -from pydango.query.expressions import ( - Expression, - FieldExpression, - IteratorExpression, - ObjectExpression, - VariableExpression, -) - -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - -from typing import TypeVar - -from pydantic import BaseConfig -from pydantic.fields import ( # type: ignore[attr-defined] - SHAPE_FROZENSET, - SHAPE_ITERABLE, - SHAPE_LIST, - SHAPE_SEQUENCE, - SHAPE_SET, - SHAPE_SINGLETON, - SHAPE_TUPLE_ELLIPSIS, - Field, - ModelField, - PrivateAttr, - Undefined, -) -from pydantic.main import BaseModel, ModelMetaclass, create_model -from pydantic.typing import resolve_annotations -from pydantic.utils import GetterDict - -from pydango import NAO, NotAnObject -from pydango.index import Indexes -from pydango.orm.relations import LIST_TYPES, LinkTypes - -ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") - -TVertexModel = TypeVar("TVertexModel", bound="VertexModel") - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from pydantic.fields import LocStr, ValidateReturn # type: ignore[attr-defined] - from pydantic.main import Model - from pydantic.types import ModelOrDc - from pydantic.typing import ( - AbstractSetIntStr, - DictStrAny, - MappingIntStrAny, - ReprArgs, - ) - - from pydango.connection.session import PydangoSession - from pydango.query import AQLQuery -LIST_SHAPES = { - SHAPE_LIST, - SHAPE_TUPLE_ELLIPSIS, - SHAPE_SEQUENCE, - SHAPE_SET, - SHAPE_FROZENSET, - SHAPE_ITERABLE, -} - - -class RelationMetaclass(type): - def __new__(mcs, name, bases, namespace, **kwargs): - parents = [b for b in bases if isinstance(b, mcs)] - if not parents: - return super().__new__(mcs, name, bases, namespace) - - model = namespace["__orig_bases__"][0].__args__[0] - - if model is ArangoModel: - return super().__new__(mcs, name, bases, namespace) - - return super().__new__(mcs, name, bases, {"model": model, **namespace}) - - -class Relation(Generic[ArangoModel]): - def __init__(self, *args, **kwargs): - pass - - -class EdgeRelation(Relation[ArangoModel]): - pass - - -class Relationship: - def __init__( - self, - *, - field: ModelField, - back_populates: Optional[str] = None, - link_model: Type[VertexModel], - via_model: Optional[Type[EdgeModel]] = None, - link_type: LinkTypes, - ): - self.via_model = via_model - self.link_type = link_type - self.field = field - self.link_model = link_model - self.back_populates = back_populates - - def __repr_args__(self) -> ReprArgs: - name = self.link_model.__name__ if not isinstance(self.link_model, ForwardRef) else self.link_model - args = [("link_model", name), ("link_type", self.link_type.value)] - if self.via_model: - args.append(("via_model", self.via_model.__name__)) - return args - - -def get_relation(field_name: str, annotation: Any, value: Any, config: Type[BaseConfig]) -> Optional[Relationship]: - if get_origin(annotation) is not Annotated: - return None - - args = get_args(annotation) - relation_infos = [arg for arg in args[1:] if arg is Relation or get_origin(arg) is Relation] - if len(relation_infos) > 1: - raise ValueError(f"cannot specify multiple `Annotated` `Field`s for {field_name!r}") - relation_info = next(iter(relation_infos), None) - via_model = get_args(relation_info)[0] if relation_info else None - field = ModelField.infer( - name=field_name, - value=value, - annotation=annotation, - class_validators=None, - config=BaseConfig, - ) - if field.shape in LIST_SHAPES: - if field.sub_fields: - link_model = field.sub_fields[0].type_ - - if field.allow_none is True: - link_type = via_model and LinkTypes.OPTIONAL_EDGE_LIST or LinkTypes.OPTIONAL_LIST - else: - link_type = via_model and LinkTypes.EDGE_LIST or LinkTypes.LIST - - elif field.shape == SHAPE_SINGLETON: - link_model = field.type_ - if field.allow_none is True: - link_type = via_model and LinkTypes.OPTIONAL_EDGE or LinkTypes.OPTIONAL_DIRECT - else: - link_type = via_model and LinkTypes.EDGE or LinkTypes.DIRECT - else: - raise AssertionError() - - return Relationship( - field=field, - link_model=link_model, - link_type=link_type, - via_model=via_model, - ) - - -class CollectionType(int, Enum): - NODE = 2 - EDGE = 3 - - -class CollectionConfig: - name: str - type: CollectionType - wait_for_sync: Optional[bool] = False - sync_json_schema: Optional[bool] = True - indexes: Sequence[Indexes] = [] - - -OPERATIONAL_FIELDS = {"key", "id", "rev"} - - -# todo: check if this is in use -# class PydangoModelField(ModelField, Compilable): -# pass - - -class RelationModelField(ModelField): - def validate( - self, - v: Any, - values: Dict[str, Any], - *, - loc: LocStr, - cls: Optional[ModelOrDc] = None, - ) -> ValidateReturn: - return super().validate(v, values, loc=loc, cls=cls) if v is not NAO else (v, None) - - -def get_pydango_field(field: ModelField, cls: Type[RelationModelField] = RelationModelField) -> RelationModelField: - return cls( - name=field.name, - type_=field.annotation, - alias=field.alias, - class_validators=field.class_validators, - default=field.default, - default_factory=field.default_factory, - required=field.required, - model_config=field.model_config, - final=field.final, - field_info=field.field_info, - ) - - -# noinspection PyPep8Naming -def ArangoField(model_field, relation) -> DocFieldDescriptor: - return DocFieldDescriptor(model_field, relation) - - -def edge_data_validator(*args, **kwargs): - # print(args, kwargs) - return args, kwargs - - -class VertexCollectionConfig(CollectionConfig): - type = CollectionType.NODE - - -class EdgeCollectionConfig(CollectionConfig): - type = CollectionType.EDGE - - -# @dataclass_transform(kw_only_default=True, field_specifiers=(ArangoField,)) -class ArangoModelMeta(ModelMetaclass, ABCMeta): - def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): - parents = [b for b in bases if isinstance(b, mcs)] - if not parents or BaseArangoModel in parents: - skipped_cls: BaseArangoModel = super().__new__(mcs, name, bases, namespace, **kwargs) - skipped_cls.__relationships__ = {} - skipped_cls.__relationships_fields__ = {} - return skipped_cls - - _relationships, original_annotations = ArangoModelMeta.get_relations_from_namespace(namespace) - - dict_used = { - **namespace, - "__weakref__": None, - "__annotations__": original_annotations, - "__relationships__": _relationships, - } - - new_cls: BaseArangoModel = super().__new__(mcs, name, bases, dict_used, **kwargs) - - __relationship_fields__ = ArangoModelMeta.set_field_descriptors(_relationships, new_cls) - - new_cls.__relationships__ = _relationships - new_cls.__relationships_fields__ = __relationship_fields__ - new_cls.__annotations__ = { - # **relationship_annotations, - **original_annotations, - **new_cls.__annotations__, - } - - return new_cls - - @staticmethod - def get_relations_from_namespace(namespace: dict[str, Any]) -> tuple[Relationships, dict[str, Any]]: - _relationships: dict[str, Relationship] = {} - original_annotations = resolve_annotations( - namespace.get("__annotations__", {}), namespace.get("__module__", None) - ) - for k, v in original_annotations.items(): - relation = get_relation(k, v, namespace.get(k, Undefined), BaseConfig) - if relation: - _relationships[k] = relation - # original_annotations[k] = Union[original_annotations[k]] - return _relationships, original_annotations - - @staticmethod - def set_field_descriptors(_relationships, new_cls): - __relationship_fields__ = {} - for field_name, model_field in [(x, y) for x, y in new_cls.__fields__.items() if x != EDGES]: - if field_name in _relationships: - pydango_field = get_pydango_field(model_field, RelationModelField) - relationship = _relationships[field_name] - relationship.field = pydango_field - __relationship_fields__[field_name] = pydango_field - new_cls.__fields__[field_name] = pydango_field - - type_ = cast(ModelField, pydango_field).type_ - setattr( - new_cls, - field_name, - DocFieldDescriptor[type_](pydango_field, relationship), # type: ignore[valid-type] - ) - - field_annotation = {field_name: DocFieldDescriptor[type_]} # type: ignore[valid-type] - new_cls.__annotations__.update(field_annotation) - else: - setattr( - new_cls, - field_name, - DocFieldDescriptor[model_field.type_](model_field), # type: ignore[name-defined] - ) - return __relationship_fields__ - - # def __hash__(self): - # return hash(self.Collection.name) - - -RelationshipFields: TypeAlias = dict[str, RelationModelField] -Relationships: TypeAlias = dict[str, Relationship] -EdgeFieldMapping: TypeAlias = dict[Union[str, ForwardRef], list[str]] - - -class BaseArangoModel(BaseModel, metaclass=ArangoModelMeta): - id: Optional[str] = Field(None, alias=ID) - key: Optional[str] = Field(None, alias=KEY) - rev: Optional[str] = Field(None, alias=REV) - - __dali__session__: Optional[PydangoSession] = PrivateAttr() - - if TYPE_CHECKING: - __relationships__: Relationships = {} - __relationships_fields__: RelationshipFields = {} - - class Config(BaseConfig): - arbitrary_types_allowed = True - orm_mode = True - # getter_dict = dict - allow_population_by_field_name = True - - class Collection(CollectionConfig): - ... - - @classmethod - def _decompose_class(cls: Type[Model], obj: Any) -> Union[GetterDict, dict]: # type: ignore[override] - if isinstance(obj, dict): - return obj - decompose_class = super()._decompose_class(obj) - return decompose_class - - def _calculate_keys( - self, - include: Optional[MappingIntStrAny], - exclude: Optional[MappingIntStrAny], - exclude_unset: bool, - update: Optional[DictStrAny] = None, - ) -> Optional[AbstractSet[str]]: - field_set = self.__fields_set__.copy() - keys = self.__dict__.keys() - unset = keys - field_set - if not exclude_unset: - _exclude = cast(Mapping, {field: True for field in unset if field in OPERATIONAL_FIELDS}) - if not exclude: - exclude = _exclude - else: - exclude.update(_exclude) # type: ignore[attr-defined] - - return super()._calculate_keys(include, exclude, exclude_unset, update) - - @classmethod - def from_orm(cls: Type[ArangoModel], obj: Any, *, session=None) -> ArangoModel: - for field_name, field in cls.__relationships_fields__.items(): - exists_in_orm = obj.get(field_name, None) - if exists_in_orm: - obj[field_name] = exists_in_orm - continue - if field.required: - obj[field_name] = NAO - try: - obj = cast(Type[ArangoModel], super()).from_orm(obj) - except ConfigError as e: - raise e - obj.__dali__session__ = session - # object_setattr(obj, DALI_SESSION_KW, session) - return obj - - @classmethod - def update_forward_refs(cls, **localns: Any) -> None: - super().update_forward_refs(**localns) - for name in cls.__relationships_fields__.keys(): - cls.__relationships_fields__[name] = cast(RelationModelField, cls.__fields__[name]) - relation = cls.__relationships__[name] - relation.field = cls.__fields__[name] - relation.link_model = cls.__fields__[name].type_ - if isinstance(relation.via_model, ForwardRef): - relation.via_model = evaluate_forward_ref(cls, relation.via_model, **localns) - - if isinstance(relation.link_model, ForwardRef): - relation.link_model = evaluate_forward_ref(cls, relation.link_model, **localns) - - # for k in cls.__edge_to_field_mapping__.copy(): - # if isinstance(k, ForwardRef): - # funcs = cls.__edge_to_field_mapping__.pop(k) - # new_k = evaluate_forward_ref(cls, k, **localns) - # if new_k in cls.__edge_to_field_mapping__: - # cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) - # else: - # cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs - - @abstractmethod - def save_dict(self) -> DictStrAny: - ... - - -class EdgeModel(BaseArangoModel, ABC): - from_: Optional[Union[str]] = Field(None, alias=FROM) - to: Optional[Union[str]] = Field(None, alias=TO) - - class Collection(EdgeCollectionConfig): - pass - - def save_dict(self) -> DictStrAny: - exclude: set[Union[int, str]] = set() - for key in ["from_", "to"]: - if self.__getattribute__(key) is None: - exclude.add(key) - return jsonable_encoder(self, by_alias=True, exclude=exclude) - # return self.dict(by_alias=True, exclude=exclude) - - -def save_dict(model: BaseArangoModel): - return model.save_dict() - - -class LazyProxy(Generic[ArangoModel]): - _initialized: bool = False - __instance__: Union[ArangoModel, NotAnObject] - - def __init__(self, instance: Union[ArangoModel, NotAnObject], field, session: Optional[PydangoSession]): - self.session = session - self._field = field - if instance is not NAO: - self._initialized = True - - self.__instance__ = instance - - def __getattr__(self, item): - if item in getattr(self, "__dict__"): - return getattr(self, item) - - if isinstance(self.__instance__, list): - if item in ["dict"]: - return partial(jsonable_encoder, obj=self.__instance__) - - # if item in getattr(getattr(self, '_instance'), item): - attr = getattr(self.__instance__, item, None) - if attr: - return attr - else: - return getattr(self._field.type_, item) - - def __repr__(self): - return repr(self.__instance__) - - def __getitem__(self, item): - if self: - return self.__instance__[item] - raise AttributeError( - "you are attempting to access " - f"{self._field.type_.__name__} via {self._field.name} which is not initialized use fetch" - ) - - def __bool__(self): - return self._initialized and bool(self.__instance__) - - def fetch(self): - self.session.get( - self._field.type_, - ) - - def compile(self, query_ref): - return ObjectExpression(self.dict()).compile(query_ref) - - def dict(self, *args, by_alias=True, **kwargs): - return jsonable_encoder(self.__instance__, by_alias=by_alias, *args, **kwargs) - - -class ModelFieldExpression(FieldExpression): - def __init__(self, field: Union[str, Expression], parent: Type[BaseArangoModel]): - super().__init__(field, cast(VariableExpression, parent)) - self.parent = parent # type: ignore[assignment] - - def compile(self, query_ref: AQLQuery) -> str: - if isinstance(self.field, Expression): - return super().compile(query_ref) - else: - if not isinstance(self.parent, IteratorExpression): - # currently importing ORMQuery creates a circular dependency - compiled = query_ref.orm_bound_vars[self.parent] # type: ignore[attr-defined] - return f"{compiled.compile(query_ref)}.{self.field}" - return super().compile(query_ref) - - def __hash__(self): - return hash(self.field) - - -FieldType = TypeVar("FieldType") - - -class DocFieldDescriptor(Generic[FieldType]): - def __init__(self, field: ModelField, relation: Optional[Relationship] = None): - self.relation = relation - self.field = field - - def __set__(self, instance, value: FieldType): - raise AssertionError() - # instance.__dict__[self.name] = LazyProxy(value) - - def __get__( - self, instance: Optional[ArangoModel], owner: Type[BaseArangoModel] - ) -> Union[LazyProxy[ArangoModel], ModelFieldExpression, None]: - if not instance and self.field.name in owner.__fields__.keys(): - return ModelFieldExpression(self.field.name, owner) - - field_value = instance.__dict__.get(self.field.name) - if field_value is not None: - return field_value - - if self.relation: - return LazyProxy[owner]( # type: ignore[valid-type] - field_value, self.field, getattr(instance, DALI_SESSION_KW, None) - ) - return None - - def __set_name__(self, owner, name): - self.name = name - - -class Aliased(Generic[ArangoModel]): - def __init__(self, entity: ArangoModel, alias=None): - self.entity: ArangoModel = entity - self.alias = alias - - def __getattr__(self, item): - # if item == "Collection": - # Temp = namedtuple("Temp", ["name"]) - # return Temp(name=self.entity.Collection.name) - # - # if item == "var_name": - # return self.alias - - attr = getattr(self.entity, item) - if isinstance(attr, FieldExpression): - attr.parent = self - - return attr - - def __str__(self): - return str(self.alias or "") - - def __repr__(self): - return f"" - - # def compile(self, query_ref: "ORMQuery") -> str: - # return query_ref.orm_bound_vars[self].compile(query_ref) - - -def convert_edge_data_to_valid_kwargs(edge_dict): - for i in edge_dict.copy(): - if isinstance(i, ModelFieldExpression): - edge_dict[i.field] = edge_dict.pop(i) - - -TEdge = TypeVar("TEdge", bound="EdgeModel") - - -class EdgeData(GenericModel, Generic[TEdge]): - pass - - -TEdges = TypeVar("TEdges", bound=EdgeData) - - -class VertexMeta(ArangoModelMeta): - def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): - parents = [b for b in bases if isinstance(b, mcs)] - if not parents: - return super().__new__(mcs, name, bases, namespace, **kwargs) - _relationships, original_annotations = mcs.get_relations_from_namespace(namespace) - __edge_to_field_mapping__, edge_annotation = mcs.build_edges_model(_relationships, bases, name, namespace) - - namespace["__edge_to_field_mapping__"] = __edge_to_field_mapping__ - namespace["__annotations__"][EDGES] = edge_annotation - - return super().__new__(mcs, name, bases, namespace, **kwargs) - - @staticmethod - def build_edges_model( - _relationships: Relationships, bases: tuple[Type[Any]], name: str, namespace: dict[str, Any] - ) -> tuple[EdgeFieldMapping, ModelField]: - if VertexModel in bases: - edges_model = VertexMeta._build_model(_relationships, name) - namespace[EDGES] = Field(None, exclude=True) - edge_annotation = cast(Any, Optional[edges_model]) - else: - namespace[EDGES] = Field(None, exclude=True) - edge_annotation = cast(Any, None) - - __edge_to_field_mapping__ = VertexMeta._build_edge_to_field_mapping(_relationships) - - VertexMeta._validate_edges(__edge_to_field_mapping__, namespace) - return __edge_to_field_mapping__, edge_annotation - - @staticmethod - def _build_edge_to_field_mapping(relationships: Relationships) -> EdgeFieldMapping: - __edge_to_field_mapping__: EdgeFieldMapping = {} - for relation_field, relation_info in relationships.items(): - if not relation_info.via_model: - continue - if isinstance(relation_info.via_model, ForwardRef): - __edge_to_field_mapping__.setdefault(relation_info.via_model, []).append(cast(str, relation_field)) - elif issubclass(relation_info.via_model, BaseArangoModel): - __edge_to_field_mapping__.setdefault(relation_info.via_model.Collection.name, []).append(relation_field) - return __edge_to_field_mapping__ - - @staticmethod - def _validate_edges(edge_to_field_mapping: EdgeFieldMapping, namespace: dict[str, Any]) -> None: - errors: dict[Union[str, ForwardRef], list[str]] = {} - items = edge_to_field_mapping.items() - for coll_or_forward_ref, fields in items: - if len(fields) > 1: - for i, f in enumerate(fields): - func = getattr(namespace.get("Collection"), f) - if func: - if not callable(func): - raise ValueError(f"{func} is not callable") - fields[i] = func - - else: - errors.setdefault(coll_or_forward_ref, []).append(f) - if errors: - raise AttributeError(f"you must define the following Collection functions for distinction {dict(errors)}") - - @staticmethod - def _build_model(relationships: Relationships, name: str): - __edge_namespace__: dict[str, Any] = {} - for field, relation_info in relationships.items(): - via_model = relation_info.via_model - if relation_info.link_type in LIST_TYPES: - if relation_info.link_type in (LinkTypes.OPTIONAL_EDGE_LIST, LinkTypes.OPTIONAL_LIST): - __edge_namespace__[field] = (Optional[list[via_model]], None) # type: ignore[valid-type] - else: - __edge_namespace__[field] = (list[via_model], ...) # type: ignore[valid-type] - - elif relation_info.link_type in (LinkTypes.OPTIONAL_EDGE, LinkTypes.OPTIONAL_DIRECT): - __edge_namespace__[field] = (Optional[via_model], None) - else: - __edge_namespace__[field] = (via_model, ...) # type: ignore[assignment] - m = create_model(f"{name}Edges", **__edge_namespace__, __base__=EdgeData) - return m - - -class VertexModel(BaseArangoModel, Generic[TEdges], metaclass=VertexMeta): - if TYPE_CHECKING: - edges: TEdges - __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] = {} - - class Collection(VertexCollectionConfig): - ... - - def __init__(self, **data: Any): - if EDGES in data: - convert_edge_data_to_valid_kwargs(data[EDGES]) - super().__init__(**data) - - def dict( - self, - *, - include: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, - exclude: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, - by_alias: bool = False, - skip_defaults: Optional[bool] = None, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - include_edges: bool = False, - ) -> DictStrAny: - d = cast(dict, self.__exclude_fields__) - if include_edges and self.__exclude_fields__: - d.pop("edges") - - try: - super__dict = super().dict( - include=include, - exclude=exclude, - by_alias=by_alias, - skip_defaults=skip_defaults, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - except RecursionError as e: - raise AssertionError( - "is not possible to call .dict() when using recursive model, instead traverse the graph and collect" - " data or exclude recursive fields" - ) from e - if self.__exclude_fields__: - d["edges"] = True - - return super__dict - - def save_dict(self) -> DictStrAny: - return jsonable_encoder(self, by_alias=True, exclude=cast(set, self.__relationships_fields__.keys())) - - @classmethod - def update_forward_refs(cls, **localns: Any) -> None: - super().update_forward_refs(**localns) - - for k in cls.__edge_to_field_mapping__.copy(): - if isinstance(k, ForwardRef): - funcs = cls.__edge_to_field_mapping__.pop(k) - new_k = evaluate_forward_ref(cls, k, **localns) - if new_k in cls.__edge_to_field_mapping__: - cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) - else: - cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs - - globalns = get_globals(cls) - - for fields, model_field in cls.__fields__[EDGES].type_.__fields__.items(): - if isinstance(model_field.type_, ForwardRef): - model_field.type_ = pydantic.typing.evaluate_forwardref(model_field.type_, globalns, localns) - - cls.__fields__[EDGES].type_.update_forward_refs(**localns, **globalns) diff --git a/pydango/orm/models/__init__.py b/pydango/orm/models/__init__.py new file mode 100644 index 0000000..916054e --- /dev/null +++ b/pydango/orm/models/__init__.py @@ -0,0 +1,5 @@ +from pydango.orm.models.base import BaseArangoModel, CollectionConfig, CollectionType +from pydango.orm.models.edge import EdgeModel +from pydango.orm.models.vertex import VertexModel + +__all__ = ["BaseArangoModel", "EdgeModel", "VertexModel", "CollectionConfig", "CollectionType"] diff --git a/pydango/orm/models/base.py b/pydango/orm/models/base.py new file mode 100644 index 0000000..373c354 --- /dev/null +++ b/pydango/orm/models/base.py @@ -0,0 +1,419 @@ +from abc import ABCMeta, abstractmethod +from enum import Enum, IntEnum +from functools import partial +from typing import ( + TYPE_CHECKING, + Annotated, + Any, + ForwardRef, + Generic, + Mapping, + Optional, + Sequence, + Type, + TypeVar, + Union, + cast, + get_args, + get_origin, +) + +from pydantic import BaseConfig, ConfigError, Field, PrivateAttr +from pydantic.fields import SHAPE_SINGLETON, ModelField, Undefined +from pydantic.main import BaseModel, ModelMetaclass +from pydantic.typing import resolve_annotations + +from pydango.connection import DALI_SESSION_KW +from pydango.index import Indexes +from pydango.orm.consts import EDGES +from pydango.orm.encoders import jsonable_encoder +from pydango.orm.models.fields import ( + ModelFieldExpression, + RelationModelField, + get_pydango_field, +) +from pydango.orm.models.relations import Relationship +from pydango.orm.models.sentinel import NAO +from pydango.orm.models.shapes import LIST_SHAPES +from pydango.orm.utils import evaluate_forward_ref +from pydango.query.consts import ID, KEY, REV +from pydango.query.expressions import FieldExpression, ObjectExpression + +if TYPE_CHECKING: + from pydantic.main import GetterDict, Model + from pydantic.typing import AbstractSet, DictStrAny, MappingIntStrAny + + from pydango.connection.session import PydangoSession + from pydango.orm.models.sentinel import NotAnObject + from pydango.orm.models.types import RelationshipFields, Relationships + from pydango.orm.models.vertex import TVertexModel + +ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") +FieldType = TypeVar("FieldType") +OPERATIONAL_FIELDS = {"key", "id", "rev"} + + +class LinkTypes(str, Enum): + DIRECT = "DIRECT" + OPTIONAL_DIRECT = "OPTIONAL_DIRECT" + LIST = "LIST" + OPTIONAL_LIST = "OPTIONAL_LIST" + EDGE = "EDGE" + OPTIONAL_EDGE = "OPTIONAL_EDGE" + EDGE_LIST = "EDGE_LIST" + OPTIONAL_EDGE_LIST = "OPTIONAL_EDGE_LIST" + + +EDGE_TYPES = ( + LinkTypes.EDGE, + LinkTypes.OPTIONAL_EDGE, + LinkTypes.OPTIONAL_EDGE_LIST, + LinkTypes.EDGE_LIST, +) + +LIST_TYPES = ( + LinkTypes.EDGE_LIST, + LinkTypes.OPTIONAL_EDGE_LIST, + LinkTypes.LIST, + LinkTypes.OPTIONAL_LIST, +) + +SINGLETON_TYPES = ( + LinkTypes.EDGE, + LinkTypes.DIRECT, + LinkTypes.OPTIONAL_EDGE, + LinkTypes.OPTIONAL_DIRECT, +) + + +def get_relation(field_name: str, annotation: Any, value: Any, _: Type["BaseConfig"]) -> Optional["Relationship"]: + if get_origin(annotation) is not Annotated: + return None + + args = get_args(annotation) + relation_infos = [arg for arg in args[1:] if arg is Relation or get_origin(arg) is Relation] + if len(relation_infos) > 1: + raise ValueError(f"cannot specify multiple `Annotated` `Field`s for {field_name!r}") + relation_info = next(iter(relation_infos), None) + via_model = get_args(relation_info)[0] if relation_info else None + field = ModelField.infer( + name=field_name, + value=value, + annotation=annotation, + class_validators=None, + config=BaseConfig, + ) + if field.shape in LIST_SHAPES: + if field.sub_fields: + link_model = field.sub_fields[0].type_ + + if field.allow_none is True: + link_type = via_model and LinkTypes.OPTIONAL_EDGE_LIST or LinkTypes.OPTIONAL_LIST + else: + link_type = via_model and LinkTypes.EDGE_LIST or LinkTypes.LIST + + elif field.shape == SHAPE_SINGLETON: + link_model = field.type_ + if field.allow_none is True: + link_type = via_model and LinkTypes.OPTIONAL_EDGE or LinkTypes.OPTIONAL_DIRECT + else: + link_type = via_model and LinkTypes.EDGE or LinkTypes.DIRECT + else: + raise AssertionError() + + return Relationship( + field=field, + link_model=link_model, + link_type=link_type, + via_model=via_model, + ) + + +class Relation(Generic[ArangoModel]): + def __init__(self, *args, **kwargs): + pass + + +class LazyProxy(Generic[ArangoModel]): + _initialized: bool = False + __instance__: Union[ArangoModel, "NotAnObject"] + + def __init__(self, instance: Union[ArangoModel, "NotAnObject"], field, session: Optional["PydangoSession"]): + self.session = session + self._field = field + if instance is not NAO: + self._initialized = True + + self.__instance__ = instance + + def __getattr__(self, item): + if item in getattr(self, "__dict__"): + return getattr(self, item) + + if isinstance(self.__instance__, list): + if item in ["dict"]: + return partial(jsonable_encoder, obj=self.__instance__) + + # if item in getattr(getattr(self, '_instance'), item): + attr = getattr(self.__instance__, item, None) + if attr: + return attr + else: + return getattr(self._field.type_, item) + + def __repr__(self): + return repr(self.__instance__) + + def __getitem__(self, item): + if self: + return self.__instance__[item] + raise AttributeError( + "you are attempting to access " + f"{self._field.type_.__name__} via {self._field.name} which is not initialized use fetch" + ) + + def __bool__(self): + return self._initialized and bool(self.__instance__) + + def fetch(self): + self.session.get( + self._field.type_, + ) + + def compile(self, query_ref): + return ObjectExpression(self.dict()).compile(query_ref) + + def dict(self, *args, by_alias=True, **kwargs): + return jsonable_encoder(self.__instance__, by_alias=by_alias, *args, **kwargs) + + +class DocFieldDescriptor(Generic[FieldType]): + def __init__(self, field: ModelField, relation: Optional[Relationship] = None): + self.relation = relation + self.field = field + + def __set__(self, instance, value: FieldType): + raise AssertionError() + # instance.__dict__[self.name] = LazyProxy(value) + + def __get__( + self, instance: Optional[ArangoModel], owner: Type["TVertexModel"] + ) -> Union[LazyProxy[ArangoModel], ModelFieldExpression, None]: + if not instance and self.field.name in owner.__fields__.keys(): + return ModelFieldExpression(self.field.name, owner) + + field_value = instance.__dict__.get(self.field.name) + if field_value is not None: + return field_value + + if self.relation: + return LazyProxy[owner]( # type: ignore[valid-type] + field_value, self.field, getattr(instance, DALI_SESSION_KW, None) + ) + return None + + def __set_name__(self, owner, name): + self.name = name + + +class ArangoModelMeta(ModelMetaclass, ABCMeta): + def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): + parents = [b for b in bases if isinstance(b, mcs)] + if not parents or BaseArangoModel in parents: + skipped_cls: BaseArangoModel = super().__new__(mcs, name, bases, namespace, **kwargs) + skipped_cls.__relationships__ = {} + skipped_cls.__relationships_fields__ = {} + return skipped_cls + + _relationships, original_annotations = ArangoModelMeta.get_relations_from_namespace(namespace) + + dict_used = { + **namespace, + "__weakref__": None, + "__annotations__": original_annotations, + "__relationships__": _relationships, + } + + new_cls: BaseArangoModel = super().__new__(mcs, name, bases, dict_used, **kwargs) + + __relationship_fields__ = ArangoModelMeta.set_field_descriptors(_relationships, new_cls) + + new_cls.__relationships__ = _relationships + new_cls.__relationships_fields__ = __relationship_fields__ + new_cls.__annotations__ = { + # **relationship_annotations, + **original_annotations, + **new_cls.__annotations__, + } + + return new_cls + + @staticmethod + def get_relations_from_namespace(namespace: dict[str, Any]) -> tuple["Relationships", dict[str, Any]]: + _relationships: dict[str, Relationship] = {} + original_annotations = resolve_annotations( + namespace.get("__annotations__", {}), namespace.get("__module__", None) + ) + for k, v in original_annotations.items(): + relation = get_relation(k, v, namespace.get(k, Undefined), BaseConfig) + if relation: + _relationships[k] = relation + # original_annotations[k] = Union[original_annotations[k]] + return _relationships, original_annotations + + @staticmethod + def set_field_descriptors(_relationships, new_cls): + __relationship_fields__ = {} + for field_name, model_field in [(x, y) for x, y in new_cls.__fields__.items() if x != EDGES]: + if field_name in _relationships: + pydango_field = get_pydango_field(model_field, RelationModelField) + relationship = _relationships[field_name] + relationship.field = pydango_field + __relationship_fields__[field_name] = pydango_field + new_cls.__fields__[field_name] = pydango_field + + type_ = cast(ModelField, pydango_field).type_ + setattr( + new_cls, + field_name, + DocFieldDescriptor[type_](pydango_field, relationship), # type: ignore[valid-type] + ) + + field_annotation = {field_name: DocFieldDescriptor[type_]} # type: ignore[valid-type] + new_cls.__annotations__.update(field_annotation) + else: + setattr( + new_cls, + field_name, + DocFieldDescriptor[model_field.type_](model_field), # type: ignore[name-defined] + ) + return __relationship_fields__ + + # def __hash__(self): + # return hash(self.Collection.name) + + +class CollectionType(IntEnum): + NODE = 2 + EDGE = 3 + + +class CollectionConfig: + name: str + type: CollectionType + wait_for_sync: Optional[bool] = False + sync_json_schema: Optional[bool] = True + indexes: Sequence[Indexes] = [] + + +class BaseArangoModel(BaseModel, metaclass=ArangoModelMeta): + id: Optional[str] = Field(None, alias=ID) + key: Optional[str] = Field(None, alias=KEY) + rev: Optional[str] = Field(None, alias=REV) + + __dali__session__: Optional["PydangoSession"] = PrivateAttr() + + if TYPE_CHECKING: + __relationships__: Relationships = {} + __relationships_fields__: RelationshipFields = {} + + class Config(BaseConfig): + arbitrary_types_allowed = True + orm_mode = True + # getter_dict = dict + allow_population_by_field_name = True + + class Collection(CollectionConfig): + ... + + @classmethod + def _decompose_class(cls: Type["Model"], obj: Any) -> Union["GetterDict", dict]: # type: ignore[override] + if isinstance(obj, dict): + return obj + decompose_class = super()._decompose_class(obj) + return decompose_class + + def _calculate_keys( + self, + include: Optional["MappingIntStrAny"], + exclude: Optional["MappingIntStrAny"], + exclude_unset: bool, + update: Optional["DictStrAny"] = None, + ) -> Optional["AbstractSet[str]"]: + field_set = self.__fields_set__.copy() + keys = self.__dict__.keys() + unset = keys - field_set + if not exclude_unset: + _exclude = cast(Mapping, {field: True for field in unset if field in OPERATIONAL_FIELDS}) + if not exclude: + exclude = _exclude + else: + exclude.update(_exclude) # type: ignore[attr-defined] + + return super()._calculate_keys(include, exclude, exclude_unset, update) + + @classmethod + def from_orm(cls: Type[ArangoModel], obj: Any, *, session=None) -> ArangoModel: + for field_name, field in cls.__relationships_fields__.items(): + exists_in_orm = obj.get(field_name, None) + if exists_in_orm: + obj[field_name] = exists_in_orm + continue + if field.required: + obj[field_name] = NAO + try: + obj = cast(Type[ArangoModel], super()).from_orm(obj) + except ConfigError as e: + raise e + obj.__dali__session__ = session + # object_setattr(obj, DALI_SESSION_KW, session) + return obj + + @classmethod + def update_forward_refs(cls, **localns: Any) -> None: + super().update_forward_refs(**localns) + for name in cls.__relationships_fields__.keys(): + cls.__relationships_fields__[name] = cast(RelationModelField, cls.__fields__[name]) + relation = cls.__relationships__[name] + relation.field = cls.__fields__[name] + relation.link_model = cls.__fields__[name].type_ + if isinstance(relation.via_model, ForwardRef): + relation.via_model = evaluate_forward_ref(cls, relation.via_model, **localns) + + if isinstance(relation.link_model, ForwardRef): + relation.link_model = evaluate_forward_ref(cls, relation.link_model, **localns) + + # for k in cls.__edge_to_field_mapping__.copy(): + # if isinstance(k, ForwardRef): + # funcs = cls.__edge_to_field_mapping__.pop(k) + # new_k = evaluate_forward_ref(cls, k, **localns) + # if new_k in cls.__edge_to_field_mapping__: + # cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) + # else: + # cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs + + @abstractmethod + def save_dict(self) -> "DictStrAny": + ... + + +class Aliased(Generic[ArangoModel]): + def __init__(self, entity: ArangoModel, alias=None): + self.entity: ArangoModel = entity + self.alias = alias + + def __getattr__(self, item): + attr = getattr(self.entity, item) + if isinstance(attr, FieldExpression): + attr.parent = self + + return attr + + def __str__(self): + return str(self.alias or "") + + def __repr__(self): + return f"" + + # def compile(self, query_ref: "ORMQuery") -> str: + # return query_ref.orm_bound_vars[self].compile(query_ref) diff --git a/pydango/orm/models/edge.py b/pydango/orm/models/edge.py new file mode 100644 index 0000000..0104b6e --- /dev/null +++ b/pydango/orm/models/edge.py @@ -0,0 +1,40 @@ +from abc import ABC +from typing import TYPE_CHECKING, Generic, Optional, TypeVar, Union + +from pydantic import BaseModel, Field + +from pydango.orm.encoders import jsonable_encoder +from pydango.orm.models import BaseArangoModel, CollectionConfig, CollectionType +from pydango.query.consts import FROM, TO + +if TYPE_CHECKING: + from pydantic.typing import DictStrAny + +TEdge = TypeVar("TEdge", bound="EdgeModel") + + +class EdgeCollectionConfig(CollectionConfig): + type = CollectionType.EDGE + + +class EdgeModel(BaseArangoModel, ABC): + from_: Optional[str] = Field(None, alias=FROM) + to: Optional[Union[str]] = Field(None, alias=TO) + + class Collection(EdgeCollectionConfig): + pass + + def save_dict(self) -> "DictStrAny": + exclude: set[Union[int, str]] = set() + for key in ["from_", "to"]: + if self.__getattribute__(key) is None: + exclude.add(key) + return jsonable_encoder(self, by_alias=True, exclude=exclude) + # return self.dict(by_alias=True, exclude=exclude) + + +T = TypeVar("T", bound=BaseModel) + + +class EdgeData(BaseModel, ABC, Generic[T]): + pass diff --git a/pydango/orm/models/fields.py b/pydango/orm/models/fields.py new file mode 100644 index 0000000..2dc1acf --- /dev/null +++ b/pydango/orm/models/fields.py @@ -0,0 +1,63 @@ +from typing import TYPE_CHECKING, Any, Dict, Optional, Type, Union, cast + +from pydantic.fields import ModelField + +from pydango.orm.models.sentinel import NAO +from pydango.query.expressions import ( + Expression, + FieldExpression, + IteratorExpression, + VariableExpression, +) + +if TYPE_CHECKING: + from pydantic.fields import LocStr, ModelOrDc, ValidateReturn + + from pydango.orm.models.vertex import TVertexModel + from pydango.query import AQLQuery + + +class ModelFieldExpression(FieldExpression): + def __init__(self, field: Union[str, Expression], parent: Type["TVertexModel"]): + super().__init__(field, cast(VariableExpression, parent)) + self.parent = parent # type: ignore[assignment] + + def compile(self, query_ref: "AQLQuery") -> str: + if isinstance(self.field, Expression): + return super().compile(query_ref) + else: + if not isinstance(self.parent, IteratorExpression): + # currently importing ORMQuery creates a circular dependency + compiled = query_ref.orm_bound_vars[self.parent] # type: ignore[attr-defined] + return f"{compiled.compile(query_ref)}.{self.field}" + return super().compile(query_ref) + + def __hash__(self): + return hash(self.field) + + +class RelationModelField(ModelField): + def validate( + self, + v: Any, + values: Dict[str, Any], + *, + loc: "LocStr", + cls: Optional["ModelOrDc"] = None, + ) -> "ValidateReturn": + return super().validate(v, values, loc=loc, cls=cls) if v is not NAO else (v, None) + + +def get_pydango_field(field: ModelField, cls: Type[RelationModelField] = RelationModelField) -> RelationModelField: + return cls( + name=field.name, + type_=field.annotation, + alias=field.alias, + class_validators=field.class_validators, + default=field.default, + default_factory=field.default_factory, + required=field.required, + model_config=field.model_config, + final=field.final, + field_info=field.field_info, + ) diff --git a/pydango/orm/models/relations.py b/pydango/orm/models/relations.py new file mode 100644 index 0000000..e228de8 --- /dev/null +++ b/pydango/orm/models/relations.py @@ -0,0 +1,33 @@ +from typing import TYPE_CHECKING, ForwardRef, Optional, Type + +if TYPE_CHECKING: + from pydantic.fields import ModelField + from pydantic.typing import ReprArgs + + from pydango.orm.models.base import LinkTypes + from pydango.orm.models.edge import TEdge + from pydango.orm.models.vertex import TVertexModel + + +class Relationship: + def __init__( + self, + *, + field: "ModelField", + back_populates: Optional[str] = None, + link_model: Type["TVertexModel"], + via_model: Optional[Type["TEdge"]] = None, + link_type: "LinkTypes", + ): + self.via_model = via_model + self.link_type = link_type + self.field = field + self.link_model = link_model + self.back_populates = back_populates + + def __repr_args__(self) -> "ReprArgs": + name = self.link_model.__name__ if not isinstance(self.link_model, ForwardRef) else self.link_model + args = [("link_model", name), ("link_type", self.link_type.value)] + if self.via_model: + args.append(("via_model", self.via_model.__name__)) + return args diff --git a/pydango/orm/models/sentinel.py b/pydango/orm/models/sentinel.py new file mode 100644 index 0000000..5d2ce29 --- /dev/null +++ b/pydango/orm/models/sentinel.py @@ -0,0 +1,6 @@ +class NotAnObject: + def __repr__(self): + return "NAO" + + +NAO = NotAnObject() diff --git a/pydango/orm/models/shapes.py b/pydango/orm/models/shapes.py new file mode 100644 index 0000000..f521e36 --- /dev/null +++ b/pydango/orm/models/shapes.py @@ -0,0 +1,17 @@ +from pydantic.fields import ( + SHAPE_FROZENSET, + SHAPE_ITERABLE, + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_TUPLE_ELLIPSIS, +) + +LIST_SHAPES = { + SHAPE_LIST, + SHAPE_TUPLE_ELLIPSIS, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_FROZENSET, + SHAPE_ITERABLE, +} diff --git a/pydango/orm/models/types.py b/pydango/orm/models/types.py new file mode 100644 index 0000000..4efcf62 --- /dev/null +++ b/pydango/orm/models/types.py @@ -0,0 +1,14 @@ +import sys +from typing import ForwardRef, Union + +from pydango.orm.models.fields import RelationModelField +from pydango.orm.models.relations import Relationship + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +RelationshipFields: TypeAlias = dict[str, RelationModelField] +Relationships: TypeAlias = dict[str, Relationship] +EdgeFieldMapping: TypeAlias = dict[Union[str, ForwardRef], list[str]] diff --git a/pydango/orm/models/utils.py b/pydango/orm/models/utils.py new file mode 100644 index 0000000..6700214 --- /dev/null +++ b/pydango/orm/models/utils.py @@ -0,0 +1,16 @@ +from typing import TYPE_CHECKING + +from pydango.orm.models.fields import ModelFieldExpression + +if TYPE_CHECKING: + from pydango.orm.models.base import ArangoModel + + +def save_dict(model: "ArangoModel"): + return model.save_dict() + + +def convert_edge_data_to_valid_kwargs(edge_dict): + for i in edge_dict.copy(): + if isinstance(i, ModelFieldExpression): + edge_dict[i.field] = edge_dict.pop(i) diff --git a/pydango/orm/models/vertex.py b/pydango/orm/models/vertex.py new file mode 100644 index 0000000..098edfc --- /dev/null +++ b/pydango/orm/models/vertex.py @@ -0,0 +1,187 @@ +from typing import ( + TYPE_CHECKING, + Any, + ForwardRef, + Generic, + Optional, + Type, + TypeVar, + Union, + cast, +) + +from pydantic.fields import Field, ModelField +from pydantic.main import create_model +from pydantic.typing import evaluate_forwardref + +from pydango.orm.consts import EDGES +from pydango.orm.encoders import jsonable_encoder +from pydango.orm.models import BaseArangoModel, CollectionConfig, CollectionType +from pydango.orm.models.base import LIST_TYPES, ArangoModelMeta, LinkTypes +from pydango.orm.models.edge import EdgeData +from pydango.orm.models.types import EdgeFieldMapping, Relationships +from pydango.orm.models.utils import convert_edge_data_to_valid_kwargs +from pydango.orm.utils import evaluate_forward_ref, get_globals + +if TYPE_CHECKING: + from pydantic.typing import AbstractSetIntStr, DictStrAny, MappingIntStrAny + +TVertexModel = TypeVar("TVertexModel", bound="VertexModel") +TEdges = TypeVar("TEdges", bound=EdgeData) + + +class VertexCollectionConfig(CollectionConfig): + type = CollectionType.NODE + + +class VertexMeta(ArangoModelMeta): + def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): + parents = [b for b in bases if isinstance(b, mcs)] + if not parents: + return super().__new__(mcs, name, bases, namespace, **kwargs) + _relationships, original_annotations = mcs.get_relations_from_namespace(namespace) + __edge_to_field_mapping__, edge_annotation = mcs.build_edges_model(_relationships, bases, name, namespace) + + namespace["__edge_to_field_mapping__"] = __edge_to_field_mapping__ + namespace["__annotations__"][EDGES] = edge_annotation + + return super().__new__(mcs, name, bases, namespace, **kwargs) + + @staticmethod + def build_edges_model( + _relationships: Relationships, bases: tuple[Type[Any]], name: str, namespace: dict[str, Any] + ) -> tuple[EdgeFieldMapping, ModelField]: + if VertexModel in bases: + edges_model = VertexMeta._build_model(_relationships, name) + namespace[EDGES] = Field(None, exclude=True) + edge_annotation = cast(Any, Optional[edges_model]) + else: + namespace[EDGES] = Field(None, exclude=True) + edge_annotation = cast(Any, None) + + __edge_to_field_mapping__ = VertexMeta._build_edge_to_field_mapping(_relationships) + + VertexMeta._validate_edges(__edge_to_field_mapping__, namespace) + return __edge_to_field_mapping__, edge_annotation + + @staticmethod + def _build_edge_to_field_mapping(relationships: Relationships) -> EdgeFieldMapping: + __edge_to_field_mapping__: EdgeFieldMapping = {} + for relation_field, relation_info in relationships.items(): + if not relation_info.via_model: + continue + if isinstance(relation_info.via_model, ForwardRef): + __edge_to_field_mapping__.setdefault(relation_info.via_model, []).append(cast(str, relation_field)) + elif issubclass(relation_info.via_model, BaseArangoModel): + __edge_to_field_mapping__.setdefault(relation_info.via_model.Collection.name, []).append(relation_field) + return __edge_to_field_mapping__ + + @staticmethod + def _validate_edges(edge_to_field_mapping: EdgeFieldMapping, namespace: dict[str, Any]) -> None: + errors: dict[Union[str, ForwardRef], list[str]] = {} + items = edge_to_field_mapping.items() + for coll_or_forward_ref, fields in items: + if len(fields) > 1: + for i, f in enumerate(fields): + func = getattr(namespace.get("Collection"), f) + if func: + if not callable(func): + raise ValueError(f"{func} is not callable") + fields[i] = func + + else: + errors.setdefault(coll_or_forward_ref, []).append(f) + if errors: + raise AttributeError(f"you must define the following Collection functions for distinction {dict(errors)}") + + @staticmethod + def _build_model(relationships: Relationships, name: str): + __edge_namespace__: dict[str, Any] = {} + for field, relation_info in relationships.items(): + via_model = relation_info.via_model + if relation_info.link_type in LIST_TYPES: + if relation_info.link_type in (LinkTypes.OPTIONAL_EDGE_LIST, LinkTypes.OPTIONAL_LIST): + __edge_namespace__[field] = (Optional[list[via_model]], None) # type: ignore[valid-type] + else: + __edge_namespace__[field] = (list[via_model], ...) # type: ignore[valid-type] + + elif relation_info.link_type in (LinkTypes.OPTIONAL_EDGE, LinkTypes.OPTIONAL_DIRECT): + __edge_namespace__[field] = (Optional[via_model], None) + else: + __edge_namespace__[field] = (via_model, ...) # type: ignore[assignment] + m = create_model(f"{name}Edges", **__edge_namespace__, __base__=EdgeData) + return m + + +class VertexModel(BaseArangoModel, Generic[TEdges], metaclass=VertexMeta): + if TYPE_CHECKING: + edges: TEdges + __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] = {} + + class Collection(VertexCollectionConfig): + ... + + def __init__(self, **data: Any): + if EDGES in data: + convert_edge_data_to_valid_kwargs(data[EDGES]) + super().__init__(**data) + + def dict( + self, + *, + include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + include_edges: bool = False, + ) -> "DictStrAny": + d = cast(dict, self.__exclude_fields__) + if include_edges and self.__exclude_fields__: + d.pop("edges") + + try: + super__dict = super().dict( + include=include, + exclude=exclude, + by_alias=by_alias, + skip_defaults=skip_defaults, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + except RecursionError as e: + raise AssertionError( + "is not possible to call .dict() when using recursive model, instead traverse the graph and collect" + " data or exclude recursive fields" + ) from e + if self.__exclude_fields__: + d["edges"] = True + + return super__dict + + def save_dict(self) -> "DictStrAny": + return jsonable_encoder(self, by_alias=True, exclude=cast(set, self.__relationships_fields__.keys())) + + @classmethod + def update_forward_refs(cls, **localns: Any) -> None: + super().update_forward_refs(**localns) + + for k in cls.__edge_to_field_mapping__.copy(): + if isinstance(k, ForwardRef): + funcs = cls.__edge_to_field_mapping__.pop(k) + new_k = evaluate_forward_ref(cls, k, **localns) + if new_k in cls.__edge_to_field_mapping__: + cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) + else: + cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs + + globalns = get_globals(cls) + + for fields, model_field in cls.__fields__[EDGES].type_.__fields__.items(): + if isinstance(model_field.type_, ForwardRef): + model_field.type_ = evaluate_forwardref(model_field.type_, globalns, localns) + + cls.__fields__[EDGES].type_.update_forward_refs(**localns, **globalns) diff --git a/pydango/orm/proxy.py b/pydango/orm/proxy.py deleted file mode 100644 index e69de29..0000000 diff --git a/pydango/orm/query.py b/pydango/orm/query.py index dfb62d8..479257f 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -1,39 +1,22 @@ import logging import sys -from typing import Optional, Sequence, Type, Union, cast, overload - -from pydango.orm.encoders import jsonable_encoder - -if sys.version_info >= (3, 10): - from typing import Self -else: - from typing_extensions import Self +from typing import TYPE_CHECKING, Optional, Sequence, Type, Union, cast, overload from pydantic import BaseModel from pydantic.utils import lenient_issubclass -from pydango.orm.models import ( - Aliased, - BaseArangoModel, - LazyProxy, - ModelFieldExpression, - save_dict, -) +from pydango.orm.encoders import jsonable_encoder +from pydango.orm.models.base import Aliased, BaseArangoModel, LazyProxy +from pydango.orm.models.fields import ModelFieldExpression +from pydango.orm.models.utils import save_dict from pydango.query.expressions import ( BinaryExpression, - BinaryLogicalExpression, - BindableExpression, CollectionExpression, - ConditionExpression, - Expression, FieldExpression, - IterableExpression, IteratorExpression, LiteralExpression, - ObjectExpression, ReturnableMixin, SortExpression, - VariableExpression, ) from pydango.query.operations import ( ForParams, @@ -49,14 +32,30 @@ ) from pydango.query.query import AQLQuery, TraverseIterators +if sys.version_info >= (3, 10): + from typing import Self +else: + from typing_extensions import Self, TypeAlias + +if TYPE_CHECKING: + from pydango.orm.models.base import ArangoModel + from pydango.query.expressions import ( + BinaryLogicalExpression, + BindableExpression, + ConditionExpression, + Expression, + IterableExpression, + ObjectExpression, + VariableExpression, + ) logger = logging.getLogger(__name__) -ORMForParams = Union[ForParams, Type[BaseArangoModel], Aliased[BaseArangoModel]] +ORMForParams: TypeAlias = Union[ForParams, Type[BaseArangoModel], Aliased[BaseArangoModel]] IMPLICIT_COLLECTION_ERROR = "you must specify collection when the collection cannot be implicitly resolved" MULTIPLE_COLLECTIONS_RESOLVED = "multiple collections resolved" -def _bind(query: "ORMQuery", node: Expression): +def _bind(query: "ORMQuery", node: "Expression"): if isinstance(node, FieldExpression): if node.parent and isinstance(node.parent, type) and issubclass(node.parent, BaseArangoModel): node.parent = query.orm_bound_vars[cast(Type[BaseArangoModel], node.parent)] @@ -66,8 +65,8 @@ def _bind(query: "ORMQuery", node: Expression): node.parent = query.orm_bound_vars[node.parent] -def _find_models_and_bind(condition: Union[ConditionExpression, BinaryLogicalExpression], query: "ORMQuery"): - stack: list[Union[BinaryExpression, Expression]] = [condition] +def _find_models_and_bind(condition: Union["ConditionExpression", "BinaryLogicalExpression"], query: "ORMQuery"): + stack: list[Union[BinaryExpression, "Expression"]] = [condition] while stack: current = stack.pop() if isinstance(current, (LiteralExpression, FieldExpression)): @@ -90,12 +89,16 @@ class ORMQuery(AQLQuery): def __init__(self, parent: Optional[AQLQuery] = None): super().__init__(parent) # self.bind_parameter_to_sequence = {} - self.orm_bound_vars: dict[Union[Type[BaseArangoModel], Aliased, ModelFieldExpression], VariableExpression] = {} + self.orm_bound_vars: dict[Union[Type[BaseArangoModel], Aliased, "ModelFieldExpression"], VariableExpression] = ( + {} + ) def for_( self, collection_or_variable: ORMForParams, - in_: Optional[Union[AQLQuery, IterableExpression, list, VariableExpression, list[VariableExpression]]] = None, + in_: Optional[ + Union[AQLQuery, "IterableExpression", list, "VariableExpression", list["VariableExpression"]] + ] = None, ) -> Self: if lenient_issubclass(collection_or_variable, BaseArangoModel): model = cast(BaseArangoModel, collection_or_variable) @@ -135,7 +138,7 @@ def for_( return self - def filter(self, condition: Union[ConditionExpression, BinaryLogicalExpression]) -> Self: + def filter(self, condition: Union["ConditionExpression", "BinaryLogicalExpression"]) -> Self: _find_models_and_bind(condition, self) super().filter(condition) return self @@ -151,19 +154,20 @@ def sort(self, *sort_list: SortParams) -> Self: super().sort(*sort_list) return self + # noinspection PyMethodOverriding @overload - def insert(self, doc: BaseArangoModel) -> Self: + def insert(self, doc: "ArangoModel") -> Self: ... @overload def insert( - self, doc: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression] + self, doc: Union[dict, "ObjectExpression", "VariableExpression"], collection: Union[str, CollectionExpression] ) -> Self: ... def insert( self, - doc: Union[dict, ObjectExpression, BaseArangoModel, VariableExpression], + doc: Union[dict, "ObjectExpression", BaseArangoModel, "VariableExpression"], collection: Optional[Union[str, CollectionExpression]] = None, ) -> Self: if isinstance(doc, (BaseArangoModel, LazyProxy)): @@ -182,7 +186,7 @@ def remove( # noqa: PyMethodOverriding @overload def remove( self, - expression: Union[dict, LiteralExpression, FieldExpression, VariableExpression, ObjectExpression, str], + expression: Union[dict, LiteralExpression, FieldExpression, "VariableExpression", "ObjectExpression", str], collection: Union[str, CollectionExpression], *, options: Optional[RemoveOptions] = None, @@ -192,7 +196,7 @@ def remove( def remove( self, expression: Union[ - BaseArangoModel, dict, LiteralExpression, FieldExpression, VariableExpression, ObjectExpression, str + BaseArangoModel, dict, LiteralExpression, FieldExpression, "VariableExpression", "ObjectExpression", str ], collection: Union[str, CollectionExpression, None] = None, *, @@ -206,7 +210,7 @@ def remove( return super().remove(expression, collection, options=options) - def bind_parameter(self, parameter: BindableExpression, override_var_name: Optional[str] = None) -> str: + def bind_parameter(self, parameter: "BindableExpression", override_var_name: Optional[str] = None) -> str: return super().bind_parameter(parameter) @overload @@ -249,8 +253,8 @@ def replace( # noqa: PyMethodOverriding @overload def replace( self, - key: Union[str, dict, ObjectExpression], - doc: Union[dict, ObjectExpression], + key: Union[str, dict, "ObjectExpression"], + doc: Union[dict, "ObjectExpression"], collection: Union[str, CollectionExpression], *, options: Optional[ReplaceOptions] = None, @@ -259,8 +263,8 @@ def replace( def replace( self, - key: Union[str, dict, ObjectExpression, BaseArangoModel], - doc: Union[dict, ObjectExpression, BaseArangoModel], + key: Union[str, dict, "ObjectExpression", BaseArangoModel], + doc: Union[dict, "ObjectExpression", BaseArangoModel], collection: Union[str, CollectionExpression, None] = None, *, options: Optional[ReplaceOptions] = None, @@ -283,9 +287,9 @@ def replace( def upsert( # noqa: PyMethodOverriding self, filter_: BaseArangoModel, - insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], + insert: Union[dict, BaseModel, "ObjectExpression", BaseArangoModel, "VariableExpression"], *, - replace: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], + replace: Union[dict, BaseModel, "ObjectExpression", BaseArangoModel, "VariableExpression"], options: Optional[UpsertOptions] = None, ) -> Self: ... @@ -294,9 +298,9 @@ def upsert( # noqa: PyMethodOverriding def upsert( # noqa: PyMethodOverriding self, filter_: BaseArangoModel, - insert: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], + insert: Union[dict, BaseModel, "ObjectExpression", BaseArangoModel, "VariableExpression"], *, - update: Union[dict, BaseModel, ObjectExpression, BaseArangoModel, VariableExpression], + update: Union[dict, BaseModel, "ObjectExpression", BaseArangoModel, "VariableExpression"], options: Optional[UpsertOptions] = None, ) -> Self: ... @@ -304,11 +308,11 @@ def upsert( # noqa: PyMethodOverriding @overload def upsert( # noqa: PyMethodOverriding self, - filter_: Union[dict, BaseModel, ObjectExpression, VariableExpression], - insert: Union[dict, BaseModel, ObjectExpression, VariableExpression], + filter_: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], + insert: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], collection: Union[str, CollectionExpression], *, - replace: Union[dict, BaseModel, ObjectExpression, VariableExpression], + replace: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], options: Optional[UpsertOptions] = None, ) -> Self: ... @@ -316,23 +320,23 @@ def upsert( # noqa: PyMethodOverriding @overload def upsert( # noqa: PyMethodOverriding self, - filter_: Union[dict, BaseModel, ObjectExpression, VariableExpression], - insert: Union[dict, BaseModel, ObjectExpression, VariableExpression], + filter_: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], + insert: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], collection: Union[str, CollectionExpression], *, - update: Union[dict, BaseModel, ObjectExpression, VariableExpression], + update: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], options: Optional[UpsertOptions] = None, ) -> Self: ... def upsert( self, - filter_: Union[dict, ObjectExpression, BaseModel, BaseArangoModel, VariableExpression], - insert: Union[dict, ObjectExpression, BaseModel, BaseArangoModel, VariableExpression], + filter_: Union[dict, "ObjectExpression", BaseModel, BaseArangoModel, "VariableExpression"], + insert: Union[dict, "ObjectExpression", BaseModel, BaseArangoModel, "VariableExpression"], collection: Union[str, CollectionExpression, None] = None, **kwargs, - # update: Union[dict,ObjectExpression, BaseModel, BaseArangoModel, None] = None, - # replace: Union[dict, ObjectExpression, BaseModel,BaseArangoModel, None] = None, + # update: Union[dict,"ObjectExpression", BaseModel, BaseArangoModel, None] = None, + # replace: Union[dict, "ObjectExpression", BaseModel,BaseArangoModel, None] = None, # options: Optional[UpsertOptions] = None, ) -> Self: update = kwargs.get("update") @@ -370,11 +374,11 @@ def upsert( super().upsert(filter_, insert, collection, **kwargs) return self - def return_(self, return_expr: Union[Type[BaseArangoModel], Aliased, ReturnableMixin, dict]) -> Self: + def return_(self, return_expr: Union[Type[BaseArangoModel], Aliased, "ReturnableMixin", dict]) -> Self: if isinstance(return_expr, type) and issubclass(return_expr, (BaseArangoModel,)): return_expr = self.orm_bound_vars[return_expr] elif isinstance(return_expr, Aliased): - return_expr = cast(ReturnableMixin, self.orm_bound_vars[return_expr]) + return_expr = cast("ReturnableMixin", self.orm_bound_vars[return_expr]) super().return_(return_expr) return self @@ -386,7 +390,7 @@ def traverse( self, iterators: TraverseIterators, edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], - start: Union["LiteralExpression", VariableExpression, FieldExpression, str], + start: Union["LiteralExpression", "VariableExpression", FieldExpression, str], depth: Union[RangeExpression, range, tuple[int, int]], direction: TraversalDirection, ): @@ -396,13 +400,13 @@ def traverse( def for_( collection_or_variable: ORMForParams, - in_: Optional[Union[IterableExpression, VariableExpression, list[VariableExpression], list]] = None, + in_: Optional[Union["IterableExpression", "VariableExpression", list["VariableExpression"], list]] = None, ) -> ORMQuery: return ORMQuery().for_(collection_or_variable, in_) def traverse( collection_or_variable: ORMForParams, - in_: Optional[Union[IterableExpression, VariableExpression, list[VariableExpression], list]] = None, + in_: Optional[Union["IterableExpression", "VariableExpression", list["VariableExpression"], list]] = None, ) -> ORMQuery: return ORMQuery().for_(collection_or_variable, in_) diff --git a/pydango/orm/relations.py b/pydango/orm/relations.py deleted file mode 100644 index 4e5ef5c..0000000 --- a/pydango/orm/relations.py +++ /dev/null @@ -1,34 +0,0 @@ -from enum import Enum - - -class LinkTypes(str, Enum): - DIRECT = "DIRECT" - OPTIONAL_DIRECT = "OPTIONAL_DIRECT" - LIST = "LIST" - OPTIONAL_LIST = "OPTIONAL_LIST" - EDGE = "EDGE" - OPTIONAL_EDGE = "OPTIONAL_EDGE" - EDGE_LIST = "EDGE_LIST" - OPTIONAL_EDGE_LIST = "OPTIONAL_EDGE_LIST" - - -EDGE_TYPES = ( - LinkTypes.EDGE, - LinkTypes.OPTIONAL_EDGE, - LinkTypes.OPTIONAL_EDGE_LIST, - LinkTypes.EDGE_LIST, -) - -LIST_TYPES = ( - LinkTypes.EDGE_LIST, - LinkTypes.OPTIONAL_EDGE_LIST, - LinkTypes.LIST, - LinkTypes.OPTIONAL_LIST, -) - -SINGLETON_TYPES = ( - LinkTypes.EDGE, - LinkTypes.DIRECT, - LinkTypes.OPTIONAL_EDGE, - LinkTypes.OPTIONAL_DIRECT, -) diff --git a/pydango/orm/types.py b/pydango/orm/types.py deleted file mode 100644 index ba69d92..0000000 --- a/pydango/orm/types.py +++ /dev/null @@ -1,5 +0,0 @@ -# from typing import TypeVar -# -# ArangoModel = TypeVar("ArangoModel", bound="BaseArangoModel") -# TEdge = TypeVar("TEdge", bound="EdgeModel") -# TVertexModel = TypeVar("TVertexModel", bound="VertexModel") diff --git a/pydango/query/operations.py b/pydango/query/operations.py index 0053b42..964b9c6 100644 --- a/pydango/query/operations.py +++ b/pydango/query/operations.py @@ -41,6 +41,11 @@ ) from pydango.query.utils import Compilable +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + if TYPE_CHECKING: from pydango.query.query import AQLQuery @@ -61,10 +66,6 @@ def compile(self, *args, **kwargs): CollectionExpression, # Aliased, ) -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias class ForOperation(Operation): diff --git a/pydango/query/query.py b/pydango/query/query.py index 220fee6..13b82f2 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -3,18 +3,9 @@ import sys from typing import Any, Dict, List, Optional, Sequence, Union, overload -# from pydango.orm.models import BaseArangoModel, save_dict - -if sys.version_info >= (3, 10): - from typing import Self, TypeAlias -else: - from typing_extensions import Self, TypeAlias - from aioarango.database import Database from pydango.orm.encoders import jsonable_encoder - -# if TYPE_CHECKING: from pydango.query.expressions import ( BindableExpression, CollectionExpression, @@ -62,6 +53,11 @@ UpsertOptions, ) +if sys.version_info >= (3, 10): + from typing import Self, TypeAlias +else: + from typing_extensions import Self, TypeAlias + logger = logging.getLogger(__name__) JsonType: TypeAlias = Union[None, int, str, bool, List["JsonType"], Dict[str, "JsonType"]] diff --git a/tests/session/test_cities.py b/tests/session/test_cities.py index b49c1d3..2dac687 100644 --- a/tests/session/test_cities.py +++ b/tests/session/test_cities.py @@ -9,14 +9,10 @@ from pydango.connection.session import PydangoSession from pydango.index import PersistentIndex -from pydango.orm.models import ( - BaseArangoModel, - EdgeCollectionConfig, - EdgeModel, - Relation, - VertexCollectionConfig, - VertexModel, -) +from pydango.orm.models import EdgeModel, VertexModel +from pydango.orm.models.base import BaseArangoModel, Relation +from pydango.orm.models.edge import EdgeCollectionConfig +from pydango.orm.models.vertex import VertexCollectionConfig from pydango.query.consts import ID # from tests.utils import find_dict_diffs, ANY_NOT_NONE diff --git a/tests/session/test_family.py b/tests/session/test_family.py index 65b322e..d58fae1 100644 --- a/tests/session/test_family.py +++ b/tests/session/test_family.py @@ -7,14 +7,10 @@ from pydango.connection.session import PydangoSession from pydango.index import PersistentIndex -from pydango.orm.models import ( - BaseArangoModel, - EdgeCollectionConfig, - EdgeModel, - Relation, - VertexCollectionConfig, - VertexModel, -) +from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel +from pydango.orm.models.base import Relation +from pydango.orm.models.edge import EdgeCollectionConfig +from pydango.orm.models.vertex import VertexCollectionConfig class Person(VertexModel): diff --git a/tests/session/test_social_network.py b/tests/session/test_social_network.py index 397e008..1f3f2d5 100644 --- a/tests/session/test_social_network.py +++ b/tests/session/test_social_network.py @@ -7,14 +7,10 @@ from pydiction import ANY_NOT_NONE, Contains, Matcher from pydango.connection.session import PydangoSession -from pydango.orm.models import ( - BaseArangoModel, - EdgeCollectionConfig, - EdgeModel, - Relation, - VertexCollectionConfig, - VertexModel, -) +from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel +from pydango.orm.models.base import Relation +from pydango.orm.models.edge import EdgeCollectionConfig +from pydango.orm.models.vertex import VertexCollectionConfig class Post(VertexModel): diff --git a/tests/test_orm_query.py b/tests/test_orm_query.py index 23d727d..c289785 100644 --- a/tests/test_orm_query.py +++ b/tests/test_orm_query.py @@ -1,6 +1,8 @@ import datetime -from pydango.orm.models import Aliased, VertexCollectionConfig, VertexModel +from pydango.orm.models import VertexModel +from pydango.orm.models.base import Aliased +from pydango.orm.models.vertex import VertexCollectionConfig from pydango.orm.query import ORMQuery from pydango.query.expressions import ( NEW, From 25eacb5279b928f87b926448ace3a547ea702bfb Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Mon, 25 Sep 2023 20:39:58 +0300 Subject: [PATCH 09/19] feat: fetch model --- .github/workflows/ci.yml | 173 +++++++++++++++++++++++++ .github/workflows/python-package.yml | 40 ------ pydango/__init__.py | 1 + pydango/connection/__init__.py | 2 +- pydango/connection/session.py | 17 ++- pydango/index.py | 7 +- pydango/orm/models/base.py | 106 ++++++++++----- pydango/orm/models/fields.py | 4 +- pydango/orm/models/sentinel.py | 12 +- pydango/orm/query.py | 17 +-- pydango/query/expressions.py | 26 +++- pydango/query/functions.py | 4 +- pydango/query/operations.py | 71 ++++------- pydango/query/query.py | 184 ++++++++++++++------------- pydango/utils.py | 0 pyproject.toml | 27 +++- tests/conftest.py | 2 +- tests/session/test_social_network.py | 22 ++++ 18 files changed, 476 insertions(+), 239 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/python-package.yml delete mode 100644 pydango/utils.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..b8e2664 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,173 @@ +--- +name: CI + +on: + push: + branches: + - main + pull_request: + branches: + - main + + +permissions: + checks: write + id-token: write + contents: write + +jobs: + pre-commit: + name: Pre-commit checks + runs-on: ubuntu-latest + steps: + - name: Checkout the code + uses: actions/checkout@v4 + + - name: Pre-commit checks + uses: pre-commit/action@v3.0.0 + env: + SKIP: no-commit-to-branch + + test: + name: Tests + runs-on: ubuntu-latest + outputs: + release-id: ${{ steps.generate-release-id.outputs.release-id }} + steps: + - name: Checkout the code + uses: actions/checkout@v4 + + - uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: cache poetry install + uses: actions/cache@v3 + with: + path: ~/.local + key: poetry-1.1.12-0 + + - name: Install and configure Poetry + uses: snok/install-poetry@v1 + with: + version: 1.4.0 + virtualenvs-create: true + virtualenvs-in-project: false + installer-parallel: true + + + - name: cache deps + id: cache-deps + uses: actions/cache@v3 + with: + path: .venv + key: pydeps-${{ hashFiles('**/poetry.lock') }} + + # Install dependencies. `--no-root` means "install all dependencies but not the project + # itself", which is what you want to avoid caching _your_ code. The `if` statement + # ensures this only runs on a cache miss. + - run: poetry install --no-interaction --no-root + if: steps.cache-deps.outputs.cache-hit != 'true' + - run: poetry install --no-interaction + - name: test + run: | + poetry run pytest --cov=pydiction --cov-report=xml:coverage.xml --junitxml=test-results/test-results.xml tests + + + - name: Test Report + uses: mikepenz/action-junit-report@v4 + if: success() || failure() + with: + report_paths: '**/test-results/*.xml' + + + + - name: Coverage Report + uses: 5monkeys/cobertura-action@master + if: success() || failure() + with: + path: coverage.xml + minimum_coverage: 75 + fail_below_threshold: true + + + + release: + name: Release + if: github.ref == 'refs/heads/main' + needs: + - test + runs-on: ubuntu-latest + concurrency: release + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: cache deps + id: cache-deps + uses: actions/cache@v3 + with: + path: /semantic-release + key: semantic-release + + + - name: cache poetry install + uses: actions/cache@v3 + with: + path: ~/.local + key: poetry-1.1.12-0 + + - name: Install and configure Poetry + uses: snok/install-poetry@v1 + with: + version: 1.4.0 + virtualenvs-create: true + virtualenvs-in-project: false + installer-parallel: true + + - name: Python Semantic Release + id: semver + uses: python-semantic-release/python-semantic-release@v8.0.8 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + + - run: | + poetry build + + - name: Store the distribution packages + uses: actions/upload-artifact@v3 + if: steps.semver.outputs.released == 'true' + + with: + name: python-package-distributions + path: dist/ + + outputs: + released: ${{ steps.semver.outputs.released }} + + publish: + name: Publish + needs: + - pre-commit + - release + if: needs.release.outputs.released == 'true' + concurrency: release + runs-on: ubuntu-latest + + steps: + - name: Download all the dists + uses: actions/download-artifact@v3 + with: + name: python-package-distributions + path: dist/ + + - name: Publish package distributions to PyPI + + uses: pypa/gh-action-pypi-publish@release/v1 + + - name: Publish package distributions to GitHub Releases + uses: python-semantic-release/upload-to-gh-release@main + with: + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml deleted file mode 100644 index 60d9a6a..0000000 --- a/.github/workflows/python-package.yml +++ /dev/null @@ -1,40 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python - -name: Python package - -on: - push: - branches: [ "main" ] - pull_request: - branches: [ "main" ] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: [ "3.9", "3.10","3.11"] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install flake8 pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test with pytest - run: | - pytest diff --git a/pydango/__init__.py b/pydango/__init__.py index e69de29..6c8e6b9 100644 --- a/pydango/__init__.py +++ b/pydango/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.0" diff --git a/pydango/connection/__init__.py b/pydango/connection/__init__.py index 6d1e6d5..7a78c14 100644 --- a/pydango/connection/__init__.py +++ b/pydango/connection/__init__.py @@ -1 +1 @@ -DALI_SESSION_KW = "__dali__session__" +DALI_SESSION_KW = "__session__" diff --git a/pydango/connection/session.py b/pydango/connection/session.py index dc769f0..1736716 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -305,7 +305,9 @@ def db_traverse( db_traverse(vertex_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) else: getattr(model.edges, field) - db_traverse(relation_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) + db_traverse( + cast(VertexModel, relation_doc), visited, result, model_fields_mapping, vertices_ids, edges_ids + ) else: # todo: insert join relation pass @@ -589,7 +591,7 @@ async def get( edges: Sequence[str] if fetch_edges: if isinstance(fetch_edges, set): - edges = cast(Sequence[str], fetch_edges) + edges = cast(Sequence[str], tuple(fetch_edges)) else: _edges = [] for i in model.__relationships__.values(): @@ -617,11 +619,16 @@ async def get( return_ = {"doc": doc, "edges": traversal_result} main_query.return_(return_) - # logger.debug(str(main_query)) + cursor = await main_query.execute(self.database) result = await cursor.next() + if not result: + raise DocumentNotFoundError(_id) + if fetch_edges and not result.get("doc"): + raise DocumentNotFoundError(_id) + if issubclass(model, VertexModel): - result, recursive = construct(result, model) + result, recursive = graph_to_documents(result, model) if return_raw: return result @@ -674,7 +681,7 @@ def traverse_model_and_map(pydantic_model: Type[BaseModel], variable: VariableEx return result -def construct(traversal_result: dict, model: Type[VertexModel]): +def graph_to_documents(traversal_result: dict, model: Type[VertexModel]): doc = traversal_result["doc"] # for relation in traversal_result["edges"]: diff --git a/pydango/index.py b/pydango/index.py index d2513ea..d21b58d 100644 --- a/pydango/index.py +++ b/pydango/index.py @@ -8,11 +8,10 @@ from typing_extensions import TypeAlias from aioarango.collection import Collection -from aioarango.result import Result -from aioarango.typings import Json if TYPE_CHECKING: - from aioarango.typings import Fields + from aioarango.result import Result + from aioarango.typings import Fields, Json @dataclass() @@ -75,7 +74,7 @@ class TTLIndex(Index): Indexes: TypeAlias = Union[GeoIndex, HashIndex, SkipListIndex, FullTextIndex, PersistentIndex, TTLIndex] -mapping: dict[Type[Indexes], Callable[..., Awaitable[Result[Json]]]] = { +mapping: dict[Type[Indexes], Callable[..., Awaitable["Result[Json]"]]] = { GeoIndex: Collection.add_geo_index, HashIndex: Collection.add_hash_index, SkipListIndex: Collection.add_skiplist_index, diff --git a/pydango/orm/models/base.py b/pydango/orm/models/base.py index 373c354..ea902c6 100644 --- a/pydango/orm/models/base.py +++ b/pydango/orm/models/base.py @@ -18,9 +18,9 @@ get_origin, ) -from pydantic import BaseConfig, ConfigError, Field, PrivateAttr -from pydantic.fields import SHAPE_SINGLETON, ModelField, Undefined -from pydantic.main import BaseModel, ModelMetaclass +from pydantic import BaseConfig, ConfigError, Field +from pydantic.fields import SHAPE_SINGLETON, ModelField, PrivateAttr, Undefined +from pydantic.main import BaseModel, ModelMetaclass, object_setattr from pydantic.typing import resolve_annotations from pydango.connection import DALI_SESSION_KW @@ -33,7 +33,7 @@ get_pydango_field, ) from pydango.orm.models.relations import Relationship -from pydango.orm.models.sentinel import NAO +from pydango.orm.models.sentinel import LazyFetch from pydango.orm.models.shapes import LIST_SHAPES from pydango.orm.utils import evaluate_forward_ref from pydango.query.consts import ID, KEY, REV @@ -44,7 +44,6 @@ from pydantic.typing import AbstractSet, DictStrAny, MappingIntStrAny from pydango.connection.session import PydangoSession - from pydango.orm.models.sentinel import NotAnObject from pydango.orm.models.types import RelationshipFields, Relationships from pydango.orm.models.vertex import TVertexModel @@ -136,12 +135,15 @@ def __init__(self, *args, **kwargs): class LazyProxy(Generic[ArangoModel]): _initialized: bool = False - __instance__: Union[ArangoModel, "NotAnObject"] + __instance__: Union[ArangoModel, "LazyFetch"] - def __init__(self, instance: Union[ArangoModel, "NotAnObject"], field, session: Optional["PydangoSession"]): + def __init__( + self, instance: Union[ArangoModel, "LazyFetch"], field, parent: ArangoModel, session: Optional["PydangoSession"] + ): + self.parent = parent self.session = session - self._field = field - if instance is not NAO: + self._relation_field = field + if not isinstance(instance, LazyFetch): self._initialized = True self.__instance__ = instance @@ -154,12 +156,11 @@ def __getattr__(self, item): if item in ["dict"]: return partial(jsonable_encoder, obj=self.__instance__) - # if item in getattr(getattr(self, '_instance'), item): attr = getattr(self.__instance__, item, None) if attr: return attr else: - return getattr(self._field.type_, item) + return getattr(self._relation_field.type_, item) def __repr__(self): return repr(self.__instance__) @@ -168,17 +169,31 @@ def __getitem__(self, item): if self: return self.__instance__[item] raise AttributeError( - "you are attempting to access " - f"{self._field.type_.__name__} via {self._field.name} which is not initialized use fetch" + f"you are attempting to access {self._relation_field.field.type_.__name__} via" + f" {self._relation_field.field.name} which is not initialized yet, use fetch" ) def __bool__(self): return self._initialized and bool(self.__instance__) - def fetch(self): - self.session.get( - self._field.type_, + async def fetch( + self, + ): + if not self.session: + raise "Kusomo" + + model = await self.session.get( + self.parent.__class__, + # self._relation_field.field.type_, + self.parent.key, + fetch_edges={self._relation_field.via_model.Collection.name}, + depth=range(1, 1), ) + model = getattr(model, self._relation_field.field.name) + setattr(self.parent, self._relation_field.field.name, model) + self.__instance__ = model + self._initialized = True + return model def compile(self, query_ref): return ObjectExpression(self.dict()).compile(query_ref) @@ -191,6 +206,7 @@ class DocFieldDescriptor(Generic[FieldType]): def __init__(self, field: ModelField, relation: Optional[Relationship] = None): self.relation = relation self.field = field + self._proxy: Optional[LazyProxy] = None def __set__(self, instance, value: FieldType): raise AssertionError() @@ -198,18 +214,23 @@ def __set__(self, instance, value: FieldType): def __get__( self, instance: Optional[ArangoModel], owner: Type["TVertexModel"] - ) -> Union[LazyProxy[ArangoModel], ModelFieldExpression, None]: + ) -> Union[LazyProxy[ArangoModel], ModelFieldExpression, FieldType, None]: if not instance and self.field.name in owner.__fields__.keys(): return ModelFieldExpression(self.field.name, owner) - - field_value = instance.__dict__.get(self.field.name) - if field_value is not None: - return field_value - - if self.relation: - return LazyProxy[owner]( # type: ignore[valid-type] - field_value, self.field, getattr(instance, DALI_SESSION_KW, None) - ) + if instance: + field_value = instance.__dict__.get(self.field.name) + if self.field.name in instance.__fields_set__: + return field_value + + if self._proxy: + return self._proxy + + if self.relation: + session = getattr(instance, DALI_SESSION_KW, None) + self._proxy = LazyProxy(field_value, self.relation, instance, session) + return self._proxy # type: ignore[valid-type] + if not instance: + raise ValueError("something happened open an issue :(") return None def __set_name__(self, owner, name): @@ -311,7 +332,7 @@ class BaseArangoModel(BaseModel, metaclass=ArangoModelMeta): key: Optional[str] = Field(None, alias=KEY) rev: Optional[str] = Field(None, alias=REV) - __dali__session__: Optional["PydangoSession"] = PrivateAttr() + __session__: Optional["PydangoSession"] = PrivateAttr() if TYPE_CHECKING: __relationships__: Relationships = {} @@ -326,6 +347,14 @@ class Config(BaseConfig): class Collection(CollectionConfig): ... + def __init__(__pydantic_self__, **data: Any): + super().__init__(**data) + object_setattr(__pydantic_self__, "__session__", data.get(DALI_SESSION_KW)) + + # @property + # def session(self): + # return self._session + @classmethod def _decompose_class(cls: Type["Model"], obj: Any) -> Union["GetterDict", dict]: # type: ignore[override] if isinstance(obj, dict): @@ -354,21 +383,38 @@ def _calculate_keys( @classmethod def from_orm(cls: Type[ArangoModel], obj: Any, *, session=None) -> ArangoModel: + obj[DALI_SESSION_KW] = session for field_name, field in cls.__relationships_fields__.items(): - exists_in_orm = obj.get(field_name, None) + exists_in_orm = field_name in obj and obj.get(field_name, None) if exists_in_orm: + if isinstance(exists_in_orm, list): + for i, v in enumerate(exists_in_orm): + exists_in_orm[i][DALI_SESSION_KW] = session + else: + exists_in_orm[DALI_SESSION_KW] = session + obj[field_name] = exists_in_orm + continue if field.required: - obj[field_name] = NAO + obj[field_name] = LazyFetch(session, obj["_id"]) + else: + print("field not set", field_name) + try: obj = cast(Type[ArangoModel], super()).from_orm(obj) except ConfigError as e: raise e - obj.__dali__session__ = session + + # for field_name, field in cls.__relationships_fields__.items(): + # setattr( getattr(obj,field_name),'__dali_session__',session) # object_setattr(obj, DALI_SESSION_KW, session) return obj + # @classmethod + # def validate(cls: Type['Model'], value: Any) -> 'Model': + # return cls.from_orm(value) + @classmethod def update_forward_refs(cls, **localns: Any) -> None: super().update_forward_refs(**localns) diff --git a/pydango/orm/models/fields.py b/pydango/orm/models/fields.py index 2dc1acf..a5929cf 100644 --- a/pydango/orm/models/fields.py +++ b/pydango/orm/models/fields.py @@ -2,7 +2,7 @@ from pydantic.fields import ModelField -from pydango.orm.models.sentinel import NAO +from pydango.orm.models.sentinel import LazyFetch from pydango.query.expressions import ( Expression, FieldExpression, @@ -45,7 +45,7 @@ def validate( loc: "LocStr", cls: Optional["ModelOrDc"] = None, ) -> "ValidateReturn": - return super().validate(v, values, loc=loc, cls=cls) if v is not NAO else (v, None) + return super().validate(v, values, loc=loc, cls=cls) if not isinstance(v, LazyFetch) else (v, None) def get_pydango_field(field: ModelField, cls: Type[RelationModelField] = RelationModelField) -> RelationModelField: diff --git a/pydango/orm/models/sentinel.py b/pydango/orm/models/sentinel.py index 5d2ce29..667f948 100644 --- a/pydango/orm/models/sentinel.py +++ b/pydango/orm/models/sentinel.py @@ -1,6 +1,10 @@ -class NotAnObject: - def __repr__(self): - return "NAO" +import dataclasses +# NOT_SET = LazyFetched() -NAO = NotAnObject() + +@dataclasses.dataclass +class LazyFetch: + def __init__(self, session, instance): + self.instance = instance + self.session = session diff --git a/pydango/orm/query.py b/pydango/orm/query.py index 479257f..5b6f265 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -18,12 +18,6 @@ ReturnableMixin, SortExpression, ) -from pydango.query.operations import ( - ForParams, - RangeExpression, - SortParams, - TraversalDirection, -) from pydango.query.options import ( RemoveOptions, ReplaceOptions, @@ -46,11 +40,14 @@ Expression, IterableExpression, ObjectExpression, + RangeExpression, VariableExpression, ) + from pydango.query.operations import ForParams, SortParams, TraversalDirection + logger = logging.getLogger(__name__) -ORMForParams: TypeAlias = Union[ForParams, Type[BaseArangoModel], Aliased[BaseArangoModel]] +ORMForParams: TypeAlias = Union["ForParams", Type[BaseArangoModel], Aliased[BaseArangoModel]] IMPLICIT_COLLECTION_ERROR = "you must specify collection when the collection cannot be implicitly resolved" MULTIPLE_COLLECTIONS_RESOLVED = "multiple collections resolved" @@ -143,7 +140,7 @@ def filter(self, condition: Union["ConditionExpression", "BinaryLogicalExpressio super().filter(condition) return self - def sort(self, *sort_list: SortParams) -> Self: + def sort(self, *sort_list: "SortParams") -> Self: for i in range(len(sort_list)): sort = sort_list[i] if isinstance(sort, SortExpression) and isinstance(sort.field, ModelFieldExpression): @@ -391,8 +388,8 @@ def traverse( iterators: TraverseIterators, edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], start: Union["LiteralExpression", "VariableExpression", FieldExpression, str], - depth: Union[RangeExpression, range, tuple[int, int]], - direction: TraversalDirection, + depth: Union["RangeExpression", range, tuple[int, int]], + direction: "TraversalDirection", ): return super().traverse(iterators, edges, start, depth, direction) # return self diff --git a/pydango/query/expressions.py b/pydango/query/expressions.py index 42aa1c9..7d4a18b 100644 --- a/pydango/query/expressions.py +++ b/pydango/query/expressions.py @@ -231,6 +231,28 @@ class QueryExpression(Expression, ABC): sep = " " +class RangeExpression(IterableExpression): + def __init__(self, start, end): + super().__init__() + self.end = end + self.start = start + + def compile(self, query_ref: "AQLQuery"): + if isinstance(self.start, Expression): + start = self.start.compile(query_ref) + else: + start = self.start + if isinstance(self.end, Expression): + end = self.end.compile(query_ref) + else: + end = self.end + + return f"{start}..{end}" + + def __repr__(self): + return f"{self.start}..{self.end}" + + class AssignmentExpression(Expression): def __init__(self, variable: VariableExpression, expression: Expression): self.variable = variable @@ -431,7 +453,9 @@ class FigurativeExpression(BindableExpression, ReturnableMixin, ABC): pass -ListItems = Union[QueryExpression, LiteralExpression, FigurativeExpression, Mapping, Sequence, int, float, str, bool] +ListItems: TypeAlias = Union[ + QueryExpression, LiteralExpression, FigurativeExpression, Mapping, Sequence, int, float, str, bool +] ListValues: TypeAlias = Union[ tuple[ ListItems, diff --git a/pydango/query/functions.py b/pydango/query/functions.py index 78f0d6b..241a43c 100644 --- a/pydango/query/functions.py +++ b/pydango/query/functions.py @@ -10,11 +10,11 @@ ObjectExpression, QueryExpression, ReturnableMixin, - VariableExpression, ) if TYPE_CHECKING: from pydango.query import AQLQuery + from pydango.query.expressions import VariableExpression class BaseFunctionExpression(Expression): @@ -291,7 +291,7 @@ def __init__(self, array, start, count=None): class UnionArrays(FunctionExpression, IterableExpression, ArrayFunctionMixin): name = "UNION" - def __init__(self, *arrays: Union[ListExpression, VariableExpression]): + def __init__(self, *arrays: Union[ListExpression, "VariableExpression"]): super().__init__(*arrays) diff --git a/pydango/query/operations.py b/pydango/query/operations.py index 964b9c6..0a4a856 100644 --- a/pydango/query/operations.py +++ b/pydango/query/operations.py @@ -16,29 +16,18 @@ from pydango.query.expressions import ( AssignmentExpression, CollectionExpression, - Expression, FieldExpression, IterableExpression, IteratorExpression, ListExpression, LiteralExpression, - LogicalExpression, ObjectExpression, QueryExpression, - ReturnableMixin, + RangeExpression, SortDirection, SortExpression, VariableExpression, ) -from pydango.query.options import ( - BaseModificationOptions, - CollectOptions, - LoopOptions, - RemoveOptions, - ReplaceOptions, - UpdateOptions, - UpsertOptions, -) from pydango.query.utils import Compilable if sys.version_info >= (3, 10): @@ -47,6 +36,16 @@ from typing_extensions import TypeAlias if TYPE_CHECKING: + from pydango.query.expressions import Expression, LogicalExpression, ReturnableMixin + from pydango.query.options import ( + BaseModificationOptions, + CollectOptions, + LoopOptions, + RemoveOptions, + ReplaceOptions, + UpdateOptions, + UpsertOptions, + ) from pydango.query.query import AQLQuery @@ -75,7 +74,7 @@ def __init__( in_: Optional[Union[list, IterableExpression]] = None, *, query_ref: "AQLQuery", - options: Optional[LoopOptions] = None, + options: Optional["LoopOptions"] = None, ): super().__init__(query_ref) self.options = options @@ -152,28 +151,6 @@ def __repr__(self): return _repr -class RangeExpression(IterableExpression): - def __init__(self, start, end): - super().__init__() - self.end = end - self.start = start - - def compile(self, query_ref: "AQLQuery"): - if isinstance(self.start, Expression): - start = self.start.compile(query_ref) - else: - start = self.start - if isinstance(self.end, Expression): - end = self.end.compile(query_ref) - else: - end = self.end - - return f"{start}..{end}" - - def __repr__(self): - return f"{self.start}..{self.end}" - - class TraversalDirection(str, Enum): OUTBOUND = "OUTBOUND" INBOUND = "INBOUND" @@ -193,7 +170,7 @@ def __init__( ], edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], start: Union[str, "LiteralExpression", VariableExpression, FieldExpression], - depth: Union[range, tuple[int, int], RangeExpression], + depth: Union[range, tuple[int, int], "RangeExpression"], direction: TraversalDirection, query_ref: "AQLQuery", ): @@ -254,7 +231,7 @@ class LetOperation(Operation): def __init__( self, variable: Union[str, VariableExpression], - expression: Expression, + expression: "Expression", *, query_ref: "AQLQuery", ): @@ -326,7 +303,7 @@ def __repr__(self): class ReturnOperation(Operation): - def __init__(self, return_expr: Union[dict, ReturnableMixin], query_ref: "AQLQuery", *, distinct=None): + def __init__(self, return_expr: Union[dict, "ReturnableMixin"], query_ref: "AQLQuery", *, distinct=None): super().__init__(query_ref=query_ref) if isinstance(return_expr, CollectionExpression): @@ -408,7 +385,7 @@ def __init__( expression: Union[str, dict, LiteralExpression, FieldExpression, VariableExpression, ObjectExpression], collection: Union[str, CollectionExpression], *, - options: Optional[RemoveOptions] = None, + options: Optional["RemoveOptions"] = None, query_ref: "AQLQuery", ): super().__init__(query_ref=query_ref) @@ -450,7 +427,7 @@ def __init__( obj: Union[ObjectExpression, dict], collection: Union[CollectionExpression, str], *, - options: Optional[BaseModificationOptions], + options: Optional["BaseModificationOptions"], query_ref, ): super().__init__(query_ref=query_ref) @@ -489,7 +466,7 @@ def __init__( obj: Union[ObjectExpression, dict], collection: Union[CollectionExpression, str], *, - options: Optional[UpdateOptions], + options: Optional["UpdateOptions"], query_ref, ): super().__init__(key, obj, collection, options=options, query_ref=query_ref) @@ -504,7 +481,7 @@ def __init__( obj: Union[ObjectExpression, dict], collection: Union[CollectionExpression, str], *, - options: Optional[ReplaceOptions], + options: Optional["ReplaceOptions"], query_ref, ): super().__init__(key, obj, collection, options=options, query_ref=query_ref) @@ -520,7 +497,7 @@ def __init__( insert: Union[dict, ObjectExpression, VariableExpression], *, update: Union[dict, ObjectExpression, VariableExpression], - options: Optional[UpsertOptions] = None, + options: Optional["UpsertOptions"] = None, ): ... @@ -533,7 +510,7 @@ def __init__( insert: Union[dict, ObjectExpression, VariableExpression], *, replace: Union[dict, ObjectExpression, VariableExpression], - options: Optional[UpsertOptions] = None, + options: Optional["UpsertOptions"] = None, ): ... @@ -546,7 +523,7 @@ def __init__( *, update: Union[dict, ObjectExpression, VariableExpression, None] = None, replace: Union[dict, ObjectExpression, VariableExpression, None] = None, - options: Optional[UpsertOptions] = None, + options: Optional["UpsertOptions"] = None, ): super().__init__(query_ref) self.options = options @@ -601,7 +578,7 @@ def __repr__(self): return _repr -AssignmentParam: TypeAlias = Union[AssignmentExpression, tuple[VariableExpression, Expression]] +AssignmentParam: TypeAlias = Union[AssignmentExpression, tuple[VariableExpression, "Expression"]] AssignmentParams: TypeAlias = Union[AssignmentParam, Sequence[AssignmentParam]] IntoParam = Union[ VariableExpression, @@ -618,7 +595,7 @@ def __init__( into: Union[VariableExpression, AssignmentParam, None] = None, keep: Optional[VariableExpression] = None, with_count_into: Optional[VariableExpression] = None, - options: Optional[CollectOptions] = None, + options: Optional["CollectOptions"] = None, query_ref: "AQLQuery", ): if not any((collect, aggregate, with_count_into)): diff --git a/pydango/query/query.py b/pydango/query/query.py index 13b82f2..c878baf 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -1,40 +1,24 @@ import json import logging import sys -from typing import Any, Dict, List, Optional, Sequence, Union, overload +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, overload from aioarango.database import Database from pydango.orm.encoders import jsonable_encoder from pydango.query.expressions import ( - BindableExpression, - CollectionExpression, - ConditionExpression, - Expression, - FieldExpression, In, - IterableExpression, - IteratorExpression, - LiteralExpression, - ObjectExpression, QueryExpression, - ReturnableMixin, SubQueryExpression, - VariableExpression, VectorSubQueryExpression, ) from pydango.query.operations import ( - AssignmentParam, - AssignmentParams, CollectOperation, FilterOperation, ForOperation, - ForParams, InsertOperation, LetOperation, LimitOperation, - Operation, - RangeExpression, RemoveOperation, ReplaceOperation, ReturnOperation, @@ -45,13 +29,35 @@ UpdateOperation, UpsertOperation, ) -from pydango.query.options import ( - CollectOptions, - RemoveOptions, - ReplaceOptions, - UpdateOptions, - UpsertOptions, -) + +if TYPE_CHECKING: + from pydango.query.expressions import ( + BindableExpression, + CollectionExpression, + ConditionExpression, + Expression, + FieldExpression, + IterableExpression, + IteratorExpression, + LiteralExpression, + ObjectExpression, + RangeExpression, + ReturnableMixin, + VariableExpression, + ) + from pydango.query.operations import ( + AssignmentParam, + AssignmentParams, + ForParams, + Operation, + ) + from pydango.query.options import ( + CollectOptions, + RemoveOptions, + ReplaceOptions, + UpdateOptions, + UpsertOptions, + ) if sys.version_info >= (3, 10): from typing import Self, TypeAlias @@ -63,10 +69,10 @@ JsonType: TypeAlias = Union[None, int, str, bool, List["JsonType"], Dict[str, "JsonType"]] TraverseIterators: TypeAlias = Union[ - IteratorExpression, - tuple[IteratorExpression], - tuple[IteratorExpression, IteratorExpression], - tuple[IteratorExpression, IteratorExpression, IteratorExpression], + "IteratorExpression", + tuple["IteratorExpression"], + tuple["IteratorExpression", "IteratorExpression"], + tuple["IteratorExpression", "IteratorExpression", "IteratorExpression"], ] @@ -83,7 +89,7 @@ def __init__(self, parent: Optional["AQLQuery"] = None): self._var_counter = 0 self._param_counter = 0 self.parent: Optional[AQLQuery] = parent - self._ops: list[Operation] = [] + self._ops: list["Operation"] = [] self._compiled = "" self.__is_modification_query__ = False @@ -115,8 +121,10 @@ def _get_param_var(self): def for_( self, - collection_or_variable: ForParams, - in_: Optional[Union[IterableExpression, list, VariableExpression, list[VariableExpression], "AQLQuery"]] = None, + collection_or_variable: "ForParams", + in_: Optional[ + Union["IterableExpression", list, "VariableExpression", list["VariableExpression"], "AQLQuery"] + ] = None, ) -> Self: if self == in_: raise ValueError("is not possible to loop over the same query") @@ -131,9 +139,9 @@ def for_( def traverse( self, iterators: TraverseIterators, - edges: Union[str, CollectionExpression, Sequence[Union[str, CollectionExpression]]], - start: Union["LiteralExpression", VariableExpression, FieldExpression, str], - depth: Union[RangeExpression, range, tuple[int, int]], + edges: Union[str, "CollectionExpression", Sequence[Union[str, "CollectionExpression"]]], + start: Union["LiteralExpression", "VariableExpression", "FieldExpression", str], + depth: Union["RangeExpression", range, tuple[int, int]], direction: TraversalDirection, ) -> Self: self._ops.append( @@ -148,7 +156,7 @@ def traverse( ) return self - def filter(self, condition: ConditionExpression) -> "AQLQuery": + def filter(self, condition: "ConditionExpression") -> "AQLQuery": if isinstance(condition, In) and isinstance(condition.right, AQLQuery): condition.right.parent = self condition.right = VectorSubQueryExpression(condition.right) @@ -161,23 +169,23 @@ def sort(self, *sort_list: SortParams) -> "AQLQuery": return self @overload - def let(self, variable: VariableExpression, expression: Expression) -> "AQLQuery": + def let(self, variable: "VariableExpression", expression: "Expression") -> "AQLQuery": ... @overload - def let(self, variable: str, expression: Expression) -> "VariableExpression": + def let(self, variable: str, expression: "Expression") -> "VariableExpression": ... def let( - self, variable: Union[VariableExpression, str], expression: Expression - ) -> Union["AQLQuery", VariableExpression]: + self, variable: Union["VariableExpression", str], expression: "Expression" + ) -> Union["AQLQuery", "VariableExpression"]: let_operation = LetOperation(variable, expression, query_ref=self) # type: ignore[arg-type] self._ops.append(let_operation) if isinstance(variable, str): return let_operation.expression.variable return self - def return_(self, return_expr: Union[ReturnableMixin, dict]) -> Self: + def return_(self, return_expr: Union["ReturnableMixin", dict]) -> Self: if isinstance(return_expr, AQLQuery): return_expr.parent = self return_expr = SubQueryExpression(return_expr) @@ -198,7 +206,7 @@ def compile(self, *args, **kwargs) -> str: def bind_variable(self) -> str: return self._get_var_name() - def bind_parameter(self, parameter: BindableExpression, override_var_name: Optional[str] = None) -> str: + def bind_parameter(self, parameter: "BindableExpression", override_var_name: Optional[str] = None) -> str: if self.parent: return self.parent.bind_parameter(parameter) is_hashable = False @@ -227,7 +235,7 @@ def limit(self, limit) -> Self: return self def insert( - self, doc: Union[dict, ObjectExpression, VariableExpression], collection: Union[str, CollectionExpression] + self, doc: Union[dict, "ObjectExpression", "VariableExpression"], collection: Union[str, "CollectionExpression"] ) -> Self: self.__is_modification_query__ = True self._ops.append(InsertOperation(doc, collection, self)) # type: ignore[arg-type] @@ -235,10 +243,10 @@ def insert( def remove( self, - expression: Union[dict, LiteralExpression, FieldExpression, VariableExpression, ObjectExpression, str], - collection: Union[str, CollectionExpression], + expression: Union[dict, "LiteralExpression", "FieldExpression", "VariableExpression", "ObjectExpression", str], + collection: Union[str, "CollectionExpression"], *, - options: Optional[RemoveOptions] = None, + options: Optional["RemoveOptions"] = None, ) -> Self: self.__is_modification_query__ = True self._ops.append( @@ -246,7 +254,7 @@ def remove( ) # type: ignore[arg-type] return self - def update(self, key, doc, coll, *, options: Optional[UpdateOptions] = None) -> Self: + def update(self, key, doc, coll, *, options: Optional["UpdateOptions"] = None) -> Self: self.__is_modification_query__ = True self._ops.append( UpdateOperation(key, doc, coll, query_ref=self, options=options), @@ -255,11 +263,11 @@ def update(self, key, doc, coll, *, options: Optional[UpdateOptions] = None) -> def replace( self, - key: Union[str, dict, ObjectExpression], - doc: Union[ObjectExpression, dict], - collection: Union[CollectionExpression, str], + key: Union[str, dict, "ObjectExpression"], + doc: Union["ObjectExpression", dict], + collection: Union["CollectionExpression", str], *, - options: Optional[ReplaceOptions] = None, + options: Optional["ReplaceOptions"] = None, ) -> Self: self.__is_modification_query__ = True self._ops.append( @@ -270,36 +278,36 @@ def replace( @overload def upsert( self, - filter_: Union[dict, ObjectExpression, VariableExpression], - insert: Union[dict, ObjectExpression, VariableExpression], - collection: Union[str, CollectionExpression], + filter_: Union[dict, "ObjectExpression", "VariableExpression"], + insert: Union[dict, "ObjectExpression", "VariableExpression"], + collection: Union[str, "CollectionExpression"], *, - replace: Union[dict, ObjectExpression, VariableExpression], - options: Optional[UpsertOptions] = None, + replace: Union[dict, "ObjectExpression", "VariableExpression"], + options: Optional["UpsertOptions"] = None, ) -> Self: ... @overload def upsert( self, - filter_: Union[dict, ObjectExpression, VariableExpression], - insert: Union[dict, ObjectExpression, VariableExpression], - collection: Union[str, CollectionExpression], + filter_: Union[dict, "ObjectExpression", "VariableExpression"], + insert: Union[dict, "ObjectExpression", "VariableExpression"], + collection: Union[str, "CollectionExpression"], *, - update: Union[dict, ObjectExpression, VariableExpression], - options: Optional[UpsertOptions] = None, + update: Union[dict, "ObjectExpression", "VariableExpression"], + options: Optional["UpsertOptions"] = None, ) -> Self: ... def upsert( self, - filter_: Union[dict, ObjectExpression, VariableExpression], - insert: Union[dict, ObjectExpression, VariableExpression], - collection: Union[str, CollectionExpression], + filter_: Union[dict, "ObjectExpression", "VariableExpression"], + insert: Union[dict, "ObjectExpression", "VariableExpression"], + collection: Union[str, "CollectionExpression"], *, - update: Union[dict, ObjectExpression, VariableExpression, None] = None, - replace: Union[dict, ObjectExpression, VariableExpression, None] = None, - options: Optional[UpsertOptions] = None, + update: Union[dict, "ObjectExpression", "VariableExpression", None] = None, + replace: Union[dict, "ObjectExpression", "VariableExpression", None] = None, + options: Optional["UpsertOptions"] = None, ) -> Self: self.__is_modification_query__ = True @@ -321,10 +329,10 @@ def upsert( def collect( self, *, - collect: Optional[AssignmentParams] = None, - into: Optional[Union[VariableExpression, AssignmentParam]] = None, - keep: Optional[VariableExpression] = None, - options: Optional[CollectOptions] = None, + collect: Optional["AssignmentParams"] = None, + into: Optional[Union["VariableExpression", "AssignmentParam"]] = None, + keep: Optional["VariableExpression"] = None, + options: Optional["CollectOptions"] = None, ): ... @@ -332,8 +340,8 @@ def collect( def collect( self, *, - with_count_into: Optional[VariableExpression] = None, - options: Optional[CollectOptions] = None, + with_count_into: Optional["VariableExpression"] = None, + options: Optional["CollectOptions"] = None, ): ... @@ -341,9 +349,9 @@ def collect( def collect( self, *, - collect: Optional[AssignmentParams] = None, - with_count_into: Optional[VariableExpression] = None, - options: Optional[CollectOptions] = None, + collect: Optional["AssignmentParams"] = None, + with_count_into: Optional["VariableExpression"] = None, + options: Optional["CollectOptions"] = None, ): ... @@ -351,9 +359,9 @@ def collect( def collect( self, *, - aggregate: Optional[AssignmentParams] = None, - into: Optional[Union[VariableExpression, AssignmentParam]] = None, - options: Optional[CollectOptions] = None, + aggregate: Optional["AssignmentParams"] = None, + into: Optional[Union["VariableExpression", "AssignmentParam"]] = None, + options: Optional["CollectOptions"] = None, ): ... @@ -361,22 +369,22 @@ def collect( def collect( self, *, - collect: Optional[AssignmentParams] = None, - aggregate: Optional[AssignmentParams] = None, - into: Optional[Union[VariableExpression, AssignmentParam]] = None, - options: Optional[CollectOptions] = None, + collect: Optional["AssignmentParams"] = None, + aggregate: Optional["AssignmentParams"] = None, + into: Optional[Union["VariableExpression", "AssignmentParam"]] = None, + options: Optional["CollectOptions"] = None, ): ... def collect( self, *, - collect: Optional[AssignmentParams] = None, - aggregate: Optional[AssignmentParams] = None, - into: Optional[Union[VariableExpression, AssignmentParam]] = None, - keep: Optional[VariableExpression] = None, - with_count_into: Optional[VariableExpression] = None, - options: Optional[CollectOptions] = None, + collect: Optional["AssignmentParams"] = None, + aggregate: Optional["AssignmentParams"] = None, + into: Optional[Union["VariableExpression", "AssignmentParam"]] = None, + keep: Optional["VariableExpression"] = None, + with_count_into: Optional["VariableExpression"] = None, + options: Optional["CollectOptions"] = None, ) -> Self: self._ops.append( CollectOperation( diff --git a/pydango/utils.py b/pydango/utils.py deleted file mode 100644 index e69de29..0000000 diff --git a/pyproject.toml b/pyproject.toml index bb94c8e..1430bfd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ name = "pydangorm" version = "0.1.0" description = "pydantic based ArangoDB ODM" -authors = ["nadobando"] +authors = ["nadobando <7695172+nadobando@users.noreply.github.com>"] readme = "README.md" [tool.poetry.dependencies] @@ -74,9 +74,9 @@ profile = "black" [tool.pytest.ini_options] addopts = "-ra" asyncio_mode = "auto" -#testpaths = [ -# "tests", -#] +testpaths = [ + "tests", +] [tool.bandit.assert_used] skips = ["tests/utils*.py", '**/test_*.py', '**/test_*.py'] @@ -106,3 +106,22 @@ module = [ "indexed" ] ignore_missing_imports = true + + +[tool.semantic_release] +match = "main" +prerelease = false +version_variables = [ + "pydango/__init__.py:__version__", +] +version_toml = [ + "pyproject.toml:tool.poetry.version", +] +[tool.semantic_release.changelog] +exclude_commit_patterns = [ + "^ci:.*", + "^(?!feat:|fix:|perf:|refactor:).*$" +] + +[tool.semantic_release.publish] +upload_to_vcs_release = false diff --git a/tests/conftest.py b/tests/conftest.py index 33bef2d..8eb1546 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -84,4 +84,4 @@ async def database(client: ArangoClient) -> AsyncFixture[StandardDatabase]: db = await get_or_create_db(client, "pydango") yield db - await (await client.db("_system")).delete_database("pydango") + # await (await client.db("_system")).delete_database("pydango") diff --git a/tests/session/test_social_network.py b/tests/session/test_social_network.py index 1f3f2d5..b8bd319 100644 --- a/tests/session/test_social_network.py +++ b/tests/session/test_social_network.py @@ -254,6 +254,7 @@ def expected_user_depth2(user: VertexModel): @pytest.mark.asyncio async def test_save(matcher: Matcher, session: PydangoSession, request: FixtureRequest, user: User): await session.save(user) + print(user.id) request.config.cache.set("user_key", user.key) # type: ignore[union-attr] matcher.assert_declarative_object(user.dict(by_alias=True, include_edges=True), expected_user_depth2(user)) @@ -271,6 +272,27 @@ async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRe ) +@pytest.mark.run(order=2) +async def test_get_lazy_proxy_fetch(matcher: Matcher, session: PydangoSession, request: FixtureRequest): + _id = request.config.cache.get("user_key", None) # type: ignore[union-attr] + result = await session.get(User, _id, fetch_edges=True, depth=range(1, 1)) + assert result + + await result.posts[0].comments.fetch() # type: ignore + + expected_user = expected_user_depth1(result) + + expected_posts_comments = [{"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}] + matcher.assert_declarative_object(result.dict(by_alias=True)["posts"][0]["comments"], expected_posts_comments) + if result.posts: + result.posts[0].comments = None + matcher.assert_declarative_object( + result.dict(by_alias=True, include_edges=True), + expected_user, + check_order=False, + ) + + @pytest.mark.run(order=2) async def test_get2(matcher: Matcher, session: PydangoSession, request: FixtureRequest): _id = request.config.cache.get("user_key", None) # type: ignore[union-attr] From 6d3213f9bc2e8b3f94825ae0a776245170e2bbbb Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Mon, 25 Sep 2023 20:44:40 +0300 Subject: [PATCH 10/19] no message --- .pre-commit-config.yaml | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 28c17c7..4fd2f6a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,4 @@ +--- fail_fast: false default_stages: - commit @@ -48,12 +49,12 @@ repos: - id: python-check-blanket-noqa - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: 'v0.0.290' + rev: v0.0.290 hooks: - id: ruff args: - --config - - ./pyproject.toml # args: + - ./pyproject.toml - --fix - repo: https://github.com/PyCQA/bandit @@ -84,11 +85,7 @@ repos: - pytest-asyncio~=0.21.0 -# - repo: https://github.com/jendrikseipp/vulture -# rev: v2.7 -# hooks: -# - id: vulture -# pass_filenames: true + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 @@ -107,10 +104,23 @@ repos: - repo: https://github.com/python-poetry/poetry - rev: '1.4.0' + rev: 1.4.0 hooks: - id: poetry-check - id: poetry-lock args: - --no-update # - id: poetry-export + + + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + rev: 0.2.3 + hooks: + - id: yamlfmt + args: + - --offset + - '2' + - --mapping + - '2' + - --sequence + - '4' From 78e582f560c92ca9f859618622c122e97e5884f4 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Mon, 25 Sep 2023 20:51:56 +0300 Subject: [PATCH 11/19] fixing ci --- .pre-commit-config.yaml | 2 +- poetry.lock | 76 ++++++++++++++++++++--------------------- poetry.toml | 5 --- pyproject.toml | 2 +- 4 files changed, 40 insertions(+), 45 deletions(-) delete mode 100644 poetry.toml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4fd2f6a..0693237 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -77,8 +77,8 @@ repos: args: - --show-traceback additional_dependencies: - - pydantic==1.10.10 - mypy-extensions + - pydantic==1.10.12 - pytest~=7.3.1 - httpx~=0.18.2 - pydiction~=0.1.0 diff --git a/poetry.lock b/poetry.lock index b367f1c..8b0fff9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -630,48 +630,48 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "1.10.10" +version = "1.10.12" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adad1ee4ab9888f12dac2529276704e719efcf472e38df7813f5284db699b4ec"}, - {file = "pydantic-1.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a7db03339893feef2092ff7b1afc9497beed15ebd4af84c3042a74abce02d48"}, - {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b3714b97ff84b2689654851c2426389bcabfac9080617bcf4306c69db606f6"}, - {file = "pydantic-1.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edfdf0a5abc5c9bf2052ebaec20e67abd52e92d257e4f2d30e02c354ed3e6030"}, - {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a3b30fd255eeeb63caa9483502ba96b7795ce5bf895c6a179b3d909d9f53a6"}, - {file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db4c7f7e60ca6f7d6c1785070f3e5771fcb9b2d88546e334d2f2c3934d949028"}, - {file = "pydantic-1.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:a2d5be50ac4a0976817144c7d653e34df2f9436d15555189f5b6f61161d64183"}, - {file = "pydantic-1.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:566a04ba755e8f701b074ffb134ddb4d429f75d5dced3fbd829a527aafe74c71"}, - {file = "pydantic-1.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f79db3652ed743309f116ba863dae0c974a41b688242482638b892246b7db21d"}, - {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62376890b819bebe3c717a9ac841a532988372b7e600e76f75c9f7c128219d5"}, - {file = "pydantic-1.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4870f13a4fafd5bc3e93cff3169222534fad867918b188e83ee0496452978437"}, - {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:990027e77cda6072a566e433b6962ca3b96b4f3ae8bd54748e9d62a58284d9d7"}, - {file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c40964596809eb616d94f9c7944511f620a1103d63d5510440ed2908fc410af"}, - {file = "pydantic-1.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:ea9eebc2ebcba3717e77cdeee3f6203ffc0e78db5f7482c68b1293e8cc156e5e"}, - {file = "pydantic-1.10.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:762aa598f79b4cac2f275d13336b2dd8662febee2a9c450a49a2ab3bec4b385f"}, - {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dab5219659f95e357d98d70577b361383057fb4414cfdb587014a5f5c595f7b"}, - {file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3d4ee957a727ccb5a36f1b0a6dbd9fad5dedd2a41eada99a8df55c12896e18d"}, - {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b69f9138dec566962ec65623c9d57bee44412d2fc71065a5f3ebb3820bdeee96"}, - {file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7aa75d1bd9cc275cf9782f50f60cddaf74cbaae19b6ada2a28e737edac420312"}, - {file = "pydantic-1.10.10-cp37-cp37m-win_amd64.whl", hash = "sha256:9f62a727f5c590c78c2d12fda302d1895141b767c6488fe623098f8792255fe5"}, - {file = "pydantic-1.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aac218feb4af73db8417ca7518fb3bade4534fcca6e3fb00f84966811dd94450"}, - {file = "pydantic-1.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88546dc10a40b5b52cae87d64666787aeb2878f9a9b37825aedc2f362e7ae1da"}, - {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c41bbaae89e32fc582448e71974de738c055aef5ab474fb25692981a08df808a"}, - {file = "pydantic-1.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b71bd504d1573b0b722ae536e8ffb796bedeef978979d076bf206e77dcc55a5"}, - {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e088e3865a2270ecbc369924cd7d9fbc565667d9158e7f304e4097ebb9cf98dd"}, - {file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3403a090db45d4027d2344859d86eb797484dfda0706cf87af79ace6a35274ef"}, - {file = "pydantic-1.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:e0014e29637125f4997c174dd6167407162d7af0da73414a9340461ea8573252"}, - {file = "pydantic-1.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9965e49c6905840e526e5429b09e4c154355b6ecc0a2f05492eda2928190311d"}, - {file = "pydantic-1.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:748d10ab6089c5d196e1c8be9de48274f71457b01e59736f7a09c9dc34f51887"}, - {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86936c383f7c38fd26d35107eb669c85d8f46dfceae873264d9bab46fe1c7dde"}, - {file = "pydantic-1.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a26841be620309a9697f5b1ffc47dce74909e350c5315ccdac7a853484d468a"}, - {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:409b810f387610cc7405ab2fa6f62bdf7ea485311845a242ebc0bd0496e7e5ac"}, - {file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ce937a2a2c020bcad1c9fde02892392a1123de6dda906ddba62bfe8f3e5989a2"}, - {file = "pydantic-1.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:37ebddef68370e6f26243acc94de56d291e01227a67b2ace26ea3543cf53dd5f"}, - {file = "pydantic-1.10.10-py3-none-any.whl", hash = "sha256:a5939ec826f7faec434e2d406ff5e4eaf1716eb1f247d68cd3d0b3612f7b4c8a"}, - {file = "pydantic-1.10.10.tar.gz", hash = "sha256:3b8d5bd97886f9eb59260594207c9f57dce14a6f869c6ceea90188715d29921a"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, ] [package.dependencies] @@ -979,4 +979,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "d7f5ad86c1566899c8641217554daa6f668ff060781525dcc3c3422650080105" +content-hash = "3d4ff30a553563a4f2e16385e00a09293fa520a68a71c27a5db8bf5446963cb7" diff --git a/poetry.toml b/poetry.toml deleted file mode 100644 index c0c666b..0000000 --- a/poetry.toml +++ /dev/null @@ -1,5 +0,0 @@ -[installer] -no-binary = ["pydantic"] - -#[installer] -#no-binary = [":all:"] diff --git a/pyproject.toml b/pyproject.toml index 1430bfd..010ddce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] python = ">=3.9,<4.0" aioarango = "^1.0.0" -pydantic = "==1.10.10" +pydantic = "==1.10.12" urllib3 = "==1.26.15" indexed = "^1.3.0" From f58d0730f4c4f3e533de7de0dd8e19c8430c6704 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Mon, 25 Sep 2023 20:57:26 +0300 Subject: [PATCH 12/19] fixing ci --- poetry.lock | 24 +++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 8b0fff9..bb73580 100644 --- a/poetry.lock +++ b/poetry.lock @@ -295,6 +295,9 @@ files = [ {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + [package.extras] toml = ["tomli"] @@ -753,6 +756,25 @@ pytest = ">=7.0.0" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -979,4 +1001,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "3d4ff30a553563a4f2e16385e00a09293fa520a68a71c27a5db8bf5446963cb7" +content-hash = "f1b548490515af40ab0863aa00d025a9d2118d0f7918bc71b7483f3542cc6dad" diff --git a/pyproject.toml b/pyproject.toml index 010ddce..48bdaf6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,7 @@ pytest = "^7.3.1" black = "^23.3.0" pre-commit = "^3.3.1" pytest-asyncio = "^0.21.0" +pytest-cov = "^4.1.0" coverage = "^7.2.5" isort = "^5.12.0" mypy = "^1.5.1" From 106c122ecc688f91d7b551b2089f7d1c9f5972e7 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Mon, 25 Sep 2023 21:06:54 +0300 Subject: [PATCH 13/19] fixing ci --- .github/workflows/ci.yml | 10 ++++++++++ .pre-commit-config.yaml | 3 +-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b8e2664..1015d58 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,6 +31,16 @@ jobs: test: name: Tests runs-on: ubuntu-latest + services: + arango: + image: arangodb + ports: + - 8529:8529 + env: + ARANGO_NO_AUTH: '1' + + + outputs: release-id: ${{ steps.generate-release-id.outputs.release-id }} steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0693237..bb2d95b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -68,14 +68,13 @@ repos: - pyproject.toml - --quiet additional_dependencies: + - bandit[toml] - toml - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.5.1 hooks: - id: mypy - args: - - --show-traceback additional_dependencies: - mypy-extensions - pydantic==1.10.12 From 38fc007a981311e883a78e78cf4fc9f5b6e95f45 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Mon, 25 Sep 2023 21:13:50 +0300 Subject: [PATCH 14/19] fixing ci --- .github/workflows/ci.yml | 2 +- pyproject.toml | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1015d58..e9272c0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,7 +81,7 @@ jobs: - run: poetry install --no-interaction - name: test run: | - poetry run pytest --cov=pydiction --cov-report=xml:coverage.xml --junitxml=test-results/test-results.xml tests + poetry run pytest --cov=pydango --cov-report=xml:coverage.xml --junitxml=test-results/test-results.xml tests - name: Test Report diff --git a/pyproject.toml b/pyproject.toml index 48bdaf6..15026f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,9 +75,7 @@ profile = "black" [tool.pytest.ini_options] addopts = "-ra" asyncio_mode = "auto" -testpaths = [ - "tests", -] + [tool.bandit.assert_used] skips = ["tests/utils*.py", '**/test_*.py', '**/test_*.py'] From 0f8055d889568138fc9c3c4f57c65ed4086cbdb8 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Mon, 25 Sep 2023 23:56:20 +0300 Subject: [PATCH 15/19] fixing ci --- .github/workflows/ci.yml | 4 ++++ pydango/orm/query.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e9272c0..58689e8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,6 +29,7 @@ jobs: SKIP: no-commit-to-branch test: + permissions: write-all name: Tests runs-on: ubuntu-latest services: @@ -78,8 +79,11 @@ jobs: # ensures this only runs on a cache miss. - run: poetry install --no-interaction --no-root if: steps.cache-deps.outputs.cache-hit != 'true' + - run: poetry install --no-interaction + - name: test + run: | poetry run pytest --cov=pydango --cov-report=xml:coverage.xml --junitxml=test-results/test-results.xml tests diff --git a/pydango/orm/query.py b/pydango/orm/query.py index 5b6f265..33b7c38 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -27,7 +27,7 @@ from pydango.query.query import AQLQuery, TraverseIterators if sys.version_info >= (3, 10): - from typing import Self + from typing import Self, TypeAlias else: from typing_extensions import Self, TypeAlias From 345e70649ecb923141eb1c89f6b7ed7d9e9d5611 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Tue, 26 Sep 2023 00:00:20 +0300 Subject: [PATCH 16/19] fixing ci --- pydango/query/query.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pydango/query/query.py b/pydango/query/query.py index c878baf..17d39f5 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -59,10 +59,10 @@ UpsertOptions, ) -if sys.version_info >= (3, 10): - from typing import Self, TypeAlias -else: +if sys.version_info < (3, 10): from typing_extensions import Self, TypeAlias +else: + from typing import Self, TypeAlias logger = logging.getLogger(__name__) From b6174b25eed14dd7741e24d558b0bfcf822ac061 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Tue, 26 Sep 2023 00:17:53 +0300 Subject: [PATCH 17/19] fixing ci --- .pre-commit-config.yaml | 14 +++++++------- pydango/connection/session.py | 2 +- pydango/connection/utils.py | 6 ++---- pydango/orm/models/base.py | 6 ++---- pydango/orm/query.py | 36 ++++++++++++----------------------- pydango/query/operations.py | 6 ++---- pydango/query/query.py | 27 +++++++++----------------- pydango/query/utils.py | 3 +-- 8 files changed, 36 insertions(+), 64 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bb2d95b..3bc1612 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ default_stages: - commit repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.12.0 + rev: v3.13.0 hooks: - id: pyupgrade args: @@ -19,7 +19,7 @@ repos: - repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.7.0 + rev: 23.9.1 hooks: - id: black args: @@ -37,10 +37,10 @@ repos: - --remove-duplicate-keys - --remove-unused-variables - - repo: https://github.com/pre-commit/mirrors-autopep8 - rev: v2.0.4 - hooks: - - id: autopep8 +# - repo: https://github.com/pre-commit/mirrors-autopep8 +# rev: v2.0.4 +# hooks: +# - id: autopep8 - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.10.0 @@ -49,7 +49,7 @@ repos: - id: python-check-blanket-noqa - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.290 + rev: v0.0.291 hooks: - id: ruff args: diff --git a/pydango/connection/session.py b/pydango/connection/session.py index 1736716..6c7426f 100644 --- a/pydango/connection/session.py +++ b/pydango/connection/session.py @@ -17,9 +17,9 @@ get_origin, ) -from aioarango import AQLQueryExecuteError from aioarango.collection import StandardCollection from aioarango.database import StandardDatabase +from aioarango.exceptions import AQLQueryExecuteError from indexed import IndexedOrderedDict # type: ignore[attr-defined] from pydantic import BaseModel diff --git a/pydango/connection/utils.py b/pydango/connection/utils.py index 5eb6a88..a9a7137 100644 --- a/pydango/connection/utils.py +++ b/pydango/connection/utils.py @@ -13,13 +13,11 @@ @overload async def get_or_create_collection( db: "StandardDatabase", model: Type["ArangoModel"], *, edge=None -) -> "StandardCollection": - ... +) -> "StandardCollection": ... @overload -async def get_or_create_collection(db: "StandardDatabase", model: str, *, edge=None) -> "StandardCollection": - ... +async def get_or_create_collection(db: "StandardDatabase", model: str, *, edge=None) -> "StandardCollection": ... async def get_or_create_collection( diff --git a/pydango/orm/models/base.py b/pydango/orm/models/base.py index ea902c6..074d9dd 100644 --- a/pydango/orm/models/base.py +++ b/pydango/orm/models/base.py @@ -344,8 +344,7 @@ class Config(BaseConfig): # getter_dict = dict allow_population_by_field_name = True - class Collection(CollectionConfig): - ... + class Collection(CollectionConfig): ... def __init__(__pydantic_self__, **data: Any): super().__init__(**data) @@ -439,8 +438,7 @@ def update_forward_refs(cls, **localns: Any) -> None: # cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs @abstractmethod - def save_dict(self) -> "DictStrAny": - ... + def save_dict(self) -> "DictStrAny": ... class Aliased(Generic[ArangoModel]): diff --git a/pydango/orm/query.py b/pydango/orm/query.py index 33b7c38..cf41b45 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -153,14 +153,12 @@ def sort(self, *sort_list: "SortParams") -> Self: # noinspection PyMethodOverriding @overload - def insert(self, doc: "ArangoModel") -> Self: - ... + def insert(self, doc: "ArangoModel") -> Self: ... @overload def insert( self, doc: Union[dict, "ObjectExpression", "VariableExpression"], collection: Union[str, CollectionExpression] - ) -> Self: - ... + ) -> Self: ... def insert( self, @@ -177,8 +175,7 @@ def insert( @overload def remove( # noqa: PyMethodOverriding self, expression: BaseArangoModel, *, options: Optional[RemoveOptions] = None - ): - ... + ): ... @overload def remove( @@ -187,8 +184,7 @@ def remove( collection: Union[str, CollectionExpression], *, options: Optional[RemoveOptions] = None, - ): - ... + ): ... def remove( self, @@ -215,8 +211,7 @@ def update(self, key, doc, *, options: Optional[UpdateOptions] = None) -> Self: ... @overload - def update(self, key, doc, coll, *, options: Optional[UpdateOptions] = None) -> Self: - ... + def update(self, key, doc, coll, *, options: Optional[UpdateOptions] = None) -> Self: ... def update(self, key, doc, coll=None, *, options: Optional[UpdateOptions] = None) -> Self: if isinstance(key, BaseArangoModel): @@ -234,8 +229,7 @@ def replace( # noqa: PyMethodOverriding doc: BaseArangoModel, *, options: Optional[ReplaceOptions] = None, - ) -> Self: - ... + ) -> Self: ... @overload def replace( # noqa: PyMethodOverriding @@ -244,8 +238,7 @@ def replace( # noqa: PyMethodOverriding doc: Union[dict, BaseArangoModel], *, options: Optional[ReplaceOptions] = None, - ) -> Self: - ... + ) -> Self: ... @overload def replace( @@ -255,8 +248,7 @@ def replace( collection: Union[str, CollectionExpression], *, options: Optional[ReplaceOptions] = None, - ) -> Self: - ... + ) -> Self: ... def replace( self, @@ -288,8 +280,7 @@ def upsert( # noqa: PyMethodOverriding *, replace: Union[dict, BaseModel, "ObjectExpression", BaseArangoModel, "VariableExpression"], options: Optional[UpsertOptions] = None, - ) -> Self: - ... + ) -> Self: ... @overload def upsert( # noqa: PyMethodOverriding @@ -299,8 +290,7 @@ def upsert( # noqa: PyMethodOverriding *, update: Union[dict, BaseModel, "ObjectExpression", BaseArangoModel, "VariableExpression"], options: Optional[UpsertOptions] = None, - ) -> Self: - ... + ) -> Self: ... @overload def upsert( # noqa: PyMethodOverriding @@ -311,8 +301,7 @@ def upsert( # noqa: PyMethodOverriding *, replace: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], options: Optional[UpsertOptions] = None, - ) -> Self: - ... + ) -> Self: ... @overload def upsert( # noqa: PyMethodOverriding @@ -323,8 +312,7 @@ def upsert( # noqa: PyMethodOverriding *, update: Union[dict, BaseModel, "ObjectExpression", "VariableExpression"], options: Optional[UpsertOptions] = None, - ) -> Self: - ... + ) -> Self: ... def upsert( self, diff --git a/pydango/query/operations.py b/pydango/query/operations.py index 0a4a856..d96ece3 100644 --- a/pydango/query/operations.py +++ b/pydango/query/operations.py @@ -498,8 +498,7 @@ def __init__( *, update: Union[dict, ObjectExpression, VariableExpression], options: Optional["UpsertOptions"] = None, - ): - ... + ): ... @overload def __init__( @@ -511,8 +510,7 @@ def __init__( *, replace: Union[dict, ObjectExpression, VariableExpression], options: Optional["UpsertOptions"] = None, - ): - ... + ): ... def __init__( self, diff --git a/pydango/query/query.py b/pydango/query/query.py index 17d39f5..5682de9 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -169,12 +169,10 @@ def sort(self, *sort_list: SortParams) -> "AQLQuery": return self @overload - def let(self, variable: "VariableExpression", expression: "Expression") -> "AQLQuery": - ... + def let(self, variable: "VariableExpression", expression: "Expression") -> "AQLQuery": ... @overload - def let(self, variable: str, expression: "Expression") -> "VariableExpression": - ... + def let(self, variable: str, expression: "Expression") -> "VariableExpression": ... def let( self, variable: Union["VariableExpression", str], expression: "Expression" @@ -284,8 +282,7 @@ def upsert( *, replace: Union[dict, "ObjectExpression", "VariableExpression"], options: Optional["UpsertOptions"] = None, - ) -> Self: - ... + ) -> Self: ... @overload def upsert( @@ -296,8 +293,7 @@ def upsert( *, update: Union[dict, "ObjectExpression", "VariableExpression"], options: Optional["UpsertOptions"] = None, - ) -> Self: - ... + ) -> Self: ... def upsert( self, @@ -333,8 +329,7 @@ def collect( into: Optional[Union["VariableExpression", "AssignmentParam"]] = None, keep: Optional["VariableExpression"] = None, options: Optional["CollectOptions"] = None, - ): - ... + ): ... @overload def collect( @@ -342,8 +337,7 @@ def collect( *, with_count_into: Optional["VariableExpression"] = None, options: Optional["CollectOptions"] = None, - ): - ... + ): ... @overload def collect( @@ -352,8 +346,7 @@ def collect( collect: Optional["AssignmentParams"] = None, with_count_into: Optional["VariableExpression"] = None, options: Optional["CollectOptions"] = None, - ): - ... + ): ... @overload def collect( @@ -362,8 +355,7 @@ def collect( aggregate: Optional["AssignmentParams"] = None, into: Optional[Union["VariableExpression", "AssignmentParam"]] = None, options: Optional["CollectOptions"] = None, - ): - ... + ): ... @overload def collect( @@ -373,8 +365,7 @@ def collect( aggregate: Optional["AssignmentParams"] = None, into: Optional[Union["VariableExpression", "AssignmentParam"]] = None, options: Optional["CollectOptions"] = None, - ): - ... + ): ... def collect( self, diff --git a/pydango/query/utils.py b/pydango/query/utils.py index f53fb2d..948ba96 100644 --- a/pydango/query/utils.py +++ b/pydango/query/utils.py @@ -5,8 +5,7 @@ class Compilable(Protocol): - def compile(self, *args, **kwargs) -> Union[str, None]: - ... + def compile(self, *args, **kwargs) -> Union[str, None]: ... T = TypeVar("T") From bc018b1a27ca138a20d6effa5d7667d03412ffcf Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Tue, 26 Sep 2023 00:20:18 +0300 Subject: [PATCH 18/19] fixing ci --- pydango/index.py | 3 +-- pydango/orm/models/vertex.py | 3 +-- pydango/query/expressions.py | 6 ++---- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/pydango/index.py b/pydango/index.py index d21b58d..71ac07c 100644 --- a/pydango/index.py +++ b/pydango/index.py @@ -15,8 +15,7 @@ @dataclass() -class Index: - ... +class Index: ... @dataclass() diff --git a/pydango/orm/models/vertex.py b/pydango/orm/models/vertex.py index 098edfc..2f39b18 100644 --- a/pydango/orm/models/vertex.py +++ b/pydango/orm/models/vertex.py @@ -118,8 +118,7 @@ class VertexModel(BaseArangoModel, Generic[TEdges], metaclass=VertexMeta): edges: TEdges __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] = {} - class Collection(VertexCollectionConfig): - ... + class Collection(VertexCollectionConfig): ... def __init__(self, **data: Any): if EDGES in data: diff --git a/pydango/query/expressions.py b/pydango/query/expressions.py index 7d4a18b..c0c042e 100644 --- a/pydango/query/expressions.py +++ b/pydango/query/expressions.py @@ -372,8 +372,7 @@ class BaseArithmeticExpression(Expression, ReturnableMixin, ABC): """ -class UnaryArithmeticExpression(UnaryExpression, BaseArithmeticExpression): - ... +class UnaryArithmeticExpression(UnaryExpression, BaseArithmeticExpression): ... class ArithmeticExpression(BinaryExpression, BaseArithmeticExpression): @@ -418,8 +417,7 @@ def compile(self, query_ref) -> str: return f"({self.query.compile(query_ref)})" -class ScalarSubQuery(SubQueryExpression): - ... +class ScalarSubQuery(SubQueryExpression): ... class VectorSubQueryExpression(SubQueryExpression, IterableExpression): From 8050d88b79c4711fbcf7514a4daf59d044c3ca96 Mon Sep 17 00:00:00 2001 From: nadobando <7695172+nadobando@users.noreply.github.com> Date: Tue, 26 Sep 2023 00:25:47 +0300 Subject: [PATCH 19/19] fixing ci --- pydango/orm/query.py | 2 +- pydango/query/query.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pydango/orm/query.py b/pydango/orm/query.py index cf41b45..5158ea8 100644 --- a/pydango/orm/query.py +++ b/pydango/orm/query.py @@ -26,7 +26,7 @@ ) from pydango.query.query import AQLQuery, TraverseIterators -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 11): from typing import Self, TypeAlias else: from typing_extensions import Self, TypeAlias diff --git a/pydango/query/query.py b/pydango/query/query.py index 5682de9..d1a6150 100644 --- a/pydango/query/query.py +++ b/pydango/query/query.py @@ -59,10 +59,10 @@ UpsertOptions, ) -if sys.version_info < (3, 10): - from typing_extensions import Self, TypeAlias -else: +if sys.version_info >= (3, 11): from typing import Self, TypeAlias +else: + from typing_extensions import Self, TypeAlias logger = logging.getLogger(__name__)