diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index aa9323ff..a135735e 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python 🐍 uses: actions/setup-python@v3 with: - python-version: '3.11' + python-version: '3.12' - name: Install Hatch 🐣 run: pip install --upgrade pip setuptools wheel twine "hatch==1.7.0" diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index ea05e1f4..ac897ffd 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -30,6 +30,7 @@ jobs: - "3.9" - "3.10" - "3.11" + - "3.12" steps: - uses: actions/checkout@v3 @@ -39,7 +40,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install poetry run: | - python -m pip install --upgrade pip poetry==1.8.2 pre-commit + python -m pip install --upgrade pip poetry==1.8.4 pre-commit poetry config virtualenvs.create false --local - name: Install dependencies run: poetry install --all-extras @@ -55,6 +56,7 @@ jobs: - "3.9" - "3.10" - "3.11" + - "3.12" db-url: - "sqlite+aiosqlite:///./db.sqlite3" - "postgresql+asyncpg://user:passwd@localhost:5432/app" @@ -87,7 +89,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install poetry run: | - python -m pip install --upgrade pip poetry==1.8.2 pre-commit + python -m pip install --upgrade pip poetry==1.8.4 pre-commit poetry config virtualenvs.create false --local - name: Install dependencies run: poetry install --all-extras diff --git a/.gitignore b/.gitignore index 2c439271..abd11916 100644 --- a/.gitignore +++ b/.gitignore @@ -160,9 +160,6 @@ cython_debug/ .idea/ /.python-version -/examples/api_for_tortoise_orm/db.sqlite3 -/examples/api_for_tortoise_orm/db.sqlite3-shm -/examples/api_for_tortoise_orm/db.sqlite3-wal /db.sqlite3 /db.sqlite3-shm /db.sqlite3-wal diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f6090575..334c86fc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,20 @@ repos: + - repo: https://github.com/pre-commit/mirrors-mypy + rev: 'bbc3dc1' + hooks: + - id: mypy + args: + - --check-untyped-defs + - --ignore-missing-imports + - --install-types + - --non-interactive + - --scripts-are-modules + - --warn-unused-ignores + stages: + - manual + - repo: https://github.com/pre-commit/pre-commit-hooks - rev: "v3.2.0" + rev: "v4.1.0" hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -8,14 +22,16 @@ repos: - id: check-added-large-files - id: mixed-line-ending - id: requirements-txt-fixer + - id: pretty-format-json + exclude: "docs/" - repo: https://github.com/psf/black - rev: "23.3.0" + rev: "25.1.0" hooks: - id: black - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.1.8" + rev: "v0.9.4" hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix, --unsafe-fixes] diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 51738072..42c3bb5a 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -8,7 +8,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3.11" + python: "3.12" # You can also specify other tool versions: # nodejs: "20" # rust: "1.70" diff --git a/README.md b/README.md index ea43c69a..b25bef75 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ [![📖 Docs (gh-pages)](https://github.com/mts-ai/FastAPI-JSONAPI/actions/workflows/documentation.yaml/badge.svg)](https://mts-ai.github.io/FastAPI-JSONAPI/) + # FastAPI-JSONAPI FastAPI-JSONAPI is a FastAPI extension for building REST APIs. @@ -30,177 +31,159 @@ pip install FastAPI-JSONAPI Create a test.py file and copy the following code into it ```python +import sys +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager from pathlib import Path -from typing import Any, ClassVar, Dict +from typing import Any, ClassVar, Optional +from typing import Union import uvicorn -from fastapi import APIRouter, Depends, FastAPI -from sqlalchemy import Column, Integer, Text +from fastapi import Depends, FastAPI +from fastapi.responses import ORJSONResponse as JSONResponse +from pydantic import ConfigDict +from sqlalchemy.engine import URL from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column -from fastapi_jsonapi import RoutersJSONAPI, init -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric +from fastapi_jsonapi import ApplicationBuilder +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric from fastapi_jsonapi.schema_base import BaseModel -from fastapi_jsonapi.views.utils import HTTPMethod, HTTPMethodConfig -from fastapi_jsonapi.views.view_base import ViewBase - -CURRENT_FILE = Path(__file__).resolve() -CURRENT_DIR = CURRENT_FILE.parent -DB_URL = f"sqlite+aiosqlite:///{CURRENT_DIR}/db.sqlite3" - -Base = declarative_base() - - -class User(Base): - __tablename__ = "users" - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(Text, nullable=True) - - -class UserAttributesBaseSchema(BaseModel): - name: str - - class Config: - """Pydantic schema config.""" +from fastapi_jsonapi.views import ViewBase, Operation, OperationConfig + +CURRENT_DIR = Path(__file__).resolve().parent +sys.path.append(f"{CURRENT_DIR.parent.parent}") + + +class DB: + def __init__( + self, + url: Union[str, URL], + echo: bool = False, + echo_pool: bool = False, + ): + self.engine: AsyncEngine = create_async_engine( + url=url, + echo=echo, + echo_pool=echo_pool, + ) - orm_mode = True + self.session_maker: async_sessionmaker[AsyncSession] = async_sessionmaker( + autocommit=False, + bind=self.engine, + expire_on_commit=False, + ) + async def dispose(self): + await self.engine.dispose() -class UserSchema(UserAttributesBaseSchema): - """User base schema.""" + async def session(self) -> AsyncIterator[AsyncSession]: + async with self.session_maker() as session: + yield session -class UserPatchSchema(UserAttributesBaseSchema): - """User PATCH schema.""" +db = DB( + url=make_url(f"sqlite+aiosqlite:///{CURRENT_DIR}/db.sqlite3"), +) -class UserInSchema(UserAttributesBaseSchema): - """User input schema.""" +class Base(DeclarativeBase): + pass -def async_session() -> sessionmaker: - engine = create_async_engine(url=make_url(DB_URL)) - _async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session +class User(Base): + __tablename__ = "users" + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[Optional[str]] -class Connector: - @classmethod - async def get_session(cls): - """ - Get session as dependency - :return: - """ - sess = async_session() - async with sess() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() +class UserSchema(BaseModel): + """User base schema.""" + model_config = ConfigDict( + from_attributes=True, + ) -async def sqlalchemy_init() -> None: - engine = create_async_engine(url=make_url(DB_URL)) - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) + name: str class SessionDependency(BaseModel): - session: AsyncSession = Depends(Connector.get_session) + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) - class Config: - arbitrary_types_allowed = True + session: AsyncSession = Depends(db.session) -def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> Dict[str, Any]: +def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> dict[str, Any]: return { "session": dto.session, } -class UserDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=session_dependency_handler, - ) - } - - -class UserListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( +class UserView(ViewBaseGeneric): + operation_dependencies: ClassVar = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=session_dependency_handler, - ) + ), } def add_routes(app: FastAPI): - tags = [ - { - "name": "User", - "description": "", - }, - ] - - router: APIRouter = APIRouter() - RoutersJSONAPI( - router=router, + builder = ApplicationBuilder(app) + builder.add_resource( path="/users", tags=["User"], - class_detail=UserDetailView, - class_list=UserListView, + view=UserView, schema=UserSchema, - resource_type="user", - schema_in_patch=UserPatchSchema, - schema_in_post=UserInSchema, model=User, + resource_type="user", ) + builder.initialize() - app.include_router(router, prefix="") - return tags +# noinspection PyUnusedLocal +@asynccontextmanager +async def lifespan(app: FastAPI): + add_routes(app) -def create_app() -> FastAPI: - """ - Create app factory. + async with db.engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield + + await db.dispose() - :return: app - """ - app = FastAPI( - title="FastAPI and SQLAlchemy", - debug=True, - openapi_url="/openapi.json", - docs_url="/docs", - ) - add_routes(app) - app.on_event("startup")(sqlalchemy_init) - init(app) - return app +app = FastAPI( + title="FastAPI and SQLAlchemy", + lifespan=lifespan, + debug=True, + default_response_class=JSONResponse, + docs_url="/docs", + openapi_url="/openapi.json", +) -app = create_app() if __name__ == "__main__": uvicorn.run( - "main:app", + app, host="0.0.0.0", port=8080, - reload=True, - app_dir=str(CURRENT_DIR), ) ``` This example provides the following API structure: -| URL | method | endpoint | Usage | -|-------------------|--------|-------------|---------------------------| -| `/users` | GET | user_list | Get a collection of users | -| `/users` | POST | user_list | Create a user | -| `/users` | DELETE | user_list | Delete users | -| `/users/{obj_id}` | GET | user_detail | Get user details | -| `/users/{obj_id}` | PATCH | user_detail | Update a user | -| `/users/{obj_id}` | DELETE | user_detail | Delete a user | +| URL | method | endpoint | Usage | +|--------------------|--------|-------------|-------------------------------| +| `/users/` | GET | user_list | Get a collection of users | +| `/users/` | POST | user_list | Create a user | +| `/users/` | DELETE | user_list | Delete users | +| `/users/{obj_id}/` | GET | user_detail | Get user details | +| `/users/{obj_id}/` | PATCH | user_detail | Update a user | +| `/users/{obj_id}/` | DELETE | user_detail | Delete a user | +| `/operations/` | POST | atomic | Create, update, delete users | diff --git a/docs/api_filtering_example.rst b/docs/api_filtering_example.rst index f8f94b80..05c032bc 100644 --- a/docs/api_filtering_example.rst +++ b/docs/api_filtering_example.rst @@ -6,14 +6,16 @@ Filtering API example -Filter by jsonb contains +Filter by jsonb contains. Before using the filter, you must define it and apply it +to the schema as shown here :ref:`custom_sql_filtering`. Some useful filters are +defined in module **fastapi_jsonapi.types_metadata.custom_filter_sql.py** .. code-block:: json [ { "name": "words", - "op": "jsonb_contains", + "op": "sqlite_json_contains", "val": {"location": "Moscow", "spam": "eggs"} } ] diff --git a/docs/api_limited_methods_example.rst b/docs/api_limited_methods_example.rst index 561b58d9..c8752353 100644 --- a/docs/api_limited_methods_example.rst +++ b/docs/api_limited_methods_example.rst @@ -8,23 +8,23 @@ For example, you want to create only GET, POST and GET LIST methods, so user can't update or delete any items. -Set ``methods`` on Routers registration: +Set ``operations`` on Routers registration: .. code-block:: python - RoutersJSONAPI( + builder = ApplicationBuilder(app) + builder.add_resource( router=router, path="/users", tags=["User"], - class_detail=UserDetailView, - class_list=UserListView, + view=UserView, schema=UserSchema, model=User, resource_type="user", - methods=[ - RoutersJSONAPI.Methods.GET_LIST, - RoutersJSONAPI.Methods.POST, - RoutersJSONAPI.Methods.GET, + operations=[ + Operation.GET_LIST, + Operation.POST, + Operation.GET, ], ) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8860c204..0a1450a1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1,6 +1,39 @@ Changelog ######### +**3.0.0** +********* + +Backwards compatibility changes +=============================== +* Removed support pydantic v1 by `@NatalyaGrigoreva`_ +* Minimal fastapi version up to fastapi>=0.112.3 by `@NatalyaGrigoreva`_ +* Updated minimal pydantic version pydantic>=2.6.0 by `@NatalyaGrigoreva`_ +* Added required dependency orjson>=3.10.0 by `@NatalyaGrigoreva`_ +* Updated framework api by `@NatalyaGrigoreva`_, `@CosmoV`_ + +Features +======== +* Added support of pydantic v2 by `@NatalyaGrigoreva`_ +* Improved sqla orm query building by `@NatalyaGrigoreva`_ +* Updated logic of creation custom sql filters by `@mahenzon`_ +* Several bugfixes by `@NatalyaGrigoreva`_ + +Performance improvements +======================== + +* Updated ViewBase logic of response building by `@CosmoV`_ +* Added storages for application lifetime entities by `@CosmoV`_ +* Updated "fields" feature logic by `@CosmoV`_ + +Authors +""""""" + +* `@CosmoV`_ +* `@NatalyaGrigoreva`_ +* `@mahenzon`_ + + **2.8.0** ********* @@ -69,7 +102,7 @@ Fix relationships filtering, refactor alchemy helpers * Fix filter by relationships by `@CosmoV`_ in `#52 `_ * Add Codecov by `@mahenzon`_ in `#72 `_ -* Set RoutersJSONAPI class on AtomicViewHandler by `@mahenzon`_ in `#7b2557f `_ +* Set ApplicationBuilder class on AtomicViewHandler by `@mahenzon`_ in `#7b2557f `_ Authors """"""" @@ -316,3 +349,4 @@ Enhancements and bug fixes .. _`@mahenzon`: https://github.com/mahenzon .. _`@CosmoV`: https://github.com/CosmoV .. _`@tpynio`: https://github.com/tpynio +.. _`@NatalyaGrigoreva`: https://github.com/NatalyaGrigoreva diff --git a/docs/conf.py b/docs/conf.py index f08ddf4a..0dabfb00 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -253,7 +253,7 @@ # -- Options for LaTeX output --------------------------------------------- -latex_elements = { +latex_elements: dict = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', diff --git a/docs/configuration.rst b/docs/configuration.rst index de2431f6..e7b8827c 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -9,4 +9,3 @@ You have access to 5 configuration keys: * MAX_PAGE_SIZE: the maximum page size. If you specify a page size greater than this value you will receive a 400 Bad Request response. * MAX_INCLUDE_DEPTH: the maximum length of an include through schema relationships * ALLOW_DISABLE_PAGINATION: if you want to disallow to disable pagination you can set this configuration key to False -* CATCH_EXCEPTIONS: if you want fastapi_jsonapi to catch all exceptions and return them as JsonApiException (default is True) diff --git a/docs/custom_sql_filtering.rst b/docs/custom_sql_filtering.rst index cfab4ad9..aa62dd42 100644 --- a/docs/custom_sql_filtering.rst +++ b/docs/custom_sql_filtering.rst @@ -11,13 +11,10 @@ You can define new filtering rules as in this example: -Prepare pydantic schema which is used in RoutersJSONAPI as schema +Prepare pydantic schema which is used in ApplicationBuilder as schema ----------------------------------------------------------------- - -``schemas/picture.py``: - -.. literalinclude:: ../examples/custom_filter_example.py +.. literalinclude:: ../examples/misc/custom_filter_example.py :language: python @@ -35,7 +32,7 @@ Request: .. sourcecode:: http - GET /pictures?filter=[{"name":"picture.meta","op":"jsonb_contains","val":{"location":"Moscow"}}] HTTP/1.1 + GET /pictures?filter=[{"name":"picture","op":"sqlite_json_ilike","val":["meta": "Moscow"]}] HTTP/1.1 Accept: application/vnd.api+json @@ -45,10 +42,8 @@ Filter value has to be a valid JSON: [ { - "name":"picture.meta", - "op":"jsonb_contains", - "val":{ - "location":"Moscow" - } + "name":"picture", + "op":"sqlite_json_ilike", + "val":["meta": "Moscow"] } ] diff --git a/docs/fastapi-jsonapi.rst b/docs/fastapi-jsonapi.rst index c3e7d171..8222a5fb 100644 --- a/docs/fastapi-jsonapi.rst +++ b/docs/fastapi-jsonapi.rst @@ -1,14 +1,6 @@ Package fastapi_jsonapi index ============================= -fastapi_jsonapi.data_layers.fields.enum module ----------------------------------------------- - -.. automodule:: fastapi_jsonapi.data_layers.fields.enum - :members: - :undoc-members: - :show-inheritance: - fastapi_jsonapi.data_layers.fields.mixins module ------------------------------------------------ @@ -17,42 +9,35 @@ fastapi_jsonapi.data_layers.fields.mixins module :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.filtering.sqlalchemy module -------------------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.filtering.sqlalchemy - :members: - :undoc-members: - :show-inheritance: - -fastapi_jsonapi.data_layers.filtering.tortoise_operation module ---------------------------------------------------------------- +fastapi_jsonapi.data_layers.sqla module +------------------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.filtering.tortoise_operation +.. automodule:: fastapi_jsonapi.data_layers.sqla :members: :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.filtering.tortoise_orm module ---------------------------------------------------------- +fastapi_jsonapi.data_layers.sqla.base_model module +------------------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.filtering.tortoise_orm +.. automodule:: fastapi_jsonapi.data_layers.sqla.base_model :members: :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.sorting.sqlalchemy module ------------------------------------------------------ +fastapi_jsonapi.data_layers.sqla.orm module +------------------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.sorting.sqlalchemy +.. automodule:: fastapi_jsonapi.data_layers.sqla.orm :members: :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.sorting.tortoise_orm module +fastapi_jsonapi.data_layers.sqla.query_building module ------------------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.sorting.tortoise_orm +.. automodule:: fastapi_jsonapi.data_layers.sqla.query_building :members: :undoc-members: :show-inheritance: @@ -73,38 +58,48 @@ fastapi_jsonapi.data_typing module :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.orm module --------------------------------------- +fastapi_jsonapi.fastapi_jsonapi.misc.sqla.generics.base module +-------------------------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.orm +.. automodule:: fastapi_jsonapi.misc.sqla.generics.base :members: :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.shared module ------------------------------------------ +fastapi_jsonapi.storages module +------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.shared +.. automodule:: fastapi_jsonapi.storages :members: :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.sqla_orm module -------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.sqla_orm +fastapi_jsonapi.storages.storages.models_storage module +------------------------------------------------------- + +.. automodule:: fastapi_jsonapi.storages.models_storage :members: :undoc-members: :show-inheritance: -fastapi_jsonapi.data_layers.tortoise_orm module +fastapi_jsonapi.storages.schemas_storage module ----------------------------------------------- -.. automodule:: fastapi_jsonapi.data_layers.tortoise_orm +.. automodule:: fastapi_jsonapi.storages.schemas_storage :members: :undoc-members: :show-inheritance: +fastapi_jsonapi.storages.views_storage module +--------------------------------------------- + +.. automodule:: fastapi_jsonapi.storages.views_storage + :members: + :undoc-members: + :show-inheritance: + + fastapi_jsonapi.api module -------------------------- @@ -113,10 +108,26 @@ fastapi_jsonapi.api module :undoc-members: :show-inheritance: -fastapi_jsonapi.jsonapi_typing module -------------------------------------- +fastapi_jsonapi.api.application_builder module +---------------------------------------------- -.. automodule:: fastapi_jsonapi.jsonapi_typing +.. automodule:: fastapi_jsonapi.api.application_builder + :members: + :undoc-members: + :show-inheritance: + +fastapi_jsonapi.api.endpoint_builder module +------------------------------------------- + +.. automodule:: fastapi_jsonapi.api.endpoint_builder + :members: + :undoc-members: + :show-inheritance: + +fastapi_jsonapi.api.schemas module +---------------------------------- + +.. automodule:: fastapi_jsonapi.api.schemas :members: :undoc-members: :show-inheritance: @@ -137,18 +148,26 @@ fastapi_jsonapi.schema module :undoc-members: :show-inheritance: -fastapi_jsonapi.signature module --------------------------------- +fastapi_jsonapi.schema_base module +---------------------------------- -.. automodule:: fastapi_jsonapi.signature +.. automodule:: fastapi_jsonapi.schema_base :members: :undoc-members: :show-inheritance: -fastapi_jsonapi.splitter module -------------------------------- +fastapi_jsonapi.schema_builder module +------------------------------------- -.. automodule:: fastapi_jsonapi.splitter +.. automodule:: fastapi_jsonapi.schema_builder :members: :undoc-members: :show-inheritance: + +fastapi_jsonapi.signature module +-------------------------------- + +.. automodule:: fastapi_jsonapi.signature + :members:F + :undoc-members: + :show-inheritance: diff --git a/docs/http_snippets/snippets/example_atomic_five__mixed_actions_result b/docs/http_snippets/snippets/example_atomic_five__mixed_actions_result index 29c5986b..0cf44659 100644 --- a/docs/http_snippets/snippets/example_atomic_five__mixed_actions_result +++ b/docs/http_snippets/snippets/example_atomic_five__mixed_actions_result @@ -18,7 +18,6 @@ Content-Type: application/json "attributes": { "birth_city": "Saint Petersburg", "favourite_movies": "\"The Good, the Bad and the Ugly\", \"Once Upon a Time in America\"", - "keys_to_ids_list": null, }, "id": "2", "type": "user_bio" diff --git a/docs/http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship b/docs/http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship new file mode 100644 index 00000000..775eb743 --- /dev/null +++ b/docs/http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship @@ -0,0 +1,2 @@ +GET /users/1/relationships/computers HTTP/1.1 +Content-Type: application/vnd.api+json diff --git a/docs/http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship_result b/docs/http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship_result new file mode 100644 index 00000000..a3e45189 --- /dev/null +++ b/docs/http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship_result @@ -0,0 +1,29 @@ +HTTP/1.1 200 OK +Content-Type: application/vnd.api+json + +{ + "data": { + "attributes": { + "serial": "Amstrad" + }, + "id": "1", + "links": { + "self": "/computers/1" + }, + "relationships": { + "owner": { + "links": { + "related": "/computers/1/owner", + "self": "/computers/1/relationships/owner" + } + } + }, + "type": "computer" + }, + "jsonapi": { + "version": "1.0" + }, + "links": { + "self": "/computers/1" + } +} diff --git a/docs/http_snippets/update_snippets_with_responses.py b/docs/http_snippets/update_snippets_with_responses.py index 483f5f16..456b7946 100644 --- a/docs/http_snippets/update_snippets_with_responses.py +++ b/docs/http_snippets/update_snippets_with_responses.py @@ -3,9 +3,9 @@ import logging from http import HTTPStatus -import requests -import simplejson import argparse +import orjson as json +import requests parser = argparse.ArgumentParser() parser.add_argument("prefix", help="Snippets prefix to process. Like 'minimal_api', 'relationship_', etc") @@ -14,7 +14,6 @@ log = logging.getLogger(__name__) SNIPPETS_DIR = "snippets" -SORT_KEYS_ON_DUMP = True SNIPPET_RESULT_POSTFIX = "_result" REMOVE_PYTHON_SNIPPET = True @@ -51,10 +50,10 @@ def __lt__(self, other): def run_request_for_module(module_name: str): - log.info("Start processing %r", module_name) + log.info("Start processing %s", module_name) module_full_name = ".".join((SNIPPETS_DIR, module_name)) - log.debug("import module %s", module_full_name) + log.debug("Import module %s", module_full_name) module = importlib.import_module(module_full_name) log.info("Process module %s", module) @@ -69,25 +68,20 @@ def run_request_for_module(module_name: str): http_response_text.append( # "HTTP/1.1 201 Created" - "{} {} {}".format( - "HTTP/1.1", - response.status_code, - response_reason, - ) + f"HTTP/1.1 {response.status_code} {response.status_code}" ) if ct := response.headers.get("content-type"): - http_response_text.append("{}: {}".format("Content-Type", ct)) + http_response_text.append(f"Content-Type: {ct}") http_response_text.append("") if response.content: # TODO: handle non-json response? http_response_text.append( - simplejson.dumps( + json.dumps( response.json(), - sort_keys=SORT_KEYS_ON_DUMP, - indent=2, - ), + option=json.OPT_INDENT_2 | json.OPT_SORT_KEYS, + ).decode(), ) http_response_text.append("") @@ -98,9 +92,9 @@ def run_request_for_module(module_name: str): result_file_name = "/".join((SNIPPETS_DIR, module_name + SNIPPET_RESULT_POSTFIX)) with open(result_file_name, "w") as f: res = f.write(result_text) - log.info("Wrote text (%s) to %r", res, result_file_name) + log.info("Wrote text (%s) to %s", res, result_file_name) - log.info("Processed %r", module_name) + log.info("Processed %s", module_name) def add_help_lines(lines: list, module_name: str) -> None: @@ -147,13 +141,13 @@ def main(): log.warning("Starting") available_modules = os.listdir(SNIPPETS_DIR) - log.debug("all available snippets: %s", available_modules) + log.debug("All available snippets: %s", available_modules) modules_to_process = list( # exclude unknown filter(lambda name: name.startswith(args.prefix), available_modules) ) modules_to_process.sort(key=StrOrderCRUD) - log.warning("modules to process (with order): %s", modules_to_process) + log.warning("Modules to process (with order): %s", modules_to_process) result_help_text = [] result_help_text.append("=" * 30) @@ -164,7 +158,7 @@ def main(): try: run_request_for_module(module_name) except Exception: - log.exception("Could not process module %r, skipping", module_file) + log.exception("Could not process module %s, skipping", module_file) else: if REMOVE_PYTHON_SNIPPET: os.unlink("/".join((SNIPPETS_DIR, module_file))) diff --git a/docs/logical_data_abstraction.rst b/docs/logical_data_abstraction.rst index 51023a59..806d81a8 100644 --- a/docs/logical_data_abstraction.rst +++ b/docs/logical_data_abstraction.rst @@ -20,55 +20,69 @@ In this example, let's assume that we have two legacy models, User and Computer, .. code-block:: python from sqlalchemy import Column, String, Integer, ForeignKey - from sqlalchemy.orm import relationship, backref + from sqlalchemy.orm import DeclarativeBase, backref, relationship from sqlalchemy.ext.declarative import declarative_base - Base = declarative_base() + class Base(DeclarativeBase): + pass class User(Base): - id = Column(Integer, primary_key=True) - name = Column(String) - email = Column(String) - birth_date = Column(String) - password = Column(String) + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] + email: Mapped[str] + birth_date: Mapped[str] + password: Mapped[str] + + computers: Mapped[list[Computer]] = relationship(back_populates="user") class Computer(Base): - computer_id = Column(Integer, primary_key=True) - serial = Column(String) - user_id = Column(Integer, ForeignKey('user.id')) - user = relationship('User', backref=backref('computers')) + computer_id: Mapped[int] = mapped_column(primary_key=True) + serial: Mapped[str] + + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id")) + user: Mapped[User] = relationship(back_populates="computers") + Now let's create the logical abstraction to illustrate this concept. .. code-block:: python - from pydantic import ( - BaseModel, - Field, - ) - from typing import List + from pydantic import BaseModel, ConfigDict, Field from datetime import datetime class UserSchema(BaseModel): - class Config: - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) id: int name: str email: str birth_date: datetime - computers: List['ComputerSchema'] + comments: Annotated[ + Optional[list["ComputerSchema"]], + RelationshipInfo( + resource_type="post_comment", + many=True, + ), + ] = None class ComputerSchema(BaseModel): - class Config: - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) id: int serial: str - owner: UserSchema + user: Annotated[ + Optional[UserSchema], + RelationshipInfo( + resource_type="user", + ), + ] = None You can see several differences between models and schemas exposed by the API. diff --git a/docs/python_snippets/client_generated_id/schematic_example.py b/docs/python_snippets/client_generated_id/schematic_example.py index 5c0b1aae..64270a03 100644 --- a/docs/python_snippets/client_generated_id/schematic_example.py +++ b/docs/python_snippets/client_generated_id/schematic_example.py @@ -1,42 +1,47 @@ import sys +from contextlib import asynccontextmanager from pathlib import Path -from typing import ClassVar +from typing import ClassVar, Annotated, Optional import uvicorn from fastapi import APIRouter, Depends, FastAPI -from fastapi_jsonapi.schema_base import Field, BaseModel as PydanticBaseModel -from sqlalchemy import Column, Integer, Text +from fastapi.responses import ORJSONResponse as JSONResponse +from pydantic import ConfigDict from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column -from fastapi_jsonapi import RoutersJSONAPI, init -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric -from fastapi_jsonapi.views.utils import HTTPMethod, HTTPMethodConfig -from fastapi_jsonapi.views.view_base import ViewBase +from examples.api_for_sqlalchemy.models.db import DB +from fastapi_jsonapi import ApplicationBuilder +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import ClientCanSetId +from fastapi_jsonapi.views import ViewBase, Operation, OperationConfig CURRENT_FILE = Path(__file__).resolve() CURRENT_DIR = CURRENT_FILE.parent -PROJECT_DIR = CURRENT_DIR.parent.parent -DB_URL = f"sqlite+aiosqlite:///{CURRENT_DIR.absolute()}/db.sqlite3" -sys.path.append(str(PROJECT_DIR)) +sys.path.append(f"{CURRENT_DIR.parent.parent}") +db = DB( + url=make_url(f"sqlite+aiosqlite:///{CURRENT_DIR.absolute()}/db.sqlite3"), +) -Base = declarative_base() + +class Base(DeclarativeBase): + pass class User(Base): __tablename__ = "users" - id = Column(Integer, primary_key=True, autoincrement=False) - name = Column(Text, nullable=True) - -class BaseModel(PydanticBaseModel): - class Config: - orm_mode = True + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[Optional[str]] class UserAttributesBaseSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + name: str @@ -51,49 +56,26 @@ class UserPatchSchema(UserAttributesBaseSchema): class UserInSchema(UserAttributesBaseSchema): """User input schema.""" - id: int = Field(client_can_set_id=True) - - -async def get_session(): - sess = sessionmaker( - bind=create_async_engine(url=make_url(DB_URL)), - class_=AsyncSession, - expire_on_commit=False, - ) - async with sess() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() - - -async def sqlalchemy_init() -> None: - engine = create_async_engine(url=make_url(DB_URL)) - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) + id: Annotated[int, ClientCanSetId()] class SessionDependency(BaseModel): - session: AsyncSession = Depends(get_session) + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) - class Config: - arbitrary_types_allowed = True + session: AsyncSession = Depends(db.session) def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> dict: - return {"session": dto.session} - - -class UserDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=session_dependency_handler, - ) + return { + "session": dto.session, } -class UserListView(ListViewBaseGeneric): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( +class UserView(ViewBaseGeneric): + operation_dependencies: ClassVar = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=session_dependency_handler, ) @@ -101,57 +83,48 @@ class UserListView(ListViewBaseGeneric): def add_routes(app: FastAPI): - tags = [ - { - "name": "User", - "description": "", - }, - ] - - router: APIRouter = APIRouter() - RoutersJSONAPI( - router=router, + builder = ApplicationBuilder(app) + builder.add_resource( path="/users", tags=["User"], - class_detail=UserDetailView, - class_list=UserListView, + view=UserView, schema=UserSchema, resource_type="user", schema_in_patch=UserPatchSchema, schema_in_post=UserInSchema, model=User, ) + builder.initialize() - app.include_router(router, prefix="") - return tags +# noinspection PyUnusedLocal +@asynccontextmanager +async def lifespan(app: FastAPI): + add_routes(app) -def create_app() -> FastAPI: - """ - Create app factory. + async with db.engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield + + await db.dispose() - :return: app - """ - app = FastAPI( - title="FastAPI and SQLAlchemy", - debug=True, - openapi_url="/openapi.json", - docs_url="/docs", - ) - add_routes(app) - app.on_event("startup")(sqlalchemy_init) - init(app) - return app +app = FastAPI( + title="FastAPI and SQLAlchemy", + lifespan=lifespan, + debug=True, + default_response_class=JSONResponse, + docs_url="/docs", + openapi_url="/openapi.json", +) -app = create_app() if __name__ == "__main__": - current_file_name = CURRENT_FILE.name.replace(CURRENT_FILE.suffix, "") uvicorn.run( - f"{current_file_name}:app", + f"{CURRENT_FILE.name.replace(CURRENT_FILE.suffix, '')}:app", host="0.0.0.0", port=8084, reload=True, - app_dir=str(CURRENT_DIR), + app_dir=f"{CURRENT_DIR}", ) diff --git a/docs/python_snippets/data_layer/custom_data_layer.py b/docs/python_snippets/data_layer/custom_data_layer.py index 04729221..5e67d0ed 100644 --- a/docs/python_snippets/data_layer/custom_data_layer.py +++ b/docs/python_snippets/data_layer/custom_data_layer.py @@ -1,17 +1,14 @@ from fastapi import FastAPI -from fastapi_jsonapi import RoutersJSONAPI +from fastapi_jsonapi import ApplicationBuilder from fastapi_jsonapi.data_layers.base import BaseDataLayer -from fastapi_jsonapi.data_layers.sqla_orm import SqlalchemyDataLayer -from fastapi_jsonapi.views.detail_view import DetailViewBase -from fastapi_jsonapi.views.list_view import ListViewBase +from fastapi_jsonapi.data_layers.sqla.orm import SqlalchemyDataLayer +from fastapi_jsonapi.views import ViewBase class MyCustomDataLayer(BaseDataLayer): """Overload abstract methods here""" - ... - class MyCustomSqlaDataLayer(SqlalchemyDataLayer): """Overload any methods here""" @@ -20,19 +17,14 @@ async def before_delete_objects(self, objects: list, view_kwargs: dict): raise Exception("not allowed to delete objects") -class UserDetailView(DetailViewBase): +class UserView(ViewBase): data_layer_cls = MyCustomDataLayer -class UserListView(ListViewBase): - data_layer_cls = MyCustomSqlaDataLayer - - app = FastAPI() -RoutersJSONAPI( - app, +builder = ApplicationBuilder(app) +builder.add_resource( # ... - class_detail=UserDetailView, - class_list=UserListView, + view=UserView, # ... ) diff --git a/docs/python_snippets/relationships/models.py b/docs/python_snippets/relationships/models.py index 39c3351c..552b6618 100644 --- a/docs/python_snippets/relationships/models.py +++ b/docs/python_snippets/relationships/models.py @@ -1,33 +1,36 @@ -from sqlalchemy import Column, Integer, String, ForeignKey -from sqlalchemy.orm import relationship +from __future__ import annotations -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from typing import Optional +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship, Mapped, mapped_column -class User(Base, BaseModelMixin): +from examples.api_for_sqlalchemy.models.base import Base + + +class User(Base): __tablename__ = "users" - id = Column(Integer, primary_key=True, autoincrement=True) - name: str = Column(String) - posts = relationship("Post", back_populates="user", uselist=True) - bio = relationship("UserBio", back_populates="user", uselist=False) - computers = relationship("Computer", back_populates="user", uselist=True) + name: Mapped[str] + + bio: Mapped[UserBio] = relationship(back_populates="user") + computers: Mapped[list[Computer]] = relationship(back_populates="user") -class Computer(Base, BaseModelMixin): +class Computer(Base): __tablename__ = "computers" - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - user_id = Column(Integer, ForeignKey("users.id"), nullable=True) - user = relationship("User", back_populates="computers") + name: Mapped[str] + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id")) + user: Mapped[User] = relationship(back_populates="computers") -class UserBio(Base, BaseModelMixin): + +class UserBio(Base): __tablename__ = "user_bio" - id = Column(Integer, primary_key=True, autoincrement=True) - birth_city: str = Column(String, nullable=False, default="", server_default="") - favourite_movies: str = Column(String, nullable=False, default="", server_default="") - user_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=True) - user = relationship("User", back_populates="bio", uselist=False) + + birth_city: Mapped[str] = mapped_column(default="", server_default="") + favourite_movies: Mapped[str] = mapped_column(default="", server_default="") + + user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), unique=True) + user: Mapped[User] = relationship(back_populates="bio") diff --git a/docs/python_snippets/relationships/relationships_info_example.py b/docs/python_snippets/relationships/relationships_info_example.py index 4e393a50..514ef393 100644 --- a/docs/python_snippets/relationships/relationships_info_example.py +++ b/docs/python_snippets/relationships/relationships_info_example.py @@ -1,29 +1,33 @@ -from typing import Optional +from __future__ import annotations -from pydantic import BaseModel as PydanticBaseModel +from typing import Optional, Annotated -from fastapi_jsonapi.schema_base import Field, RelationshipInfo +from pydantic import BaseModel, ConfigDict - -class BaseModel(PydanticBaseModel): - class Config: - orm_mode = True +from fastapi_jsonapi.types_metadata import RelationshipInfo class UserBaseSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + id: int name: str - bio: Optional["UserBioSchema"] = Field( - relationship=RelationshipInfo( + + bio: Annotated[ + Optional[UserBioBaseSchema], + RelationshipInfo( resource_type="user_bio", ), - ) - computers: Optional["ComputerSchema"] = Field( - relationship=RelationshipInfo( + ] = None + computers: Annotated[ + Optional[list[ComputerBaseSchema]], + RelationshipInfo( resource_type="computer", many=True, ), - ) + ] = None class UserSchema(BaseModel): @@ -34,20 +38,22 @@ class UserSchema(BaseModel): class UserBioBaseSchema(BaseModel): birth_city: str favourite_movies: str - keys_to_ids_list: dict[str, list[int]] = None - user: "UserSchema" = Field( - relationship=RelationshipInfo( + user: Annotated[ + Optional[UserSchema], + RelationshipInfo( resource_type="user", ), - ) + ] = None class ComputerBaseSchema(BaseModel): id: int name: str - user: Optional["UserSchema"] = Field( - relationship=RelationshipInfo( + + user: Annotated[ + Optional[UserSchema], + RelationshipInfo( resource_type="user", ), - ) + ] = None diff --git a/docs/python_snippets/routing/router.py b/docs/python_snippets/routing/router.py index 0577d6c1..e24e6c2f 100644 --- a/docs/python_snippets/routing/router.py +++ b/docs/python_snippets/routing/router.py @@ -1,30 +1,17 @@ -from fastapi import APIRouter, FastAPI +from fastapi import FastAPI from examples.api_for_sqlalchemy.models import User -from examples.api_for_sqlalchemy.models.schemas import ( - UserInSchema, - UserPatchSchema, - UserSchema, -) -from fastapi_jsonapi import RoutersJSONAPI -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBase, ListViewBase +from examples.api_for_sqlalchemy.schemas import UserInSchema, UserPatchSchema, UserSchema +from examples.api_for_sqlalchemy.urls import ViewBase +from fastapi_jsonapi import ApplicationBuilder def add_routes(app: FastAPI): - tags = [ - { - "name": "User", - "description": "Users API", - }, - ] - - router: APIRouter = APIRouter() - RoutersJSONAPI( - router=router, + builder = ApplicationBuilder(app) + builder.add_resource( path="/users", tags=["User"], - class_detail=DetailViewBase, - class_list=ListViewBase, + view=ViewBase, model=User, schema=UserSchema, resource_type="user", @@ -32,9 +19,6 @@ def add_routes(app: FastAPI): schema_in_post=UserInSchema, ) - app.include_router(router, prefix="") - return tags - app = FastAPI() add_routes(app) diff --git a/docs/python_snippets/view_dependencies/main_example.py b/docs/python_snippets/view_dependencies/main_example.py index 0fa6ff9f..c684e7eb 100644 --- a/docs/python_snippets/view_dependencies/main_example.py +++ b/docs/python_snippets/view_dependencies/main_example.py @@ -1,86 +1,50 @@ -from __future__ import annotations - -from typing import ClassVar, Dict +from typing import Optional, ClassVar from fastapi import Depends, Header -from pydantic import BaseModel -from sqlalchemy.engine import make_url +from pydantic import BaseModel, ConfigDict from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.ext.asyncio import create_async_engine -from sqlalchemy.orm import sessionmaker from typing_extensions import Annotated +from examples.api_for_sqlalchemy.models.db import DB from fastapi_jsonapi.exceptions import Forbidden -from fastapi_jsonapi.misc.sqla.generics.base import ( - DetailViewBaseGeneric, - ListViewBaseGeneric, -) -from fastapi_jsonapi.views.utils import ( - HTTPMethod, - HTTPMethodConfig, -) -from fastapi_jsonapi.views.view_base import ViewBase - - -def get_async_sessionmaker() -> sessionmaker: - _async_session = sessionmaker( - bind=create_async_engine( - url=make_url( - f"sqlite+aiosqlite:///tmp/db.sqlite3", - ) - ), - class_=AsyncSession, - expire_on_commit=False, - ) - return _async_session - - -async def async_session_dependency(): - """ - Get session as dependency +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.views import ViewBase, Operation, OperationConfig - :return: - """ - session_maker = get_async_sessionmaker() - async with session_maker() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() +db = DB( + url="sqlite+aiosqlite:///tmp/db.sqlite3", +) class SessionDependency(BaseModel): - session: AsyncSession = Depends(async_session_dependency) + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) - class Config: - arbitrary_types_allowed = True + session: AsyncSession = Depends(db.session) async def common_handler(view: ViewBase, dto: SessionDependency) -> dict: - return {"session": dto.session} + return { + "session": dto.session, + } async def check_that_user_is_admin(x_auth: Annotated[str, Header()]): if x_auth != "admin": - raise Forbidden(detail="Only admin user have permissions to this endpoint") + raise Forbidden(detail="Only admin user have permissions to this endpoint.") class AdminOnlyPermission(BaseModel): - is_admin: bool | None = Depends(check_that_user_is_admin) + is_admin: Optional[bool] = Depends(check_that_user_is_admin) -class DetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( +class View(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[Operation, OperationConfig]] = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, ), - } - - -class ListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.GET: HTTPMethodConfig(dependencies=AdminOnlyPermission), - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=common_handler, + Operation.GET: OperationConfig( + dependencies=AdminOnlyPermission, ), } diff --git a/docs/python_snippets/view_dependencies/several_dependencies.py b/docs/python_snippets/view_dependencies/several_dependencies.py index 72d858f7..f5df2656 100644 --- a/docs/python_snippets/view_dependencies/several_dependencies.py +++ b/docs/python_snippets/view_dependencies/several_dependencies.py @@ -3,9 +3,8 @@ from fastapi import Depends from pydantic import BaseModel -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric -from fastapi_jsonapi.views.utils import HTTPMethod, HTTPMethodConfig -from fastapi_jsonapi.views.view_base import ViewBase +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.views import ViewBase, Operation, OperationConfig def one(): @@ -36,13 +35,13 @@ def get_handler(view: ViewBase, dto: DependencyMix): return {"key_2": dto.key_2} -class DetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( +class View(ViewBaseGeneric): + operation_dependencies: ClassVar = { + Operation.ALL: OperationConfig( dependencies=CommonDependency, prepare_data_layer_kwargs=common_handler, ), - HTTPMethod.GET: HTTPMethodConfig( + Operation.GET: OperationConfig( dependencies=GetDependency, prepare_data_layer_kwargs=get_handler, ), diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 9b88bf2a..49a11b93 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -230,6 +230,19 @@ Response: :language: HTTP +If you want retrieve relationships only you can use the relationship endpoint associated with target resource. + +Request: + +.. literalinclude:: ./http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship + :language: HTTP + +Response: + +.. literalinclude:: ./http_snippets/snippets/relationship_api__get_user_with_computers_as_relationship_result + :language: HTTP + + Check user's computers without loading actual user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/requirements.txt b/docs/requirements.txt index c13c65a9..cdd2a78c 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,6 @@ -fastapi<0.100.0 -pydantic<2 -simplejson>=3.17.6 +fastapi>=0.112.3 +orjson>=3.2.1 +pydantic>=2.6.0 sphinx sphinx_rtd_theme -sqlalchemy<2 -tortoise-orm>=0.19.3 +sqlalchemy>=2.0.26 diff --git a/docs/view_dependencies.rst b/docs/view_dependencies.rst index d382cd1a..409c4745 100644 --- a/docs/view_dependencies.rst +++ b/docs/view_dependencies.rst @@ -7,7 +7,7 @@ View Dependencies As you already know, in the process of its work, FastAPI-JSONAPI interacts between application layers. Sometimes there are things that are necessary to process requests but are only computable at runtime. -In order for ResourceManager and DataLayer to use such things, there is a mechanism called **method_dependencies**. +In order for ResourceManager and DataLayer to use such things, there is a mechanism called **operation_dependencies**. The most common cases of such things are database session and access handling. The example below demonstrates some simple implementation of these ideas using sqlalchemy. @@ -17,8 +17,8 @@ Example: .. literalinclude:: ./python_snippets/view_dependencies/main_example.py :language: python -In this example, the focus should be on the **HTTPMethod** and **HTTPMethodConfig** entities. -By setting the **method_dependencies** attribute, you can set FastAPI dependencies for endpoints, +In this example, the focus should be on the **Operation** and **OperationConfig** entities. +By setting the **operation_dependencies** attribute, you can set FastAPI dependencies for endpoints, as well as manage the creation of additional kwargs needed to initialize the DataLayer. Dependencies can be any Pydantic model containing Depends as default values. @@ -73,33 +73,33 @@ Handlers -------- As noted above, dependencies can be used to create a kwargs for a DataLayer. -To do this, you need to define **prepare_data_layer_kwargs** in **HTTPMethodConfig**. +To do this, you need to define **prepare_data_layer_kwargs** in **OperationConfig**. This is a callable object which can be synchronous or asynchronous. Its signature should look like this .. code-block:: python - async def my_handler(view: ViewBase, dto: BaseModel) -> Dict[str, Any]: + async def my_handler(view: ViewBase, dto: BaseModel) -> dict[str, Any]: pass or this .. code-block:: python - async def my_handler(view: ViewBase) -> Dict[str, Any]: + async def my_handler(view: ViewBase) -> dict[str, Any]: pass In the case of dto, it is an instance of the class corresponds to what -is in **HTTPMethodConfig.dependencies** and should only be present in the function +is in **OperationConfig.dependencies** and should only be present in the function signature if dependencies is not None. -The **HTTPMethodConfig.ALL** method has special behavior. When declared, +The **OperationConfig.ALL** method has special behavior. When declared, its dependencies will be passed to each endpoint regardless of the existence of other configs. -Explaining with a specific example, in the case when **HTTPMethod.ALL** is declared and -it has dependencies, and also a method such as **HTTPMethod.GET** also has dependencies, -the signature for the **HTTPMethod.GET** handler will be a union of dependencies +Explaining with a specific example, in the case when **Operation.ALL** is declared and +it has dependencies, and also a method such as **Operation.GET** also has dependencies, +the signature for the **Operation.GET** handler will be a union of dependencies Example: diff --git a/examples/api_for_sqlalchemy/api/views_base.py b/examples/api_for_sqlalchemy/api/views_base.py index 68b85920..d3d74768 100644 --- a/examples/api_for_sqlalchemy/api/views_base.py +++ b/examples/api_for_sqlalchemy/api/views_base.py @@ -1,51 +1,44 @@ -from typing import ClassVar, Dict +from typing import ClassVar from fastapi import Depends -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict +from sqlalchemy.engine import make_url from sqlalchemy.ext.asyncio import AsyncSession -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Connector -from fastapi_jsonapi.data_layers.sqla_orm import SqlalchemyDataLayer -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric -from fastapi_jsonapi.views.utils import HTTPMethod, HTTPMethodConfig -from fastapi_jsonapi.views.view_base import ViewBase +from examples.api_for_sqlalchemy import config +from examples.api_for_sqlalchemy.models.db import DB +from fastapi_jsonapi.data_layers.sqla.orm import SqlalchemyDataLayer +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.views import Operation, OperationConfig, ViewBase +db = DB( + url=make_url(config.SQLA_URI), +) -class SessionDependency(BaseModel): - session: AsyncSession = Depends(Connector.get_session) - - class Config: - arbitrary_types_allowed = True +class SessionDependency(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) -def handler(view: ViewBase, dto: SessionDependency) -> Dict: - return {"session": dto.session} - + session: AsyncSession = Depends(db.session) -class DetailViewBase(DetailViewBaseGeneric): - """ - Generic view base (detail) - """ - data_layer_cls = SqlalchemyDataLayer - - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=handler, - ), +def handler(view: ViewBase, dto: SessionDependency) -> dict: + return { + "session": dto.session, } -class ListViewBase(ListViewBaseGeneric): +class ViewBase(ViewBaseGeneric): """ - Generic view base (list) + Generic view base (detail) """ data_layer_cls = SqlalchemyDataLayer - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( + operation_dependencies: ClassVar = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=handler, ), diff --git a/examples/api_for_sqlalchemy/asgi.py b/examples/api_for_sqlalchemy/asgi.py deleted file mode 100644 index 95967617..00000000 --- a/examples/api_for_sqlalchemy/asgi.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Factory call module.""" - -from examples.api_for_sqlalchemy.main import create_app - -app = create_app() diff --git a/examples/api_for_sqlalchemy/extensions/__init__.py b/examples/api_for_sqlalchemy/enums/__init__.py similarity index 100% rename from examples/api_for_sqlalchemy/extensions/__init__.py rename to examples/api_for_sqlalchemy/enums/__init__.py diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/fields/enum.py b/examples/api_for_sqlalchemy/enums/enums.py similarity index 87% rename from examples/api_for_sqlalchemy/utils/sqlalchemy/fields/enum.py rename to examples/api_for_sqlalchemy/enums/enums.py index 1cfd8f37..ff8d6c24 100644 --- a/examples/api_for_sqlalchemy/utils/sqlalchemy/fields/enum.py +++ b/examples/api_for_sqlalchemy/enums/enums.py @@ -10,10 +10,6 @@ class EnumColumn(types.TypeDecorator): - """ - Обычный Enum из python сохраняет в БД значение, а не ключ, как делает Enum sqlalchemy - """ - impl = types.Text cache_ok = True diff --git a/examples/api_for_tortoise_orm/models/enums.py b/examples/api_for_sqlalchemy/enums/user.py similarity index 69% rename from examples/api_for_tortoise_orm/models/enums.py rename to examples/api_for_sqlalchemy/enums/user.py index c37423fc..caec0e56 100644 --- a/examples/api_for_tortoise_orm/models/enums.py +++ b/examples/api_for_sqlalchemy/enums/user.py @@ -1,4 +1,4 @@ -from fastapi_jsonapi.data_layers.fields.enum import Enum +from fastapi_jsonapi.data_layers.fields.enums import Enum class UserStatusEnum(str, Enum): diff --git a/examples/api_for_sqlalchemy/extensions/sqlalchemy.py b/examples/api_for_sqlalchemy/extensions/sqlalchemy.py deleted file mode 100644 index 26926a21..00000000 --- a/examples/api_for_sqlalchemy/extensions/sqlalchemy.py +++ /dev/null @@ -1,28 +0,0 @@ -from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker - -from examples.api_for_sqlalchemy import config - -Base = declarative_base() - - -def async_session() -> sessionmaker: - engine = create_async_engine(url=make_url(config.SQLA_URI), echo=config.SQLA_ECHO) - _async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session - - -class Connector: - @classmethod - async def get_session(cls): - """ - Get session as dependency - - :return: - """ - sess = async_session() - async with sess() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() diff --git a/examples/api_for_sqlalchemy/helpers/__init__.py b/examples/api_for_sqlalchemy/helpers/__init__.py deleted file mode 100644 index 092d160b..00000000 --- a/examples/api_for_sqlalchemy/helpers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Helpers for service package. Contains factories and updaters packages.""" diff --git a/examples/api_for_sqlalchemy/main.py b/examples/api_for_sqlalchemy/main.py index 92256676..dc90e708 100644 --- a/examples/api_for_sqlalchemy/main.py +++ b/examples/api_for_sqlalchemy/main.py @@ -3,58 +3,52 @@ In module placed db initialization functions, app factory. """ + import sys +from contextlib import asynccontextmanager from pathlib import Path import uvicorn from fastapi import FastAPI -from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import create_async_engine +from fastapi.responses import ORJSONResponse as JSONResponse -from examples.api_for_sqlalchemy import config -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base +from examples.api_for_sqlalchemy.api.views_base import db +from examples.api_for_sqlalchemy.models.base import Base from examples.api_for_sqlalchemy.urls import add_routes -from fastapi_jsonapi import init -CURRENT_FILE = Path(__file__).resolve() -CURRENT_DIR = CURRENT_FILE.parent -PROJECT_DIR = CURRENT_DIR.parent.parent +CURRENT_DIR = Path(__file__).resolve().parent +sys.path.append(f"{CURRENT_DIR.parent.parent}") -sys.path.append(str(PROJECT_DIR)) +# noinspection PyUnusedLocal +@asynccontextmanager +async def lifespan(app: FastAPI): + app.config = {"MAX_INCLUDE_DEPTH": 5} + add_routes(app) -async def sqlalchemy_init() -> None: - engine = create_async_engine(url=make_url(config.SQLA_URI), echo=config.SQLA_ECHO) - async with engine.begin() as conn: - # We don't want to drop tables on each app restart! - # await conn.run_sync(Base.metadata.drop_all) + async with db.engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) + yield -def create_app() -> FastAPI: - """ - Create app factory. + await db.engine.dispose() - :return: app - """ - app = FastAPI( - title="FastAPI and SQLAlchemy", - debug=True, - openapi_url="/openapi.json", - docs_url="/docs", - ) - app.config = {"MAX_INCLUDE_DEPTH": 5} - add_routes(app) - app.on_event("startup")(sqlalchemy_init) - init(app) - return app + +app = FastAPI( + title="FastAPI and SQLAlchemy", + lifespan=lifespan, + debug=True, + default_response_class=JSONResponse, + docs_url="/docs", + openapi_url="/openapi.json", +) if __name__ == "__main__": uvicorn.run( - "asgi:app", + "main:app", host="0.0.0.0", port=8082, reload=True, - app_dir=str(CURRENT_DIR), + app_dir=f"{CURRENT_DIR}", ) diff --git a/examples/api_for_sqlalchemy/models/__init__.py b/examples/api_for_sqlalchemy/models/__init__.py index 5331e31d..5891ad73 100644 --- a/examples/api_for_sqlalchemy/models/__init__.py +++ b/examples/api_for_sqlalchemy/models/__init__.py @@ -1,19 +1,21 @@ from examples.api_for_sqlalchemy.models.child import Child from examples.api_for_sqlalchemy.models.computer import Computer from examples.api_for_sqlalchemy.models.parent import Parent -from examples.api_for_sqlalchemy.models.parent_child_association import ParentToChildAssociation +from examples.api_for_sqlalchemy.models.parent_to_child_association import ParentToChildAssociation from examples.api_for_sqlalchemy.models.post import Post from examples.api_for_sqlalchemy.models.post_comment import PostComment from examples.api_for_sqlalchemy.models.user import User from examples.api_for_sqlalchemy.models.user_bio import UserBio +from examples.api_for_sqlalchemy.models.workplace import Workplace __all__ = ( - "User", - "Post", - "UserBio", - "PostComment", - "Parent", - "Computer", "Child", + "Computer", + "Parent", "ParentToChildAssociation", + "Post", + "PostComment", + "User", + "UserBio", + "Workplace", ) diff --git a/examples/api_for_sqlalchemy/models/base.py b/examples/api_for_sqlalchemy/models/base.py new file mode 100644 index 00000000..55e8499e --- /dev/null +++ b/examples/api_for_sqlalchemy/models/base.py @@ -0,0 +1,11 @@ +from typing import Any, ClassVar + +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + + +class Base(DeclarativeBase): + __table_args__: ClassVar[dict[str, Any]] = { + "extend_existing": True, + } + + id: Mapped[int] = mapped_column(primary_key=True) diff --git a/examples/api_for_sqlalchemy/models/child.py b/examples/api_for_sqlalchemy/models/child.py index ccf86d4a..8ce9bba6 100644 --- a/examples/api_for_sqlalchemy/models/child.py +++ b/examples/api_for_sqlalchemy/models/child.py @@ -1,16 +1,12 @@ -from sqlalchemy import Column, Integer, String -from sqlalchemy.orm import relationship +from sqlalchemy.orm import Mapped, relationship -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from .base import Base +from .parent_to_child_association import ParentToChildAssociation -class Child(Base, BaseModelMixin): +class Child(Base): __tablename__ = "right_table_children" - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - parents = relationship( - "ParentToChildAssociation", - back_populates="child", - ) + name: Mapped[str] + + parents: Mapped[list[ParentToChildAssociation]] = relationship(back_populates="child", cascade="delete") diff --git a/examples/api_for_sqlalchemy/models/computer.py b/examples/api_for_sqlalchemy/models/computer.py index 44ecdab3..d2dfb94d 100644 --- a/examples/api_for_sqlalchemy/models/computer.py +++ b/examples/api_for_sqlalchemy/models/computer.py @@ -1,17 +1,20 @@ -from sqlalchemy import Column, ForeignKey, Integer, String -from sqlalchemy.orm import relationship +from __future__ import annotations -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from typing import TYPE_CHECKING, Optional +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship -class Computer(Base, BaseModelMixin): +from .base import Base + +if TYPE_CHECKING: + from .user import User + + +class Computer(Base): __tablename__ = "computers" - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - user_id = Column(Integer, ForeignKey("users.id"), nullable=True) - user = relationship("User", back_populates="computers") + name: Mapped[str] - def __repr__(self): - return f"{self.__class__.__name__}(id={self.id}, name={self.name!r}, user_id={self.user_id})" + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id")) + user: Mapped[User] = relationship(back_populates="computers") diff --git a/examples/api_for_sqlalchemy/models/db.py b/examples/api_for_sqlalchemy/models/db.py new file mode 100644 index 00000000..15b8c888 --- /dev/null +++ b/examples/api_for_sqlalchemy/models/db.py @@ -0,0 +1,32 @@ +from collections.abc import AsyncIterator +from typing import Union + +from sqlalchemy.engine import URL +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine + + +class DB: + def __init__( + self, + url: Union[str, URL], + echo: bool = False, + echo_pool: bool = False, + ): + self.engine: AsyncEngine = create_async_engine( + url=url, + echo=echo, + echo_pool=echo_pool, + ) + + self.session_maker: async_sessionmaker[AsyncSession] = async_sessionmaker( + autocommit=False, + bind=self.engine, + expire_on_commit=False, + ) + + async def dispose(self): + await self.engine.dispose() + + async def session(self) -> AsyncIterator[AsyncSession]: + async with self.session_maker() as session: + yield session diff --git a/examples/api_for_sqlalchemy/models/enums.py b/examples/api_for_sqlalchemy/models/enums.py deleted file mode 100644 index c37423fc..00000000 --- a/examples/api_for_sqlalchemy/models/enums.py +++ /dev/null @@ -1,11 +0,0 @@ -from fastapi_jsonapi.data_layers.fields.enum import Enum - - -class UserStatusEnum(str, Enum): - """ - Status user. - """ - - active = "active" - archive = "archive" - block = "block" diff --git a/examples/api_for_sqlalchemy/models/parent.py b/examples/api_for_sqlalchemy/models/parent.py index 81fdc3da..a445d284 100644 --- a/examples/api_for_sqlalchemy/models/parent.py +++ b/examples/api_for_sqlalchemy/models/parent.py @@ -1,16 +1,12 @@ -from sqlalchemy import Column, Integer, String -from sqlalchemy.orm import relationship +from sqlalchemy.orm import Mapped, relationship -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from .base import Base +from .parent_to_child_association import ParentToChildAssociation -class Parent(Base, BaseModelMixin): +class Parent(Base): __tablename__ = "left_table_parents" - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - children = relationship( - "ParentToChildAssociation", - back_populates="parent", - ) + name: Mapped[str] + + children: Mapped[list[ParentToChildAssociation]] = relationship(back_populates="parent", cascade="delete") diff --git a/examples/api_for_sqlalchemy/models/parent_child_association.py b/examples/api_for_sqlalchemy/models/parent_child_association.py deleted file mode 100644 index 85c48ea7..00000000 --- a/examples/api_for_sqlalchemy/models/parent_child_association.py +++ /dev/null @@ -1,43 +0,0 @@ -from sqlalchemy import Column, ForeignKey, Index, Integer, String -from sqlalchemy.orm import relationship - -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin - - -class ParentToChildAssociation(Base, BaseModelMixin): - __table_args__ = ( - # JSON:API requires `id` field on any model, - # so we can't create a composite PK here - # that's why we need to create this index - Index( - "ix_parent_child_association_unique", - "parent_left_id", - "child_right_id", - unique=True, - ), - ) - - __tablename__ = "parent_to_child_association_table" - - id = Column(Integer, primary_key=True, autoincrement=True) - - parent_left_id = Column( - ForeignKey("left_table_parents.id"), - nullable=False, - ) - child_right_id = Column( - ForeignKey("right_table_children.id"), - nullable=False, - ) - extra_data = Column(String(50)) - parent = relationship( - "Parent", - back_populates="children", - # primaryjoin="ParentToChildAssociation.parent_left_id == Parent.id", - ) - child = relationship( - "Child", - back_populates="parents", - # primaryjoin="ParentToChildAssociation.child_right_id == Child.id", - ) diff --git a/examples/api_for_sqlalchemy/models/parent_to_child_association.py b/examples/api_for_sqlalchemy/models/parent_to_child_association.py new file mode 100644 index 00000000..07242b67 --- /dev/null +++ b/examples/api_for_sqlalchemy/models/parent_to_child_association.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey, Index, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from .base import Base + +if TYPE_CHECKING: + from .child import Child + from .parent import Parent + + +class ParentToChildAssociation(Base): + __table_args__ = ( + # JSON:API requires `id` field on any model, + # so we can't create a composite PK here + # that's why we need to create this index + Index( + "ix_parent_child_association_unique", + "parent_left_id", + "child_right_id", + unique=True, + ), + ) + + __tablename__ = "parent_to_child_association_table" + + extra_data: Mapped[str] = mapped_column(String(50)) + + child_right_id: Mapped[int] = mapped_column(ForeignKey("right_table_children.id")) + child: Mapped[Child] = relationship(back_populates="parents") + parent_left_id: Mapped[int] = mapped_column(ForeignKey("left_table_parents.id")) + parent: Mapped[Parent] = relationship(back_populates="children") diff --git a/examples/api_for_sqlalchemy/models/post.py b/examples/api_for_sqlalchemy/models/post.py index 7295beb3..0581a5e5 100644 --- a/examples/api_for_sqlalchemy/models/post.py +++ b/examples/api_for_sqlalchemy/models/post.py @@ -1,22 +1,23 @@ -"""Post model.""" +from __future__ import annotations -from sqlalchemy import Column, ForeignKey, Integer, String, Text -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from .base import Base +from .post_comment import PostComment + +if TYPE_CHECKING: + from .user import User -class Post(Base, BaseModelMixin): - __tablename__ = "posts" - id = Column(Integer, primary_key=True, autoincrement=True) - title = Column(String, nullable=False) - body = Column(Text, nullable=False, default="", server_default="") - user_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=False) - user = relationship("User", back_populates="posts", uselist=False) +class Post(Base): + __tablename__ = "posts" - comments = relationship("PostComment", back_populates="post", uselist=True) + body: Mapped[str] = mapped_column(default="", server_default="") + title: Mapped[str] - def __repr__(self): - return f"{self.__class__.__name__}(id={self.id} title={self.title!r} user_id={self.user_id})" + comments: Mapped[list[PostComment]] = relationship(back_populates="post", cascade="delete") + user_id: Mapped[int] = mapped_column(ForeignKey("users.id")) + user: Mapped[User] = relationship(back_populates="posts") diff --git a/examples/api_for_sqlalchemy/models/post_comment.py b/examples/api_for_sqlalchemy/models/post_comment.py index ca509ae2..69754634 100644 --- a/examples/api_for_sqlalchemy/models/post_comment.py +++ b/examples/api_for_sqlalchemy/models/post_comment.py @@ -1,29 +1,23 @@ -"""Post Comment model.""" +from __future__ import annotations -from sqlalchemy import Column, ForeignKey, Integer, String -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from .base import Base -class PostComment(Base, BaseModelMixin): +if TYPE_CHECKING: + from .post import Post + from .user import User + + +class PostComment(Base): __tablename__ = "post_comments" - id = Column(Integer, primary_key=True, autoincrement=True) - text: str = Column(String, nullable=False, default="", server_default="") - - post_id = Column(Integer, ForeignKey("posts.id"), nullable=False, unique=False) - post = relationship("Post", back_populates="comments", uselist=False) - - author_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=False) - author = relationship("User", back_populates="comments", uselist=False) - - def __repr__(self): - return ( - f"{self.__class__.__name__}(" - f"id={self.id}," - f" text={self.text!r}," - f" author_id={self.author_id}," - f" post_id={self.post_id}" - ")" - ) + + text: Mapped[str] = mapped_column(default="", server_default="") + + user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), unique=False) + user: Mapped[User] = relationship(back_populates="comments") + post_id: Mapped[int] = mapped_column(ForeignKey("posts.id"), unique=False) + post: Mapped[Post] = relationship(back_populates="comments") diff --git a/examples/api_for_sqlalchemy/models/schemas/child.py b/examples/api_for_sqlalchemy/models/schemas/child.py deleted file mode 100644 index a053f7b1..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/child.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import TYPE_CHECKING, List - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .parent_child_association import ParentToChildAssociationSchema - - -class ChildBaseSchema(BaseModel): - """Child base schema.""" - - class Config: - orm_mode = True - - name: str - - parents: List["ParentToChildAssociationSchema"] = Field( - default=None, - relationship=RelationshipInfo( - resource_type="parent_child_association", - many=True, - ), - ) - - -class ChildPatchSchema(ChildBaseSchema): - """Child PATCH schema.""" - - -class ChildInSchema(ChildBaseSchema): - """Child input schema.""" - - -class ChildSchema(ChildInSchema): - """Child item schema.""" - - id: int diff --git a/examples/api_for_sqlalchemy/models/schemas/computer.py b/examples/api_for_sqlalchemy/models/schemas/computer.py deleted file mode 100644 index 0ca4b5f4..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/computer.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Computer schemas module.""" - -from typing import TYPE_CHECKING, Optional - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .user import UserSchema - - -class ComputerBaseSchema(BaseModel): - """Computer base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - name: str - user: Optional["UserSchema"] = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -class ComputerPatchSchema(ComputerBaseSchema): - """Computer PATCH schema.""" - - -class ComputerInSchema(ComputerBaseSchema): - """Computer input schema.""" - - -class ComputerSchema(ComputerInSchema): - """Computer item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int diff --git a/examples/api_for_sqlalchemy/models/schemas/parent.py b/examples/api_for_sqlalchemy/models/schemas/parent.py deleted file mode 100644 index 5e798d05..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/parent.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import TYPE_CHECKING, List - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .parent_child_association import ParentToChildAssociationSchema - - -class ParentBaseSchema(BaseModel): - """Parent base schema.""" - - class Config: - orm_mode = True - - name: str - - children: List["ParentToChildAssociationSchema"] = Field( - default=None, - relationship=RelationshipInfo( - resource_type="parent_child_association", - many=True, - ), - ) - - -class ParentPatchSchema(ParentBaseSchema): - """Parent PATCH schema.""" - - -class ParentInSchema(ParentBaseSchema): - """Parent input schema.""" - - -class ParentSchema(ParentInSchema): - """Parent item schema.""" - - id: int diff --git a/examples/api_for_sqlalchemy/models/schemas/parent_child_association.py b/examples/api_for_sqlalchemy/models/schemas/parent_child_association.py deleted file mode 100644 index 3062a710..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/parent_child_association.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import TYPE_CHECKING - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .child import ChildSchema - from .parent import ParentSchema - - -class ParentToChildAssociationSchema(BaseModel): - id: int - extra_data: str - - parent: "ParentSchema" = Field( - default=None, - relationship=RelationshipInfo( - resource_type="parent", - ), - ) - - child: "ChildSchema" = Field( - default=None, - relationship=RelationshipInfo( - resource_type="child", - ), - ) diff --git a/examples/api_for_sqlalchemy/models/schemas/post.py b/examples/api_for_sqlalchemy/models/schemas/post.py deleted file mode 100644 index 11d0fe61..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/post.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Post schemas module.""" - -from datetime import datetime -from typing import TYPE_CHECKING, List - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .post_comment import PostCommentSchema - from .user import UserSchema - - -class PostBaseSchema(BaseModel): - """Post base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - title: str - body: str - - user: "UserSchema" = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - comments: List["PostCommentSchema"] = Field( - relationship=RelationshipInfo( - resource_type="post_comment", - many=True, - ), - ) - - -class PostPatchSchema(PostBaseSchema): - """Post PATCH schema.""" - - -class PostInSchema(PostBaseSchema): - """Post input schema.""" - - -class PostSchema(PostInSchema): - """Post item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int - created_at: datetime = Field(description="Create datetime") - modified_at: datetime = Field(description="Update datetime") diff --git a/examples/api_for_sqlalchemy/models/schemas/post_comment.py b/examples/api_for_sqlalchemy/models/schemas/post_comment.py deleted file mode 100644 index f9dc908f..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/post_comment.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Post Comment schemas module.""" -from datetime import datetime -from typing import TYPE_CHECKING - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .post import PostSchema - from .user import UserSchema - - -class PostCommentBaseSchema(BaseModel): - """PostComment base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - text: str - created_at: datetime = Field(description="Create datetime") - modified_at: datetime = Field(description="Update datetime") - - post: "PostSchema" = Field( - relationship=RelationshipInfo( - resource_type="post", - ), - ) - author: "UserSchema" = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -class PostCommentPatchSchema(PostCommentBaseSchema): - """PostComment PATCH schema.""" - - -class PostCommentInSchema(PostCommentBaseSchema): - """PostComment input schema.""" - - -class PostCommentSchema(PostCommentInSchema): - """PostComment item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int diff --git a/examples/api_for_sqlalchemy/models/schemas/user.py b/examples/api_for_sqlalchemy/models/schemas/user.py deleted file mode 100644 index 80ecbdf2..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/user.py +++ /dev/null @@ -1,74 +0,0 @@ -"""User schemas module.""" -from __future__ import annotations - -from datetime import datetime -from typing import TYPE_CHECKING, List, Optional - -from examples.api_for_sqlalchemy.models.enums import UserStatusEnum -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .computer import ComputerSchema - from .post import PostSchema - from .user_bio import UserBioSchema - - -class UserBaseSchema(BaseModel): - """User base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - class Enum: - """User enums.""" - - status = UserStatusEnum - - first_name: Optional[str] = None - last_name: Optional[str] = None - age: Optional[int] = None - status: UserStatusEnum = Field(default=UserStatusEnum.active) - email: str | None = None - - posts: Optional[List["PostSchema"]] = Field( - relationship=RelationshipInfo( - resource_type="post", - many=True, - ), - ) - - bio: Optional["UserBioSchema"] = Field( - relationship=RelationshipInfo( - resource_type="user_bio", - ), - ) - - computers: Optional[List["ComputerSchema"]] = Field( - relationship=RelationshipInfo( - resource_type="computer", - many=True, - ), - ) - - -class UserPatchSchema(UserBaseSchema): - """User PATCH schema.""" - - -class UserInSchema(UserBaseSchema): - """User input schema.""" - - -class UserSchema(UserInSchema): - """User item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int - created_at: datetime = Field(description="Create datetime") - modified_at: datetime = Field(description="Update datetime") diff --git a/examples/api_for_sqlalchemy/models/schemas/user_bio.py b/examples/api_for_sqlalchemy/models/schemas/user_bio.py deleted file mode 100644 index 7b3bc035..00000000 --- a/examples/api_for_sqlalchemy/models/schemas/user_bio.py +++ /dev/null @@ -1,49 +0,0 @@ -"""User Bio schemas module.""" - -from datetime import datetime -from typing import TYPE_CHECKING, Dict, List - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - -if TYPE_CHECKING: - from .user import UserSchema - - -class UserBioBaseSchema(BaseModel): - """UserBio base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - birth_city: str - favourite_movies: str - keys_to_ids_list: Dict[str, List[int]] = None - - user: "UserSchema" = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -class UserBioPatchSchema(UserBioBaseSchema): - """UserBio PATCH schema.""" - - -class UserBioInSchema(UserBioBaseSchema): - """UserBio input schema.""" - - -class UserBioSchema(UserBioInSchema): - """UserBio item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int - created_at: datetime = Field(description="Create datetime") - modified_at: datetime = Field(description="Update datetime") diff --git a/examples/api_for_sqlalchemy/models/user.py b/examples/api_for_sqlalchemy/models/user.py index bd088bfe..565c5d89 100644 --- a/examples/api_for_sqlalchemy/models/user.py +++ b/examples/api_for_sqlalchemy/models/user.py @@ -1,37 +1,34 @@ -"""User model.""" -from __future__ import annotations +from typing import Optional -from sqlalchemy import Column, Integer, String -from sqlalchemy.orm import relationship +from sqlalchemy.orm import Mapped, mapped_column, relationship -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.models.enums import UserStatusEnum -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin -from examples.api_for_sqlalchemy.utils.sqlalchemy.fields.enum import EnumColumn +from examples.api_for_sqlalchemy.enums.enums import EnumColumn +from examples.api_for_sqlalchemy.enums.user import UserStatusEnum +from .base import Base +from .computer import Computer +from .post import Post +from .post_comment import PostComment +from .user_bio import UserBio +from .workplace import Workplace -class User(Base, BaseModelMixin): + +class User(Base): __tablename__ = "users" - id = Column(Integer, primary_key=True, autoincrement=True) - first_name: str = Column(String, nullable=True) - last_name: str = Column(String, nullable=True) - age: int = Column(Integer, nullable=True) - status = Column(EnumColumn(UserStatusEnum), nullable=False, default=UserStatusEnum.active) - email: str | None = Column(String, nullable=True) - - posts = relationship("Post", back_populates="user", uselist=True) - bio = relationship("UserBio", back_populates="user", uselist=False) - comments = relationship("PostComment", back_populates="author", uselist=True) - computers = relationship("Computer", back_populates="user", uselist=True) - - def __repr__(self): - return ( - f"{self.__class__.__name__}(" - f"id={self.id}," - f" first_name={self.first_name!r}," - f" last_name={self.last_name!r}" - ")" - ) + + age: Mapped[Optional[int]] + email: Mapped[Optional[str]] + name: Mapped[Optional[str]] = mapped_column(unique=True) + status: Mapped[UserStatusEnum] = mapped_column( + EnumColumn(UserStatusEnum), + default=UserStatusEnum.active, + ) + + bio: Mapped[UserBio] = relationship(back_populates="user", cascade="delete") + comments: Mapped[list[PostComment]] = relationship(back_populates="user", cascade="delete") + computers: Mapped[list[Computer]] = relationship(back_populates="user") + posts: Mapped[list[Post]] = relationship(back_populates="user", cascade="delete") + workplace: Mapped[Workplace] = relationship(back_populates="user") class Enum: Status = UserStatusEnum diff --git a/examples/api_for_sqlalchemy/models/user_bio.py b/examples/api_for_sqlalchemy/models/user_bio.py index 7fd458f5..8a67e8db 100644 --- a/examples/api_for_sqlalchemy/models/user_bio.py +++ b/examples/api_for_sqlalchemy/models/user_bio.py @@ -1,29 +1,21 @@ -"""User Bio model.""" -from typing import Dict, List +from __future__ import annotations -from sqlalchemy import JSON, Column, ForeignKey, Integer, String -from sqlalchemy.orm import relationship +from typing import TYPE_CHECKING -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base -from examples.api_for_sqlalchemy.utils.sqlalchemy.base_model_mixin import BaseModelMixin +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from .base import Base -class UserBio(Base, BaseModelMixin): +if TYPE_CHECKING: + from .user import User + + +class UserBio(Base): __tablename__ = "user_bio" - id = Column(Integer, primary_key=True, autoincrement=True) - birth_city: str = Column(String, nullable=False, default="", server_default="") - favourite_movies: str = Column(String, nullable=False, default="", server_default="") - keys_to_ids_list: Dict[str, List[int]] = Column(JSON) - - user_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=True) - user = relationship("User", back_populates="bio", uselist=False) - - def __repr__(self): - return ( - f"{self.__class__.__name__}(" - f"id={self.id}," - f" birth_city={self.birth_city!r}," - f" favourite_movies={self.favourite_movies!r}," - f" user_id={self.user_id}" - ")" - ) + + birth_city: Mapped[str] = mapped_column(default="", server_default="") + favourite_movies: Mapped[str] = mapped_column(default="", server_default="") + + user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), unique=True) + user: Mapped[User] = relationship(back_populates="bio") diff --git a/examples/api_for_sqlalchemy/models/workplace.py b/examples/api_for_sqlalchemy/models/workplace.py new file mode 100644 index 00000000..b23c2e18 --- /dev/null +++ b/examples/api_for_sqlalchemy/models/workplace.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from .base import Base + +if TYPE_CHECKING: + from .user import User + + +class Workplace(Base): + __tablename__ = "workplaces" + + name: Mapped[str] + + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id")) + user: Mapped[User] = relationship(back_populates="workplace") diff --git a/examples/api_for_sqlalchemy/models/schemas/__init__.py b/examples/api_for_sqlalchemy/schemas/__init__.py old mode 100644 new mode 100755 similarity index 51% rename from examples/api_for_sqlalchemy/models/schemas/__init__.py rename to examples/api_for_sqlalchemy/schemas/__init__.py index a04a683b..14c5276a --- a/examples/api_for_sqlalchemy/models/schemas/__init__.py +++ b/examples/api_for_sqlalchemy/schemas/__init__.py @@ -1,66 +1,87 @@ -"""schemas package.""" - - from .child import ( + ChildAttributesSchema, ChildInSchema, ChildPatchSchema, ChildSchema, ) from .computer import ( + ComputerAttributesBaseSchema, ComputerInSchema, ComputerPatchSchema, ComputerSchema, ) from .parent import ( + ParentAttributesSchema, ParentInSchema, ParentPatchSchema, ParentSchema, ) -from .parent_child_association import ( +from .parent_to_child_association import ( + ParentToChildAssociationAttributesSchema, ParentToChildAssociationSchema, ) from .post import ( + PostAttributesBaseSchema, PostInSchema, PostPatchSchema, PostSchema, ) from .post_comment import ( - PostCommentInSchema, - PostCommentPatchSchema, + PostCommentAttributesBaseSchema, PostCommentSchema, ) from .user import ( + CustomUserAttributesSchema, + UserAttributesBaseSchema, UserInSchema, + UserInSchemaAllowIdOnPost, UserPatchSchema, UserSchema, ) from .user_bio import ( + UserBioAttributesBaseSchema, + UserBioBaseSchema, UserBioInSchema, UserBioPatchSchema, - UserBioSchema, +) +from .workplace import ( + WorkplaceInSchema, + WorkplacePatchSchema, + WorkplaceSchema, ) -__all__ = [ - "UserSchema", - "UserInSchema", - "UserPatchSchema", - "PostSchema", - "PostInSchema", - "PostPatchSchema", - "UserBioSchema", - "UserBioInSchema", - "UserBioPatchSchema", - "PostCommentSchema", - "PostCommentInSchema", - "PostCommentPatchSchema", - "ParentInSchema", - "ParentPatchSchema", - "ParentSchema", - "ComputerSchema", - "ComputerInSchema", - "ComputerPatchSchema", +__all__ = ( + "ChildAttributesSchema", "ChildInSchema", "ChildPatchSchema", "ChildSchema", + "ComputerAttributesBaseSchema", + "ComputerInSchema", + "ComputerPatchSchema", + "ComputerSchema", + "CustomUserAttributesSchema", + "ParentAttributesSchema", + "ParentInSchema", + "ParentPatchSchema", + "ParentSchema", + "ParentToChildAssociationAttributesSchema", "ParentToChildAssociationSchema", -] + "PostAttributesBaseSchema", + "PostCommentAttributesBaseSchema", + "PostCommentSchema", + "PostInSchema", + "PostPatchSchema", + "PostSchema", + "UserAttributesBaseSchema", + "UserBioAttributesBaseSchema", + "UserBioBaseSchema", + "UserBioInSchema", + "UserBioPatchSchema", + "UserInSchema", + "UserInSchemaAllowIdOnPost", + "UserPatchSchema", + "UserSchema", + "WorkplaceInSchema", + "WorkplacePatchSchema", + "WorkplaceSchema", +) diff --git a/examples/api_for_sqlalchemy/schemas/child.py b/examples/api_for_sqlalchemy/schemas/child.py new file mode 100755 index 00000000..ab83f752 --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/child.py @@ -0,0 +1,42 @@ +from typing import Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +from .parent_to_child_association import ParentToChildAssociationSchema + + +class ChildAttributesSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + name: str + + +class ChildBaseSchema(ChildAttributesSchema): + """Child base schema.""" + + parents: Annotated[ + Optional[list[ParentToChildAssociationSchema]], + RelationshipInfo( + resource_type="parent_child_association", + many=True, + ), + ] = None + + +class ChildPatchSchema(ChildBaseSchema): + """Child PATCH schema.""" + + +class ChildInSchema(ChildBaseSchema): + """Child input schema.""" + + +class ChildSchema(ChildInSchema): + """Child item schema.""" + + id: int diff --git a/examples/api_for_sqlalchemy/schemas/computer.py b/examples/api_for_sqlalchemy/schemas/computer.py new file mode 100755 index 00000000..9e67651b --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/computer.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .user import UserSchema + + +class ComputerAttributesBaseSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + name: str + + +class ComputerBaseSchema(ComputerAttributesBaseSchema): + """Computer base schema.""" + + user: Annotated[ + Optional[UserSchema], + RelationshipInfo( + resource_type="user", + ), + ] = None + + +class ComputerPatchSchema(ComputerBaseSchema): + """Computer PATCH schema.""" + + +class ComputerInSchema(ComputerBaseSchema): + """Computer input schema.""" + + +class ComputerSchema(ComputerInSchema): + """Computer item schema.""" + + id: int diff --git a/examples/api_for_sqlalchemy/schemas/parent.py b/examples/api_for_sqlalchemy/schemas/parent.py new file mode 100755 index 00000000..46983d91 --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/parent.py @@ -0,0 +1,42 @@ +from typing import Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +from .parent_to_child_association import ParentToChildAssociationSchema + + +class ParentAttributesSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + name: str + + +class ParentBaseSchema(ParentAttributesSchema): + """Parent base schema.""" + + children: Annotated[ + Optional[list[ParentToChildAssociationSchema]], + RelationshipInfo( + resource_type="parent_child_association", + many=True, + ), + ] = None + + +class ParentPatchSchema(ParentBaseSchema): + """Parent PATCH schema.""" + + +class ParentInSchema(ParentBaseSchema): + """Parent input schema.""" + + +class ParentSchema(ParentInSchema): + """Parent item schema.""" + + id: int diff --git a/examples/api_for_sqlalchemy/schemas/parent_to_child_association.py b/examples/api_for_sqlalchemy/schemas/parent_to_child_association.py new file mode 100755 index 00000000..dca6715e --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/parent_to_child_association.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .child import ChildSchema + from .parent import ParentSchema + + +class ParentToChildAssociationAttributesSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + extra_data: str + + +class ParentToChildAssociationSchema(ParentToChildAssociationAttributesSchema): + parent: Annotated[ + Optional[ParentSchema], + RelationshipInfo( + resource_type="parent", + ), + ] = None + child: Annotated[ + Optional[ChildSchema], + RelationshipInfo( + resource_type="child", + ), + ] = None diff --git a/examples/api_for_sqlalchemy/schemas/post.py b/examples/api_for_sqlalchemy/schemas/post.py new file mode 100755 index 00000000..6a8c7392 --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/post.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +from .post_comment import PostCommentSchema + +if TYPE_CHECKING: + from .user import UserSchema + + +class PostAttributesBaseSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + body: str + title: str + + +class PostBaseSchema(PostAttributesBaseSchema): + """Post base schema.""" + + user: Annotated[ + Optional[UserSchema], + RelationshipInfo( + resource_type="user", + ), + ] = None + comments: Annotated[ + Optional[list[PostCommentSchema]], + RelationshipInfo( + resource_type="post_comment", + many=True, + ), + ] = None + + +class PostPatchSchema(PostBaseSchema): + """Post PATCH schema.""" + + +class PostInSchema(PostBaseSchema): + """Post input schema.""" + + +class PostSchema(PostInSchema): + """Post item schema.""" + + id: int diff --git a/examples/api_for_sqlalchemy/schemas/post_comment.py b/examples/api_for_sqlalchemy/schemas/post_comment.py new file mode 100755 index 00000000..db899ea7 --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/post_comment.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .post import PostSchema + from .user import UserSchema + + +class PostCommentAttributesBaseSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + text: str + + +class PostCommentBaseSchema(PostCommentAttributesBaseSchema): + """PostComment base schema.""" + + post: Annotated[ + PostSchema, + RelationshipInfo( + resource_type="post", + ), + ] + user: Annotated[ + UserSchema, + RelationshipInfo( + resource_type="user", + ), + ] + + +class PostCommentSchema(PostCommentBaseSchema): + """PostComment item schema.""" + + id: int diff --git a/examples/api_for_sqlalchemy/schemas/user.py b/examples/api_for_sqlalchemy/schemas/user.py new file mode 100755 index 00000000..248c229a --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/user.py @@ -0,0 +1,84 @@ +from typing import Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import ClientCanSetId, RelationshipInfo + +from .computer import ComputerSchema +from .post import PostSchema +from .post_comment import PostCommentSchema +from .user_bio import UserBioBaseSchema +from .workplace import WorkplaceSchema + + +class UserAttributesBaseSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + name: str + + age: Optional[int] = None + email: Optional[str] = None + + +class UserBaseSchema(UserAttributesBaseSchema): + """User base schema.""" + + bio: Annotated[ + Optional[UserBioBaseSchema], + RelationshipInfo( + resource_type="user_bio", + ), + ] = None + comments: Annotated[ + Optional[list[PostCommentSchema]], + RelationshipInfo( + resource_type="post_comment", + many=True, + ), + ] = None + computers: Annotated[ + Optional[list[ComputerSchema]], + RelationshipInfo( + resource_type="computer", + many=True, + ), + ] = None + posts: Annotated[ + Optional[list[PostSchema]], + RelationshipInfo( + resource_type="post", + many=True, + ), + ] = None + workplace: Annotated[ + Optional[WorkplaceSchema], + RelationshipInfo( + resource_type="workplace", + ), + ] = None + + +class UserPatchSchema(UserBaseSchema): + """User PATCH schema.""" + + +class UserInSchema(UserBaseSchema): + """User input schema.""" + + +class UserInSchemaAllowIdOnPost(UserBaseSchema): + id: Annotated[str, ClientCanSetId()] + + +class UserSchema(UserInSchema): + """User item schema.""" + + id: int + + +class CustomUserAttributesSchema(UserBaseSchema): + spam: str + eggs: str diff --git a/examples/api_for_sqlalchemy/schemas/user_bio.py b/examples/api_for_sqlalchemy/schemas/user_bio.py new file mode 100755 index 00000000..18db3170 --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/user_bio.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .user import UserSchema + + +class UserBioAttributesBaseSchema(BaseModel): + """UserBio base schema.""" + + model_config = ConfigDict( + from_attributes=True, + ) + + birth_city: str + favourite_movies: str + + +class UserBioBaseSchema(UserBioAttributesBaseSchema): + """UserBio item schema.""" + + user: Annotated[ + Optional[UserSchema], + RelationshipInfo( + resource_type="user", + ), + ] = None + + +class UserBioPatchSchema(UserBioBaseSchema): + """UserBio PATCH schema.""" + + +class UserBioInSchema(UserBioBaseSchema): + """UserBio input schema.""" + + +class UserBioSchema(UserBioInSchema): + """UserBio item schema.""" + + id: int diff --git a/examples/api_for_sqlalchemy/schemas/workplace.py b/examples/api_for_sqlalchemy/schemas/workplace.py new file mode 100644 index 00000000..206fa2a3 --- /dev/null +++ b/examples/api_for_sqlalchemy/schemas/workplace.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .user import UserSchema + + +class WorkplaceBaseSchema(BaseModel): + """Workplace base schema.""" + + model_config = ConfigDict( + from_attributes=True, + ) + + name: str + + user: Annotated[ + Optional[UserSchema], + RelationshipInfo( + resource_type="user", + ), + ] = None + + +class WorkplacePatchSchema(WorkplaceBaseSchema): + """Workplace PATCH schema.""" + + +class WorkplaceInSchema(WorkplaceBaseSchema): + """Workplace input schema.""" + + +class WorkplaceSchema(WorkplaceInSchema): + """Workplace item schema.""" + + id: int diff --git a/examples/api_for_sqlalchemy/urls.py b/examples/api_for_sqlalchemy/urls.py index 55793c01..f933283e 100644 --- a/examples/api_for_sqlalchemy/urls.py +++ b/examples/api_for_sqlalchemy/urls.py @@ -1,17 +1,12 @@ """Route creator""" -from typing import ( - Any, - Dict, - List, -) +from fastapi import APIRouter, FastAPI -from fastapi import ( - APIRouter, - FastAPI, -) +from fastapi_jsonapi import ApplicationBuilder +from fastapi_jsonapi.atomic import AtomicOperations -from examples.api_for_sqlalchemy.models import ( +from .api.views_base import ViewBase +from .models import ( Child, Computer, Parent, @@ -19,12 +14,9 @@ Post, User, UserBio, + Workplace, ) -from fastapi_jsonapi import RoutersJSONAPI -from fastapi_jsonapi.atomic import AtomicOperations - -from .api.views_base import DetailViewBase, ListViewBase -from .models.schemas import ( +from .schemas import ( ChildInSchema, ChildPatchSchema, ChildSchema, @@ -38,119 +30,110 @@ PostInSchema, PostPatchSchema, PostSchema, + UserBioBaseSchema, UserBioInSchema, UserBioPatchSchema, - UserBioSchema, UserInSchema, UserPatchSchema, UserSchema, + WorkplaceInSchema, + WorkplacePatchSchema, + WorkplaceSchema, ) -def add_routes(app: FastAPI) -> List[Dict[str, Any]]: - tags = [ - { - "name": "User", - "description": "Users API", - }, - { - "name": "Post", - "description": "Posts API", - }, - ] - +def add_routes(app: FastAPI): router: APIRouter = APIRouter() - RoutersJSONAPI( + builder = ApplicationBuilder(app) + builder.add_resource( router=router, - path="/users", - tags=["User"], - class_detail=DetailViewBase, - class_list=ListViewBase, - model=User, - schema=UserSchema, - resource_type="user", - schema_in_patch=UserPatchSchema, - schema_in_post=UserInSchema, + path="/children", + tags=["Child"], + view=ViewBase, + model=Child, + schema=ChildSchema, + resource_type="child", + schema_in_patch=ChildPatchSchema, + schema_in_post=ChildInSchema, ) - - RoutersJSONAPI( + builder.add_resource( + router=router, + path="/computers", + tags=["Computer"], + view=ViewBase, + model=Computer, + schema=ComputerSchema, + resource_type="computer", + schema_in_patch=ComputerPatchSchema, + schema_in_post=ComputerInSchema, + ) + builder.add_resource( + router=router, + path="/parent-to-child-association", + tags=["Parent To Child Association"], + view=ViewBase, + schema=ParentToChildAssociationSchema, + resource_type="parent-to-child-association", + model=ParentToChildAssociation, + ) + builder.add_resource( + router=router, + path="/parents", + tags=["Parent"], + view=ViewBase, + model=Parent, + schema=ParentSchema, + resource_type="parent", + schema_in_patch=ParentPatchSchema, + schema_in_post=ParentInSchema, + ) + builder.add_resource( router=router, path="/posts", tags=["Post"], - class_detail=DetailViewBase, - class_list=ListViewBase, + view=ViewBase, model=Post, schema=PostSchema, resource_type="post", schema_in_patch=PostPatchSchema, schema_in_post=PostInSchema, ) - - RoutersJSONAPI( + builder.add_resource( router=router, path="/user-bio", tags=["Bio"], - class_detail=DetailViewBase, - class_list=ListViewBase, + view=ViewBase, model=UserBio, - schema=UserBioSchema, + schema=UserBioBaseSchema, resource_type="user_bio", schema_in_patch=UserBioPatchSchema, schema_in_post=UserBioInSchema, ) - - RoutersJSONAPI( - router=router, - path="/parents", - tags=["Parent"], - class_detail=DetailViewBase, - class_list=ListViewBase, - model=Parent, - schema=ParentSchema, - resource_type="parent", - schema_in_patch=ParentPatchSchema, - schema_in_post=ParentInSchema, - ) - - RoutersJSONAPI( - router=router, - path="/children", - tags=["Child"], - class_detail=DetailViewBase, - class_list=ListViewBase, - model=Child, - schema=ChildSchema, - resource_type="child", - schema_in_patch=ChildPatchSchema, - schema_in_post=ChildInSchema, - ) - - RoutersJSONAPI( + builder.add_resource( router=router, - path="/parent-to-child-association", - tags=["Parent To Child Association"], - class_detail=DetailViewBase, - class_list=ListViewBase, - schema=ParentToChildAssociationSchema, - resource_type="parent-to-child-association", - model=ParentToChildAssociation, + path="/users", + tags=["User"], + view=ViewBase, + model=User, + schema=UserSchema, + resource_type="user", + schema_in_patch=UserPatchSchema, + schema_in_post=UserInSchema, ) - - RoutersJSONAPI( + builder.add_resource( router=router, - path="/computers", - tags=["Computer"], - class_detail=DetailViewBase, - class_list=ListViewBase, - model=Computer, - schema=ComputerSchema, - resource_type="computer", - schema_in_patch=ComputerPatchSchema, - schema_in_post=ComputerInSchema, + path="/workplaces", + tags=["Workplace"], + view=ViewBase, + model=Workplace, + schema=WorkplaceSchema, + resource_type="workplace", + schema_in_patch=WorkplacePatchSchema, + schema_in_post=WorkplaceInSchema, ) + builder.initialize() atomic = AtomicOperations() app.include_router(router, prefix="") app.include_router(atomic.router, prefix="") - return tags diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/base_model_mixin.py b/examples/api_for_sqlalchemy/utils/sqlalchemy/base_model_mixin.py deleted file mode 100644 index ba155621..00000000 --- a/examples/api_for_sqlalchemy/utils/sqlalchemy/base_model_mixin.py +++ /dev/null @@ -1,85 +0,0 @@ -from datetime import datetime -from typing import Generic, List, TypeVar - -from sqlalchemy import ( - Column, - DateTime, - delete, - func, - inspect, - select, -) -from sqlalchemy.engine import Result -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.ext.declarative import declared_attr - -from examples.api_for_sqlalchemy.extensions.sqlalchemy import Base - -TypeBase = TypeVar("TypeBase", bound="Base") -Model = TypeVar("Model", Base, Base) - - -class BaseModelMixin(Generic[Model]): - id: int - - @declared_attr - def created_at(cls) -> Column: - """Дата создания записи""" - return Column( - "created_at", - DateTime, - default=datetime.utcnow, - server_default=func.now(), - ) - - @declared_attr - def modified_at(cls) -> Column: - """Дата изменения записи""" - return Column( - "modified_at", - DateTime, - default=datetime.utcnow, - onupdate=datetime.utcnow, - server_onupdate=func.now(), - ) - - def __repr__(self) -> str: - return "<{}, pk: {}>".format( - self.__class__.__name__, - ", ".join(str(getattr(self, key.name)) for key in inspect(self.__class__).primary_key), - ) - - async def save(self, session: AsyncSession, commit: bool = True, flush: bool = False) -> "BaseModelMixin[Model]": - has_pk: bool = all(getattr(self, key.name) for key in inspect(self.__class__).primary_key) - if has_pk: - await session.merge(self) - else: - session.add(self) - if commit: - await session.commit() - elif flush: - await session.flush() - return self - - async def delete(self, session: AsyncSession, commit: bool = True) -> "BaseModelMixin[Model]": - await session.execute(delete(self)) - if commit: - await session.commit() - return self - - @classmethod - async def get_all(cls, session: AsyncSession) -> List[Model]: - result = await session.execute(select(Model)) - return result.scalars().all() - - @classmethod - async def get_by_id(cls, id_: int, session: AsyncSession) -> Model: - stmt = select(cls).where(cls.id == id_) - result: Result = await session.execute(stmt) - return result.scalar_one() - - @classmethod - async def get_or_none(cls, id_: int, session: AsyncSession) -> Model: - stmt = select(cls).where(cls.id == id_) - result: Result = await session.execute(stmt) - return result.scalar_one_or_none() diff --git a/examples/api_for_tortoise_orm/README.md b/examples/api_for_tortoise_orm/README.md deleted file mode 100644 index 9a090dbb..00000000 --- a/examples/api_for_tortoise_orm/README.md +++ /dev/null @@ -1,10 +0,0 @@ -## App API-FOR-TORTOISE-ORM - -### Start app -```shell -# in dir fastapi-rest-jsonapi - -export PYTHOPATH="${PYTHONPATH}:./" -python examples/api_for_tortoise_orm/main.py -``` -http://0.0.0.0:8080/docs diff --git a/examples/api_for_tortoise_orm/__init__.py b/examples/api_for_tortoise_orm/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/api_for_tortoise_orm/api/__init__.py b/examples/api_for_tortoise_orm/api/__init__.py deleted file mode 100644 index 9e8d7b1c..00000000 --- a/examples/api_for_tortoise_orm/api/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -"""service API package.""" - -from .user import UserFactory - -__all__ = [ - "UserFactory", -] diff --git a/examples/api_for_tortoise_orm/api/user.py b/examples/api_for_tortoise_orm/api/user.py deleted file mode 100644 index b8cccc1c..00000000 --- a/examples/api_for_tortoise_orm/api/user.py +++ /dev/null @@ -1,96 +0,0 @@ -from http import HTTPStatus -from typing import ( - List, - Union, -) - -from tortoise.exceptions import DoesNotExist -from tortoise.queryset import QuerySet - -from examples.api_for_tortoise_orm.helpers.factories.meta_base import FactoryUseMode -from examples.api_for_tortoise_orm.helpers.factories.user import UserFactory, ErrorCreateUserObject -from examples.api_for_tortoise_orm.helpers.updaters.exceptions import ObjectNotFound -from examples.api_for_tortoise_orm.helpers.updaters.update_user import UpdateUser, ErrorUpdateUserObject -from examples.api_for_tortoise_orm.models.pydantic import UserSchema, UserPatchSchema -from examples.api_for_tortoise_orm.models.pydantic.user import UserInSchema -from examples.api_for_tortoise_orm.models.tortoise import User -from fastapi_jsonapi.data_layers.tortoise_orm import TortoiseDataLayer -from fastapi_jsonapi.exceptions import ( - BadRequest, - HTTPException, -) -from fastapi_jsonapi.querystring import QueryStringManager -from fastapi_jsonapi.schema import JSONAPIResultListSchema - - -class UserDetail: - @classmethod - async def get_user(cls, user_id: int, query_params: QueryStringManager) -> User: - """ - Get user by id from ORM. - - :param user_id: int - :param query_params: QueryStringManager - :return: User model. - :raises HTTPException: if user not found. - """ - user: User - try: - user = await User.get(id=user_id) - except DoesNotExist: - raise HTTPException( - status_code=HTTPStatus.FORBIDDEN, - detail="User with id {id} not found".format(id=user_id), - ) - - return user - - @classmethod - async def get(cls, obj_id: int, query_params: QueryStringManager) -> UserSchema: - user: User = await cls.get_user(user_id=obj_id, query_params=query_params) - return UserSchema.from_orm(user) - - @classmethod - async def patch(cls, obj_id: int, data: UserPatchSchema, query_params: QueryStringManager) -> UserSchema: - user_obj: User - try: - user_obj = await UpdateUser.update( - obj_id, - data.dict(exclude_unset=True), - query_params.headers, - ) - except ErrorUpdateUserObject as ex: - raise BadRequest(ex.description, ex.field) - except ObjectNotFound as ex: - raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=ex.description) - - user = UserSchema.from_orm(user_obj) - return user - - -class UserList: - @classmethod - async def get(cls, query_params: QueryStringManager) -> Union[QuerySet, JSONAPIResultListSchema]: - user_query = User.filter().order_by("-id") - dl = TortoiseDataLayer(query=user_query, schema=UserSchema, model=User) - count, users_db = await dl.get_collection(qs=query_params) - total_pages = count // query_params.pagination.size + (count % query_params.pagination.size and 1) - users: List[UserSchema] = [UserSchema.from_orm(i_user) for i_user in users_db] - return JSONAPIResultListSchema( - meta={"count": count, "totalPages": total_pages}, - data=[{"id": i_obj.id, "attributes": i_obj.dict(), "type": "user"} for i_obj in users], - ) - - @classmethod - async def post(cls, data: UserInSchema, query_params: QueryStringManager) -> UserSchema: - try: - user_obj = await UserFactory.create( - data=data.dict(), - mode=FactoryUseMode.production, - header=query_params.headers, - ) - except ErrorCreateUserObject as ex: - raise BadRequest(ex.description, ex.field) - - user = UserSchema.from_orm(user_obj) - return user diff --git a/examples/api_for_tortoise_orm/asgi.py b/examples/api_for_tortoise_orm/asgi.py deleted file mode 100644 index 27a05831..00000000 --- a/examples/api_for_tortoise_orm/asgi.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Factory call module.""" - -from examples.api_for_tortoise_orm.main import create_app - -app = create_app() diff --git a/examples/api_for_tortoise_orm/helpers/__init__.py b/examples/api_for_tortoise_orm/helpers/__init__.py deleted file mode 100644 index 092d160b..00000000 --- a/examples/api_for_tortoise_orm/helpers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Helpers for service package. Contains factories and updaters packages.""" diff --git a/examples/api_for_tortoise_orm/helpers/factories/__init__.py b/examples/api_for_tortoise_orm/helpers/factories/__init__.py deleted file mode 100644 index a5efa93c..00000000 --- a/examples/api_for_tortoise_orm/helpers/factories/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -factories package. - -Contains factories for creating user models. -""" diff --git a/examples/api_for_tortoise_orm/helpers/factories/exceptions.py b/examples/api_for_tortoise_orm/helpers/factories/exceptions.py deleted file mode 100644 index fac4c8ed..00000000 --- a/examples/api_for_tortoise_orm/helpers/factories/exceptions.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Create exceptions module.""" - -from typing import Type - -from tortoise import models - - -class ErrorCreateObject(Exception): - """Base create object exception.""" - - def __init__(self, model: Type[models.Model], description: str, field: str = ""): - """For a custom exception, you can define the model and error description.""" - self.model = model - self.message = description - self.field = field - self.description = description - - -class ExceptionBeforeCreate(Exception): - """The exception thrown before the object was created by the factory.""" - - pass - - -class ExceptionNotFactory(Exception): - """The exception that is thrown when there is no factory for a given model in the store.""" - - pass - - -class ExceptionAfterCommit(Exception): - """The exception thrown after the object was created by the factory.""" - - pass diff --git a/examples/api_for_tortoise_orm/helpers/factories/faker.py b/examples/api_for_tortoise_orm/helpers/factories/faker.py deleted file mode 100644 index afc55fb8..00000000 --- a/examples/api_for_tortoise_orm/helpers/factories/faker.py +++ /dev/null @@ -1,9 +0,0 @@ -"""When you'll need to patch faker, do it here.""" - -from faker import Faker -from faker.providers import lorem - -fake = Faker() -fake.add_provider(lorem) - -__all__ = ["fake"] diff --git a/examples/api_for_tortoise_orm/helpers/factories/meta_base.py b/examples/api_for_tortoise_orm/helpers/factories/meta_base.py deleted file mode 100644 index 40f51908..00000000 --- a/examples/api_for_tortoise_orm/helpers/factories/meta_base.py +++ /dev/null @@ -1,227 +0,0 @@ -"""Base factory module.""" - -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) - -from tortoise import models - -from fastapi_jsonapi.data_layers.fields.enum import Enum -from .exceptions import ( - ExceptionAfterCommit, - ExceptionBeforeCreate, - ExceptionNotFactory, -) -from fastapi_jsonapi.querystring import HeadersQueryStringManager - - -TYPE_VAR = TypeVar("TYPE_VAR") -TYPE_MODEL = TypeVar("TypeModel", bound=models.Model) - - -class FactoryUseMode(Enum): - """Effects the creation of an object in a factory. In test mode data generated randomly.""" - - test = 1 # for tests, that is, data is generated randomly (unless specified explicitly) - production = 2 # working version, you can not allow random data generation - - -class _BaseFactory(Generic[TYPE_MODEL]): - class Meta(object): - model: Any - - data: Dict[str, Callable] = {} - """simple data like text, dict and etc.""" - awaitable_data: Dict[str, Tuple[Callable, List, Dict]] = {} - """awaitable with arguments (like another factory) - Usage: - awaitable_data = { - 'attribute_name': (lambda: Factories.get("example_factory").create, [], {}), - } - Warning!!! lambda function is required! - """ - - @classmethod - async def _get_data( - cls, - data: Optional[Dict[str, Any]] = None, - mode: FactoryUseMode = FactoryUseMode.test, - ) -> Dict: - new_kwargs = dict() - if data: - new_kwargs.update(data) - - if mode is FactoryUseMode.test: - for name, val in cls.data.items(): - if name not in new_kwargs: - new_kwargs[name] = val() - for name, awaitable_pack in cls.awaitable_data.items(): - if name not in new_kwargs: - lambda_func, f_args, f_kwargs = awaitable_pack - new_kwargs[name] = await lambda_func()(*f_args, **f_kwargs) - return new_kwargs - - @classmethod - async def create_batch( - cls, - count: int = 1, - data: Optional[Dict[str, Any]] = None, - save: bool = True, - mode: FactoryUseMode = FactoryUseMode.test, - ) -> List[models.MODEL]: - """ - Create objects. - - :param cls: factory - :param count: you can pass an optional parameter - the number of instances, default = 1 - :param data: named parameters for the factory - :param save: flag save model to db or not (save by default) - :param mode: what is the factory used for - :return: new object. - """ - result_data = [] - for step in range(1, count + 1): - new_kwargs = await cls._get_data(data=data, mode=mode) - try: - new_kwargs = await cls.before_create(many=True, mode=mode, model_kwargs=new_kwargs) - except ExceptionBeforeCreate: - pass - new_object = cls.Meta.model(**new_kwargs) - if save: - await new_object.save() - result_data.append(new_object) - - try: - await cls.after_create(result_data=result_data, many=True, saved=save, mode=mode) - except ExceptionAfterCommit: - pass - - return result_data - - @classmethod - async def create( - cls, - data: Optional[Dict[str, Any]] = None, - header: Union[HeadersQueryStringManager, None] = None, - save: bool = True, - mode: FactoryUseMode = FactoryUseMode.test, - ) -> models.MODEL: - """ - Create objects. - - :param cls: factory - :param data: named parameters for the factory - :param header: header - :param save: flag save model to db or not (save by default) - :param mode: what is the factory used for - :return: created model. - """ - new_kwargs = await cls._get_data(data=data, mode=mode) - - try: - new_kwargs = await cls.before_create(many=False, mode=mode, model_kwargs=new_kwargs, header=header) - except ExceptionBeforeCreate: - pass - - result_data = cls.Meta.model(**new_kwargs) - if save: - await result_data.save() - - try: - await cls.after_create(result_data=result_data, many=False, saved=save, mode=mode, header=header) - except ExceptionAfterCommit: - pass - - return result_data - - @classmethod - async def before_create( - cls, - many: bool, - mode: FactoryUseMode, - model_kwargs: Dict, - header: Union[HeadersQueryStringManager, None] = None, - ) -> Dict: - """ - Perform logic before the factory starts. - - :param many: boolean flag: bulk save or not - :param mode: Factory mode - :param model_kwargs: argument which pass to fabric - :param header: header - :return: named parameters to create an object - :raises ExceptionBeforeCreate: if 'before_create' has failed. - """ - raise ExceptionBeforeCreate - - @classmethod - async def after_create( - cls, - result_data: Union[List[TYPE_MODEL], TYPE_MODEL], - many: bool, - saved: bool, - mode: FactoryUseMode, - header: Union[HeadersQueryStringManager, None] = None, - ) -> None: - """ - Perform logic after data. - - :param result_data: created object - :param many: boolean flag: bulk save or not - :param saved: boolean flag: model saved to db or not - :param mode: Factory mode - :param header: header - :raises ExceptionAfterCommit: if 'after_create' has failed. - """ - raise ExceptionAfterCommit - - -class Factories(object): - """Хранилище фабрик.""" - - _factories: Dict[str, Type["_BaseFactory"]] = dict() - - @classmethod - def get(cls, name_model: str) -> Type["_BaseFactory"]: - """ - Get factory for model. - - :param name_model: str. - :return: factory for model. - :raises ExceptionNotFactory: if no factory is found for this model. - """ - factory = cls._factories.get(name_model) - if factory is None: - raise ExceptionNotFactory("Not found factory={model}".format(model=name_model)) - return factory - - @classmethod - def add(cls, name_factory: str, factory: Type["_BaseFactory"]) -> None: - """Add new factory to storage.""" - cls._factories[name_factory] = factory - - -class MetaFactory(type): - """Factory meta class.""" - - def __new__(cls, name, bases, attrs): - """Add new factory to factories storage.""" - factory = super().__new__(cls, name, bases, attrs) - if issubclass(factory, _BaseFactory): - Factories.add(name, factory) - return factory - - -class BaseFactory(_BaseFactory, metaclass=MetaFactory): - """Base factory.""" - - ... diff --git a/examples/api_for_tortoise_orm/helpers/factories/user.py b/examples/api_for_tortoise_orm/helpers/factories/user.py deleted file mode 100644 index 38e20453..00000000 --- a/examples/api_for_tortoise_orm/helpers/factories/user.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import ( - Any, - Dict, - Union, -) - -from examples.api_for_tortoise_orm.models.enums import UserStatusEnum - -from .exceptions import ErrorCreateObject -from .faker import fake -from .meta_base import ( - BaseFactory, - FactoryUseMode, -) -from fastapi_jsonapi.querystring import HeadersQueryStringManager -from ...models.tortoise import User - - -class ErrorCreateUserObject(ErrorCreateObject): - def __init__(self, description, field: str = ""): - """Initialize constructor for exception while creating object.""" - super().__init__(User, description, field) - - -class UserFactory(BaseFactory): - class Meta(object): - model = User - - data = { - "first_name": lambda: fake.word(), - "last_name": lambda: fake.word(), - "status": lambda: UserStatusEnum.active, - } - - @classmethod - async def before_create( - cls, - many: bool, - mode: FactoryUseMode, - model_kwargs: Dict, - header: Union[HeadersQueryStringManager, None] = None, - ) -> Dict: - data_for_create_user: Dict[str, Any] = dict() - cls._set_first_name(data_for_create_user, model_kwargs) - cls._set_last_name(data_for_create_user, model_kwargs) - cls._set_status(data_for_create_user, model_kwargs) - return data_for_create_user - - @classmethod - def _set_first_name(cls, data_for_create_user: Dict, kwargs: Dict): - """ - Set first name. - """ - data_for_create_user["first_name"] = kwargs.get("first_name", "First name") - - @classmethod - def _set_last_name(cls, data_for_create_user: Dict, kwargs: Dict): - """ - Set first name. - """ - data_for_create_user["last_name"] = kwargs.get("last_name", "Last name") - - @classmethod - def _set_status(cls, data_for_create_user: Dict, kwargs: Dict): - """Status setter.""" - data_for_create_user["status"] = UserStatusEnum.active diff --git a/examples/api_for_tortoise_orm/helpers/updaters/__init__.py b/examples/api_for_tortoise_orm/helpers/updaters/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/api_for_tortoise_orm/helpers/updaters/exceptions.py b/examples/api_for_tortoise_orm/helpers/updaters/exceptions.py deleted file mode 100644 index be08776b..00000000 --- a/examples/api_for_tortoise_orm/helpers/updaters/exceptions.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Base updater exception module.""" - - -class ErrorUpdateObject(Exception): - """Base updater exception.""" - - def __init__(self, model, description, field: str = ""): - """When creating an exception object for an updater, you must specify the model and error description.""" - self.model = model - self.message = description - self.field = field - self.description = description - - -class ExceptionBeforeUpdate(Exception): - """The exception thrown before the object was updated by the updater.""" - - pass - - -class ObjectNotFound(ErrorUpdateObject): - """The exception if the object was not found.""" - - pass - - -class ExceptionNotUpdater(Exception): - """Raise exception if updater not found in storage.""" - - pass diff --git a/examples/api_for_tortoise_orm/helpers/updaters/meta_base.py b/examples/api_for_tortoise_orm/helpers/updaters/meta_base.py deleted file mode 100644 index 6c5b0df2..00000000 --- a/examples/api_for_tortoise_orm/helpers/updaters/meta_base.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Base updaters module.""" - -from typing import ( - Any, - Dict, - Generic, - Iterable, - Optional, - Type, - TypeVar, - Union, -) - -from tortoise import models -from tortoise.exceptions import DoesNotExist - -from fastapi_jsonapi.querystring import HeadersQueryStringManager - -from .exceptions import ( - ExceptionBeforeUpdate, - ExceptionNotUpdater, - ObjectNotFound, -) - -TYPE_VAR = TypeVar("TYPE_VAR") -TYPE_MODEL = TypeVar("TypeModel", bound=models.Model) - - -class _BaseUpdater(Generic[TYPE_MODEL]): - class Meta(object): - model: Any - - @classmethod - async def update( - cls, - model_or_id: Union[TYPE_MODEL, int], - new_data: Dict[str, Any], - header: Union[HeadersQueryStringManager, None] = None, - save: bool = True, - update_fields: Optional[Iterable[str]] = None, - ) -> TYPE_MODEL: - """ - Create objects. - - :param cls: updater - :param new_data: named parameters for the updater - :param model_or_id: object or id - :param header: header - :param save: boolean flag: model saved to db or not - :return: created model. - """ - model_obj = await cls._preload_model(model_or_id) - old_data = await model_obj.clone(pk=model_obj.id) # type: ignore - - try: - model_obj = await cls.before_update(obj=model_obj, new_data=new_data, header=header) - except ExceptionBeforeUpdate: - pass - - if save: - await model_obj.save(update_fields=update_fields) - - return model_obj - - @classmethod - async def _preload_model(cls, model_or_id: Union[TYPE_MODEL, int]) -> TYPE_MODEL: - """ - Preload model method. - - If updater initialize with int id - load from database with this id. - :return: Model. Returns model from initialization or preloaded model. - :raises ObjectNotFound: if object does not found. - """ - if isinstance(model_or_id, int): - try: - model = await cls.Meta.model.get(id=model_or_id) - except DoesNotExist: - raise ObjectNotFound(cls.Meta.model, description="Object does not exist") - - return model - else: - return model_or_id - - @classmethod - async def before_update( - cls, - obj: TYPE_MODEL, - new_data: Dict[Any, Any], - header: Union[HeadersQueryStringManager, None] = None, - ) -> TYPE_MODEL: - """ - Perform logic before the updater starts. - - :param obj: argument with preloaded model, - :param new_data: argument with new data - :param header: header - :return: named parameters to update an object - :raises ExceptionBeforeUpdate: if 'before_update' has failed. - """ - raise ExceptionBeforeUpdate - - -class Updaters(object): - """Updaters factory.""" - - _updaters: Dict[str, Type["_BaseUpdater"]] = dict() - - @classmethod - def get(cls, name_model: str) -> Type["_BaseUpdater"]: - """Get updater from storage.""" - try: - return cls._updaters[name_model] - except KeyError: - raise ExceptionNotUpdater("Not found updater={model}".format(model=name_model)) - - @classmethod - def add(cls, name_updater: str, updater: Type["_BaseUpdater"]) -> None: - """Add to storage method.""" - cls._updaters[name_updater] = updater - - -class MetaUpdater(type): - """Metaclass for updater.""" - - def __new__(cls, name, bases, attrs): - """Create updater instance and add it to storage.""" - updater = super().__new__(cls, name, bases, attrs) - if issubclass(updater, _BaseUpdater): - Updaters.add(name, updater) - return updater - - -class BaseUpdater(_BaseUpdater, metaclass=MetaUpdater): - """Base updater.""" - - ... diff --git a/examples/api_for_tortoise_orm/helpers/updaters/update_user.py b/examples/api_for_tortoise_orm/helpers/updaters/update_user.py deleted file mode 100644 index c6553df5..00000000 --- a/examples/api_for_tortoise_orm/helpers/updaters/update_user.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Update user helper.""" - -from typing import ( - Any, - Dict, - Optional, - Union, -) - -from fastapi_jsonapi.querystring import HeadersQueryStringManager -from .exceptions import ErrorUpdateObject -from .meta_base import ( - BaseUpdater, -) -from ...models.enums import UserStatusEnum -from ...models.tortoise import User - - -class ErrorUpdateUserObject(ErrorUpdateObject): - """Exception class for user update helper.""" - - def __init__(self, description, field: str = ""): - """Initialize constructor for exception while updating object.""" - super().__init__(User, description, field) - - -class UpdateUser(BaseUpdater): - """User update helper.""" - - class Meta(object): - """Type of model.""" - - model = User - - @classmethod - async def before_update( - cls, - obj: User, - new_data: Dict[str, Any], - header: Union[HeadersQueryStringManager, None] = None, - ) -> User: - cls._update_first_name(obj, new_data) - cls._update_last_name(obj, new_data) - cls._update_status(obj, new_data) - return obj - - @classmethod - def _update_first_name(cls, obj: User, new_data: Dict[str, Any]) -> None: - first_name: Optional[str] = new_data.get("first_name") - if first_name is not None and first_name != obj.first_name: - obj.first_name = first_name - - @classmethod - def _update_last_name(cls, obj: User, new_data: Dict[str, Any]) -> None: - last_name: Optional[str] = new_data.get("last_name") - if last_name is not None and last_name != obj.last_name: - obj.last_name = last_name - - @classmethod - def _update_status( - cls, - obj: User, - new_data: Dict[str, Any], - ) -> None: - new_status: Optional[UserStatusEnum] = new_data.get("status") - if new_status is None or new_status == obj.status: - return None - - if new_status is User.Enum.status.block and obj.status is not User.Enum.status.active: - obj.status = new_status diff --git a/examples/api_for_tortoise_orm/main.py b/examples/api_for_tortoise_orm/main.py deleted file mode 100644 index a87eb383..00000000 --- a/examples/api_for_tortoise_orm/main.py +++ /dev/null @@ -1,59 +0,0 @@ -""" -Main module for w_mount service. - -In module placed db initialization functions, app factory. -""" -import sys -from pathlib import Path - -CURRENT_FILE = Path(__file__).resolve() -CURRENT_DIR = CURRENT_FILE.parent -PROJECT_DIR = CURRENT_DIR.parent.parent - -sys.path.append(str(PROJECT_DIR)) - -import uvicorn -from fastapi import FastAPI -from tortoise import Tortoise - -from examples.api_for_tortoise_orm.urls import add_routes - - -async def tortoise_init() -> None: - # Here we create a SQLite DB using file "db.sqlite3" - # also specify the app name of "models" - # which contain models from "app.models" - await Tortoise.init( - db_url="sqlite://db.sqlite3", - modules={"models": ["models.tortoise"]}, - ) - # Generate the schema - await Tortoise.generate_schemas() - - -def create_app() -> FastAPI: - """ - Create app factory. - - :return: app - """ - app = FastAPI( - title="FastAPI and Tortoise ORM", - debug=True, - openapi_url="/openapi.json", - docs_url="/docs", - ) - add_routes(app) - app.on_event("startup")(tortoise_init) - return app - - -if __name__ == "__main__": - uvicorn.run( - "asgi:app", - host="0.0.0.0", - port=8080, - debug=True, - reload=True, - app_dir=str(CURRENT_DIR), - ) diff --git a/examples/api_for_tortoise_orm/models/__init__.py b/examples/api_for_tortoise_orm/models/__init__.py deleted file mode 100644 index ed194fee..00000000 --- a/examples/api_for_tortoise_orm/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Models package. Contains schemas and tortoise models.""" diff --git a/examples/api_for_tortoise_orm/models/pydantic/__init__.py b/examples/api_for_tortoise_orm/models/pydantic/__init__.py deleted file mode 100644 index 3a2443b8..00000000 --- a/examples/api_for_tortoise_orm/models/pydantic/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""W-mount schemas package.""" - - -from .user import ( - UserPatchSchema, - UserSchema, -) - -__all__ = [ - "UserSchema", - "UserPatchSchema", - "UserSchema", -] diff --git a/examples/api_for_tortoise_orm/models/pydantic/user.py b/examples/api_for_tortoise_orm/models/pydantic/user.py deleted file mode 100644 index 5964f92f..00000000 --- a/examples/api_for_tortoise_orm/models/pydantic/user.py +++ /dev/null @@ -1,51 +0,0 @@ -"""User base schemas module.""" - -from datetime import datetime -from typing import Optional - -from pydantic import ( - BaseModel, - Field, -) - -from examples.api_for_tortoise_orm.models.enums import UserStatusEnum - - -class UserBaseSchema(BaseModel): - """User base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - class Enum: - """User enums.""" - - status = UserStatusEnum - - first_name: Optional[str] = None - last_name: Optional[str] = None - status: UserStatusEnum = Field(default=UserStatusEnum.active) - - -class UserPatchSchema(UserBaseSchema): - """User PATCH schema.""" - - -class UserInSchema(UserBaseSchema): - """User input schema.""" - - -class UserSchema(UserInSchema): - """User item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - model = "users" - - id: int - created_at: datetime = Field(description="Время создания данных") - modified_at: datetime = Field(description="Время изменения данных") diff --git a/examples/api_for_tortoise_orm/models/tortoise/__init__.py b/examples/api_for_tortoise_orm/models/tortoise/__init__.py deleted file mode 100644 index 6104814a..00000000 --- a/examples/api_for_tortoise_orm/models/tortoise/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Tortoise models package.""" - -from examples.api_for_tortoise_orm.models.tortoise.user import User - -__all__ = [ - "User", -] diff --git a/examples/api_for_tortoise_orm/models/tortoise/user.py b/examples/api_for_tortoise_orm/models/tortoise/user.py deleted file mode 100644 index 211f813c..00000000 --- a/examples/api_for_tortoise_orm/models/tortoise/user.py +++ /dev/null @@ -1,29 +0,0 @@ -"""User model.""" - - -from tortoise import ( - fields, - models, -) - -from examples.api_for_tortoise_orm.models.enums import UserStatusEnum - -MAX_LEN_NAME = 100 -NOMENCLATURE_NUMBER_FIELD_LENGTH = 100 - - -class User(models.Model): - """The user model.""" - - class Enum: - status = UserStatusEnum - - id: int = fields.IntField(pk=True) - first_name: str = fields.CharField(max_length=MAX_LEN_NAME) - last_name: str = fields.CharField(max_length=MAX_LEN_NAME) - status: UserStatusEnum = fields.CharEnumField(UserStatusEnum) - created_at = fields.DatetimeField(null=True, auto_now_add=True) - modified_at = fields.DatetimeField(null=True, auto_now=True) - - class Meta: - table = "users" diff --git a/examples/api_for_tortoise_orm/urls.py b/examples/api_for_tortoise_orm/urls.py deleted file mode 100644 index f35f8bd1..00000000 --- a/examples/api_for_tortoise_orm/urls.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Route creator for w_mount service.""" - -from typing import ( - Any, - Dict, - List, -) - -from fastapi import ( - APIRouter, - FastAPI, -) - -from fastapi_jsonapi import RoutersJSONAPI -from .models.pydantic import UserPatchSchema -from .models.pydantic.user import ( - UserSchema, - UserInSchema, -) -from .api.user import ( - UserDetail, - UserList, -) -from .models.tortoise import User - - -def add_routes(app: FastAPI) -> List[Dict[str, Any]]: - tags = [ - { - "name": "User", - "description": "", - }, - ] - - routers: APIRouter = APIRouter() - # TODO: fix example - RoutersJSONAPI( - router=routers, - path="/users", - tags=["User"], - class_detail=UserDetail, - class_list=UserList, - schema=UserSchema, - resource_type="user", - schema_in_patch=UserPatchSchema, - schema_in_post=UserInSchema, - model=User, - ) - - app.include_router(routers, prefix="") - return tags diff --git a/examples/api_limited_methods.py b/examples/api_limited_methods.py deleted file mode 100644 index ec46a5db..00000000 --- a/examples/api_limited_methods.py +++ /dev/null @@ -1,156 +0,0 @@ -import sys -from pathlib import Path -from typing import Any, ClassVar, Dict - -import uvicorn -from fastapi import APIRouter, Depends, FastAPI -from sqlalchemy import Column, Integer, Text -from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker - -from fastapi_jsonapi import RoutersJSONAPI, init -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric -from fastapi_jsonapi.schema_base import BaseModel -from fastapi_jsonapi.views.utils import HTTPMethod, HTTPMethodConfig -from fastapi_jsonapi.views.view_base import ViewBase - -CURRENT_FILE = Path(__file__).resolve() -CURRENT_DIR = CURRENT_FILE.parent -PROJECT_DIR = CURRENT_DIR.parent.parent -DB_URL = f"sqlite+aiosqlite:///{CURRENT_DIR}/db.sqlite3" -sys.path.append(str(PROJECT_DIR)) - -Base = declarative_base() - - -class User(Base): - __tablename__ = "users" - id = Column(Integer, primary_key=True) - name = Column(Text, nullable=True) - - -class UserAttributesBaseSchema(BaseModel): - name: str - - class Config: - orm_mode = True - - -class UserSchema(UserAttributesBaseSchema): - """User base schema.""" - - -def async_session() -> sessionmaker: - engine = create_async_engine(url=make_url(DB_URL)) - _async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session - - -class Connector: - @classmethod - async def get_session(cls): - """ - Get session as dependency - - :return: - """ - sess = async_session() - async with sess() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() - - -async def sqlalchemy_init() -> None: - engine = create_async_engine(url=make_url(DB_URL)) - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - -class SessionDependency(BaseModel): - session: AsyncSession = Depends(Connector.get_session) - - class Config: - arbitrary_types_allowed = True - - -def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> Dict[str, Any]: - return { - "session": dto.session, - } - - -class UserDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=session_dependency_handler, - ), - } - - -class UserListView(ListViewBaseGeneric): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=session_dependency_handler, - ), - } - - -def add_routes(app: FastAPI): - tags = [ - { - "name": "User", - "description": "", - }, - ] - - router: APIRouter = APIRouter() - RoutersJSONAPI( - router=router, - path="/users", - tags=["User"], - class_detail=UserDetailView, - class_list=UserListView, - schema=UserSchema, - model=User, - resource_type="user", - methods=[ - RoutersJSONAPI.Methods.GET_LIST, - RoutersJSONAPI.Methods.POST, - RoutersJSONAPI.Methods.GET, - ], - ) - - app.include_router(router, prefix="") - return tags - - -def create_app() -> FastAPI: - """ - Create app factory. - - :return: app - """ - app = FastAPI( - title="FastAPI app with limited methods", - debug=True, - openapi_url="/openapi.json", - docs_url="/docs", - ) - add_routes(app) - app.on_event("startup")(sqlalchemy_init) - init(app) - return app - - -app = create_app() - -if __name__ == "__main__": - uvicorn.run( - app, - host="0.0.0.0", - port=8080, - ) diff --git a/examples/api_minimal.py b/examples/api_minimal.py index e8d563f2..e4ce8607 100644 --- a/examples/api_minimal.py +++ b/examples/api_minimal.py @@ -1,98 +1,67 @@ import sys +from contextlib import asynccontextmanager from pathlib import Path -from typing import Any, ClassVar, Dict +from typing import Any, ClassVar, Optional import uvicorn -from fastapi import APIRouter, Depends, FastAPI -from sqlalchemy import Column, Integer, Text +from fastapi import Depends, FastAPI +from fastapi.responses import ORJSONResponse as JSONResponse +from pydantic import ConfigDict from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column -from fastapi_jsonapi import RoutersJSONAPI, init -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric +from examples.api_for_sqlalchemy.models.db import DB +from fastapi_jsonapi import ApplicationBuilder +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric from fastapi_jsonapi.schema_base import BaseModel -from fastapi_jsonapi.views.utils import HTTPMethod, HTTPMethodConfig -from fastapi_jsonapi.views.view_base import ViewBase +from fastapi_jsonapi.views import Operation, OperationConfig, ViewBase -CURRENT_FILE = Path(__file__).resolve() -CURRENT_DIR = CURRENT_FILE.parent -PROJECT_DIR = CURRENT_DIR.parent.parent -DB_URL = f"sqlite+aiosqlite:///{CURRENT_DIR}/db.sqlite3" -sys.path.append(str(PROJECT_DIR)) +CURRENT_DIR = Path(__file__).resolve().parent +sys.path.append(f"{CURRENT_DIR.parent.parent}") +db = DB( + url=make_url(f"sqlite+aiosqlite:///{CURRENT_DIR}/db.sqlite3"), +) -Base = declarative_base() + +class Base(DeclarativeBase): + pass class User(Base): __tablename__ = "users" - id = Column(Integer, primary_key=True) - name = Column(Text, nullable=True) - - -class UserAttributesBaseSchema(BaseModel): - name: str - class Config: - orm_mode = True + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[Optional[str]] -class UserSchema(UserAttributesBaseSchema): +class UserSchema(BaseModel): """User base schema.""" + model_config = ConfigDict( + from_attributes=True, + ) -def async_session() -> sessionmaker: - engine = create_async_engine(url=make_url(DB_URL)) - _async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session - - -class Connector: - @classmethod - async def get_session(cls): - """ - Get session as dependency - - :return: - """ - sess = async_session() - async with sess() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() - - -async def sqlalchemy_init() -> None: - engine = create_async_engine(url=make_url(DB_URL)) - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) + name: str class SessionDependency(BaseModel): - session: AsyncSession = Depends(Connector.get_session) + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) - class Config: - arbitrary_types_allowed = True + session: AsyncSession = Depends(db.session) -def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> Dict[str, Any]: +def session_dependency_handler(view: ViewBase, dto: SessionDependency) -> dict[str, Any]: return { "session": dto.session, } -class UserDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=session_dependency_handler, - ), - } - - -class UserListView(ListViewBaseGeneric): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( +class UserView(ViewBaseGeneric): + operation_dependencies: ClassVar = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=session_dependency_handler, ), @@ -100,48 +69,40 @@ class UserListView(ListViewBaseGeneric): def add_routes(app: FastAPI): - tags = [ - { - "name": "User", - "description": "", - }, - ] - - router: APIRouter = APIRouter() - RoutersJSONAPI( - router=router, + builder = ApplicationBuilder(app) + builder.add_resource( path="/users", tags=["User"], - class_detail=UserDetailView, - class_list=UserListView, + view=UserView, schema=UserSchema, model=User, resource_type="user", ) + builder.initialize() - app.include_router(router, prefix="") - return tags +# noinspection PyUnusedLocal +@asynccontextmanager +async def lifespan(app: FastAPI): + add_routes(app) -def create_app() -> FastAPI: - """ - Create app factory. + async with db.engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield + + await db.dispose() - :return: app - """ - app = FastAPI( - title="FastAPI and SQLAlchemy", - debug=True, - openapi_url="/openapi.json", - docs_url="/docs", - ) - add_routes(app) - app.on_event("startup")(sqlalchemy_init) - init(app) - return app +app = FastAPI( + title="FastAPI and SQLAlchemy", + lifespan=lifespan, + debug=True, + default_response_class=JSONResponse, + docs_url="/docs", + openapi_url="/openapi.json", +) -app = create_app() if __name__ == "__main__": uvicorn.run( diff --git a/examples/custom_filter_example.py b/examples/custom_filter_example.py deleted file mode 100644 index 4277ae5c..00000000 --- a/examples/custom_filter_example.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any, Union - -from pydantic.fields import Field, ModelField -from sqlalchemy.orm import InstrumentedAttribute -from sqlalchemy.sql.elements import BinaryExpression, BooleanClauseList - -from fastapi_jsonapi.schema_base import BaseModel - - -def jsonb_contains_sql_filter( - schema_field: ModelField, - model_column: InstrumentedAttribute, - value: dict[Any, Any], - operator: str, -) -> Union[BinaryExpression, BooleanClauseList]: - """ - Any SQLA (or Tortoise) magic here - - :param schema_field: - :param model_column: - :param value: any dict - :param operator: value 'jsonb_contains' - :return: one sqla filter expression - """ - return model_column.op("@>")(value) - - -class PictureSchema(BaseModel): - """ - Now you can use `jsonb_contains` sql filter for this resource - """ - - name: str - meta: dict[Any, Any] = Field( - default_factory=dict, - description="Any additional info in JSON format.", - example={"location": "Moscow", "spam": "eggs"}, - _jsonb_contains_sql_filter_=jsonb_contains_sql_filter, - ) diff --git a/examples/api_for_sqlalchemy/utils/__init__.py b/examples/misc/__init__.py similarity index 100% rename from examples/api_for_sqlalchemy/utils/__init__.py rename to examples/misc/__init__.py diff --git a/examples/misc/custom_filter_example.py b/examples/misc/custom_filter_example.py new file mode 100644 index 00000000..59b84127 --- /dev/null +++ b/examples/misc/custom_filter_example.py @@ -0,0 +1,58 @@ +from typing import Annotated, Optional + +import orjson as json +from pydantic import BaseModel, Field +from pydantic.fields import FieldInfo +from sqlalchemy.orm import InstrumentedAttribute +from sqlalchemy.sql.expression import BinaryExpression + +from fastapi_jsonapi.exceptions import InvalidFilters +from fastapi_jsonapi.types_metadata.custom_filter_sql import CustomFilterSQLA + + +def _get_sqlite_json_ilike_expression( + model_column: InstrumentedAttribute, + value: list, + operator: str, +) -> BinaryExpression: + try: + target_field, regex = value + except ValueError: + msg = f'The "value" field has to be list of two values for op `{operator}`' + raise InvalidFilters(msg) + + if isinstance(regex, (list, dict)): + regex = json.dumps(regex).decode() + elif isinstance(regex, bool): + return model_column.op("->>")(target_field).is_(regex) + else: + regex = f"{regex}" + + return model_column.op("->>")(target_field).ilike(regex) + + +class SQLiteJSONIlikeFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: list[str], + operator: str, + ) -> BinaryExpression: + return _get_sqlite_json_ilike_expression(model_column, value, operator) + + +sql_filter_sqlite_json_ilike = SQLiteJSONIlikeFilterSQL(op="sqlite_json_ilike") + + +class PictureSchema(BaseModel): + """ + Now you can use `jsonb_contains` sql filter for this resource + """ + + name: str + meta: Annotated[Optional[dict], sql_filter_sqlite_json_ilike] = Field( + default_factory=dict, + description="Any additional info in JSON format.", + example={"location": "Moscow", "spam": "eggs"}, + ) diff --git a/fastapi_jsonapi/VERSION b/fastapi_jsonapi/VERSION index 834f2629..4a36342f 100644 --- a/fastapi_jsonapi/VERSION +++ b/fastapi_jsonapi/VERSION @@ -1 +1 @@ -2.8.0 +3.0.0 diff --git a/fastapi_jsonapi/__init__.py b/fastapi_jsonapi/__init__.py index a9d73a18..37f5e771 100644 --- a/fastapi_jsonapi/__init__.py +++ b/fastapi_jsonapi/__init__.py @@ -1,32 +1,18 @@ """JSON API utils package.""" -from pathlib import Path -from fastapi import FastAPI +from pathlib import Path -from fastapi_jsonapi.api import RoutersJSONAPI from fastapi_jsonapi.exceptions import BadRequest -from fastapi_jsonapi.exceptions.handlers import base_exception_handler from fastapi_jsonapi.exceptions.json_api import HTTPException from fastapi_jsonapi.querystring import QueryStringManager +from fastapi_jsonapi.api.application_builder import ApplicationBuilder # isort: skip + __version__ = Path(__file__).parent.joinpath("VERSION").read_text().strip() __all__ = [ - "init", + "ApplicationBuilder", "BadRequest", + "HTTPException", "QueryStringManager", - "RoutersJSONAPI", ] - - -def init(app: FastAPI): - """ - Init the app. - - Processes the application by setting the entities necessary for work. - - Action list: - - Registers default exception handlers for exceptions defined - in "fastapi_jsonapi.exceptions" module. - """ - app.add_exception_handler(HTTPException, base_exception_handler) diff --git a/fastapi_jsonapi/api.py b/fastapi_jsonapi/api.py deleted file mode 100644 index 9fbdfd40..00000000 --- a/fastapi_jsonapi/api.py +++ /dev/null @@ -1,704 +0,0 @@ -"""JSON API router class.""" -from enum import Enum, auto -from inspect import Parameter, Signature, signature -from typing import ( - TYPE_CHECKING, - Any, - Callable, - ClassVar, - Dict, - Iterable, - List, - Literal, - Optional, - Type, - TypeVar, - Union, -) - -from fastapi import APIRouter, Body, Path, Query, Request, status -from pydantic import BaseModel as PydanticBaseModel - -from fastapi_jsonapi.data_typing import TypeModel -from fastapi_jsonapi.exceptions import ExceptionResponseSchema -from fastapi_jsonapi.schema_base import BaseModel -from fastapi_jsonapi.schema_builder import SchemaBuilder -from fastapi_jsonapi.signature import create_additional_query_params -from fastapi_jsonapi.utils.dependency_helper import DependencyHelper -from fastapi_jsonapi.views.utils import ( - HTTPMethod, - HTTPMethodConfig, -) - -if TYPE_CHECKING: - from fastapi_jsonapi.views.detail_view import DetailViewBase - from fastapi_jsonapi.views.list_view import ListViewBase - from fastapi_jsonapi.views.view_base import ViewBase - -JSON_API_RESPONSE_TYPE = Dict[Union[int, str], Dict[str, Any]] - -JSONAPIObjectSchemaType = TypeVar("JSONAPIObjectSchemaType", bound=PydanticBaseModel) - -not_passed = object() - - -class ViewMethods(str, Enum): - GET_LIST = auto() - POST = auto() - DELETE_LIST = auto() - GET = auto() - DELETE = auto() - PATCH = auto() - - -class RoutersJSONAPI: - """ - API Router interface for JSON API endpoints in web-services. - """ - - # xxx: store in app, not in routers! - all_jsonapi_routers: ClassVar[Dict[str, "RoutersJSONAPI"]] = {} - Methods = ViewMethods - DEFAULT_METHODS = tuple(str(method) for method in ViewMethods) - - def __init__( - self, - router: APIRouter, - path: Union[str, List[str]], - tags: List[str], - class_list: Type["ListViewBase"], - class_detail: Type["DetailViewBase"], - model: Type[TypeModel], - schema: Type[BaseModel], - resource_type: str, - schema_in_post: Optional[Type[BaseModel]] = None, - schema_in_patch: Optional[Type[BaseModel]] = None, - pagination_default_size: Optional[int] = 25, - pagination_default_number: Optional[int] = 1, - pagination_default_offset: Optional[int] = None, - pagination_default_limit: Optional[int] = None, - methods: Iterable[str] = (), - max_cache_size: int = 0, - ) -> None: - """ - Initialize router items. - - :param router: APIRouter from FastAPI - :param path: path prefix, for example `/users` - :param tags: swagger tags - :param class_detail: detail view class - :param class_list: list view class - :param model: SQLA / Tortoise / any other ORM model - :param schema: full object schema for this resource - :param resource_type: `resource type` (JSON:API required) - :param schema_in_post: schema for POST - custom schema to use instead of `schema` - :param schema_in_patch: schema for PATCH - custom schema to use instead of `schema` - - # default pagination params for swagger - :param pagination_default_size: `page[size]` - default swagger param. page/size pagination, used with `page[number]` - :param pagination_default_number: `page[number]` - default swagger param. page/size pagination, used with `page[size]` - :param pagination_default_offset: `page[offset]` - default swagger param. limit/offset pagination, used with `page[limit]` - :param pagination_default_limit: `page[limit]` - default swagger param. limit/offset pagination, used with `page[offset]` - """ - self._router: APIRouter = router - self._path: Union[str, List[str]] = path - self._tags: List[str] = tags - self.detail_views = None - self.list_views = None - self.detail_view_resource: Type["DetailViewBase"] = class_detail - self.list_view_resource: Type["ListViewBase"] = class_list - self.type_: str = resource_type - self._schema: Type[BaseModel] = schema - self.schema_list: Type[BaseModel] = schema - self.model: Type[TypeModel] = model - self.schema_detail = schema - # tuple and not set, so ordering is persisted - self.methods = tuple(methods) or self.DEFAULT_METHODS - - if self.type_ in self.all_jsonapi_routers: - msg = f"Resource type {self.type_!r} already registered" - raise ValueError(msg) - self.all_jsonapi_routers[self.type_] = self - - self.pagination_default_size: Optional[int] = pagination_default_size - self.pagination_default_number: Optional[int] = pagination_default_number - self.pagination_default_offset: Optional[int] = pagination_default_offset - self.pagination_default_limit: Optional[int] = pagination_default_limit - self.schema_builder = SchemaBuilder(resource_type=resource_type, max_cache_size=max_cache_size) - - dto = self.schema_builder.create_schemas( - schema=schema, - schema_in_post=schema_in_post, - schema_in_patch=schema_in_patch, - ) - # we need to save post_data and patch_data - # and set dependency `data` as `embed=True` - # because if there's more than one Body dependency, - # FastAPI makes them all `embed=True` and validation breaks! - # doc url - # https://fastapi.tiangolo.com/tutorial/body-multiple-params/#embed-a-single-body-parameter - # code: - # https://github.com/tiangolo/fastapi/blob/831b5d5402a65ee9f415670f4116522c8e874ed3/fastapi/dependencies/utils.py#L768 - self.schema_in_post = dto.schema_in_post - self.schema_in_post_data = dto.schema_in_post_data - self.schema_in_patch = dto.schema_in_patch - self.schema_in_patch_data = dto.schema_in_patch_data - self.detail_response_schema = dto.detail_response_schema - self.list_response_schema = dto.list_response_schema - - self._prepare_responses() - self._create_and_register_generic_views() - - def _prepare_responses(self): - self.default_error_responses: JSON_API_RESPONSE_TYPE = { - status.HTTP_400_BAD_REQUEST: {"model": ExceptionResponseSchema}, - status.HTTP_401_UNAUTHORIZED: {"model": ExceptionResponseSchema}, - status.HTTP_404_NOT_FOUND: {"model": ExceptionResponseSchema}, - status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ExceptionResponseSchema}, - } - - def _create_and_register_generic_views(self): - if isinstance(self._path, Iterable) and not isinstance(self._path, (str, bytes)): - for i_path in self._path: - self._register_views(i_path) - else: - self._register_views(self._path) - - def get_endpoint_name( - self, - action: Literal["get", "create", "update", "delete"], - kind: Literal["list", "detail"], - ): - """ - Generate view name - - :param action - :param kind: list / detail - :return: - """ - return f"{action}_{self.type_}_{kind}" - - def _register_get_resource_list(self, path: str): - list_response_example = { - status.HTTP_200_OK: {"model": self.list_response_schema}, - } - self._router.add_api_route( - path=path, - tags=self._tags, - responses=list_response_example | self.default_error_responses, - methods=["GET"], - summary=f"Get list of `{self.type_}` objects", - endpoint=self._create_get_resource_list_view(), - name=self.get_endpoint_name("get", "list"), - ) - - def _register_post_resource_list(self, path: str): - create_resource_response_example = { - status.HTTP_201_CREATED: {"model": self.detail_response_schema}, - } - self._router.add_api_route( - path=path, - tags=self._tags, - responses=create_resource_response_example | self.default_error_responses, - methods=["POST"], - summary=f"Create object `{self.type_}`", - status_code=status.HTTP_201_CREATED, - endpoint=self._create_post_resource_list_view(), - name=self.get_endpoint_name("create", "list"), - ) - - def _register_delete_resource_list(self, path: str): - detail_response_example = { - status.HTTP_200_OK: {"model": self.detail_response_schema}, - } - self._router.add_api_route( - path=path, - tags=self._tags, - responses=detail_response_example | self.default_error_responses, - methods=["DELETE"], - summary=f"Delete objects `{self.type_}` by filters", - endpoint=self._create_delete_resource_list_view(), - name=self.get_endpoint_name("delete", "list"), - ) - - def _register_get_resource_detail(self, path: str): - detail_response_example = { - status.HTTP_200_OK: {"model": self.detail_response_schema}, - } - self._router.add_api_route( - # TODO: variable path param name (set default name on DetailView class) - # TODO: trailing slash (optional) - path=path + "/{obj_id}", - tags=self._tags, - responses=detail_response_example | self.default_error_responses, - methods=["GET"], - summary=f"Get object `{self.type_}` by id", - endpoint=self._create_get_resource_detail_view(), - name=self.get_endpoint_name("get", "detail"), - ) - - def _register_patch_resource_detail(self, path: str): - update_response_example = { - status.HTTP_200_OK: {"model": self.detail_response_schema}, - } - self._router.add_api_route( - # TODO: variable path param name (set default name on DetailView class) - # TODO: trailing slash (optional) - path=path + "/{obj_id}", - tags=self._tags, - responses=update_response_example | self.default_error_responses, - methods=["PATCH"], - summary=f"Patch object `{self.type_}` by id", - endpoint=self._create_patch_resource_detail_view(), - name=self.get_endpoint_name("update", "detail"), - ) - - def _register_delete_resource_detail(self, path: str): - delete_response_example = { - status.HTTP_204_NO_CONTENT: { - "description": "If a server is able to delete the resource," - " the server MUST return a result with no data", - }, - } - self._router.add_api_route( - # TODO: variable path param name (set default name on DetailView class) - # TODO: trailing slash (optional) - path=path + "/{obj_id}", - tags=self._tags, - responses=delete_response_example | self.default_error_responses, - methods=["DELETE"], - summary=f"Delete object `{self.type_}` by id", - endpoint=self._create_delete_resource_detail_view(), - name=self.get_endpoint_name("delete", "detail"), - status_code=status.HTTP_204_NO_CONTENT, - ) - - def _create_pagination_query_params(self) -> List[Parameter]: - size = Query(self.pagination_default_size, alias="page[size]", title="pagination_page_size") - number = Query(self.pagination_default_number, alias="page[number]", title="pagination_page_number") - offset = Query(self.pagination_default_offset, alias="page[offset]", title="pagination_page_offset") - limit = Query(self.pagination_default_limit, alias="page[limit]", title="pagination_page_limit") - - params = [] - - for q_param in ( - size, - number, - offset, - limit, - ): - params.append( - Parameter( - # name doesn't really matter here - name=q_param.title, - kind=Parameter.POSITIONAL_OR_KEYWORD, - annotation=Optional[int], - default=q_param, - ), - ) - - return params - - @classmethod - def _create_filters_query_dependency_param(cls): - filters_list = Query( - None, - alias="filter", - description="[Filtering docs](https://fastapi-jsonapi.readthedocs.io/en/latest/filtering.html)" - "\nExamples:\n* filter for timestamp interval: " - '`[{"name": "timestamp", "op": "ge", "val": "2020-07-16T11:35:33.383"},' - '{"name": "timestamp", "op": "le", "val": "2020-07-21T11:35:33.383"}]`', - ) - return Parameter( - name="filters_list", - kind=Parameter.POSITIONAL_OR_KEYWORD, - annotation=Optional[str], - default=filters_list, - ) - - @classmethod - def _create_sort_query_dependency_param(cls): - sort = Query( - None, - alias="sort", - description="[Sorting docs](https://fastapi-jsonapi.readthedocs.io/en/latest/sorting.html)" - "\nExamples:\n* `email` - sort by email ASC\n* `-email` - sort by email DESC" - "\n* `created_at,-email` - sort by created_at ASC and by email DESC", - ) - return Parameter( - name="sort", - kind=Parameter.POSITIONAL_OR_KEYWORD, - annotation=Optional[str], - default=sort, - ) - - @classmethod - def _get_separated_params(cls, sig: Signature): - """ - Separate params, tail params, skip **kwargs - - :param sig: - :return: - """ - params = [] - tail_params = [] - - for name, param in sig.parameters.items(): - if param.kind is Parameter.VAR_KEYWORD: - # skip **kwargs for spec - continue - - if param.kind is Parameter.KEYWORD_ONLY: - tail_params.append(param) - else: - params.append(param) - - return params, tail_params - - def _update_signature_for_resource_list_view( - self, - wrapper: Callable[..., Any], - additional_dependency_params: Iterable[Parameter] = (), - ) -> Signature: - sig = signature(wrapper) - params, tail_params = self._get_separated_params(sig) - - filter_params, include_params = create_additional_query_params(schema=self.schema_detail) - - extra_params = [] - extra_params.extend(self._create_pagination_query_params()) - extra_params.extend(filter_params) - extra_params.append(self._create_filters_query_dependency_param()) - extra_params.append(self._create_sort_query_dependency_param()) - extra_params.extend(include_params) - - return sig.replace(parameters=params + extra_params + list(additional_dependency_params) + tail_params) - - def _update_signature_for_resource_detail_view( - self, - wrapper: Callable[..., Any], - additional_dependency_params: Iterable[Parameter] = (), - ) -> Signature: - sig = signature(wrapper) - params, tail_params = self._get_separated_params(sig) - - _, include_params = create_additional_query_params(schema=self.schema_detail) - - return sig.replace(parameters=params + include_params + list(additional_dependency_params) + tail_params) - - def _create_dependency_params_from_pydantic_model(self, model_class: Type[BaseModel]) -> List[Parameter]: - return [ - Parameter( - name=field_name, - kind=Parameter.POSITIONAL_OR_KEYWORD, - annotation=field_info.outer_type_, - default=field_info.default, - ) - for field_name, field_info in model_class.__fields__.items() - ] - - def _update_method_config(self, view: Type["ViewBase"], method: HTTPMethod) -> HTTPMethodConfig: - target_config = view.method_dependencies.get(method) or HTTPMethodConfig() - common_config = view.method_dependencies.get(HTTPMethod.ALL) or HTTPMethodConfig() - - dependencies_model = target_config.dependencies or common_config.dependencies - - same_type = target_config.dependencies is common_config.dependencies - if not same_type and all([target_config.dependencies, common_config.dependencies]): - dependencies_model = type( - f"{view.__name__}{method.name.title()}MethodDependencyModel", - ( - common_config.dependencies, - target_config.dependencies, - ), - {}, - ) - - new_method_config = HTTPMethodConfig( - dependencies=dependencies_model, - prepare_data_layer_kwargs=target_config.handler or common_config.handler, - ) - view.method_dependencies[method] = new_method_config - - return new_method_config - - def _update_method_config_and_get_dependency_params( - self, - view: Type["ViewBase"], - method: HTTPMethod, - ) -> List[Parameter]: - method_config = self._update_method_config(view, method) - - if method_config.dependencies is None: - return [] - - return self._create_dependency_params_from_pydantic_model(method_config.dependencies) - - def prepare_dependencies_handler_signature( - self, - custom_handler: Callable[..., Any], - method_config: HTTPMethodConfig, - ) -> Signature: - sig = signature(custom_handler) - - additional_dependency_params = [] - if method_config.dependencies is not None: - additional_dependency_params = self._create_dependency_params_from_pydantic_model( - model_class=method_config.dependencies, - ) - - params, tail_params = self._get_separated_params(sig) - - return sig.replace(parameters=params + list(additional_dependency_params) + tail_params) - - async def handle_view_dependencies( - self, - request: Request, - view_cls: Type["ViewBase"], - method: HTTPMethod, - ) -> Dict[str, Any]: - """ - Combines all dependencies (prepared) and returns them as list - - Consider method config is already prepared for generic views - Reuse the same config for atomic operations - - :param request: - :param view_cls: - :param method: - :return: - """ - method_config: HTTPMethodConfig = view_cls.method_dependencies[method] - - def handle_dependencies(**dep_kwargs): - return dep_kwargs - - handle_dependencies.__signature__ = self.prepare_dependencies_handler_signature( - custom_handler=handle_dependencies, - method_config=method_config, - ) - - dep_helper = DependencyHelper(request=request) - dependencies_result: Dict[str, Any] = await dep_helper.run(handle_dependencies) - return dependencies_result - - def _create_get_resource_list_view(self): - """ - Create wrapper for GET list (get objects list) - - :return: - """ - - async def wrapper(request: Request, **extra_view_deps): - resource = self.list_view_resource( - request=request, - jsonapi=self, - ) - - response = await resource.handle_get_resource_list(**extra_view_deps) - return response - - additional_dependency_params = self._update_method_config_and_get_dependency_params( - self.list_view_resource, - HTTPMethod.GET, - ) - wrapper.__signature__ = self._update_signature_for_resource_list_view( - wrapper, - additional_dependency_params=additional_dependency_params, - ) - return wrapper - - def _create_post_resource_list_view(self): - """ - Create wrapper for POST list (create a new object) - - :return: - """ - # `data` as embed Body param - schema_in = self.schema_in_post_data - - async def wrapper( - request: Request, - data: schema_in = Body(embed=True), - **extra_view_deps, - ): - resource = self.list_view_resource( - request=request, - jsonapi=self, - ) - - response = await resource.handle_post_resource_list( - data_create=data, - **extra_view_deps, - ) - return response - - additional_dependency_params = self._update_method_config_and_get_dependency_params( - self.list_view_resource, - HTTPMethod.POST, - ) - - # POST request returns result as for detail view - wrapper.__signature__ = self._update_signature_for_resource_detail_view( - wrapper, - additional_dependency_params=additional_dependency_params, - ) - return wrapper - - def _create_delete_resource_list_view(self): - """ - Create wrapper for DELETE list (delete objects) - - :return: - """ - - async def wrapper(request: Request, **extra_view_deps): - resource = self.list_view_resource( - request=request, - jsonapi=self, - ) - - response = await resource.handle_delete_resource_list(**extra_view_deps) - return response - - additional_dependency_params = self._update_method_config_and_get_dependency_params( - self.list_view_resource, - HTTPMethod.DELETE, - ) - - wrapper.__signature__ = self._update_signature_for_resource_list_view( - wrapper, - additional_dependency_params=additional_dependency_params, - ) - return wrapper - - def _create_get_resource_detail_view(self): - """ - Create wrapper for GET detail (get object by id) - - :return: - """ - - # TODO: - # - custom path param name (set default name on DetailView class) - # - custom type for obj id (get type from DetailView class) - async def wrapper(request: Request, obj_id: str = Path(...), **extra_view_deps): - resource = self.detail_view_resource( - request=request, - jsonapi=self, - ) - - # TODO: pass obj_id as kwarg (get name from DetailView class) - response = await resource.handle_get_resource_detail(obj_id, **extra_view_deps) - return response - - additional_dependency_params = self._update_method_config_and_get_dependency_params( - self.detail_view_resource, - HTTPMethod.GET, - ) - - wrapper.__signature__ = self._update_signature_for_resource_detail_view( - wrapper, - additional_dependency_params=additional_dependency_params, - ) - return wrapper - - def _create_patch_resource_detail_view(self): - """ - Create wrapper for PATCH detail (patch object by id) - - :return: - """ - # `data` as embed Body param - schema_in = self.schema_in_patch_data - - async def wrapper( - request: Request, - data: schema_in = Body(embed=True), - obj_id: str = Path(...), - **extra_view_deps, - ): - resource = self.detail_view_resource( - request=request, - jsonapi=self, - ) - - # TODO: pass obj_id as kwarg (get name from DetailView class) - response = await resource.handle_update_resource( - obj_id=obj_id, - data_update=data, - **extra_view_deps, - ) - return response - - additional_dependency_params = self._update_method_config_and_get_dependency_params( - self.detail_view_resource, - HTTPMethod.PATCH, - ) - - wrapper.__signature__ = self._update_signature_for_resource_detail_view( - wrapper, - additional_dependency_params=additional_dependency_params, - ) - return wrapper - - def _create_delete_resource_detail_view(self): - """ - Create wrapper for DELETE detail (delete object by id) - - :return: - """ - - async def wrapper( - request: Request, - obj_id: str = Path(...), - **extra_view_deps, - ): - resource = self.detail_view_resource( - request=request, - jsonapi=self, - ) - - # TODO: pass obj_id as kwarg (get name from DetailView class) - response = await resource.handle_delete_resource(obj_id=obj_id, **extra_view_deps) - return response - - additional_dependency_params = self._update_method_config_and_get_dependency_params( - self.detail_view_resource, - HTTPMethod.DELETE, - ) - - wrapper.__signature__ = self._update_signature_for_resource_detail_view( - wrapper, - additional_dependency_params=additional_dependency_params, - ) - - return wrapper - - def _register_views(self, path: str): - """ - Register wrapper views - - :param path: - :return: - """ - methods_map: Dict[Union[str, ViewMethods], Callable[[str], None]] = { - ViewMethods.GET_LIST: self._register_get_resource_list, - ViewMethods.POST: self._register_post_resource_list, - ViewMethods.DELETE_LIST: self._register_delete_resource_list, - ViewMethods.GET: self._register_get_resource_detail, - ViewMethods.PATCH: self._register_patch_resource_detail, - ViewMethods.DELETE: self._register_delete_resource_detail, - } - # patch for Python < 3.11 - for key, value in list(methods_map.items()): - methods_map[str(key)] = value - - for method in self.methods: - # `to str` so Python < 3.11 is supported - register = methods_map[str(method)] - register(path) diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/__init__.py b/fastapi_jsonapi/api/__init__.py similarity index 100% rename from examples/api_for_sqlalchemy/utils/sqlalchemy/__init__.py rename to fastapi_jsonapi/api/__init__.py diff --git a/fastapi_jsonapi/api/application_builder.py b/fastapi_jsonapi/api/application_builder.py new file mode 100644 index 00000000..3a38da61 --- /dev/null +++ b/fastapi_jsonapi/api/application_builder.py @@ -0,0 +1,361 @@ +from itertools import product +from queue import Queue +from typing import Callable, Iterable, Optional, Type + +from fastapi import APIRouter, FastAPI, status +from pydantic import BaseModel + +from fastapi_jsonapi.api.endpoint_builder import EndpointsBuilder +from fastapi_jsonapi.api.schemas import ResourceData +from fastapi_jsonapi.atomic import AtomicOperations +from fastapi_jsonapi.data_typing import TypeModel +from fastapi_jsonapi.exceptions import ExceptionResponseSchema, HTTPException +from fastapi_jsonapi.exceptions.handlers import base_exception_handler +from fastapi_jsonapi.schema import get_schema_from_field_annotation +from fastapi_jsonapi.schema_builder import SchemaBuilder +from fastapi_jsonapi.storages.models_storage import models_storage +from fastapi_jsonapi.storages.schemas_storage import schemas_storage +from fastapi_jsonapi.storages.views_storage import views_storage +from fastapi_jsonapi.views import Operation, ViewBase + + +class ApplicationBuilderError(Exception): ... + + +class ApplicationBuilder: + def __init__( + self, + app: FastAPI, + base_router: Optional[APIRouter] = None, + exception_handler: Optional[Callable] = None, + **base_router_include_kwargs, + ): + self._app: FastAPI = app + self._base_router: APIRouter = base_router or APIRouter() + self._base_router_include_kwargs: dict = base_router_include_kwargs + self._routers: dict[str, APIRouter] = {} + self._router_include_kwargs: dict[str, dict] = {} + self._paths = set() + self._resource_data: dict[str, ResourceData] = {} + self._exception_handler: Callable = base_exception_handler + self._initialized: bool = False + + if exception_handler is not None: + self._exception_handler = exception_handler + + def add_resource( + self, + path: str, + tags: Iterable[str], + resource_type: str, + view: Type[ViewBase], + model: Type[TypeModel], + schema: Type[BaseModel], + router: Optional[APIRouter] = None, + schema_in_post: Optional[Type[BaseModel]] = None, + schema_in_patch: Optional[Type[BaseModel]] = None, + pagination_default_size: Optional[int] = 25, + pagination_default_number: Optional[int] = 1, + pagination_default_offset: Optional[int] = None, + pagination_default_limit: Optional[int] = None, + operations: Iterable[str] = (), + ending_slash: bool = True, + model_id_field_name: str = "id", + include_router_kwargs: Optional[dict] = None, + ): + if self._initialized: + msg = "Can't add resource after app initialization" + raise ApplicationBuilderError(msg) + + if resource_type in self._resource_data: + msg = f"Resource {resource_type!r} already registered" + raise ApplicationBuilderError(msg) + + if include_router_kwargs is not None and router is None: + msg = "The argument 'include_router_kwargs' forbidden if 'router' not passed" + raise ApplicationBuilderError(msg) + + models_storage.add_model(resource_type, model, model_id_field_name) + views_storage.add_view(resource_type, view) + dto = SchemaBuilder(resource_type).create_schemas( + schema=schema, + schema_in_post=schema_in_post, + schema_in_patch=schema_in_patch, + ) + + resource_operations = [] + for operation in operations: + if operation == Operation.ALL: + resource_operations = Operation.real_operations() + break + + resource_operations.append(operation) + + resource_operations = resource_operations or Operation.real_operations() + + resource_data = ResourceData( + path=path, + router=router, + tags=list(tags), + view=view, + model=model, + source_schema=schema, + schema_in_post=schema_in_post, + schema_in_post_data=dto.schema_in_post_data, + schema_in_patch=schema_in_patch, + schema_in_patch_data=dto.schema_in_patch_data, + detail_response_schema=dto.detail_response_schema, + list_response_schema=dto.list_response_schema, + pagination_default_size=pagination_default_size, + pagination_default_number=pagination_default_number, + pagination_default_offset=pagination_default_offset, + pagination_default_limit=pagination_default_limit, + operations=resource_operations, + ending_slash=ending_slash, + ) + self._resource_data[resource_type] = resource_data + + router = router or self._base_router + self._routers[resource_type] = router + + if router is not None: + self._router_include_kwargs[resource_type] = include_router_kwargs or {} + + def initialize(self) -> FastAPI: + if self._initialized: + msg = "Application already initialized" + raise Exception(msg) + + self._initialized = True + self._traverse_relationship_schemas() + self._app.add_exception_handler(HTTPException, self._exception_handler) + + status_codes = self._get_status_codes() + summaries = self._get_summary_pattern_strings() + + for resource_type, data in self._resource_data.items(): + example_responses = self._get_example_responses(data) + endpoints_builder = EndpointsBuilder(resource_type, data) + + for operation in data.operations: + name, endpoint = endpoints_builder.create_common_fastapi_endpoint(operation) + http_method = operation.http_method() + path = self._create_path( + path=data.path, + ending_slash=data.ending_slash, + include_object_id=( + operation + in { + Operation.GET, + Operation.UPDATE, + Operation.DELETE, + } + ), + ) + + self._routers[resource_type].add_api_route( + path=path, + tags=data.tags, + responses=example_responses[operation] | self._get_default_error_responses(), + methods=[http_method], + summary=summaries[operation].format(resource_type), + status_code=status_codes[operation], + endpoint=endpoint, + name=name, + ) + + relationships_info = schemas_storage.get_relationships_info( + resource_type=resource_type, + operation_type="get", + ) + + for relationship_name, info in relationships_info.items(): + if not views_storage.has_view(info.resource_type): + continue + + operation = Operation.GET_LIST if info.many else Operation.GET + path = self._create_path( + path=data.path, + ending_slash=data.ending_slash, + include_object_id=True, + relationship_name=relationship_name, + ) + name, endpoint = endpoints_builder.create_relationship_endpoint( + resource_type=info.resource_type, + relationship_name=relationship_name, + parent_resource_type=resource_type, + model=models_storage.get_model(info.resource_type), + view=views_storage.get_view(info.resource_type), + source_schema=schemas_storage.get_source_schema(info.resource_type), + operation=operation, + ) + self._routers[resource_type].add_api_route( + path=path, + tags=data.tags, + responses=example_responses[operation] | self._get_default_error_responses(), + methods=[operation.http_method()], + summary=summaries[operation].format(resource_type), + status_code=status_codes[operation], + endpoint=endpoint, + name=name, + ) + + registered_routers = set() + for resource_type, router in self._routers.items(): + if id(router) in registered_routers: + continue + + include_kwargs = self._router_include_kwargs.get(resource_type, {}) + if router is self._base_router: + include_kwargs = self._base_router_include_kwargs + + self._app.include_router(router, **include_kwargs) + registered_routers.add(id(router)) + + atomic = AtomicOperations() + self._app.include_router(atomic.router) + + return self._app + + @staticmethod + def _get_status_codes() -> dict[Operation, int]: + return { + Operation.GET: status.HTTP_200_OK, + Operation.CREATE: status.HTTP_201_CREATED, + Operation.UPDATE: status.HTTP_200_OK, + Operation.DELETE: status.HTTP_204_NO_CONTENT, + Operation.GET_LIST: status.HTTP_200_OK, + Operation.DELETE_LIST: status.HTTP_200_OK, + } + + @staticmethod + def _get_example_responses(data: ResourceData) -> dict[Operation, dict]: + return { + Operation.GET: { + status.HTTP_200_OK: {"model": data.detail_response_schema}, + }, + Operation.CREATE: { + status.HTTP_201_CREATED: {"model": data.detail_response_schema}, + }, + Operation.UPDATE: { + status.HTTP_200_OK: {"model": data.detail_response_schema}, + }, + Operation.DELETE: { + status.HTTP_204_NO_CONTENT: { + "description": "If a server is able to delete the resource," + " the server MUST return a result with no data", + }, + }, + Operation.GET_LIST: { + status.HTTP_200_OK: {"model": data.list_response_schema}, + }, + Operation.DELETE_LIST: { + status.HTTP_200_OK: {"model": data.detail_response_schema}, + }, + } + + @staticmethod + def _get_default_error_responses() -> dict: + return { + status.HTTP_400_BAD_REQUEST: {"model": ExceptionResponseSchema}, + status.HTTP_401_UNAUTHORIZED: {"model": ExceptionResponseSchema}, + status.HTTP_404_NOT_FOUND: {"model": ExceptionResponseSchema}, + status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ExceptionResponseSchema}, + } + + @staticmethod + def _get_summary_pattern_strings() -> dict[Operation, str]: + return { + Operation.GET: "Get object `{}` by id", + Operation.CREATE: "Create object `{}`", + Operation.UPDATE: "Update object `{}` by id", + Operation.DELETE: "Delete object `{}` by id", + Operation.GET_LIST: "Get list of `{}` objects", + Operation.DELETE_LIST: "Delete objects `{}` by filters", + } + + @staticmethod + def _create_path( + path: str, + ending_slash: bool, + include_object_id: bool, + relationship_name: str = "", + ) -> str: + path = path.removesuffix("/") + suffix = "/" if ending_slash else "" + + if include_object_id: + obj_id = "{obj_id}" + path = f"{path}/{obj_id}" + + if relationship_name: + path = f"{path}/relationships/{relationship_name.replace('_', '-')}" + + return f"{path}{suffix}" + + def _traverse_relationship_schemas(self): + # User can have relationship resources without having CRUD operations for these resource types. + # So the SchemaStorage will not be filled with schemas without passing through the relationships. + + operations = Queue() + handled_operations = set() + + for item in product(self._resource_data, ("create", "update", "get")): + operations.put(item) + + while not operations.empty(): + if (operation := operations.get()) in handled_operations: + continue + + handled_operations.add(operation) + resource_type, operation_type = operation + + if not schemas_storage.has_operation(resource_type, operation_type): + continue + + parent_model = models_storage.get_model(resource_type) + relationships_info = schemas_storage.get_relationships_info(resource_type, operation_type).items() + + for relationship_name, info in relationships_info: + if schemas_storage.has_operation(info.resource_type, operation_type="get"): + continue + + field = schemas_storage.get_source_relationship_pydantic_field( + resource_type=resource_type, + operation_type=operation_type, + field_name=relationship_name, + ) + + relationship_source_schema = get_schema_from_field_annotation(field) + relationship_model = models_storage.search_relationship_model( + resource_type=resource_type, + model=parent_model, + field_name=relationship_name, + ) + models_storage.add_model(info.resource_type, relationship_model, info.id_field_name) + + builder = SchemaBuilder(resource_type=resource_type) + dto = builder.get_info_from_schema_for_building( + base_name=f"{info.resource_type}_hidden_generation", + schema=relationship_source_schema, + operation_type="get", + ) + data_schema = builder.build_jsonapi_object( + base_name=f"{info.resource_type}_hidden_generation_ObjectJSONAPI", + resource_type=info.resource_type, + dto=dto, + with_relationships=False, + id_field_required=True, + ) + schemas_storage.add_resource( + builder=builder, + resource_type=info.resource_type, + operation_type="get", + source_schema=relationship_source_schema, + data_schema=data_schema, + attributes_schema=dto.attributes_schema, + field_schemas=dto.field_schemas, + relationships_info=dto.relationships_info, + model_validators=dto.model_validators, + ) + operations.put((info.resource_type, "get")) diff --git a/fastapi_jsonapi/api/endpoint_builder.py b/fastapi_jsonapi/api/endpoint_builder.py new file mode 100644 index 00000000..b1d1ae7d --- /dev/null +++ b/fastapi_jsonapi/api/endpoint_builder.py @@ -0,0 +1,554 @@ +from inspect import Parameter, Signature, signature +from typing import Any, Callable, Iterable, Literal, Optional, Type + +from fastapi import Body, Path, Query, Request + +from fastapi_jsonapi.api.schemas import ResourceData +from fastapi_jsonapi.data_typing import TypeModel, TypeSchema +from fastapi_jsonapi.signature import ( + create_additional_query_params, + create_dependency_params_from_pydantic_model, + get_separated_params, +) +from fastapi_jsonapi.views import Operation, OperationConfig, ViewBase + + +class OperationAlreadyHandled: ... + + +class EndpointsBuilder: + def __init__(self, resource_type: str, data: ResourceData): + self._resource_type: str = resource_type + self._data: ResourceData = data + self._operation_to_action: dict[Operation, Literal["get", "create", "update", "delete"]] = { + Operation.CREATE: "create", + Operation.GET: "get", + Operation.GET_LIST: "get", + Operation.DELETE: "delete", + Operation.DELETE_LIST: "delete", + Operation.UPDATE: "update", + } + self._operation_to_creation_method: dict[Operation, Callable] = { + Operation.CREATE: self.create_post_fastapi_endpoint, + Operation.DELETE: self.create_delete_fastapi_endpoint, + Operation.DELETE_LIST: self.create_delete_list_fastapi_endpoint, + Operation.GET: self.create_get_fastapi_endpoint, + Operation.GET_LIST: self.create_get_list_fastapi_endpoint, + Operation.UPDATE: self.create_update_fastapi_endpoint, + } + + @classmethod + def _update_operation_config(cls, view: Type[ViewBase], operation: Operation) -> OperationConfig: + """ + Merge two pydantic schemas into one. + """ + target_config = view.operation_dependencies.get(operation) or OperationConfig() + common_config = view.operation_dependencies.get(Operation.ALL) or OperationConfig() + + # in case of relationship fetch endpoints + if target_config.dependencies and issubclass(target_config.dependencies, OperationAlreadyHandled): + return view.operation_dependencies[operation] + + dependencies_model = target_config.dependencies or common_config.dependencies + + same_type = target_config.dependencies is common_config.dependencies + if not same_type and all([target_config.dependencies, common_config.dependencies]): + dependencies_model = type( + f"{view.__name__}{operation.name.title()}MethodDependencyModel", + ( + common_config.dependencies, + target_config.dependencies, + OperationAlreadyHandled, + ), + {}, + ) + + new_method_config = OperationConfig( + dependencies=dependencies_model, + prepare_data_layer_kwargs=target_config.handler or common_config.handler, + ) + view.operation_dependencies[operation] = new_method_config + + return new_method_config + + def _create_pagination_query_params(self) -> list[Parameter]: + size = Query(self._data.pagination_default_size, alias="page[size]", title="pagination_page_size") + number = Query(self._data.pagination_default_number, alias="page[number]", title="pagination_page_number") + offset = Query(self._data.pagination_default_offset, alias="page[offset]", title="pagination_page_offset") + limit = Query(self._data.pagination_default_limit, alias="page[limit]", title="pagination_page_limit") + + return [ + Parameter( + # name doesn't really matter here + name=q_param.title, + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=Optional[int], + default=q_param, + ) + for q_param in ( + size, + number, + offset, + limit, + ) + ] + + @classmethod + def _create_filters_query_dependency_param(cls): + filters_list = Query( + None, + alias="filter", + description="[Filtering docs](https://fastapi-jsonapi.readthedocs.io/en/latest/filtering.html)" + "\nExamples:\n* filter for timestamp interval: " + '`[{"name": "timestamp", "op": "ge", "val": "2020-07-16T11:35:33.383"},' + '{"name": "timestamp", "op": "le", "val": "2020-07-21T11:35:33.383"}]`', + ) + return Parameter( + name="filters_list", + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=Optional[str], + default=filters_list, + ) + + @classmethod + def _create_sort_query_dependency_param(cls): + sort = Query( + None, + alias="sort", + description="[Sorting docs](https://fastapi-jsonapi.readthedocs.io/en/latest/sorting.html)" + "\nExamples:\n* `email` - sort by email ASC\n* `-email` - sort by email DESC" + "\n* `created_at,-email` - sort by created_at ASC and by email DESC", + ) + return Parameter( + name="sort", + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=Optional[str], + default=sort, + ) + + def _update_signature_for_resource_list_view( + self, + wrapper: Callable[..., Any], + context_schema: Type[TypeSchema], + additional_dependency_params: Iterable[Parameter] = (), + ) -> Signature: + sig = signature(wrapper) + params, tail_params = get_separated_params(sig) + + filter_params, include_params = create_additional_query_params(schema=context_schema) + + extra_params = [] + extra_params.extend(self._create_pagination_query_params()) + extra_params.extend(filter_params) + extra_params.append(self._create_filters_query_dependency_param()) + extra_params.append(self._create_sort_query_dependency_param()) + extra_params.extend(include_params) + + return sig.replace(parameters=params + extra_params + list(additional_dependency_params) + tail_params) + + @staticmethod + def _update_signature_for_resource_detail_view( + wrapper: Callable[..., Any], + context_schema: Type[TypeSchema], + additional_dependency_params: Iterable[Parameter] = (), + ) -> Signature: + sig = signature(wrapper) + params, tail_params = get_separated_params(sig) + + _, include_params = create_additional_query_params(schema=context_schema) + + return sig.replace(parameters=params + include_params + list(additional_dependency_params) + tail_params) + + @classmethod + def _update_method_config_and_get_dependency_params( + cls, + view: Type[ViewBase], + operation: Operation, + ) -> list[Parameter]: + method_config = cls._update_operation_config(view, operation) + + if method_config.dependencies is None: + return [] + + return create_dependency_params_from_pydantic_model(method_config.dependencies) + + def _update_signature( + self, + wrapper: Callable[..., Any], + view: Type[ViewBase], + operation: Operation, + context_schema: Type[TypeSchema], + is_list_endpoint: bool = False, + ): + additional_dependency_params = self._update_method_config_and_get_dependency_params( + view=view, + operation=operation, + ) + + if is_list_endpoint: + return self._update_signature_for_resource_list_view( + wrapper=wrapper, + context_schema=context_schema, + additional_dependency_params=additional_dependency_params, + ) + + return self._update_signature_for_resource_detail_view( + wrapper=wrapper, + context_schema=context_schema, + additional_dependency_params=additional_dependency_params, + ) + + def create_post_fastapi_endpoint( + self, + resource_type: str, + operation: Operation, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + schema_in_post_data: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + data: schema_in_post_data = Body(embed=True), + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=operation, + model=model, + schema=source_schema, + **view_options, + ) + return await view_instance.handle_post_resource_list(data_create=data, **extra_view_deps) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=operation, + context_schema=self._data.source_schema, + ) + return wrapper + + def create_update_fastapi_endpoint( + self, + resource_type: str, + operation: Operation, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + schema_in_patch_data: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + data: schema_in_patch_data = Body(embed=True), + obj_id: str = Path(...), + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=operation, + model=model, + schema=source_schema, + **view_options, + ) + return await view_instance.handle_update_resource(obj_id=obj_id, data_update=data, **extra_view_deps) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=operation, + context_schema=self._data.source_schema, + ) + return wrapper + + def create_delete_fastapi_endpoint( + self, + resource_type: str, + operation: Operation, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + obj_id: str = Path(...), + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=operation, + model=model, + schema=source_schema, + **view_options, + ) + return await view_instance.handle_delete_resource(obj_id=obj_id, **extra_view_deps) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=operation, + context_schema=self._data.source_schema, + ) + return wrapper + + def create_get_fastapi_endpoint( + self, + resource_type: str, + operation: Operation, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + obj_id: str = Path(...), + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=operation, + model=model, + schema=source_schema, + **view_options, + ) + return await view_instance.handle_get_resource_detail(obj_id=obj_id, **extra_view_deps) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=operation, + context_schema=self._data.source_schema, + ) + return wrapper + + def create_get_relationship_fastapi_endpoint( + self, + resource_type: str, + relationship_name: str, + parent_resource_type: str, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + obj_id: str = Path(...), + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=Operation.GET, + model=model, + schema=source_schema, + **view_options, + ) + return await view.handle_get_resource_relationship( + view_instance, + obj_id=obj_id, + relationship_name=relationship_name, + parent_resource_type=parent_resource_type, + **extra_view_deps, + ) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=Operation.GET, + context_schema=source_schema, + ) + return wrapper + + def create_get_relationship_list_fastapi_endpoint( + self, + resource_type: str, + relationship_name: str, + parent_resource_type: str, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + obj_id: str = Path(...), + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=Operation.GET_LIST, + model=model, + schema=source_schema, + **view_options, + ) + return await view.handle_get_resource_relationship_list( + view_instance, + obj_id=obj_id, + relationship_name=relationship_name, + parent_resource_type=parent_resource_type, + **extra_view_deps, + ) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=Operation.GET_LIST, + context_schema=source_schema, + ) + return wrapper + + def create_get_list_fastapi_endpoint( + self, + resource_type: str, + operation: Operation, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=operation, + model=model, + schema=source_schema, + **view_options, + ) + return await view_instance.handle_get_resource_list(**extra_view_deps) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=operation, + is_list_endpoint=True, + context_schema=self._data.source_schema, + ) + return wrapper + + def create_delete_list_fastapi_endpoint( + self, + resource_type: str, + operation: Operation, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + **view_options, + ): + async def wrapper( + request: Request, + **extra_view_deps, + ): + view_instance = view( + request=request, + resource_type=resource_type, + operation=operation, + model=model, + schema=source_schema, + **view_options, + ) + return await view_instance.handle_delete_resource_list(**extra_view_deps) + + wrapper.__signature__ = self._update_signature( + wrapper=wrapper, + view=view, + operation=operation, + is_list_endpoint=True, + context_schema=self._data.source_schema, + ) + return wrapper + + def create_common_fastapi_endpoint( + self, + operation: Operation, + **view_options, + ) -> tuple[str, Callable]: + name = self.get_common_endpoint_name(self._resource_type, operation) + kwargs = { + "resource_type": self._resource_type, + "operation": operation, + "view": self._data.view, + "model": self._data.model, + "source_schema": self._data.source_schema, + } + + if operation == Operation.CREATE: + kwargs["schema_in_post_data"] = self._data.schema_in_post_data + + if operation == Operation.UPDATE: + kwargs["schema_in_patch_data"] = self._data.schema_in_patch_data + + endpoint = self._operation_to_creation_method[operation](**kwargs, **view_options) + return name, endpoint + + def create_relationship_endpoint( + self, + resource_type: str, + operation: Operation, + relationship_name: str, + parent_resource_type: str, + view: Type[ViewBase], + model: Type[TypeModel], + source_schema: Type[TypeSchema], + ) -> tuple[str, Callable]: + name = self.get_relationship_endpoint_name(parent_resource_type, relationship_name, operation) + + if operation == Operation.GET: + creation_method = self.create_get_relationship_fastapi_endpoint + elif operation == Operation.GET_LIST: + creation_method = self.create_get_relationship_list_fastapi_endpoint + else: + msg = f"The operation {operation} is not supported on relationship endpoint creation" + raise Exception(msg) + + endpoint = creation_method( + resource_type=resource_type, + relationship_name=relationship_name, + parent_resource_type=parent_resource_type, + view=view, + model=model, + source_schema=source_schema, + ) + return name, endpoint + + @staticmethod + def get_endpoint_kind(operation: Operation) -> str: + kind = "detail" + if operation in {Operation.GET_LIST, Operation.DELETE_LIST, Operation.CREATE}: + kind = "list" + + return kind + + def get_common_endpoint_name( + self, + resource_type: str, + operation: Operation, + ): + """ + Generate endpoint name + """ + action = self._operation_to_action[operation] + kind = self.get_endpoint_kind(operation) + return f"{action}_{resource_type}_{kind}" + + def get_relationship_endpoint_name( + self, + resource_type: str, + relationship_name: str, + operation: Operation, + ): + kind = self.get_endpoint_kind(operation) + return f"fetch_{resource_type}_{relationship_name}_{kind}" diff --git a/fastapi_jsonapi/api/schemas.py b/fastapi_jsonapi/api/schemas.py new file mode 100644 index 00000000..41728e0e --- /dev/null +++ b/fastapi_jsonapi/api/schemas.py @@ -0,0 +1,26 @@ +from typing import Iterable, Optional, Type, Union + +from pydantic import BaseModel + +from fastapi_jsonapi.data_typing import TypeModel, TypeSchema +from fastapi_jsonapi.views import Operation, ViewBase + + +class ResourceData(BaseModel): + path: Union[str, list[str]] + tags: list[str] + view: Type[ViewBase] + model: Type[TypeModel] + source_schema: Type[TypeSchema] + schema_in_post: Optional[Type[BaseModel]] + schema_in_post_data: Type[BaseModel] + schema_in_patch: Optional[Type[BaseModel]] + schema_in_patch_data: Type[BaseModel] + detail_response_schema: Type[BaseModel] + list_response_schema: Type[BaseModel] + pagination_default_size: Optional[int] = 25 + pagination_default_number: Optional[int] = 1 + pagination_default_offset: Optional[int] = None + pagination_default_limit: Optional[int] = None + operations: Iterable[Operation] = () + ending_slash: bool = True diff --git a/fastapi_jsonapi/atomic/__init__.py b/fastapi_jsonapi/atomic/__init__.py index 2343eb28..531cad3f 100644 --- a/fastapi_jsonapi/atomic/__init__.py +++ b/fastapi_jsonapi/atomic/__init__.py @@ -1,7 +1,7 @@ +from .atomic import AtomicOperations +from .atomic_handler import current_atomic_operation + __all__ = ( "AtomicOperations", "current_atomic_operation", ) - -from .atomic import AtomicOperations -from .atomic_handler import current_atomic_operation diff --git a/fastapi_jsonapi/atomic/atomic.py b/fastapi_jsonapi/atomic/atomic.py index bcb6a8cc..33bf5728 100644 --- a/fastapi_jsonapi/atomic/atomic.py +++ b/fastapi_jsonapi/atomic/atomic.py @@ -1,15 +1,9 @@ -from typing import ( - Optional, - Type, -) +from typing import Optional, Type from fastapi import APIRouter, Request, Response, status from fastapi_jsonapi.atomic.atomic_handler import AtomicViewHandler -from fastapi_jsonapi.atomic.schemas import ( - AtomicOperationRequest, - AtomicResultResponse, -) +from fastapi_jsonapi.atomic.schemas import AtomicOperationRequest, AtomicResultResponse class AtomicOperations: diff --git a/fastapi_jsonapi/atomic/atomic_handler.py b/fastapi_jsonapi/atomic/atomic_handler.py index 788ee3c8..f580889f 100644 --- a/fastapi_jsonapi/atomic/atomic_handler.py +++ b/fastapi_jsonapi/atomic/atomic_handler.py @@ -4,32 +4,22 @@ from collections import defaultdict from contextvars import ContextVar from functools import wraps -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - List, - Optional, - Type, - TypedDict, - Union, -) +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, TypedDict, Union from fastapi import HTTPException, status +from fastapi.requests import Request from pydantic import ValidationError -from starlette.requests import Request -from fastapi_jsonapi import RoutersJSONAPI from fastapi_jsonapi.atomic.prepared_atomic_operation import LocalIdsType, OperationBase from fastapi_jsonapi.atomic.schemas import AtomicOperation, AtomicOperationRequest, AtomicResultResponse +from fastapi_jsonapi.storages.schemas_storage import schemas_storage if TYPE_CHECKING: from fastapi_jsonapi.data_layers.base import BaseDataLayer + from fastapi_jsonapi.data_typing import TypeSchema log = logging.getLogger(__name__) -AtomicResponseDict = TypedDict("AtomicResponseDict", {"atomic:results": List[Any]}) - +AtomicResponseDict = TypedDict("AtomicResponseDict", {"atomic:results": list[Any]}) current_atomic_operation: ContextVar[OperationBase] = ContextVar("current_atomic_operation") @@ -47,12 +37,12 @@ async def wrapper(*a, operation: OperationBase, **kw): errors_details = { "message": f"Validation error on operation {operation.op_type}", "ref": operation.ref, - "data": operation.data.dict(), + "data": operation.data.model_dump(), } if isinstance(ex, ValidationError): errors_details.update(errors=ex.errors()) elif isinstance(ex, ValueError): - errors_details.update(error=str(ex)) + errors_details.update(error=f"{ex}") else: raise # TODO: json:api exception @@ -65,7 +55,6 @@ async def wrapper(*a, operation: OperationBase, **kw): class AtomicViewHandler: - jsonapi_routers_cls: Type[RoutersJSONAPI] = RoutersJSONAPI def __init__( self, @@ -83,24 +72,21 @@ async def prepare_one_operation(self, operation: AtomicOperation): :param operation: :return: """ - operation_type = operation.ref and operation.ref.type or operation.data and operation.data.type - assert operation_type - if operation_type not in self.jsonapi_routers_cls.all_jsonapi_routers: - msg = f"Unknown resource type {operation_type!r}. Register it via RoutersJSONAPI" + resource_type = (operation.ref and operation.ref.type) or (operation.data and operation.data.type) + if not schemas_storage.has_resource(resource_type): + msg = f"Unknown resource type {resource_type!r}." raise ValueError(msg) - jsonapi = self.jsonapi_routers_cls.all_jsonapi_routers[operation_type] - one_operation = OperationBase.prepare( + return OperationBase.prepare( action=operation.op, request=self.request, - jsonapi=jsonapi, + resource_type=resource_type, ref=operation.ref, data=operation.data, ) - return one_operation - async def prepare_operations(self) -> List[OperationBase]: - prepared_operations: List[OperationBase] = [] + async def prepare_operations(self) -> list[OperationBase]: + prepared_operations: list[OperationBase] = [] for operation in self.operations_request.operations: one_operation = await self.prepare_one_operation(operation) @@ -117,6 +103,29 @@ async def process_one_operation( operation.update_relationships_with_lid(local_ids=self.local_ids_cache) return await operation.handle(dl=dl) + async def process_next_operation( + self, + operation: OperationBase, + previous_dl: Optional[BaseDataLayer], + ) -> tuple[Optional[TypeSchema], BaseDataLayer]: + dl = await operation.get_data_layer() + await dl.atomic_start( + previous_dl=previous_dl, + ) + try: + response = await self.process_one_operation( + dl=dl, + operation=operation, + ) + except HTTPException as ex: + await dl.atomic_end( + success=False, + exception=ex, + ) + raise ex + + return response, dl + async def handle(self) -> Union[AtomicResponseDict, AtomicResultResponse, None]: prepared_operations = await self.prepare_operations() results = [] @@ -127,12 +136,7 @@ async def handle(self) -> Union[AtomicResponseDict, AtomicResultResponse, None]: # set context var ctx_var_token = current_atomic_operation.set(operation) - dl: BaseDataLayer = await operation.get_data_layer() - await dl.atomic_start(previous_dl=previous_dl) - response = await self.process_one_operation( - dl=dl, - operation=operation, - ) + response, dl = await self.process_next_operation(operation, previous_dl) previous_dl = dl # response.data.id @@ -143,9 +147,14 @@ async def handle(self) -> Union[AtomicResponseDict, AtomicResultResponse, None]: results.append({}) continue only_empty_responses = False - results.append({"data": response.data}) - if operation.data.lid and response.data: - self.local_ids_cache[operation.data.type][operation.data.lid] = response.data.id + + data = response["data"] + results.append( + {"data": data}, + ) + + if operation.data.lid and data: + self.local_ids_cache[operation.data.type][operation.data.lid] = data["id"] # reset context var current_atomic_operation.reset(ctx_var_token) diff --git a/fastapi_jsonapi/atomic/prepared_atomic_operation.py b/fastapi_jsonapi/atomic/prepared_atomic_operation.py index cbdf0cc2..2483ceb4 100644 --- a/fastapi_jsonapi/atomic/prepared_atomic_operation.py +++ b/fastapi_jsonapi/atomic/prepared_atomic_operation.py @@ -1,45 +1,41 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Dict, Optional, Type +from inspect import Signature, signature +from typing import Any, Callable, Optional, Type from fastapi import Request -from fastapi_jsonapi import RoutersJSONAPI from fastapi_jsonapi.atomic.schemas import AtomicOperationAction, AtomicOperationRef, OperationDataType -from fastapi_jsonapi.views.utils import HTTPMethod +from fastapi_jsonapi.data_layers.base import BaseDataLayer +from fastapi_jsonapi.data_typing import TypeSchema +from fastapi_jsonapi.signature import create_dependency_params_from_pydantic_model, get_separated_params +from fastapi_jsonapi.storages import models_storage, schemas_storage, views_storage +from fastapi_jsonapi.utils.dependency_helper import DependencyHelper +from fastapi_jsonapi.views import Operation, OperationConfig, ViewBase -if TYPE_CHECKING: - from fastapi_jsonapi.data_layers.base import BaseDataLayer - from fastapi_jsonapi.views.detail_view import DetailViewBase - from fastapi_jsonapi.views.list_view import ListViewBase - from fastapi_jsonapi.views.view_base import ViewBase - -LocalIdsType = Dict[str, Dict[str, str]] +LocalIdsType = dict[str, dict[str, str]] +atomic_dependency_handlers: dict[(str, Operation), Callable] = {} @dataclass class OperationBase: - jsonapi: RoutersJSONAPI view: ViewBase ref: Optional[AtomicOperationRef] data: OperationDataType op_type: str - - @property - def http_method(self) -> HTTPMethod: - raise NotImplementedError + resource_type: str @classmethod def prepare( cls, action: str, request: Request, - jsonapi: RoutersJSONAPI, + resource_type: str, ref: Optional[AtomicOperationRef], data: OperationDataType, - ) -> "OperationBase": - view_cls: Type[ViewBase] = jsonapi.detail_view_resource + ) -> OperationBase: + view_cls: Type[ViewBase] = views_storage.get_view(resource_type) if hasattr(action, "value"): # convert to str if enum @@ -47,34 +43,99 @@ def prepare( if action == AtomicOperationAction.add: operation_cls = OperationAdd - view_cls = jsonapi.list_view_resource + view_operation = Operation.CREATE elif action == AtomicOperationAction.update: operation_cls = OperationUpdate + view_operation = Operation.UPDATE elif action == AtomicOperationAction.remove: operation_cls = OperationRemove + view_operation = Operation.DELETE else: msg = f"Unknown operation {action!r}" raise ValueError(msg) - view = view_cls(request=request, jsonapi=jsonapi) + view = view_cls( + request=request, + resource_type=resource_type, + operation=view_operation, + model=models_storage.get_model(resource_type), + schema=schemas_storage.get_source_schema(resource_type), + ) return operation_cls( - jsonapi=jsonapi, view=view, ref=ref, data=data, op_type=action, + resource_type=resource_type, ) + @staticmethod + def prepare_dependencies_handler_signature( + custom_handler: Callable[..., Any], + method_config: OperationConfig, + ) -> Signature: + sig = signature(custom_handler) + + additional_dependency_params = [] + if method_config.dependencies is not None: + additional_dependency_params = create_dependency_params_from_pydantic_model( + model_class=method_config.dependencies, + ) + + params, tail_params = get_separated_params(sig) + + return sig.replace(parameters=params + list(additional_dependency_params) + tail_params) + + @classmethod + async def handle_view_dependencies( + cls, + request: Request, + view_cls: Type[ViewBase], + resource_type: str, + operation: Operation, + ) -> dict[str, Any]: + """ + Combines all dependencies (prepared) and returns them as list + + Consider method config is already prepared for generic views + Reuse the same config for atomic operations + + :param request: + :param view_cls: + :param resource_type: + :param operation: + :return: + """ + handler_key = (resource_type, operation) + + if handler_key in atomic_dependency_handlers: + handle_dependencies = atomic_dependency_handlers[handler_key] + else: + method_config: OperationConfig = view_cls.operation_dependencies[operation] + + def handle_dependencies(**dep_kwargs): + return dep_kwargs + + handle_dependencies.__signature__ = cls.prepare_dependencies_handler_signature( + custom_handler=handle_dependencies, + method_config=method_config, + ) + atomic_dependency_handlers[handler_key] = handle_dependencies + + dep_helper = DependencyHelper(request=request) + return await dep_helper.run(handle_dependencies) + async def get_data_layer(self) -> BaseDataLayer: - data_layer_view_dependencies: Dict[str, Any] = await self.jsonapi.handle_view_dependencies( + data_layer_view_dependencies: dict[str, Any] = await self.handle_view_dependencies( request=self.view.request, view_cls=self.view.__class__, - method=self.http_method, + resource_type=self.resource_type, + operation=self.view.operation, ) return await self.view.get_data_layer(data_layer_view_dependencies) - async def handle(self, dl: BaseDataLayer): + async def handle(self, dl: BaseDataLayer) -> Optional[TypeSchema]: raise NotImplementedError @classmethod @@ -113,7 +174,7 @@ def upd_one_relationship_with_local_id(cls, relationship_info: dict, local_ids: def update_relationships_with_lid(self, local_ids: LocalIdsType): if not (self.data and self.data.relationships): return - for relationship_name, relationship_value in self.data.relationships.items(): + for relationship_value in self.data.relationships.values(): relationship_data = relationship_value["data"] if isinstance(relationship_data, list): for data in relationship_data: @@ -125,33 +186,23 @@ def update_relationships_with_lid(self, local_ids: LocalIdsType): raise ValueError(msg) -class ListOperationBase(OperationBase): - view: ListViewBase - - -class DetailOperationBase(OperationBase): - view: DetailViewBase - - -class OperationAdd(ListOperationBase): - http_method = HTTPMethod.POST +class OperationAdd(OperationBase): - async def handle(self, dl: BaseDataLayer): + async def handle(self, dl: BaseDataLayer) -> dict: # use outer schema wrapper because we need this error path: # `{'loc': ['data', 'attributes', 'name']` # and not `{'loc': ['attributes', 'name']` - data_in = self.jsonapi.schema_in_post(data=self.data) - response = await self.view.process_create_object( + schema_in_create = schemas_storage.get_schema_in(self.resource_type, operation_type="create") + data_in = schema_in_create(data=self.data.model_dump(exclude_unset=True)) + return await self.view.process_create_object( dl=dl, data_create=data_in.data, ) - return response -class OperationUpdate(DetailOperationBase): - http_method = HTTPMethod.PATCH +class OperationUpdate(OperationBase): - async def handle(self, dl: BaseDataLayer): + async def handle(self, dl: BaseDataLayer) -> dict: if self.data is None: # TODO: clear to-one relationships pass @@ -160,18 +211,17 @@ async def handle(self, dl: BaseDataLayer): # use outer schema wrapper because we need this error path: # `{'loc': ['data', 'attributes', 'name']` # and not `{'loc': ['attributes', 'name']` - data_in = self.jsonapi.schema_in_patch(data=self.data) - obj_id = self.ref and self.ref.id or self.data and self.data.id - response = await self.view.process_update_object( + schema_in_update = schemas_storage.get_schema_in(self.resource_type, operation_type="create") + data_in = schema_in_update(data=self.data.model_dump(exclude_unset=True)) + obj_id = (self.ref and self.ref.id) or (self.data and self.data.id) + return await self.view.process_update_object( dl=dl, obj_id=obj_id, data_update=data_in.data, ) - return response -class OperationRemove(DetailOperationBase): - http_method = HTTPMethod.DELETE +class OperationRemove(OperationBase): async def handle( self, diff --git a/fastapi_jsonapi/atomic/schemas.py b/fastapi_jsonapi/atomic/schemas.py index a9fca72e..976eddf6 100644 --- a/fastapi_jsonapi/atomic/schemas.py +++ b/fastapi_jsonapi/atomic/schemas.py @@ -1,10 +1,7 @@ -from __future__ import annotations - from enum import Enum -from typing import List, Optional, Union +from typing import Any, Optional, Union -from pydantic import BaseModel, Field, root_validator -from starlette.datastructures import URLPath +from pydantic import BaseModel, Field, model_validator class OperationRelationshipSchema(BaseModel): @@ -20,8 +17,8 @@ class OperationItemInSchema(BaseModel): type: str = Field(default=..., description="Resource type") id: Optional[str] = Field(default=None, description="Resource object ID") lid: Optional[str] = Field(default=None, description="Resource object local ID") - attributes: Optional[dict] = Field(None, description="Resource object attributes") - relationships: Optional[dict] = Field(None, description="Resource object relationships") + attributes: Optional[dict] = Field(default=None, description="Resource object attributes") + relationships: Optional[dict] = Field(default=None, description="Resource object relationships") OperationDataType = Union[ @@ -29,7 +26,7 @@ class OperationItemInSchema(BaseModel): # any object creation OperationItemInSchema, # to-many relationship - List[OperationRelationshipSchema], + list[OperationRelationshipSchema], # to-one relationship OperationRelationshipSchema, # not required @@ -55,7 +52,7 @@ class AtomicOperationRef(BaseModel): lid: Optional[str] = Field(default=None) relationship: Optional[str] = Field(default=None) - @root_validator + @model_validator(mode="before") def validate_atomic_operation_ref(cls, values: dict): """ type is required on schema, so id or lid has to be present @@ -106,8 +103,10 @@ class AtomicOperation(BaseModel): default=..., description="an operation code, expressed as a string, that indicates the type of operation to perform.", ) - ref: Optional[AtomicOperationRef] = Field(default=None) - href: Optional[URLPath] = Field( + ref: Optional[AtomicOperationRef] = Field( + default=None, + ) + href: Optional[str] = Field( default=None, description="a string that contains a URI-reference that identifies the target of the operation.", ) @@ -151,7 +150,13 @@ def _validate_one_of_ref_or_href(cls, values: dict): # TODO: pydantic V2 raise ValueError(msg) - @root_validator + @classmethod + def _get_value_from_dict_or_obj(cls, obj: Any, key: str): + if isinstance(obj, dict): + return obj.get(key) + return getattr(obj, key, None) + + @model_validator(mode="before") def validate_operation(cls, values: dict): """ Make sure atomic operation request conforms the spec @@ -164,16 +169,23 @@ def validate_operation(cls, values: dict): ref: Optional[AtomicOperationRef] = values.get("ref") if op == AtomicOperationAction.remove: if not ref: - msg = f"ref should be present for action {op.value!r}" + msg = f"ref should be present for action {op!r}" raise ValueError(msg) + # when updating / removing item, ref [l]id has to be present - if not (ref.id or ref.lid): - msg = f"id or local id has to be present for action {op.value!r}" + if not cls._get_value_from_dict_or_obj(ref, "id") and not cls._get_value_from_dict_or_obj(ref, "lid"): + msg = f"id or local id has to be present for action {op!r}" raise ValueError(msg) data: OperationDataType = values.get("data") - operation_type = ref and ref.type or data and data.type - if not operation_type: + + operation_type = None + if data is not None: + operation_type = cls._get_value_from_dict_or_obj(data, "type") + elif ref is not None: + operation_type = cls._get_value_from_dict_or_obj(ref, "type") + + if operation_type is None: msg = "Operation has to be in ref or in data" raise ValueError(msg) @@ -181,9 +193,9 @@ def validate_operation(cls, values: dict): class AtomicOperationRequest(BaseModel): - operations: List[AtomicOperation] = Field( + operations: list[AtomicOperation] = Field( alias="atomic:operations", - min_items=1, + min_length=1, ) @@ -203,7 +215,7 @@ class AtomicResultResponse(BaseModel): https://jsonapi.org/ext/atomic/#auto-id-responses-4 """ - results: List[AtomicResult] = Field( + results: list[AtomicResult] = Field( alias="atomic:results", - min_items=1, + min_length=1, ) diff --git a/fastapi_jsonapi/common.py b/fastapi_jsonapi/common.py new file mode 100644 index 00000000..b84da41c --- /dev/null +++ b/fastapi_jsonapi/common.py @@ -0,0 +1,18 @@ +from typing import Optional + +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo + +from fastapi_jsonapi.types_metadata import ClientCanSetId, CustomFilterSQL, CustomSortSQL, RelationshipInfo +from fastapi_jsonapi.utils.metadata_instance_search import MetadataInstanceSearch + +search_client_can_set_id = MetadataInstanceSearch[ClientCanSetId](ClientCanSetId) +search_relationship_info = MetadataInstanceSearch[RelationshipInfo](RelationshipInfo) +search_custom_filter_sql = MetadataInstanceSearch[CustomFilterSQL](CustomFilterSQL) +search_custom_sort_sql = MetadataInstanceSearch[CustomSortSQL](CustomSortSQL) + + +def get_relationship_info_from_field_metadata( + field: FieldInfo, +) -> Optional[RelationshipInfo]: + return search_relationship_info.first(field) diff --git a/fastapi_jsonapi/data_layers/base.py b/fastapi_jsonapi/data_layers/base.py index e4a33625..b70c952b 100644 --- a/fastapi_jsonapi/data_layers/base.py +++ b/fastapi_jsonapi/data_layers/base.py @@ -5,14 +5,16 @@ you must inherit from this base class """ -from typing import Dict, List, Optional, Tuple, Type +from typing import Optional, Type from fastapi import Request +from pydantic import TypeAdapter +from fastapi_jsonapi.common import search_client_can_set_id from fastapi_jsonapi.data_typing import TypeModel, TypeSchema from fastapi_jsonapi.querystring import QueryStringManager from fastapi_jsonapi.schema import BaseJSONAPIItemInSchema -from fastapi_jsonapi.schema_builder import FieldConfig, TransferSaveWrapper +from fastapi_jsonapi.views import RelationshipRequestInfo class BaseDataLayer: @@ -21,13 +23,12 @@ class BaseDataLayer: def __init__( self, request: Request, - schema: Type[TypeSchema], model: Type[TypeModel], - url_id_field: str, - id_name_field: Optional[str] = None, + schema: Type[TypeSchema], + resource_type: str, + url_id_field: str = "id", disable_collection_count: bool = False, default_collection_count: int = -1, - type_: str = "", **kwargs, ): """ @@ -37,38 +38,29 @@ def __init__( :param schema: :param model: :param url_id_field: - :param id_name_field: :param disable_collection_count: :param default_collection_count: - :param type_: resource type + :param resource_type: resource type :param kwargs: """ - self.request = request - self.schema = schema - self.model = model - self.url_id_field = url_id_field - self.id_name_field = id_name_field + self.request: Request = request + self.schema: Type[TypeSchema] = schema + self.model: Type[TypeModel] = model + self.resource_type: str = resource_type + self.url_id_field: str = url_id_field self.disable_collection_count: bool = disable_collection_count self.default_collection_count: int = default_collection_count self.is_atomic = False - self.type_ = type_ async def atomic_start(self, previous_dl: Optional["BaseDataLayer"] = None): self.is_atomic = True - async def atomic_end(self, success: bool = True): + async def atomic_end(self, success: bool = True, exception: Optional[Exception] = None): raise NotImplementedError - def _unwrap_field_config(self, extra: Dict): - field_config_wrapper: Optional[TransferSaveWrapper] = extra.get("field_config") - - if field_config_wrapper: - return field_config_wrapper.get_field_config() - - return FieldConfig() - + @classmethod def _apply_client_generated_id( - self, + cls, data_create: BaseJSONAPIItemInSchema, model_kwargs: dict, ): @@ -81,14 +73,11 @@ def _apply_client_generated_id( if data_create.id is None: return model_kwargs - extra = data_create.__fields__["id"].field_info.extra - if extra.get("client_can_set_id"): + field = data_create.model_fields["id"] + if can_set_id := search_client_can_set_id.first(field): id_value = data_create.id - field_config = self._unwrap_field_config(extra) - - if field_config.cast_type: - id_value = field_config.cast_type(id_value) - + if can_set_id.cast_type: + id_value = TypeAdapter(can_set_id.cast_type).validate_python(id_value) model_kwargs["id"] = id_value return model_kwargs @@ -103,42 +92,34 @@ async def create_object(self, data_create: BaseJSONAPIItemInSchema, view_kwargs: """ raise NotImplementedError - def get_object_id_field_name(self): - """ - compound key may cause errors - - :return: - """ - return self.id_name_field - - def get_object_id_field(self): - id_name_field = self.get_object_id_field_name() - try: - return getattr(self.model, id_name_field) - except AttributeError: - msg = f"{self.model.__name__} has no attribute {id_name_field}" - # TODO: any custom exception type? - raise Exception(msg) - - def get_object_id(self, obj: TypeModel): - return getattr(obj, self.get_object_id_field_name()) - - async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = None) -> TypeModel: + async def get_object( + self, + view_kwargs: dict, + qs: Optional[QueryStringManager] = None, + relationship_request_info: Optional[RelationshipRequestInfo] = None, + ) -> TypeModel: """ Retrieve an object :param view_kwargs: kwargs from the resource view :param qs: + :param relationship_request_info: :return DeclarativeMeta: an object """ raise NotImplementedError - async def get_collection(self, qs: QueryStringManager, view_kwargs: Optional[dict] = None) -> Tuple[int, list]: + async def get_collection( + self, + qs: QueryStringManager, + view_kwargs: Optional[dict] = None, + relationship_request_info: Optional[RelationshipRequestInfo] = None, + ) -> tuple[int, list]: """ Retrieve a collection of objects :param qs: a querystring manager to retrieve information from url :param view_kwargs: kwargs from the resource view + :param relationship_request_info: :return tuple: the number of object and the list of objects """ raise NotImplementedError @@ -235,68 +216,7 @@ async def delete_relationship( """ raise NotImplementedError - def get_related_model_query_base( - self, - related_model: Type[TypeModel], - ): - """ - Prepare query for the related model - - :param related_model: Related ORM model class (not instance) - :return: - """ - raise NotImplementedError - - def get_related_object_query( - self, - related_model: Type[TypeModel], - related_id_field: str, - id_value: str, - ): - """ - Prepare query to get related object - - :param related_model: - :param related_id_field: - :param id_value: - :return: - """ - raise NotImplementedError - - def get_related_objects_list_query( - self, - related_model: Type[TypeModel], - related_id_field: str, - ids: list[str], - ): - """ - Prepare query to get related objects list - - :param related_model: - :param related_id_field: - :param ids: - :return: - """ - raise NotImplementedError - - # async def get_related_object_query(self): - async def get_related_object( - self, - related_model: Type[TypeModel], - related_id_field: str, - id_value: str, - ) -> TypeModel: - """ - Get related object. - - :param related_model: Related ORM model class (not instance) - :param related_id_field: id field of the related model (usually it's `id`) - :param id_value: related object id value - :return: an ORM object - """ - raise NotImplementedError - - async def get_related_objects_list( + async def get_related_objects( self, related_model: Type[TypeModel], related_id_field: str, @@ -312,14 +232,6 @@ async def get_related_objects_list( """ raise NotImplementedError - def query(self, view_kwargs): - """ - Construct the base query to retrieve wanted data - - :param view_kwargs: kwargs from the resource view - """ - raise NotImplementedError - async def before_create_object(self, data, view_kwargs): """ Provide additional data before object creation @@ -413,27 +325,25 @@ async def after_delete_object(self, obj: TypeModel, view_kwargs): """ raise NotImplementedError - async def delete_objects(self, objects: List[TypeModel], view_kwargs): + async def delete_objects(self, objects: list[TypeModel], view_kwargs): # TODO: doc raise NotImplementedError - async def before_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def before_delete_objects(self, objects: list[TypeModel], view_kwargs: dict): """ Make checks before deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def after_delete_objects(self, objects: list[TypeModel], view_kwargs: dict): """ Any action after deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass async def before_create_relationship( self, diff --git a/fastapi_jsonapi/data_layers/fields/enum.py b/fastapi_jsonapi/data_layers/fields/enum.py deleted file mode 100644 index b7c0b9de..00000000 --- a/fastapi_jsonapi/data_layers/fields/enum.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Base enum module.""" - -from fastapi_jsonapi.data_layers.fields.mixins import ( - MixinEnum, - MixinIntEnum, -) - - -class Enum(MixinEnum): - """ - Base enum class. - - All used non-integer enumerations must inherit from this class. - """ - - pass - - -class IntEnum(MixinIntEnum): - """ - Base IntEnum class. - - All used integer enumerations must inherit from this class. - """ - - pass diff --git a/fastapi_jsonapi/data_layers/fields/enums.py b/fastapi_jsonapi/data_layers/fields/enums.py new file mode 100644 index 00000000..b6fc1ebb --- /dev/null +++ b/fastapi_jsonapi/data_layers/fields/enums.py @@ -0,0 +1,11 @@ +"""Base enum module.""" + +from fastapi_jsonapi.data_layers.fields.mixins import MixinEnum + + +class Enum(MixinEnum): + """ + Base enum class. + + All used non-integer enumerations must inherit from this class. + """ diff --git a/fastapi_jsonapi/data_layers/fields/mixins.py b/fastapi_jsonapi/data_layers/fields/mixins.py index 22a14530..d45ce823 100644 --- a/fastapi_jsonapi/data_layers/fields/mixins.py +++ b/fastapi_jsonapi/data_layers/fields/mixins.py @@ -1,9 +1,6 @@ """Enum mixin module.""" -from enum import ( - Enum, - IntEnum, -) +from enum import Enum class MixinEnum(Enum): @@ -34,48 +31,3 @@ def value_to_enum(cls, value): """Convert value to enum.""" val_to_enum = {value.value: value for _, value in cls._member_map_.items()} return val_to_enum.get(value) - - -class MixinIntEnum(IntEnum): - """ - Здесь пришлось дублировать код, чтобы обеспечить совместимость с FastAPI и Pydantic. - - Основная проблема - данные либы определяют валидаторы для стандартной библиотеки enum, используя вызов issubclass. - И для стандартного IntEnum есть отдельная ветка issubclass(IntEnum), в которой происходят - специальные преобразования, например, аргументы из запроса конвертируются в тип int. - Поэтому OurEnum(int, Enum) не срабатывает по условию issubclass(obj, IntEnum) и выбираются - неверные валидаторы и конверторы. - А код ниже пришлось задублировать, так как у стандартного Enum есть метакласс, который разрешает только - такую цепочку наследования: - NewEnum(клас_тип, миксин_без_типа_1, ..., миксин_без_типа_n, Enum) - По этому правилу нельзя построить наследование, добавляющее миксин без типа к стандартному IntEnum: - NewEnum(our_mixin, IntEnum), так как IntEnum = (int, Enum) - Поэтому пока остается такое решение до каких-либо исправлений со стороны разработчиков либы, - либо появления более гениальных идей - """ - - @classmethod - def names(cls): - """Get all field names.""" - return ",".join(field.name for field in cls) - - @classmethod - def values(cls): - """Get all values from Enum.""" - return [value for _, value in cls._member_map_.items()] - - @classmethod - def keys(cls): - """Get all field keys from Enum.""" - return [key for key, _ in cls._member_map_.items()] - - @classmethod - def inverse(cls): - """Return all inverted items sequence.""" - return {value: key for key, value in cls._member_map_.items()} - - @classmethod - def value_to_enum(cls, value): - """Convert value to enum.""" - val_to_enum = {value.value: value for _, value in cls._member_map_.items()} - return val_to_enum.get(value) diff --git a/fastapi_jsonapi/data_layers/filtering/__init__.py b/fastapi_jsonapi/data_layers/filtering/__init__.py deleted file mode 100644 index 0e31b2a7..00000000 --- a/fastapi_jsonapi/data_layers/filtering/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Base filtering functions package.""" diff --git a/fastapi_jsonapi/data_layers/filtering/sqlalchemy.py b/fastapi_jsonapi/data_layers/filtering/sqlalchemy.py deleted file mode 100644 index a768f5bb..00000000 --- a/fastapi_jsonapi/data_layers/filtering/sqlalchemy.py +++ /dev/null @@ -1,572 +0,0 @@ -"""Helper to create sqlalchemy filters according to filter querystring parameter""" -import inspect -import logging -from collections.abc import Sequence -from typing import ( - Any, - Callable, - Dict, - List, - Optional, - Set, - Tuple, - Type, - Union, -) - -from pydantic import BaseConfig, BaseModel -from pydantic.fields import ModelField -from pydantic.validators import _VALIDATORS, find_validators -from sqlalchemy import and_, false, not_, or_ -from sqlalchemy.orm import aliased -from sqlalchemy.orm.attributes import InstrumentedAttribute -from sqlalchemy.orm.util import AliasedClass -from sqlalchemy.sql.elements import BinaryExpression, BooleanClauseList - -from fastapi_jsonapi.data_typing import TypeModel, TypeSchema -from fastapi_jsonapi.exceptions import InvalidFilters, InvalidType -from fastapi_jsonapi.exceptions.json_api import HTTPException -from fastapi_jsonapi.schema import JSONAPISchemaIntrospectionError, get_model_field, get_relationships - -log = logging.getLogger(__name__) - -RELATIONSHIP_SPLITTER = "." - -# The mapping with validators using by to cast raw value to instance of target type -REGISTERED_PYDANTIC_TYPES: Dict[Type, List[Callable]] = dict(_VALIDATORS) - -cast_failed = object() - -RelationshipPath = str - - -class RelationshipFilteringInfo(BaseModel): - target_schema: Type[TypeSchema] - model: Type[TypeModel] - aliased_model: AliasedClass - join_column: InstrumentedAttribute - - class Config: - arbitrary_types_allowed = True - - -def check_can_be_none(fields: list[ModelField]) -> bool: - """ - Return True if None is possible value for target field - """ - return any(field_item.allow_none for field_item in fields) - - -def separate_types(types: List[Type]) -> Tuple[List[Type], List[Type]]: - """ - Separates the types into two kinds. - - The first are those for which there are already validators - defined by pydantic - str, int, datetime and some other built-in types. - The second are all other types for which the `arbitrary_types_allowed` - config is applied when defining the pydantic model - """ - pydantic_types = [ - # skip format - type_ - for type_ in types - if type_ in REGISTERED_PYDANTIC_TYPES - ] - userspace_types = [ - # skip format - type_ - for type_ in types - if type_ not in REGISTERED_PYDANTIC_TYPES - ] - return pydantic_types, userspace_types - - -def validator_requires_model_field(validator: Callable) -> bool: - """ - Check if validator accepts the `field` param - - :param validator: - :return: - """ - signature = inspect.signature(validator) - parameters = signature.parameters - - if "field" not in parameters: - return False - - field_param = parameters["field"] - field_type = field_param.annotation - - return field_type == "ModelField" or field_type is ModelField - - -def cast_value_with_pydantic( - types: List[Type], - value: Any, - schema_field: ModelField, -) -> Tuple[Optional[Any], List[str]]: - result_value, errors = None, [] - - for type_to_cast in types: - for validator in find_validators(type_to_cast, BaseConfig): - args = [value] - # TODO: some other way to get all the validator's dependencies? - if validator_requires_model_field(validator): - args.append(schema_field) - try: - result_value = validator(*args) - except Exception as ex: - errors.append(str(ex)) - else: - return result_value, errors - - return None, errors - - -def cast_iterable_with_pydantic( - types: List[Type], - values: List, - schema_field: ModelField, -) -> Tuple[List, List[str]]: - type_cast_failed = False - failed_values = [] - - result_values: List[Any] = [] - errors: List[str] = [] - - for value in values: - casted_value, cast_errors = cast_value_with_pydantic( - types, - value, - schema_field, - ) - errors.extend(cast_errors) - - if casted_value is None: - type_cast_failed = True - failed_values.append(value) - - continue - - result_values.append(casted_value) - - if type_cast_failed: - msg = f"Can't parse items {failed_values} of value {values}" - raise InvalidFilters(msg, pointer=schema_field.name) - - return result_values, errors - - -def cast_value_with_scheme(field_types: List[Type], value: Any) -> Tuple[Any, List[str]]: - errors: List[str] = [] - casted_value = cast_failed - - for field_type in field_types: - try: - if isinstance(value, list): # noqa: SIM108 - casted_value = [field_type(item) for item in value] - else: - casted_value = field_type(value) - except (TypeError, ValueError) as ex: - errors.append(str(ex)) - else: - return casted_value, errors - - return casted_value, errors - - -def build_filter_expression( - schema_field: ModelField, - model_column: InstrumentedAttribute, - operator: str, - value: Any, -) -> BinaryExpression: - """ - Builds sqlalchemy filter expression, like YourModel.some_field == value - - Custom sqlalchemy filtering logic can be created in a schemas field for any operator - To implement a new filtering logic (override existing or create a new one) - create a method inside a field following this pattern: `__sql_filter_` - - :param schema_field: schemas field instance - :param model_column: sqlalchemy column instance - :param operator: your operator, for example: "eq", "in", "ilike_str_array", ... - :param value: filtering value - - """ - fields = [schema_field] - - # for Union annotations - if schema_field.sub_fields: - fields = list(schema_field.sub_fields) - - can_be_none = check_can_be_none(fields) - - if value is None: - if can_be_none: - return getattr(model_column, operator)(value) - - raise InvalidFilters(detail=f"The field `{schema_field.name}` can't be null") - - types = [i.type_ for i in fields] - casted_value = None - errors: List[str] = [] - - pydantic_types, userspace_types = separate_types(types) - - if pydantic_types: - func = cast_value_with_pydantic - if isinstance(value, list): - func = cast_iterable_with_pydantic - casted_value, errors = func(pydantic_types, value, schema_field) - - if casted_value is None and userspace_types: - log.warning("Filtering by user type values is not properly tested yet. Use this on your own risk.") - - casted_value, errors = cast_value_with_scheme(types, value) - - if casted_value is cast_failed: - raise InvalidType( - detail=f"Can't cast filter value `{value}` to arbitrary type.", - errors=[HTTPException(status_code=InvalidType.status_code, detail=str(err)) for err in errors], - ) - - if casted_value is None and not can_be_none: - raise InvalidType( - detail=", ".join(errors), - pointer=schema_field.name, - ) - - return getattr(model_column, operator)(casted_value) - - -def is_terminal_node(filter_item: dict) -> bool: - """ - If node shape is: - - { - "name: ..., - "op: ..., - "val: ..., - } - """ - terminal_node_keys = {"name", "op", "val"} - return set(filter_item.keys()) == terminal_node_keys - - -def is_relationship_filter(name: str) -> bool: - return RELATIONSHIP_SPLITTER in name - - -def gather_relationship_paths(filter_item: Union[dict, list]) -> Set[str]: - """ - Extracts relationship paths from query filter - """ - names = set() - - if isinstance(filter_item, list): - for sub_item in filter_item: - names.update(gather_relationship_paths(sub_item)) - - elif is_terminal_node(filter_item): - name = filter_item["name"] - - if RELATIONSHIP_SPLITTER not in name: - return set() - - return {RELATIONSHIP_SPLITTER.join(name.split(RELATIONSHIP_SPLITTER)[:-1])} - - else: - for sub_item in filter_item.values(): - names.update(gather_relationship_paths(sub_item)) - - return names - - -def get_model_column( - model: Type[TypeModel], - schema: Type[TypeSchema], - field_name: str, -) -> InstrumentedAttribute: - try: - model_field = get_model_field(schema, field_name) - except JSONAPISchemaIntrospectionError as e: - raise InvalidFilters(str(e)) - - try: - return getattr(model, model_field) - except AttributeError: - msg = "{} has no attribute {}".format(model.__name__, model_field) - raise InvalidFilters(msg) - - -def get_operator(model_column: InstrumentedAttribute, operator_name: str) -> str: - """ - Get the function operator from his name - - :return callable: a callable to make operation on a column - """ - operators = ( - f"__{operator_name}__", - f"{operator_name}_", - operator_name, - ) - - for op in operators: - if hasattr(model_column, op): - return op - - msg = "{} has no operator {}".format(model_column.key, operator_name) - raise InvalidFilters(msg) - - -def get_custom_filter_expression_callable(schema_field, operator: str) -> Callable: - return schema_field.field_info.extra.get( - f"_{operator}_sql_filter_", - ) - - -def gather_relationships_info( - model: Type[TypeModel], - schema: Type[TypeSchema], - relationship_path: List[str], - collected_info: dict[RelationshipPath, RelationshipFilteringInfo], - target_relationship_idx: int = 0, - prev_aliased_model: Optional[Any] = None, -) -> dict[RelationshipPath, RelationshipFilteringInfo]: - is_last_relationship = target_relationship_idx == len(relationship_path) - 1 - target_relationship_path = RELATIONSHIP_SPLITTER.join( - relationship_path[: target_relationship_idx + 1], - ) - target_relationship_name = relationship_path[target_relationship_idx] - - if target_relationship_name not in set(get_relationships(schema)): - msg = f"There are no relationship '{target_relationship_name}' defined in schema {schema.__name__}" - raise InvalidFilters(msg) - - target_schema = schema.__fields__[target_relationship_name].type_ - target_model = getattr(model, target_relationship_name).property.mapper.class_ - - if prev_aliased_model: - join_column = get_model_column( - model=prev_aliased_model, - schema=schema, - field_name=target_relationship_name, - ) - else: - join_column = get_model_column( - model, - schema, - target_relationship_name, - ) - - aliased_model = aliased(target_model) - collected_info[target_relationship_path] = RelationshipFilteringInfo( - target_schema=target_schema, - model=target_model, - aliased_model=aliased_model, - join_column=join_column, - ) - - if not is_last_relationship: - return gather_relationships_info( - model=target_model, - schema=target_schema, - relationship_path=relationship_path, - collected_info=collected_info, - target_relationship_idx=target_relationship_idx + 1, - prev_aliased_model=aliased_model, - ) - - return collected_info - - -def gather_relationships( - entrypoint_model: Type[TypeModel], - schema: Type[TypeSchema], - relationship_paths: Set[str], -) -> dict[RelationshipPath, RelationshipFilteringInfo]: - collected_info = {} - for relationship_path in sorted(relationship_paths): - gather_relationships_info( - model=entrypoint_model, - schema=schema, - relationship_path=relationship_path.split(RELATIONSHIP_SPLITTER), - collected_info=collected_info, - ) - - return collected_info - - -def prepare_relationships_info( - model: Type[TypeModel], - schema: Type[TypeSchema], - filter_info: list, -): - # TODO: do this on application startup or use the cache - relationship_paths = gather_relationship_paths(filter_info) - return gather_relationships( - entrypoint_model=model, - schema=schema, - relationship_paths=relationship_paths, - ) - - -def build_terminal_node_filter_expressions( - filter_item: Dict, - target_schema: Type[TypeSchema], - target_model: Type[TypeModel], - relationships_info: Dict[RelationshipPath, RelationshipFilteringInfo], -): - name: str = filter_item["name"] - if is_relationship_filter(name): - *relationship_path, field_name = name.split(RELATIONSHIP_SPLITTER) - relationship_info: RelationshipFilteringInfo = relationships_info[ - RELATIONSHIP_SPLITTER.join(relationship_path) - ] - model_column = get_model_column( - model=relationship_info.aliased_model, - schema=relationship_info.target_schema, - field_name=field_name, - ) - target_schema = relationship_info.target_schema - else: - field_name = name - model_column = get_model_column( - model=target_model, - schema=target_schema, - field_name=field_name, - ) - - schema_field = target_schema.__fields__[field_name] - - filter_operator = filter_item["op"] - custom_filter_expression: Callable = get_custom_filter_expression_callable( - schema_field=schema_field, - operator=filter_operator, - ) - if custom_filter_expression is None: - return build_filter_expression( - schema_field=schema_field, - model_column=model_column, - operator=get_operator( - model_column=model_column, - operator_name=filter_operator, - ), - value=filter_item["val"], - ) - - custom_call_result = custom_filter_expression( - schema_field=schema_field, - model_column=model_column, - value=filter_item["val"], - operator=filter_operator, - ) - if isinstance(custom_call_result, Sequence): - expected_len = 2 - if len(custom_call_result) != expected_len: - log.error( - "Invalid filter, returned sequence length is not %s: %s, len=%s", - expected_len, - custom_call_result, - len(custom_call_result), - ) - raise InvalidFilters(detail="Custom sql filter backend error.") - log.warning( - "Custom filter result of `[expr, [joins]]` is deprecated." - " Please return only filter expression from now on. " - "(triggered on schema field %s for filter operator %s on column %s)", - schema_field, - filter_operator, - model_column, - ) - custom_call_result = custom_call_result[0] - return custom_call_result - - -def build_filter_expressions( - filter_item: Dict, - target_schema: Type[TypeSchema], - target_model: Type[TypeModel], - relationships_info: Dict[RelationshipPath, RelationshipFilteringInfo], -) -> Union[BinaryExpression, BooleanClauseList]: - """ - Return sqla expressions. - - Builds sqlalchemy expression which can be use - in where condition: query(Model).where(build_filter_expressions(...)) - """ - if is_terminal_node(filter_item): - return build_terminal_node_filter_expressions( - filter_item=filter_item, - target_schema=target_schema, - target_model=target_model, - relationships_info=relationships_info, - ) - - if not isinstance(filter_item, dict): - log.warning("Could not build filtering expressions %s", locals()) - # dirty. refactor. - return not_(false()) - - sqla_logic_operators = { - "or": or_, - "and": and_, - "not": not_, - } - - if len(logic_operators := set(filter_item.keys())) > 1: - msg = ( - f"In each logic node expected one of operators: {set(sqla_logic_operators.keys())} " - f"but got {len(logic_operators)}: {logic_operators}" - ) - raise InvalidFilters(msg) - - if (logic_operator := logic_operators.pop()) not in set(sqla_logic_operators.keys()): - msg = f"Not found logic operator {logic_operator} expected one of {set(sqla_logic_operators.keys())}" - raise InvalidFilters(msg) - - op = sqla_logic_operators[logic_operator] - - if logic_operator == "not": - return op( - build_filter_expressions( - filter_item=filter_item[logic_operator], - target_schema=target_schema, - target_model=target_model, - relationships_info=relationships_info, - ), - ) - - expressions = [] - for filter_sub_item in filter_item[logic_operator]: - expressions.append( - build_filter_expressions( - filter_item=filter_sub_item, - target_schema=target_schema, - target_model=target_model, - relationships_info=relationships_info, - ), - ) - - return op(*expressions) - - -def create_filters_and_joins( - filter_info: list, - model: Type[TypeModel], - schema: Type[TypeSchema], -): - relationships_info = prepare_relationships_info( - model=model, - schema=schema, - filter_info=filter_info, - ) - expressions = build_filter_expressions( - filter_item={"and": filter_info}, - target_model=model, - target_schema=schema, - relationships_info=relationships_info, - ) - joins = [(info.aliased_model, info.join_column) for info in relationships_info.values()] - return expressions, joins diff --git a/fastapi_jsonapi/data_layers/filtering/tortoise_operation.py b/fastapi_jsonapi/data_layers/filtering/tortoise_operation.py deleted file mode 100644 index 83266c1b..00000000 --- a/fastapi_jsonapi/data_layers/filtering/tortoise_operation.py +++ /dev/null @@ -1,340 +0,0 @@ -""" -Previously used: '__' -""" -from typing import Protocol - - -def add_suffix(field_name: str, suffix: str, sep: str = "__") -> str: - """ - joins str - - :param field_name: - :param suffix: - :param sep: - :return: - """ - return "".join((field_name, sep, suffix)) - - -def type_op_any(field_name: str, type_op: str) -> str: - """ - used to filter on to many relationships - - :param field_name: - :param type_op: - :return: - """ - return field_name - - -def type_op_between(field_name: str, type_op: str) -> str: - """ - used to filter a field between two values - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "range") - - -def type_op_endswith(field_name: str, type_op: str) -> str: - """ - check if field ends with a string - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "endswith") - - -def type_op_iendswith(field_name: str, type_op: str) -> str: - """ - check if field ends with a string - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "iendswith") - - -def type_op_eq(field_name: str, type_op: str) -> str: - """ - check if field is equal to something - - :param field_name: - :param type_op: - :return: - """ - return field_name - - -def type_op_ge(field_name: str, type_op: str) -> str: - """ - check if field is greater than or equal to something - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "gte") - - -def type_op_gt(field_name: str, type_op: str) -> str: - """ - check if field is greater than to something - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "gt") - - -def type_op_has(field_name: str, type_op: str) -> str: - """ - used to filter on to one relationship - - :param field_name: - :param type_op: - :return: - """ - return field_name - - -def type_op_in_(field_name: str, type_op: str) -> str: - """ - check if field is in a list of values - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "in") - - -def type_op_is_(field_name: str, type_op: str) -> str: - """ - check if field is null. wtf - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "isnull") - - -def type_op_isnot(field_name: str, type_op: str) -> str: - """ - check if field is not null. wtf - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "not_isnull") - - -def type_op_le(field_name: str, type_op: str) -> str: - """ - check if field is less than or equal to something - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "lte") - - -def type_op_lt(field_name: str, type_op: str) -> str: - """ - check if field is less than to something - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "lt") - - -def type_op_match(field_name: str, type_op: str) -> str: - """ - check if field match against a string or pattern - - :param field_name: - :param type_op: - :return: - """ - return field_name - - -def type_op_ne(field_name: str, type_op: str) -> str: - """ - check if field is not equal to something - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "not") - - -def type_op_notilike(field_name: str, type_op: str) -> str: - """ - check if field does not contains a string (case insensitive) - - :param field_name: - :param type_op: - :return: - """ - return field_name - - -def type_op_notin_(field_name: str, type_op: str) -> str: - """ - check if field is not in a list of values - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "not_in") - - -def type_op_notlike(field_name: str, type_op: str) -> str: - """ - check if field does not contains a string - - :param field_name: - :param type_op: - :return: - """ - return field_name - - -def type_op_startswith(field_name: str, type_op: str) -> str: - """ - check if field starts with value - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "startswith") - - -def type_op_istartswith(field_name: str, type_op: str) -> str: - """ - check if field starts with a string (case insensitive) - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "istartswith") - - -def type_op_iequals(field_name: str, type_op: str) -> str: - """ - case insensitive equals - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "iexact") - - -def type_op_contains(field_name: str, type_op: str) -> str: - """ - field contains specified substring - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "contains") - - -def type_op_like(field_name: str, type_op: str) -> str: - """ - field contains specified substring - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "contains") - - -def type_op_icontains(field_name: str, type_op: str) -> str: - """ - case insensitive contains - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "icontains") - - -def type_op_ilike(field_name: str, type_op: str) -> str: - """ - case insensitive contains - - :param field_name: - :param type_op: - :return: - """ - return add_suffix(field_name, "icontains") - - -class ProcessTypeOperationFieldName(Protocol): - def __call__(self, field_name: str, type_op: str) -> str: - ... - - -filters_dict: dict[str, ProcessTypeOperationFieldName] = { - "any": type_op_any, - "between": type_op_between, - "endswith": type_op_endswith, - "iendswith": type_op_iendswith, - "eq": type_op_eq, - "ge": type_op_ge, - "gt": type_op_gt, - "has": type_op_has, - "in_": type_op_in_, - "is_": type_op_is_, - "isnot": type_op_isnot, - "le": type_op_le, - "lt": type_op_lt, - "match": type_op_match, - "ne": type_op_ne, - "notilike": type_op_notilike, - "notin_": type_op_notin_, - "notlike": type_op_notlike, - "startswith": type_op_startswith, - "istartswith": type_op_istartswith, - "iequals": type_op_iequals, - "contains": type_op_contains, - "like": type_op_like, - "icontains": type_op_icontains, - "ilike": type_op_ilike, -} - - -def prepare_field_name_for_filtering(field_name: str, type_op: str) -> str: - """ - Prepare fields for use in ORM. - - :param field_name: name of the field by which the filtering will be performed. - :param type_op: operation type. - :return: prepared name field. - """ - func = filters_dict.get(type_op) - if func: - field_name = func(field_name=field_name, type_op=type_op) - - return field_name diff --git a/fastapi_jsonapi/data_layers/filtering/tortoise_orm.py b/fastapi_jsonapi/data_layers/filtering/tortoise_orm.py deleted file mode 100644 index 2dd3ef4f..00000000 --- a/fastapi_jsonapi/data_layers/filtering/tortoise_orm.py +++ /dev/null @@ -1,174 +0,0 @@ -"""Tortoise filters creator.""" - -from typing import ( - Any, - Dict, - List, - Optional, - Tuple, - Type, - Union, -) - -from pydantic import BaseModel -from pydantic.fields import ModelField -from tortoise.expressions import Q -from tortoise.queryset import QuerySet - -from fastapi_jsonapi.data_layers.fields.enum import Enum -from fastapi_jsonapi.data_layers.filtering.tortoise_operation import prepare_field_name_for_filtering -from fastapi_jsonapi.data_layers.orm import DBORMOperandType -from fastapi_jsonapi.data_typing import TypeModel -from fastapi_jsonapi.exceptions import InvalidFilters, QueryError -from fastapi_jsonapi.jsonapi_typing import Filters -from fastapi_jsonapi.querystring import QueryStringManager - - -def prepare_filter_pair(field: Type[ModelField], field_name: str, type_op: str, value: Any) -> Tuple: - """Prepare filter.""" - name_field_q: str = prepare_field_name_for_filtering(field_name, type_op) - return name_field_q, value - - -class FilterTortoiseORM: - def __init__(self, model: TypeModel): - self.model = model - - def create_query(self, filter_q: Union[tuple, Q]) -> Q: - """Tortoise filter creation.""" - if isinstance(filter_q, tuple): - return Q(**{filter_q[0]: filter_q[1]}) - else: - return Q(filter_q) - - def orm_and_or( - self, - op: DBORMOperandType, - filters: list, - ) -> Union[None, QuerySet, Dict[str, Union[QuerySet, List[QuerySet]]]]: - """Filter for query to ORM.""" - if not filters: - return None - if op is DBORMOperandType.or_: - result_filter = None - for i_filter in filters: - i_filter = i_filter[0] if isinstance(i_filter, list) else i_filter # noqa: PLW2901 - if result_filter is None: - result_filter = self.create_query(i_filter) - else: - result_filter |= self.create_query(i_filter) - return result_filter - if op is DBORMOperandType.and_: - result_filter = None - for i_filter in filters: - i_filter = i_filter[0] if isinstance(i_filter, list) else i_filter # noqa: PLW2901 - if result_filter is None: - result_filter = self.create_query(i_filter) - else: - result_filter &= self.create_query(i_filter) - return result_filter - if op is DBORMOperandType.not_: - return ~Q(**{filters[0][0][0]: filters[0][0][1]}) - return None - - def filter_converter( - self, - schema: Type[BaseModel], - filters: Filters, - ) -> List: - """ - Make a list with filters, which can be used in the tortoise filter. - - :param schema: schemas schema of object. - :param filters: list of JSON API filters. - :return: list of filters, prepared for use in tortoise model. - :raises InvalidFilters: if the filter was created with an error. - """ - converted_filters: List = [] - for i_filter in filters: - if "or" in i_filter: - result = self.filter_converter(schema, i_filter["or"]) - converted_filters.append(self.orm_and_or(DBORMOperandType.or_, result)) - continue - elif "and" in i_filter: - result = self.filter_converter(schema, i_filter["and"]) - converted_filters.append(self.orm_and_or(DBORMOperandType.and_, result)) - continue - elif "not" in i_filter: - result = self.filter_converter(schema, [i_filter["not"]]) - converted_filters.append(self.orm_and_or(DBORMOperandType.not_, result)) - continue - model_fields = i_filter["name"].split(".") - name_field: str = model_fields[0] - if len(model_fields) > 1: - result = self.filter_converter( - schema.__fields__[name_field].type_, - [ - { - "name": ".".join(model_fields[1:]), - "op": i_filter["op"], - "val": i_filter["val"], - }, - ], - ) - converted_filters.append(result) - else: - val: Union[List[Any], Any] - field: ModelField = schema.__fields__[name_field] - if isinstance(i_filter["val"], list) and field.type_ is not list: - val = self._validate(i_filter, field) - else: - val, errors = field.validate(i_filter["val"], {}, loc=field.alias) - val = self.val_to_query(val) - if errors: - raise InvalidFilters(str(errors), parameter=field.alias) - - converted_filters.append(prepare_filter_pair(field, name_field, i_filter["op"], val)) - return converted_filters - - async def json_api_filter( - self, - query, - schema: Type[BaseModel], - query_params: QueryStringManager, - ) -> QuerySet: - """Make queries with filtering from request.""" - filters = self.filter_converter( - schema=schema, - filters=query_params.filters, - ) - for i_filter in filters: - query = query.filter(**{i_filter[0]: i_filter[1]}) - return query - - def val_to_query(self, val: Any) -> Any: - """Value to query.""" - if isinstance(val, Enum): - val = val.value - return val - - def _validate(self, json_api_filter: Dict[str, List[str]], model_filed: ModelField) -> List: - val = [] - for i_v in json_api_filter["val"]: - i_val, errors = model_filed.validate(i_v, {}, loc=model_filed.alias) - if errors: - raise InvalidFilters(str(errors), parameter=model_filed.alias) - i_val = self.val_to_query(i_val) - val.append(i_val) - return val - - def validate(self, filter_q: Union[None, Q, Dict[str, Union[Q, List[Q]]]]) -> Optional[Q]: - """ - Tortoise filter validation. - - :param filter_q: dict with filter body. - :return: validated filter. - :raises QueryError: if the field in the filter does not match the field in tortoise. - """ - if isinstance(filter_q, Q): - return Q(filter_q) - elif filter_q is None: - return None - else: - msg = "An unexpected argument for Q (result_filter={type})".format(type=type(filter_q)) - raise QueryError(msg) diff --git a/fastapi_jsonapi/data_layers/orm.py b/fastapi_jsonapi/data_layers/orm.py deleted file mode 100644 index 0ffee4bd..00000000 --- a/fastapi_jsonapi/data_layers/orm.py +++ /dev/null @@ -1,9 +0,0 @@ -"""ORM types enums.""" - -from fastapi_jsonapi.data_layers.fields.enum import Enum - - -class DBORMOperandType(str, Enum): - or_ = "or" - and_ = "and" - not_ = "not" diff --git a/fastapi_jsonapi/data_layers/shared.py b/fastapi_jsonapi/data_layers/shared.py deleted file mode 100644 index 113359f4..00000000 --- a/fastapi_jsonapi/data_layers/shared.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import TYPE_CHECKING, Tuple, Type, Union - -from fastapi_jsonapi.data_typing import TypeModel, TypeSchema - -if TYPE_CHECKING: - from fastapi_jsonapi.data_layers.filtering.sqlalchemy import Node as NodeSQLAlchemy - - -def create_filters_or_sorts( - model: Type[TypeModel], - filter_or_sort_info: Union[list, dict], - class_node: Union[Type["NodeSQLAlchemy"]], - schema: Type[TypeSchema], -) -> Tuple: - """ - Apply filters / sorts from filters / sorts information to base query - - :param model: the model of the node - :param filter_or_sort_info: current node filter_or_sort information - :param class_node: - :param schema: the resource - """ - filters_or_sorts = [] - joins = [] - for filter_or_sort in filter_or_sort_info: - filters_or_sort, join = class_node(model, filter_or_sort, schema).resolve() - filters_or_sorts.append(filters_or_sort) - joins.extend(join) - - return filters_or_sorts, joins diff --git a/fastapi_jsonapi/data_layers/sorting/__init__.py b/fastapi_jsonapi/data_layers/sorting/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/fastapi_jsonapi/data_layers/sorting/sqlalchemy.py b/fastapi_jsonapi/data_layers/sorting/sqlalchemy.py deleted file mode 100644 index 63632480..00000000 --- a/fastapi_jsonapi/data_layers/sorting/sqlalchemy.py +++ /dev/null @@ -1,172 +0,0 @@ -"""Helper to create sqlalchemy sortings according to filter querystring parameter""" -from typing import Any, List, Tuple, Type, Union - -from pydantic.fields import ModelField -from sqlalchemy.orm import DeclarativeMeta, InstrumentedAttribute, aliased -from sqlalchemy.sql.elements import BinaryExpression - -from fastapi_jsonapi.data_layers.shared import create_filters_or_sorts -from fastapi_jsonapi.data_typing import TypeModel, TypeSchema -from fastapi_jsonapi.exceptions import InvalidFilters, InvalidSort -from fastapi_jsonapi.schema import get_model_field, get_relationships -from fastapi_jsonapi.splitter import SPLIT_REL -from fastapi_jsonapi.utils.sqla import get_related_model_cls - -Sort = BinaryExpression -Join = List[Any] - -SortAndJoins = Tuple[ - Sort, - List[Join], -] - - -def create_sorts(model: Type[TypeModel], filter_info: Union[list, dict], schema: Type[TypeSchema]): - """ - Apply filters from filters information to base query. - - :params model: the model of the node. - :params filter_info: current node filter information. - :params schema: the resource. - """ - return create_filters_or_sorts(model, filter_info, Node, schema) - - -class Node(object): - """Helper to recursively create sorts with sqlalchemy according to sort querystring parameter""" - - def __init__(self, model: Type[TypeModel], sort_: dict, schema: Type[TypeSchema]): - """ - Initialize an instance of a filter node. - - :params model: an sqlalchemy model. - :params sort_: sorts information of the current node and deeper nodes. - :param schema: the serializer of the resource. - """ - self.model = model - self.sort_ = sort_ - self.schema = schema - - @classmethod - def create_sort(cls, schema_field: ModelField, model_column, order: str): - """ - Create sqlalchemy sort. - - :params schema_field: - :params model_column: column sqlalchemy - :params order: desc | asc (or custom) - :return: - """ - """ - Custom sqlachemy sorting logic can be created in a marshmallow field for any field - You can override existing ('asc', 'desc') or create new - then follow this pattern: - `__sql_sort_`. This method has to accept following params: - * marshmallow_field - marshmallow field instance - * model_column - sqlalchemy column instance - """ - try: - f = getattr(schema_field, f"_{order}_sql_sort_") - except AttributeError: - pass - else: - return f( - schema_field=schema_field, - model_column=model_column, - ) - return getattr(model_column, order)() - - def resolve(self) -> SortAndJoins: - """ - Create sort for a particular node of the sort tree. - """ - field = self.sort_.get("field", "") - if not hasattr(self.model, field) and SPLIT_REL not in field: - msg = "{} has no attribute {}".format(self.model.__name__, field) - raise InvalidSort(msg) - - if SPLIT_REL in field: - value = {"field": SPLIT_REL.join(field.split(SPLIT_REL)[1:]), "order": self.sort_["order"]} - alias = aliased(self.related_model) - joins = [[alias, self.column]] - node = Node(alias, value, self.related_schema) - filters, new_joins = node.resolve() - joins.extend(new_joins) - return filters, joins - - return ( - self.create_sort( - schema_field=self.schema.__fields__[self.name].type_, - model_column=self.column, - order=self.sort_["order"], - ), - [], - ) - - @property - def name(self) -> str: - """ - Return the name of the node or raise a BadRequest exception - - :return str: the name of the sort to sort on - """ - name = self.sort_.get("field") - - if name is None: - msg = "Can't find name of a sort" - raise InvalidFilters(msg) - - if SPLIT_REL in name: - name = name.split(SPLIT_REL)[0] - - if name not in self.schema.__fields__: - msg = "{} has no attribute {}".format(self.schema.__name__, name) - raise InvalidFilters(msg) - - return name - - @property - def column(self) -> InstrumentedAttribute: - """ - Get the column object. - - :return: the column to filter on - """ - field = self.name - - model_field = get_model_field(self.schema, field) - - try: - return getattr(self.model, model_field) - except AttributeError: - msg = "{} has no attribute {}".format(self.model.__name__, model_field) - raise InvalidFilters(msg) - - @property - def related_model(self) -> DeclarativeMeta: - """ - Get the related model of a relationship field. - - :return: the related model. - """ - relationship_field = self.name - - if relationship_field not in get_relationships(self.schema): - msg = "{} has no relationship attribute {}".format(self.schema.__name__, relationship_field) - raise InvalidFilters(msg) - - return get_related_model_cls(self.model, get_model_field(self.schema, relationship_field)) - - @property - def related_schema(self) -> Type[TypeSchema]: - """ - Get the related schema of a relationship field. - - :return: the related schema - """ - relationship_field = self.name - - if relationship_field not in get_relationships(self.schema): - msg = "{} has no relationship attribute {}".format(self.schema.__name__, relationship_field) - raise InvalidFilters(msg) - - return self.schema.__fields__[relationship_field].type_ diff --git a/fastapi_jsonapi/data_layers/sorting/tortoise_orm.py b/fastapi_jsonapi/data_layers/sorting/tortoise_orm.py deleted file mode 100644 index 59f43f7c..00000000 --- a/fastapi_jsonapi/data_layers/sorting/tortoise_orm.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Dict, List - -from tortoise.queryset import QuerySet - - -class SortTortoiseORM: - @classmethod - def sort( - cls, - query: QuerySet, - query_params_sorting: List[Dict[str, str]], - default_sort: str = "", - ) -> QuerySet: - """ - Реализация динамической сортировки для query. - - :param query: запрос - :param query_params_sorting: параметры от клиента - :param default_sort: дефолтная сортировка, например "-id" или `sort=-id,created_at` - """ - if query_params_sorting: - for i_sort in query_params_sorting: - i_order = "" if i_sort["order"] == "asc" else "-" - i_field = "{order}{field}".format(order=i_order, field=i_sort["field"]) - query = query.order_by(i_field) - elif default_sort: - query = query.order_by(default_sort) - return query diff --git a/examples/api_for_sqlalchemy/utils/sqlalchemy/fields/__init__.py b/fastapi_jsonapi/data_layers/sqla/__init__.py similarity index 100% rename from examples/api_for_sqlalchemy/utils/sqlalchemy/fields/__init__.py rename to fastapi_jsonapi/data_layers/sqla/__init__.py diff --git a/fastapi_jsonapi/data_layers/sqla/base_model.py b/fastapi_jsonapi/data_layers/sqla/base_model.py new file mode 100644 index 00000000..a8eaeac2 --- /dev/null +++ b/fastapi_jsonapi/data_layers/sqla/base_model.py @@ -0,0 +1,240 @@ +import logging +from typing import Any, Iterable, Literal, Optional, Type, Union + +from sqlalchemy import and_, delete, func, select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.sql import Select, column, distinct +from sqlalchemy.sql.elements import UnaryExpression +from sqlalchemy.sql.expression import BinaryExpression + +from fastapi_jsonapi.data_layers.sqla.query_building import RelationshipInfo +from fastapi_jsonapi.data_typing import TypeModel +from fastapi_jsonapi.exceptions import BadRequest, InternalServerError, ObjectNotFound + +log = logging.getLogger(__name__) + + +class BaseSQLA: + @classmethod + def _check_field_exists( + cls, + model: TypeModel, + key: str, + ) -> None: + try: + getattr(model, key) + except AttributeError as ex: + err_message = f"No fields `{key}` on `{type(model).__name__}`. Make sure schema conforms model." + log.exception(err_message, exc_info=ex) + raise InternalServerError( + detail=err_message, + pointer="/data", + ) + + @classmethod + def _fill( + cls, + model: TypeModel, + **kwargs, + ) -> None: + for key, value in kwargs.items(): + cls._check_field_exists(model, key) + setattr(model, key, value) + + @classmethod + async def _save( + cls, + session: AsyncSession, + model: TypeModel, + action_trigger: Literal["update", "create", "delete"], + resource_type: str, + commit: bool = True, + id_: Optional[str] = None, + **kwargs, + ) -> TypeModel: + try: + if not commit: + await session.flush() + return model + + await session.commit() + return model + except IntegrityError as ex: + err_message = f"Could not {action_trigger} object" + log.exception("%s with data %s", err_message, kwargs, exc_info=ex) + raise BadRequest( + detail=err_message, + pointer="/data", + meta={ + "type": resource_type, + "id": id_, + }, + ) + except Exception as ex: + err_message = f"Got an error {ex.__class__.__name__} during updating obj {kwargs} data in DB" + log.exception(err_message, exc_info=ex) + await session.rollback() + raise InternalServerError( + detail=err_message, + pointer="/data", + meta={ + "type": resource_type, + "id": id_, + }, + ) + + @classmethod + async def all( + cls, + session: AsyncSession, + stmt: Select, + ) -> Union[Type[TypeModel], Any]: + return (await session.execute(stmt)).unique().scalars().all() + + @classmethod + async def count( + cls, + session: AsyncSession, + stmt: Select, + ) -> int: + stmt = select(func.count(distinct(column("id")))).select_from(stmt.subquery()) + return (await session.execute(stmt)).scalar_one() + + @classmethod + async def create( + cls, + session: AsyncSession, + model: TypeModel, + resource_type: str, + commit: bool = True, + id_: Optional[str] = None, + **kwargs, + ) -> TypeModel: + cls._fill(model, **kwargs) + session.add(model) + return await cls._save( + session=session, + model=model, + action_trigger="create", + resource_type=resource_type, + commit=commit, + id_=id_, + **kwargs, + ) + + @classmethod + async def delete( + cls, + session: AsyncSession, + model: TypeModel, + filters: list[Union[BinaryExpression, bool]], + resource_type: str, + commit: bool = True, + id_: Optional[str] = None, + **kwargs, + ) -> None: + await session.execute(delete(model).where(*filters)) + await cls._save( + session=session, + model=model, + action_trigger="delete", + resource_type=resource_type, + commit=commit, + id_=id_, + **kwargs, + ) + + @classmethod + async def one_or_raise( + cls, + session: AsyncSession, + model: TypeModel, + filters: list[Union[BinaryExpression, bool]], + stmt: Select, + ) -> Union[TypeModel, Any]: + result = (await session.execute(stmt)).scalar_one_or_none() + if result is None: + compiled_conditions = and_(*filters).compile( + dialect=session.bind.dialect, + compile_kwargs={"literal_binds": True}, + ) + raise ObjectNotFound( + detail=f"Resource {model.__name__} `{compiled_conditions}` not found", + ) + return result + + @classmethod + def query( + cls, + model: TypeModel, + fields: Optional[list] = None, + select_from: Optional[TypeModel] = None, + distinct_: bool = False, + filters: Optional[list[Union[BinaryExpression, bool]]] = None, + for_update: Optional[dict] = None, + join: Optional[list[tuple[TypeModel, Any]]] = None, + jsonapi_join: Optional[list[RelationshipInfo]] = None, + number: Optional[int] = None, + options: Iterable = (), + order: Optional[Union[str, UnaryExpression]] = None, + size: Optional[int] = None, + stmt: Optional[Select] = None, + ) -> Select: + if stmt is None: + stmt = select(model) if fields is None else select(*fields) + + if select_from is not None: + stmt = stmt.select_from(select_from) + + if filters is not None: + stmt = stmt.where(*filters) + + if options: + stmt = stmt.options(*options) + + if for_update is not None: + stmt = stmt.with_for_update(**for_update) + + if order is not None: + stmt = stmt.order_by(*order) + + if jsonapi_join: + for relationship_info in jsonapi_join: + stmt = stmt.join(relationship_info.aliased_model, relationship_info.join_column) + + if size not in [0, None]: + stmt = stmt.limit(size) + number = number or 1 + stmt = stmt.offset((number - 1) * size) + + if distinct_: + stmt = stmt.distinct() + + if join is not None: + for join_model, predicate in join: + stmt = stmt.join(join_model, predicate) + + return stmt + + @classmethod + async def update( + cls, + session: AsyncSession, + model: TypeModel, + resource_type: str, + commit: bool = True, + id_: Optional[str] = None, + **kwargs, + ) -> TypeModel: + cls._fill(model, **kwargs) + session.add(model) + return await cls._save( + session=session, + model=model, + action_trigger="update", + resource_type=resource_type, + commit=commit, + id_=id_, + **kwargs, + ) diff --git a/fastapi_jsonapi/data_layers/sqla_orm.py b/fastapi_jsonapi/data_layers/sqla/orm.py similarity index 50% rename from fastapi_jsonapi/data_layers/sqla_orm.py rename to fastapi_jsonapi/data_layers/sqla/orm.py index 3854605c..783b9c6c 100644 --- a/fastapi_jsonapi/data_layers/sqla_orm.py +++ b/fastapi_jsonapi/data_layers/sqla/orm.py @@ -1,74 +1,66 @@ """This module is a CRUD interface between resource managers and the sqlalchemy ORM""" + +from __future__ import annotations + import logging -from typing import TYPE_CHECKING, Any, Iterable, List, Literal, Optional, Tuple, Type, Union +from typing import Any, Iterable, Literal, Optional, Type -from sqlalchemy import delete, func, select -from sqlalchemy.exc import DBAPIError, IntegrityError, MissingGreenlet, NoResultFound +from pydantic import BaseModel +from sqlalchemy.exc import MissingGreenlet from sqlalchemy.ext.asyncio import AsyncSession, AsyncSessionTransaction -from sqlalchemy.inspection import inspect -from sqlalchemy.orm import joinedload, selectinload +from sqlalchemy.orm import joinedload, load_only, selectinload from sqlalchemy.orm.attributes import InstrumentedAttribute from sqlalchemy.orm.collections import InstrumentedList -from sqlalchemy.sql import column, distinct +from sqlalchemy.sql import Select +from sqlalchemy.sql.expression import BinaryExpression -from fastapi_jsonapi import BadRequest from fastapi_jsonapi.data_layers.base import BaseDataLayer -from fastapi_jsonapi.data_layers.filtering.sqlalchemy import ( - create_filters_and_joins, +from fastapi_jsonapi.data_layers.sqla.base_model import BaseSQLA +from fastapi_jsonapi.data_layers.sqla.query_building import ( + build_filter_expressions, + build_sort_expressions, + prepare_relationships_info, + relationships_info_storage, ) -from fastapi_jsonapi.data_layers.sorting.sqlalchemy import create_sorts -from fastapi_jsonapi.data_typing import TypeModel, TypeSchema +from fastapi_jsonapi.data_typing import TypeModel from fastapi_jsonapi.exceptions import ( - HTTPException, InternalServerError, InvalidInclude, ObjectNotFound, RelatedObjectNotFound, RelationNotFound, ) -from fastapi_jsonapi.querystring import PaginationQueryStringManager, QueryStringManager +from fastapi_jsonapi.querystring import QueryStringManager from fastapi_jsonapi.schema import ( BaseJSONAPIItemInSchema, - BaseJSONAPIRelationshipDataToManySchema, - BaseJSONAPIRelationshipDataToOneSchema, - get_model_field, - get_related_schema, ) -from fastapi_jsonapi.schema_base import RelationshipInfo -from fastapi_jsonapi.splitter import SPLIT_REL -from fastapi_jsonapi.utils.sqla import get_related_model_cls - -if TYPE_CHECKING: - from pydantic import BaseModel as PydanticBaseModel - from sqlalchemy.sql import Select +from fastapi_jsonapi.storages.models_storage import models_storage +from fastapi_jsonapi.storages.schemas_storage import schemas_storage +from fastapi_jsonapi.views import RelationshipRequestInfo log = logging.getLogger(__name__) -ModelTypeOneOrMany = Union[TypeModel, list[TypeModel]] -ActionTrigger = Literal["create", "update"] - class SqlalchemyDataLayer(BaseDataLayer): """Sqlalchemy data layer""" def __init__( self, - schema: Type[TypeSchema], model: Type[TypeModel], session: AsyncSession, + resource_type: str, disable_collection_count: bool = False, default_collection_count: int = -1, id_name_field: Optional[str] = None, url_id_field: str = "id", eagerload_includes: bool = True, - query: Optional["Select"] = None, + query: Optional[Select] = None, auto_convert_id_to_column_type: bool = True, **kwargs: Any, ): """ Initialize an instance of SqlalchemyDataLayer. - :param schema: :param model: :param disable_collection_count: :param default_collection_count: @@ -80,8 +72,8 @@ def __init__( :param kwargs: initialization parameters of an SqlalchemyDataLayer instance """ super().__init__( - schema=schema, model=model, + resource_type=resource_type, url_id_field=url_id_field, id_name_field=id_name_field, disable_collection_count=disable_collection_count, @@ -89,36 +81,43 @@ def __init__( **kwargs, ) + self._base_sql = BaseSQLA() + self._query = query + self.session = session self.eagerload_includes_ = eagerload_includes - self._query = query self.auto_convert_id_to_column_type = auto_convert_id_to_column_type self.transaction: Optional[AsyncSessionTransaction] = None - async def atomic_start(self, previous_dl: Optional["SqlalchemyDataLayer"] = None): + async def atomic_start( + self, + previous_dl: Optional[SqlalchemyDataLayer] = None, + ): self.is_atomic = True if previous_dl: self.session = previous_dl.session if previous_dl.transaction: self.transaction = previous_dl.transaction - return + return None self.transaction = self.session.begin() await self.transaction.start() - async def atomic_end(self, success: bool = True): + async def atomic_end( + self, + success: bool = True, + exception: Optional[Exception] = None, + ): if success: await self.transaction.commit() else: await self.transaction.rollback() - async def save(self): - if self.is_atomic: - await self.session.flush() - else: - await self.session.commit() - - def prepare_id_value(self, col: InstrumentedAttribute, value: Any) -> Any: + def prepare_id_value( + self, + col: InstrumentedAttribute, + value: Any, + ) -> Any: """ Convert value to the required python type. @@ -137,25 +136,12 @@ def prepare_id_value(self, col: InstrumentedAttribute, value: Any) -> Any: return value - async def link_relationship_object( - self, + @classmethod + async def check_object_has_relationship_or_raise( + cls, obj: TypeModel, relation_name: str, - related_data: Optional[ModelTypeOneOrMany], - action_trigger: ActionTrigger, ): - """ - Links target object with relationship object or objects - - :param obj: - :param relation_name: - :param related_data: - :param action_trigger: indicates which one operation triggered relationships applying - """ - # todo: relation name may be different? - setattr(obj, relation_name, related_data) - - async def check_object_has_relationship_or_raise(self, obj: TypeModel, relation_name: str): """ Checks that there is relationship with relation_name in obj @@ -173,46 +159,12 @@ async def check_object_has_relationship_or_raise(self, obj: TypeModel, relation_ parameter="include", ) - async def get_related_data_to_link( - self, - related_model: TypeModel, - relationship_info: RelationshipInfo, - relationship_in: Union[ - BaseJSONAPIRelationshipDataToOneSchema, - BaseJSONAPIRelationshipDataToManySchema, - ], - ) -> Optional[ModelTypeOneOrMany]: - """ - Retrieves object or objects to link from database - - :param related_model: - :param relationship_info: - :param relationship_in: - """ - if not relationship_in.data: - return [] if relationship_info.many else None - - if relationship_info.many: - assert isinstance(relationship_in, BaseJSONAPIRelationshipDataToManySchema) - return await self.get_related_objects_list( - related_model=related_model, - related_id_field=relationship_info.id_field_name, - ids=[r.id for r in relationship_in.data], - ) - - assert isinstance(relationship_in, BaseJSONAPIRelationshipDataToOneSchema) - return await self.get_related_object( - related_model=related_model, - related_id_field=relationship_info.id_field_name, - id_value=relationship_in.data.id, - ) - async def apply_relationships( self, obj: TypeModel, data_create: BaseJSONAPIItemInSchema, - action_trigger: ActionTrigger, - ) -> None: + action_trigger: Literal["create", "update"], + ) -> tuple[dict[str, Optional[TypeModel]], dict[str, list[TypeModel]]]: """ Handles relationships passed in request @@ -221,42 +173,49 @@ async def apply_relationships( :param action_trigger: indicates which one operation triggered relationships applying :return: """ - relationships: "PydanticBaseModel" = data_create.relationships + to_one, to_many = {}, {} + relationships: BaseModel = data_create.relationships if relationships is None: - return + return to_one, to_many - schema_fields = self.schema.__fields__ or {} for relation_name, relationship_in in relationships: if relationship_in is None: continue - field = schema_fields.get(relation_name) - if field is None: - # should not happen if schema is built properly - # there may be an error if schema and schema_in are different - log.warning("field for %s in schema %s not found", relation_name, self.schema.__name__) + relationship_info = schemas_storage.get_relationship_info( + resource_type=self.resource_type, + operation_type=action_trigger, + field_name=relation_name, + ) + if relationship_info is None: + log.warning("Not found relationship %s for resource_type %s", relation_name, self.resource_type) continue - if "relationship" not in field.field_info.extra: - log.warning( - "relationship info for %s in schema %s extra not found", - relation_name, - self.schema.__name__, + related_model = models_storage.get_model(relationship_info.resource_type) + related_data = [] + if relationship_in.data: + related_data = await self.get_related_objects( + related_model=related_model, + related_id_field=relationship_info.id_field_name, + ids=[r.id for r in relationship_in.data] if relationship_info.many else [relationship_in.data.id], ) - continue - - relationship_info: RelationshipInfo = field.field_info.extra["relationship"] - related_model = get_related_model_cls(type(obj), relation_name) - related_data = await self.get_related_data_to_link( - related_model=related_model, - relationship_info=relationship_info, - relationship_in=relationship_in, - ) await self.check_object_has_relationship_or_raise(obj, relation_name) - await self.link_relationship_object(obj, relation_name, related_data, action_trigger) - async def create_object(self, data_create: BaseJSONAPIItemInSchema, view_kwargs: dict) -> TypeModel: + if relationship_info.many: + to_many[relation_name] = related_data + elif related_data: + related_data, *_ = related_data + to_one[relation_name] = related_data + else: + to_one[relation_name] = None + return to_one, to_many + + async def create_object( + self, + data_create: BaseJSONAPIItemInSchema, + view_kwargs: dict, + ) -> TypeModel: """ Create an object through sqlalchemy. @@ -265,120 +224,213 @@ async def create_object(self, data_create: BaseJSONAPIItemInSchema, view_kwargs: :return: """ log.debug("Create object with data %s", data_create) - # todo: pydantic v2 model_dump() - model_kwargs = data_create.attributes.dict() - model_kwargs = self._apply_client_generated_id(data_create, model_kwargs=model_kwargs) - await self.before_create_object(model_kwargs=model_kwargs, view_kwargs=view_kwargs) + model_kwargs = self._apply_client_generated_id(data_create, data_create.attributes.model_dump()) + await self.before_create_object(model_kwargs, view_kwargs) obj = self.model(**model_kwargs) - await self.apply_relationships(obj, data_create, action_trigger="create") - - self.session.add(obj) - try: - await self.save() - except IntegrityError: - log.exception("Could not create object with data create %s", data_create) - msg = "Object creation error" - raise BadRequest(msg, pointer="/data") - except DBAPIError: - log.exception("Could not create object with data create %s", data_create) - msg = "Object creation error" - raise HTTPException(msg, pointer="/data") - except Exception as e: - log.exception("Error creating object with data create %s", data_create) - await self.session.rollback() - msg = f"Object creation error: {e}" - raise HTTPException(msg, pointer="/data") - - await self.after_create_object(obj=obj, model_kwargs=model_kwargs, view_kwargs=view_kwargs) + to_one, to_many = await self.apply_relationships(obj, data_create, "create") + model_kwargs.update({**to_one, **to_many}) + obj = await self._base_sql.create( + session=self.session, + model=obj, + resource_type=self.resource_type, + commit=not self.is_atomic, + id_=view_kwargs.get(self.url_id_field), + **model_kwargs, + ) + await self.after_create_object(obj, model_kwargs, view_kwargs) return obj - def get_object_id_field_name(self): - """ - compound key may cause errors + def get_fields_options( + self, + resource_type: str, + qs: QueryStringManager, + required_to_load: Optional[set] = None, + ) -> set: + required_to_load = required_to_load or set() + + if resource_type not in qs.fields: + return set() + + # empty str means skip all attributes + if "" not in qs.fields[resource_type]: + required_to_load.update(field_name for field_name in qs.fields[resource_type]) + + return self.get_load_only_options( + resource_type=resource_type, + field_names=required_to_load, + ) - :return: - """ - return self.id_name_field or inspect(self.model).primary_key[0].key + @staticmethod + def get_load_only_options( + resource_type: str, + field_names: Iterable[str], + ) -> set: + model = models_storage.get_model(resource_type) + options = { + load_only( + getattr( + model, + models_storage.get_model_id_field_name(resource_type), + ), + ), + } - async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = None) -> TypeModel: + for field_name in field_names: + options.add(load_only(getattr(model, field_name))) + + return options + + def get_relationship_request_filters( + self, + model_id_field: InstrumentedAttribute, + parent_obj_id: Any, + parent_resource_type: str, + relationship_name: str, + ) -> list[BinaryExpression]: + parent_model = models_storage.get_model(parent_resource_type) + parent_id_field = models_storage.get_object_id_field(parent_resource_type) + parent_relationship_field = getattr(parent_model, relationship_name) + info = schemas_storage.get_relationship_info( + resource_type=parent_resource_type, + operation_type="get", + field_name=relationship_name, + ) + stmt = self._base_sql.query( + model=self.model, + fields=[model_id_field], + select_from=parent_model, + filters=[parent_id_field == parent_obj_id], + size=None if info.many else 1, + join=[(self.model, parent_relationship_field)], + ) + return [model_id_field.in_(stmt)] + + async def get_object( + self, + view_kwargs: dict, + qs: Optional[QueryStringManager] = None, + relationship_request_info: Optional[RelationshipRequestInfo] = None, + ) -> TypeModel: """ Retrieve an object through sqlalchemy. :param view_kwargs: kwargs from the resource view :param qs: + :param relationship_request_info: :return DeclarativeMeta: an object from sqlalchemy """ await self.before_get_object(view_kwargs) - filter_field = self.get_object_id_field() - filter_value = view_kwargs[self.url_id_field] - - query = self.retrieve_object_query(view_kwargs, filter_field, filter_value) + model_id_field = models_storage.get_object_id_field(self.resource_type) + filter_value = self.prepare_id_value(model_id_field, view_kwargs[self.url_id_field]) + options = set() if qs is not None: - query = self.eagerload_includes(query, qs) + options.update(self.eagerload_includes(qs)) + options.update( + self.get_fields_options( + resource_type=self.resource_type, + qs=qs, + required_to_load=set(view_kwargs.get("required_to_load", set())), + ), + ) - try: - obj = (await self.session.execute(query)).scalar_one() - except NoResultFound: - msg = f"Resource {self.model.__name__} `{filter_value}` not found" - raise ObjectNotFound( - msg, - parameter=self.url_id_field, + if relationship_request_info is None: + filters = [model_id_field == filter_value] + else: + filters = self.get_relationship_request_filters( + model_id_field=model_id_field, + parent_obj_id=filter_value, + parent_resource_type=relationship_request_info.parent_resource_type, + relationship_name=relationship_request_info.relationship_name, ) - await self.after_get_object(obj, view_kwargs) + query = self._base_sql.query( + model=self.model, + filters=filters, + options=options, + stmt=self._query, + ) + obj = await self._base_sql.one_or_raise( + session=self.session, + model=self.model, + filters=[model_id_field == filter_value], + stmt=query, + ) + await self.after_get_object(obj, view_kwargs) return obj - async def get_collection_count(self, query: "Select", qs: QueryStringManager, view_kwargs: dict) -> int: - """ - Returns number of elements for this collection - - :param query: SQLAlchemy query - :param qs: QueryString - :param view_kwargs: view kwargs - :return: - """ - if self.disable_collection_count is True: - return self.default_collection_count - - count_query = select(func.count(distinct(column("id")))).select_from(query.subquery()) - return (await self.session.execute(count_query)).scalar_one() - - async def get_collection(self, qs: QueryStringManager, view_kwargs: Optional[dict] = None) -> Tuple[int, list]: + async def get_collection( + self, + qs: QueryStringManager, + view_kwargs: Optional[dict] = None, + relationship_request_info: Optional[RelationshipRequestInfo] = None, + ) -> tuple[int, list]: """ Retrieve a collection of objects through sqlalchemy. :param qs: a querystring manager to retrieve information from url. :param view_kwargs: kwargs from the resource view. + :param relationship_request_info: indicates that method was called in fetch relationship request and + contains some related data :return: the number of object and the list of objects. """ view_kwargs = view_kwargs or {} - await self.before_get_collection(qs, view_kwargs) + relationship_paths = prepare_relationships_info( + model=self.model, + schema=self.schema, + resource_type=self.resource_type, + filter_info=qs.filters, + sorting_info=qs.sorts, + ) + relationships_info = [ + relationships_info_storage.get_info(self.resource_type, relationship_path) + for relationship_path in relationship_paths + ] - query = self.query(view_kwargs) - - if filters_qs := qs.filters: - query = self.filter_query(query, filters_qs) - - if sorts := qs.get_sorts(schema=self.schema): - query = self.sort_query(query, sorts) - - objects_count = await self.get_collection_count(query, qs, view_kwargs) - + options = self.get_fields_options(self.resource_type, qs) if self.eagerload_includes_: - query = self.eagerload_includes(query, qs) + options.update(self.eagerload_includes(qs)) + + filters = self.get_filter_expressions(qs) or [] + if relationship_request_info is not None: + model_id_field = models_storage.get_object_id_field(self.resource_type) + filters.extend( + self.get_relationship_request_filters( + model_id_field=model_id_field, + parent_obj_id=self.prepare_id_value(model_id_field, relationship_request_info.parent_obj_id), + parent_resource_type=relationship_request_info.parent_resource_type, + relationship_name=relationship_request_info.relationship_name, + ), + ) - query = self.paginate_query(query, qs.pagination) + query = self._base_sql.query( + model=self.model, + filters=filters, + jsonapi_join=relationships_info, + number=qs.pagination.number, + options=options, + order=self.get_sort_expressions(qs), + size=qs.pagination.size, + stmt=self._query, + ) + collection = await self._base_sql.all( + session=self.session, + stmt=query, + ) - collection = (await self.session.execute(query)).unique().scalars().all() + objects_count = self.default_collection_count + if not self.disable_collection_count: + objects_count = await self._base_sql.count( + session=self.session, + stmt=query, + ) collection = await self.after_get_collection(collection, qs, view_kwargs) - return objects_count, list(collection) async def update_object( @@ -395,58 +447,28 @@ async def update_object( :param view_kwargs: kwargs from the resource view. :return: True if object have changed else False. """ - new_data = data_update.attributes.dict(exclude_unset=True) - - await self.apply_relationships(obj, data_update, action_trigger="update") - - await self.before_update_object(obj, model_kwargs=new_data, view_kwargs=view_kwargs) - - missing = object() - - has_updated = False - for field_name, new_value in new_data.items(): - # TODO: get field alias (if present) and get attribute by alias (rarely used, but required) - - if (old_value := getattr(obj, field_name, missing)) is missing: - log.warning("No field %r on %s. Make sure schema conforms model.", field_name, type(obj)) - continue - - if old_value != new_value: - setattr(obj, field_name, new_value) - has_updated = True - try: - await self.save() - except IntegrityError: - log.exception("Could not update object with data update %s", data_update) - msg = "Object update error" - raise BadRequest( - msg, - pointer="/data", - meta={ - "type": self.type_, - "id": view_kwargs.get(self.url_id_field), - }, - ) - except DBAPIError as e: - await self.session.rollback() - - err_message = f"Got an error {e.__class__.__name__} during updating obj {view_kwargs} data in DB" - log.error(err_message, exc_info=e) - - raise InternalServerError( - detail=err_message, - pointer="/data", - meta={ - "type": self.type_, - "id": view_kwargs.get(self.url_id_field), - }, - ) - - await self.after_update_object(obj=obj, model_kwargs=new_data, view_kwargs=view_kwargs) + new_data = data_update.attributes.model_dump(exclude_unset=True) + to_one, to_many = await self.apply_relationships(obj, data_update, "update") + await self.before_update_object(obj, new_data, view_kwargs) + + new_data.update({**to_one, **to_many}) + obj = await self._base_sql.update( + session=self.session, + model=obj, + resource_type=self.resource_type, + commit=not self.is_atomic, + id_=view_kwargs.get(self.url_id_field), + **new_data, + ) - return has_updated + await self.after_update_object(obj, new_data, view_kwargs) + return obj - async def delete_object(self, obj: TypeModel, view_kwargs: dict): + async def delete_object( + self, + obj: TypeModel, + view_kwargs: dict, + ): """ Delete an object through sqlalchemy. @@ -454,40 +476,35 @@ async def delete_object(self, obj: TypeModel, view_kwargs: dict): :param view_kwargs: kwargs from the resource view. """ await self.before_delete_object(obj, view_kwargs) - stmt = delete(self.model).where(self.model.id == obj.id) - try: - await self.session.execute(stmt) - await self.save() - except DBAPIError as e: - await self.session.rollback() - - err_message = f"Got an error {e.__class__.__name__} deleting object {view_kwargs}" - log.error(err_message, exc_info=e) - - raise InternalServerError( - detail=err_message, - pointer="/data", - meta={ - "type": self.type_, - "id": view_kwargs.get(self.url_id_field), - }, - ) + await self._base_sql.delete( + session=self.session, + model=self.model, + filters=[self.model.id == obj.id], + resource_type=self.resource_type, + commit=not self.is_atomic, + id_=view_kwargs.get(self.url_id_field), + **view_kwargs, + ) await self.after_delete_object(obj, view_kwargs) - async def delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def delete_objects( + self, + objects: list[TypeModel], + view_kwargs: dict, + ): await self.before_delete_objects(objects, view_kwargs) - query = delete(self.model).filter(self.model.id.in_((obj.id for obj in objects))) - try: - await self.session.execute(query) - await self.save() - except DBAPIError as e: - await self.session.rollback() - raise InternalServerError( - detail=f"Got an error {e.__class__.__name__} during delete data from DB: {e!s}", - ) + await self._base_sql.delete( + session=self.session, + model=self.model, + filters=[self.model.id.in_((obj.id for obj in objects))], + resource_type=self.resource_type, + commit=not self.is_atomic, + id_=view_kwargs.get(self.url_id_field), + **view_kwargs, + ) await self.after_delete_objects(objects, view_kwargs) @@ -507,7 +524,6 @@ async def create_relationship( :param view_kwargs: kwargs from the resource view. :return: True if relationship have changed else False. """ - pass async def get_relationship( self, @@ -515,7 +531,7 @@ async def get_relationship( related_type_: str, related_id_field: str, view_kwargs: dict, - ) -> Tuple[Any, Any]: + ) -> tuple[Any, Any]: """ Get a relationship. @@ -530,8 +546,7 @@ async def get_relationship( obj = await self.get_object(view_kwargs) if obj is None: - filter_value = view_kwargs[self.url_id_field] - msg = f"{self.model.__name__}: {filter_value} not found" + msg = f"{self.model.__name__}: {view_kwargs[self.url_id_field]} not found" raise ObjectNotFound( msg, parameter=self.url_id_field, @@ -541,9 +556,7 @@ async def get_relationship( msg = f"{obj.__class__.__name__} has no attribute {relationship_field}" raise RelationNotFound(msg) - related_objects = getattr(obj, relationship_field) - - if related_objects is None: + if (related_objects := getattr(obj, relationship_field)) is None: return obj, related_objects await self.after_get_relationship( @@ -557,8 +570,7 @@ async def get_relationship( if isinstance(related_objects, InstrumentedList): return obj, [{"type": related_type_, "id": getattr(obj_, related_id_field)} for obj_ in related_objects] - else: - return obj, {"type": related_type_, "id": getattr(related_objects, related_id_field)} + return obj, {"type": related_type_, "id": getattr(related_objects, related_id_field)} async def update_relationship( self, @@ -568,7 +580,6 @@ async def update_relationship( view_kwargs: dict, ) -> bool: """ - Update a relationship :param json_data: the request params. @@ -594,71 +605,9 @@ async def delete_relationship( :param view_kwargs: kwargs from the resource view. """ - def get_related_model_query_base( - self, - related_model: Type[TypeModel], - ) -> "Select": - """ - Prepare sql query (statement) to fetch related model - - :param related_model: - :return: - """ - return select(related_model) - - def get_related_object_query( - self, - related_model: Type[TypeModel], - related_id_field: str, - id_value: str, - ): - id_field = getattr(related_model, related_id_field) - id_value = self.prepare_id_value(id_field, id_value) - stmt: "Select" = self.get_related_model_query_base(related_model) - return stmt.where(id_field == id_value) - - def get_related_objects_list_query( + async def get_related_objects( self, - related_model: Type[TypeModel], - related_id_field: str, - ids: list[str], - ) -> Tuple["Select", list[str]]: - id_field = getattr(related_model, related_id_field) - prepared_ids = [self.prepare_id_value(id_field, _id) for _id in ids] - stmt: "Select" = self.get_related_model_query_base(related_model) - return stmt.where(id_field.in_(prepared_ids)), prepared_ids - - async def get_related_object( - self, - related_model: Type[TypeModel], - related_id_field: str, - id_value: str, - ) -> TypeModel: - """ - Get related object. - - :param related_model: SQLA ORM model class - :param related_id_field: id field of the related model (usually it's `id`) - :param id_value: related object id value - :return: a related SQLA ORM object - """ - stmt = self.get_related_object_query( - related_model=related_model, - related_id_field=related_id_field, - id_value=id_value, - ) - - try: - related_object = (await self.session.execute(stmt)).scalar_one() - except NoResultFound: - msg = f"{related_model.__name__}.{related_id_field}: {id_value} not found" - raise RelatedObjectNotFound(msg) - - return related_object - - async def get_related_objects_list( - self, - related_model: Type[TypeModel], + related_model: TypeModel, related_id_field: str, ids: list[str], ) -> list[TypeModel]: @@ -670,100 +619,85 @@ async def get_related_objects_list( :param ids: :return: """ - stmt, ids = self.get_related_objects_list_query( - related_model=related_model, - related_id_field=related_id_field, - ids=ids, - ) - - related_objects = (await self.session.execute(stmt)).scalars().all() - object_ids = [getattr(obj, related_id_field) for obj in related_objects] + id_field = getattr(related_model, related_id_field) + id_values = [self.prepare_id_value(id_field, id_) for id_ in ids] - not_found_ids = ids - if object_ids: - not_found_ids = set(ids).difference(object_ids) + query = self._base_sql.query( + model=related_model, + filters=[id_field.in_(id_values)], + ) + related_objects = await self._base_sql.all( + session=self.session, + stmt=query, + ) - if not_found_ids: - msg = f"Objects for {related_model.__name__} with ids: {not_found_ids} not found" - raise RelatedObjectNotFound(detail=msg, pointer="/data") + objects = {f"{getattr(obj, related_id_field)}" for obj in related_objects} + if not_found_ids := set(ids).difference(objects): + msg = f"Objects for {related_model.__name__} with ids: {list(not_found_ids)} not found" + raise RelatedObjectNotFound( + detail=msg, + pointer="/data", + ) return list(related_objects) - def filter_query(self, query: "Select", filter_info: Optional[list]) -> "Select": - """ - Filter query according to jsonapi 1.0. + def get_filter_expressions( + self, + qs: QueryStringManager, + ) -> Optional[list[BinaryExpression]]: + if qs.filters: + return [ + build_filter_expressions( + filter_item={"and": qs.filters}, + target_model=self.model, + target_schema=self.schema, + entrypoint_resource_type=self.resource_type, + ), + ] - :param query: sqlalchemy query to sort. - :param filter_info: filter information. - :return: the sorted query. - """ - if filter_info: - filters, joins = create_filters_and_joins( - model=self.model, - filter_info=filter_info, - schema=self.schema, + def get_sort_expressions( + self, + qs: QueryStringManager, + ) -> Optional[list]: + if qs.sorts: + return build_sort_expressions( + sort_items=qs.sorts, + target_model=self.model, + target_schema=self.schema, + entrypoint_resource_type=self.resource_type, ) - for i_join in joins: - query = query.join(*i_join) - - query = query.where(filters) - - return query - - def sort_query(self, query: "Select", sort_info: list) -> "Select": - """ - Sort query according to jsonapi 1.0. - - :param query: sqlalchemy query to sort. - :param sort_info: sort information. - :return: the sorted query. - """ - if sort_info: - sorts, joins = create_sorts(self.model, sort_info, self.schema) - for i_join in joins: - query = query.join(*i_join) - for i_sort in sorts: - query = query.order_by(i_sort) - return query - - def paginate_query(self, query: "Select", paginate_info: PaginationQueryStringManager) -> "Select": - """ - Paginate query according to jsonapi 1.0. - - :param query: sqlalchemy queryset. - :param paginate_info: pagination information. - :return: the paginated query - """ - if paginate_info.size == 0 or paginate_info.size is None: - return query - - query = query.limit(paginate_info.size) - if paginate_info.number: - query = query.offset((paginate_info.number - 1) * paginate_info.size) - - return query - - def eagerload_includes(self, query: "Select", qs: QueryStringManager) -> "Select": + def eagerload_includes( + self, + qs: QueryStringManager, + ): """ Use eagerload feature of sqlalchemy to optimize data retrieval for include querystring parameter. - :param query: sqlalchemy queryset. :param qs: a querystring manager to retrieve information from url. :return: the query with includes eagerloaded. """ + relation_join_objects = [] for include in qs.include: relation_join_object = None - current_schema = self.schema current_model = self.model - for related_field_name in include.split(SPLIT_REL): - try: - field_name_to_load = get_model_field(current_schema, related_field_name) - except Exception as e: - raise InvalidInclude(str(e)) + current_resource_type = self.resource_type - field_to_load: InstrumentedAttribute = getattr(current_model, field_name_to_load) + for related_field_name in include.split("."): + relationship_info = schemas_storage.get_relationship_info( + resource_type=current_resource_type, + operation_type="get", + field_name=related_field_name, + ) + if relationship_info is None: + msg = ( + f"Not found relationship {related_field_name!r} from include {include!r} " + f"for resource_type {current_resource_type!r}." + ) + raise InvalidInclude(msg) + + field_to_load: InstrumentedAttribute = getattr(current_model, related_field_name) is_many = field_to_load.property.uselist if relation_join_object is None: relation_join_object = selectinload(field_to_load) if is_many else joinedload(field_to_load) @@ -772,45 +706,25 @@ def eagerload_includes(self, query: "Select", qs: QueryStringManager) -> "Select else: relation_join_object = relation_join_object.joinedload(field_to_load) - current_schema = get_related_schema(current_schema, related_field_name) + current_resource_type = relationship_info.resource_type + current_model = models_storage.get_model(current_resource_type) - # the first entity is Mapper, - # the second entity is DeclarativeMeta - current_model = field_to_load.property.entity.entity + relation_join_object = relation_join_object.options( + *self.get_fields_options( + resource_type=current_resource_type, + qs=qs, + ), + ) - query = query.options(relation_join_object) + relation_join_objects.append(relation_join_object) - return query + return relation_join_objects - def retrieve_object_query( + async def before_create_object( self, + model_kwargs: dict, view_kwargs: dict, - filter_field: InstrumentedAttribute, - filter_value: Any, - ) -> "Select": - """ - Build query to retrieve object. - - :param view_kwargs: kwargs from the resource view - :param filter_field: the field to filter on - :param filter_value: the value to filter with - :return sqlalchemy query: a query from sqlalchemy - """ - value = self.prepare_id_value(filter_field, filter_value) - query: "Select" = self.query(view_kwargs).where(filter_field == value) - return query - - def query(self, view_kwargs: dict) -> "Select": - """ - Construct the base query to retrieve wanted data. - - :param view_kwargs: kwargs from the resource view - """ - if self._query is not None: - return self._query - return select(self.model) - - async def before_create_object(self, model_kwargs: dict, view_kwargs: dict): + ): """ Provide additional data before object creation. @@ -818,10 +732,15 @@ async def before_create_object(self, model_kwargs: dict, view_kwargs: dict): :param view_kwargs: kwargs from the resource view. """ if (id_value := model_kwargs.get("id")) and self.auto_convert_id_to_column_type: - model_field = self.get_object_id_field() + model_field = models_storage.get_object_id_field(resource_type=self.resource_type) model_kwargs.update(id=self.prepare_id_value(model_field, id_value)) - async def after_create_object(self, obj: TypeModel, model_kwargs: dict, view_kwargs: dict): + async def after_create_object( + self, + obj: TypeModel, + model_kwargs: dict, + view_kwargs: dict, + ): """ Provide additional data after object creation. @@ -829,35 +748,47 @@ async def after_create_object(self, obj: TypeModel, model_kwargs: dict, view_kwa :param model_kwargs: the data validated by pydantic. :param view_kwargs: kwargs from the resource view. """ - pass - async def before_get_object(self, view_kwargs: dict): + async def before_get_object( + self, + view_kwargs: dict, + ): """ Make work before to retrieve an object. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_get_object(self, obj: Any, view_kwargs: dict): + async def after_get_object( + self, + obj: Any, + view_kwargs: dict, + ): """ Make work after to retrieve an object. :param obj: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def before_get_collection(self, qs: QueryStringManager, view_kwargs: dict): + async def before_get_collection( + self, + qs: QueryStringManager, + view_kwargs: dict, + ): """ Make work before to retrieve a collection of objects. :param qs: a querystring manager to retrieve information from url. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_get_collection(self, collection: Iterable, qs: QueryStringManager, view_kwargs: dict): + async def after_get_collection( + self, + collection: Iterable, + qs: QueryStringManager, + view_kwargs: dict, + ): """ Make work after to retrieve a collection of objects. @@ -867,7 +798,12 @@ async def after_get_collection(self, collection: Iterable, qs: QueryStringManage """ return collection - async def before_update_object(self, obj: Any, model_kwargs: dict, view_kwargs: dict): + async def before_update_object( + self, + obj: Any, + model_kwargs: dict, + view_kwargs: dict, + ): """ Make checks or provide additional data before update object. @@ -875,9 +811,13 @@ async def before_update_object(self, obj: Any, model_kwargs: dict, view_kwargs: :param model_kwargs: the data validated by schemas. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_update_object(self, obj: Any, model_kwargs: dict, view_kwargs: dict): + async def after_update_object( + self, + obj: Any, + model_kwargs: dict, + view_kwargs: dict, + ): """ Make work after update object. @@ -885,43 +825,54 @@ async def after_update_object(self, obj: Any, model_kwargs: dict, view_kwargs: d :param model_kwargs: the data validated by schemas. :param view_kwargs: kwargs from the resource view. """ - pass - async def before_delete_object(self, obj: TypeModel, view_kwargs: dict): + async def before_delete_object( + self, + obj: TypeModel, + view_kwargs: dict, + ): """ Make checks before delete object. :param obj: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_delete_object(self, obj: TypeModel, view_kwargs: dict): + async def after_delete_object( + self, + obj: TypeModel, + view_kwargs: dict, + ): """ Make work after delete object. :param obj: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def before_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def before_delete_objects( + self, + objects: list[TypeModel], + view_kwargs: dict, + ): """ Make checks before deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass - async def after_delete_objects(self, objects: List[TypeModel], view_kwargs: dict): + async def after_delete_objects( + self, + objects: list[TypeModel], + view_kwargs: dict, + ): """ Any actions after deleting objects. :param objects: an object from data layer. :param view_kwargs: kwargs from the resource view. """ - pass async def before_create_relationship( self, @@ -939,7 +890,6 @@ async def before_create_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def after_create_relationship( self, @@ -961,7 +911,6 @@ async def after_create_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def before_get_relationship( self, @@ -979,7 +928,6 @@ async def before_get_relationship( :param dict view_kwargs: kwargs from the resource view. :return tuple: the object and related object(s). """ - pass async def after_get_relationship( self, @@ -1001,7 +949,6 @@ async def after_get_relationship( :param view_kwargs: kwargs from the resource view. :return tuple: the object and related object(s). """ - pass async def before_update_relationship( self, @@ -1019,7 +966,6 @@ async def before_update_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def after_update_relationship( self, @@ -1041,7 +987,6 @@ async def after_update_relationship( :param view_kwargs: kwargs from the resource view. :return boolean: True if relationship have changed else False. """ - pass async def before_delete_relationship( self, @@ -1058,7 +1003,6 @@ async def before_delete_relationship( :param related_id_field: the identifier field of the related model. :param view_kwargs: kwargs from the resource view. """ - pass async def after_delete_relationship( self, @@ -1079,4 +1023,3 @@ async def after_delete_relationship( :param related_id_field: the identifier field of the related model. :param view_kwargs: kwargs from the resource view. """ - pass diff --git a/fastapi_jsonapi/data_layers/sqla/query_building.py b/fastapi_jsonapi/data_layers/sqla/query_building.py new file mode 100644 index 00000000..83bd6b53 --- /dev/null +++ b/fastapi_jsonapi/data_layers/sqla/query_building.py @@ -0,0 +1,493 @@ +"""Helper to create sqlalchemy filters according to filter querystring parameter""" + +import logging +from collections import defaultdict +from typing import Any, Optional, Type, Union, get_args + +from pydantic import BaseModel, ConfigDict, PydanticSchemaGenerationError, TypeAdapter + +# noinspection PyProtectedMember +from pydantic._internal._typing_extra import is_none_type + +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo +from sqlalchemy import and_, false, not_, or_ +from sqlalchemy.orm import aliased +from sqlalchemy.orm.attributes import InstrumentedAttribute +from sqlalchemy.orm.util import AliasedClass +from sqlalchemy.sql.elements import BinaryExpression + +from fastapi_jsonapi.common import search_custom_filter_sql, search_custom_sort_sql +from fastapi_jsonapi.data_typing import TypeModel, TypeSchema +from fastapi_jsonapi.exceptions import InvalidField, InvalidFilters, InvalidType +from fastapi_jsonapi.exceptions.json_api import HTTPException, InternalServerError +from fastapi_jsonapi.schema import ( + JSONAPISchemaIntrospectionError, + get_model_field, + get_relationship_fields_names, + get_schema_from_field_annotation, +) +from fastapi_jsonapi.types_metadata import CustomFilterSQL, CustomSortSQL + +log = logging.getLogger(__name__) + +cast_failed = object() + + +class RelationshipInfo(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) + + target_schema: Type[TypeSchema] + model: Type[TypeModel] + aliased_model: AliasedClass + join_column: InstrumentedAttribute + + +class RelationshipInfoStorage: + def __init__(self): + self._data = defaultdict(dict) + + def has_info(self, resource_type: str, path: str) -> bool: + return path in self._data[resource_type] + + def get_info(self, resource_type: str, path: str) -> RelationshipInfo: + try: + return self._data[resource_type][path] + except KeyError: + raise InternalServerError( + detail=( + f"Error of loading relationship info from storage for resource_type {resource_type!r}. " + f"Target relationship has path {path!r}." + ), + parameter="filter", + ) + + def set_info(self, resource_type: str, path: str, info: RelationshipInfo): + self._data[resource_type][path] = info + + +relationships_info_storage = RelationshipInfoStorage() + + +def cast_value_with_schema(field_types: list[Type], value: Any) -> tuple[Any, list[str]]: + errors: list[str] = [] + casted_value = cast_failed + + for field_type in field_types: + try: + # don't allow arbitrary types, we don't know their behaviour + cast_type = TypeAdapter(field_type).validate_python + except PydanticSchemaGenerationError: + cast_type = field_type + + try: + if isinstance(value, list): # noqa: SIM108 + casted_value = [cast_type(item) for item in value] + else: + casted_value = cast_type(value) + except (TypeError, ValueError) as ex: + errors.append(f"{ex}") + else: + return casted_value, errors + + return casted_value, errors + + +def build_filter_expression( + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + operator: str, + value: Any, +) -> BinaryExpression: + """ + Builds sqlalchemy filter expression, like YourModel.some_field == value + + Custom sqlalchemy filtering logic can be created in a schemas field for any operator + To implement a new filtering logic (override existing or create a new one) + create a method inside a field following this pattern: `__sql_filter_` + + :param schema_field: schemas field instance + :param model_column: sqlalchemy column instance + :param operator: your operator, for example: "eq", "in", "ilike_str_array", ... + :param value: filtering value + + """ + fields = [schema_field] + + can_be_none = False + for field in fields: + if args := get_args(field.annotation): + for arg in args: + # None is probably only on the top level + if is_none_type(arg): + can_be_none = True + break + + if value is None: + if can_be_none: + return getattr(model_column, operator)(value) + + raise InvalidFilters(detail=f"The field `{model_column.key}` can't be null.") + + casted_value, errors = cast_value_with_schema( + field_types=[i.annotation for i in fields], + value=value, + ) + if casted_value is cast_failed: + raise InvalidType( + detail=f"Can't cast filter value `{value}` to arbitrary type.", + errors=[HTTPException(status_code=InvalidType.status_code, detail=f"{err}") for err in errors], + ) + + if casted_value is None and not can_be_none: + raise InvalidType( + detail=", ".join(errors), + pointer=schema_field.title, + ) + + return getattr(model_column, operator)(casted_value) + + +def is_filtering_terminal_node(filter_item: dict) -> bool: + """ + If node shape is: + + { + "name: ..., + "op: ..., + "val: ..., + } + """ + terminal_node_keys = {"name", "op", "val"} + return set(filter_item.keys()) == terminal_node_keys + + +def is_relationship_filter(name: str) -> bool: + return "." in name + + +def gather_relationship_paths(item: Union[dict, list[dict]]) -> set[str]: + """ + Extracts relationship paths from query filter + """ + names = set() + + if isinstance(item, list): + for sub_item in item: + names.update(gather_relationship_paths(sub_item)) + + elif field_name := (item.get("name") or item.get("field")): + if "." not in field_name: + return set() + + return {".".join(field_name.split(".")[:-1])} + + else: + for sub_item in item.values(): + names.update(gather_relationship_paths(sub_item)) + + return names + + +def get_model_column( + model: Type[TypeModel], + schema: Type[TypeSchema], + field_name: str, +) -> InstrumentedAttribute: + try: + model_field = get_model_field(schema, field_name) + except JSONAPISchemaIntrospectionError as e: + msg = f"{e}" + raise InvalidFilters(msg) + + try: + return getattr(model, model_field) + except AttributeError: + msg = f"{model.__name__} has no attribute {model_field}" + raise InvalidFilters(msg) + + +def get_operator(model_column: InstrumentedAttribute, operator_name: str) -> str: + """ + Get the function operator from his name + + :return callable: a callable to make operation on a column + """ + operators = ( + f"__{operator_name}__", + f"{operator_name}_", + operator_name, + ) + + for op in operators: + if hasattr(model_column, op): + return op + + msg = f"Field {model_column.key!r} has no operator {operator_name!r}" + raise InvalidFilters(msg) + + +def gather_relationships_info( + model: Type[TypeModel], + schema: Type[TypeSchema], + entrypoint_resource_type: str, + relationship_path: list[str], + target_relationship_idx: int = 0, + prev_aliased_model: Optional[Any] = None, +) -> dict[str, RelationshipInfo]: + is_last_relationship = target_relationship_idx == len(relationship_path) - 1 + target_relationship_path = ".".join( + relationship_path[: target_relationship_idx + 1], + ) + target_relationship_name = relationship_path[target_relationship_idx] + + relationships_names = get_relationship_fields_names(schema) + if target_relationship_name not in relationships_names: + msg = f"There is no relationship {target_relationship_name!r} defined in schema {schema.__name__!r}" + raise InvalidField(msg) + + target_schema = get_schema_from_field_annotation(schema.model_fields[target_relationship_name]) + target_model = getattr(model, target_relationship_name).property.mapper.class_ + + if prev_aliased_model: + join_column = get_model_column( + model=prev_aliased_model, + schema=schema, + field_name=target_relationship_name, + ) + else: + join_column = get_model_column( + model, + schema, + target_relationship_name, + ) + + aliased_model = aliased(target_model) + relationships_info_storage.set_info( + resource_type=entrypoint_resource_type, + path=target_relationship_path, + info=RelationshipInfo( + target_schema=target_schema, + model=target_model, + aliased_model=aliased_model, + join_column=join_column, + ), + ) + + if not is_last_relationship: + return gather_relationships_info( + model=target_model, + schema=target_schema, + entrypoint_resource_type=entrypoint_resource_type, + relationship_path=relationship_path, + target_relationship_idx=target_relationship_idx + 1, + prev_aliased_model=aliased_model, + ) + + +def gather_relationships( + entrypoint_resource_type: str, + entrypoint_model: Type[TypeModel], + schema: Type[TypeSchema], + relationship_paths: set[str], +) -> set[str]: + for relationship_path in relationship_paths: + if relationships_info_storage.has_info(entrypoint_resource_type, relationship_path): + continue + + gather_relationships_info( + model=entrypoint_model, + schema=schema, + entrypoint_resource_type=entrypoint_resource_type, + relationship_path=relationship_path.split("."), + ) + + return relationship_paths + + +def prepare_relationships_info( + model: Type[TypeModel], + schema: Type[TypeSchema], + resource_type: str, + filter_info: list, + sorting_info: list, +) -> set[str]: + """ + Return set with request relationship paths in dot separated format. + + Gathers information about all relationships involved to request and save them + data for skip extra computations for the next time. + + For the filter like this: + filter_info = [ + {"field": "foo.bar.field_name", "op": "eq", "val": ""}, + {"field": "baz.field_name", "op": "eq", "val": ""}, + ] + + It returns: + ("foo.bar", "baz") + """ + relationship_paths = gather_relationship_paths(filter_info) + relationship_paths.update(gather_relationship_paths(sorting_info)) + return gather_relationships( + entrypoint_resource_type=resource_type, + entrypoint_model=model, + schema=schema, + relationship_paths=relationship_paths, + ) + + +def build_terminal_node_filter_expressions( + filter_item: dict, + target_schema: Type[TypeSchema], + target_model: Type[TypeModel], + entrypoint_resource_type: str, +): + name: str = filter_item["name"] + if is_relationship_filter(name): + *relationship_path, field_name = name.split(".") + relationship_info: RelationshipInfo = relationships_info_storage.get_info( + resource_type=entrypoint_resource_type, + path=".".join(relationship_path), + ) + model_column = get_model_column( + model=relationship_info.aliased_model, + schema=relationship_info.target_schema, + field_name=field_name, + ) + target_schema = relationship_info.target_schema + else: + field_name = name + model_column = get_model_column( + model=target_model, + schema=target_schema, + field_name=field_name, + ) + + schema_field = target_schema.model_fields[field_name] + + filter_operator = filter_item["op"] + custom_filter_sql: Optional[CustomFilterSQL] = None + for filter_sql in search_custom_filter_sql.iterate(field=schema_field): + if filter_sql.op == filter_operator: + custom_filter_sql = filter_sql + break + + if custom_filter_sql is None: + return build_filter_expression( + schema_field=schema_field, + model_column=model_column, + operator=get_operator( + model_column=model_column, + operator_name=filter_operator, + ), + value=filter_item["val"], + ) + + return custom_filter_sql.get_expression( + schema_field=schema_field, + model_column=model_column, + value=filter_item["val"], + operator=filter_operator, + ) + + +def build_filter_expressions( + filter_item: dict, + target_schema: Type[TypeSchema], + target_model: Type[TypeModel], + entrypoint_resource_type: str, +) -> BinaryExpression: + """ + Return sqla expressions. + + Builds sqlalchemy expression which can be use + in where condition: query(Model).where(build_filter_expressions(...)) + """ + if is_filtering_terminal_node(filter_item): + return build_terminal_node_filter_expressions( + filter_item=filter_item, + target_schema=target_schema, + target_model=target_model, + entrypoint_resource_type=entrypoint_resource_type, + ) + + if not isinstance(filter_item, dict): + log.warning("Could not build filtering expressions %s", locals()) + # dirty. refactor. + return not_(false()) + + sqla_logic_operators = { + "or": or_, + "and": and_, + "not": not_, + } + + if len(logic_operators := set(filter_item.keys())) > 1: + msg = ( + f"In each logic node expected one of operators: {set(sqla_logic_operators.keys())} " + f"but got {len(logic_operators)}: {logic_operators}" + ) + raise InvalidFilters(msg) + + if (logic_operator := logic_operators.pop()) not in set(sqla_logic_operators.keys()): + msg = f"Not found logic operator {logic_operator} expected one of {set(sqla_logic_operators.keys())}" + raise InvalidFilters(msg) + + op = sqla_logic_operators[logic_operator] + + if logic_operator == "not": + return op( + build_filter_expressions( + filter_item=filter_item[logic_operator], + target_schema=target_schema, + target_model=target_model, + entrypoint_resource_type=entrypoint_resource_type, + ), + ) + + return op( + *( + build_filter_expressions( + filter_item=filter_sub_item, + target_schema=target_schema, + target_model=target_model, + entrypoint_resource_type=entrypoint_resource_type, + ) + for filter_sub_item in filter_item[logic_operator] + ), + ) + + +def build_sort_expressions( + sort_items: list[dict], + target_schema: Type[TypeSchema], + target_model: Type[TypeModel], + entrypoint_resource_type: str, +): + expressions = [] + for item in sort_items: + schema = target_schema + model, field_name = target_model, item["field"] + + if relationship_path := item.get("rel_path"): + field_name = item["field"].split(".")[-1] + info = relationships_info_storage.get_info( + resource_type=entrypoint_resource_type, + path=relationship_path, + ) + model = info.aliased_model + schema = info.target_schema + + schema_field = schema.model_fields[field_name] + custom_sort_sql: Optional[CustomSortSQL] = search_custom_sort_sql.first(field=schema_field) + + join_column = getattr(model, field_name) + if custom_sort_sql is not None: + join_column = custom_sort_sql.get_expression(schema_field, join_column) + + expressions.append(getattr(join_column, item["order"])()) + + return expressions diff --git a/fastapi_jsonapi/data_layers/tortoise_orm.py b/fastapi_jsonapi/data_layers/tortoise_orm.py deleted file mode 100644 index def07871..00000000 --- a/fastapi_jsonapi/data_layers/tortoise_orm.py +++ /dev/null @@ -1,500 +0,0 @@ -"""This module is a CRUD interface between resource managers and the Tortoise ORM""" - -from typing import Any, Iterable, Optional, Tuple, Type - -from tortoise.queryset import QuerySet - -from fastapi_jsonapi.data_layers.base import BaseDataLayer -from fastapi_jsonapi.data_layers.filtering.tortoise_orm import FilterTortoiseORM -from fastapi_jsonapi.data_layers.sorting.tortoise_orm import SortTortoiseORM -from fastapi_jsonapi.data_typing import TypeModel, TypeSchema -from fastapi_jsonapi.querystring import PaginationQueryStringManager, QueryStringManager -from fastapi_jsonapi.schema import BaseJSONAPIItemInSchema - - -class TortoiseDataLayer(BaseDataLayer): - """Tortoise data layer""" - - def __init__( - self, - schema: Type[TypeSchema], - model: Type[TypeModel], - disable_collection_count: bool = False, - default_collection_count: int = -1, - id_name_field: Optional[str] = None, - url_id_field: str = "id", - query: Optional[QuerySet] = None, - **kwargs: Any, - ): - """ - Initialize an instance of TortoiseDataLayer. - - :param schema: - :param model: Tortoise - :param disable_collection_count: - :param default_collection_count: - :param id_name_field: Первичный ключ модели - :param url_id_field: название переменной из FastAPI, в которой придёт значение первичного ключа.. - :param kwargs: initialization parameters of an TortoiseDataLayer instance - """ - super().__init__( - schema=schema, - model=model, - url_id_field=url_id_field, - id_name_field=id_name_field, - disable_collection_count=disable_collection_count, - default_collection_count=default_collection_count, - **kwargs, - ) - self.query_: QuerySet = query or self.model.filter() - - async def create_object(self, data_create: BaseJSONAPIItemInSchema, view_kwargs: dict) -> TypeModel: - """ - Create an object - - :param data_create: validated data - :param view_kwargs: kwargs from the resource view - :return DeclarativeMeta: an object - """ - - async def get_object(self, view_kwargs: dict, qs: Optional[QueryStringManager] = None) -> TypeModel: - """ - Retrieve an object - - :param view_kwargs: kwargs from the resource view - :param qs: - :return DeclarativeMeta: an object - """ - - async def get_collection_count(self, query: QuerySet) -> int: - """ - Prepare query to fetch collection - - :param query: Tortoise query - :param qs: QueryString - :param view_kwargs: view kwargs - :return: - """ - if self.disable_collection_count is True: - return self.default_collection_count - - return await query.count() - - async def get_collection(self, qs: QueryStringManager, view_kwargs: Optional[dict] = None) -> Tuple[int, list]: - """ - Retrieve a collection of objects through Tortoise. - - :param qs: a querystring manager to retrieve information from url. - :param view_kwargs: kwargs from the resource view. - :return: the number of object and the list of objects. - """ - view_kwargs = view_kwargs or {} - await self.before_get_collection(qs, view_kwargs) - - query = self.query(view_kwargs) - - if filters_qs := qs.filters: - filters = FilterTortoiseORM(model=self.model).filter_converter(schema=self.schema, filters=filters_qs) - for i_filter in filters: - query = query.filter(**{i_filter[0]: i_filter[1]}) - - if sorts := qs.get_sorts(schema=self.schema): - query = SortTortoiseORM.sort(query=query, query_params_sorting=sorts) - - objects_count = await self.get_collection_count(query) - - query = self.paginate_query(query, qs.pagination) - - collection: Iterable = await query.all() - - collection = await self.after_get_collection(collection, qs, view_kwargs) - - return objects_count, list(collection) - - async def update_object( - self, - obj: TypeModel, - data_update: BaseJSONAPIItemInSchema, - view_kwargs: dict, - ) -> bool: - """ - Update an object through Tortoise. - - :param obj: an object from Tortoise. - :param data: the data validated by schemas. - :param view_kwargs: kwargs from the resource view. - :return: True if object have changed else False. - """ - - async def delete_object(self, obj: TypeModel, view_kwargs: dict): - """ - Delete an object through Tortoise. - - :param obj: an item from Tortoise. - :param view_kwargs: kwargs from the resource view. - """ - - async def create_relationship( - self, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ) -> bool: - """ - Create a relationship. - - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return: True if relationship have changed else False. - """ - - async def get_relationship( - self, - relationship_field: str, - related_type_: str, - related_id_field: str, - view_kwargs: dict, - ) -> Tuple[Any, Any]: - """ - Get a relationship. - - :param relationship_field: the model attribute used for relationship. - :param related_type_: the related resource type. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return: the object and related object(s). - """ - - async def update_relationship( - self, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ) -> bool: - """ - Update a relationship - - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return: True if relationship have changed else False. - """ - - async def delete_relationship( - self, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Delete a relationship. - - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - """ - - async def get_related_object( - self, - related_model: Type[TypeModel], - related_id_field: str, - id_value: str, - ) -> TypeModel: - """ - Get related object. - - :param related_model: Tortoise model - :param related_id_field: the identifier field of the related model - :param id_value: related object id value - :return: a related object - """ - - def paginate_query(self, query: QuerySet, paginate_info: PaginationQueryStringManager) -> QuerySet: - """ - Paginate query according to jsonapi 1.0. - - :param query: Tortoise queryset. - :param paginate_info: pagination information. - :return: the paginated query - """ - if paginate_info.size == 0: - return query - - query = query.limit(paginate_info.size) - if paginate_info.number: - query = query.offset((paginate_info.number - 1) * paginate_info.size) - - return query - - def eagerload_includes(self, query: QuerySet, qs: QueryStringManager) -> QuerySet: - """ - Use eagerload feature of Tortoise to optimize data retrieval for include querystring parameter. - - :param query: Tortoise queryset. - :param qs: a querystring manager to retrieve information from url. - :return: the query with includes eagerloaded. - """ - - def retrieve_object_query( - self, - view_kwargs: dict, - filter_field: Any, - filter_value: Any, - ) -> QuerySet: - """ - Build query to retrieve object. - - :param view_kwargs: kwargs from the resource view - :param filter_field: the field to filter on - :param filter_value: the value to filter with - :return Tortoise query: a query from Tortoise - """ - - def query(self, view_kwargs: dict) -> QuerySet: - """ - Construct the base query to retrieve wanted data. - - :param view_kwargs: kwargs from the resource view - """ - return self.query_ - - async def before_create_object(self, data: dict, view_kwargs: dict): - """ - Provide additional data before object creation. - - :param data: the data validated by pydantic. - :param view_kwargs: kwargs from the resource view. - """ - - async def after_create_object(self, obj: Any, data: dict, view_kwargs: dict): - """ - Provide additional data after object creation. - - :param obj: an object from data layer. - :param data: the data validated by pydantic. - :param view_kwargs: kwargs from the resource view. - """ - - async def before_get_object(self, view_kwargs: dict): - """ - Make work before to retrieve an object. - - :param view_kwargs: kwargs from the resource view. - """ - - async def after_get_object(self, obj: Any, view_kwargs: dict): - """ - Make work after to retrieve an object. - - :param obj: an object from data layer. - :param view_kwargs: kwargs from the resource view. - """ - - async def before_get_collection(self, qs: QueryStringManager, view_kwargs: dict): - """ - Make work before to retrieve a collection of objects. - - :param qs: a querystring manager to retrieve information from url. - :param view_kwargs: kwargs from the resource view. - """ - - async def after_get_collection(self, collection: Iterable, qs: QueryStringManager, view_kwargs: dict) -> Iterable: - """ - Make work after to retrieve a collection of objects. - - :param collection: the collection of objects. - :param qs: a querystring manager to retrieve information from url. - :param view_kwargs: kwargs from the resource view. - """ - return collection - - async def before_update_object(self, obj: Any, data: dict, view_kwargs: dict): - """ - Make checks or provide additional data before update object. - - :param obj: an object from data layer. - :param data: the data validated by schemas. - :param view_kwargs: kwargs from the resource view. - """ - - async def after_update_object(self, obj: Any, data: dict, view_kwargs: dict): - """ - Make work after update object. - - :param obj: an object from data layer. - :param data: the data validated by schemas. - :param view_kwargs: kwargs from the resource view. - """ - - async def before_delete_object(self, obj: Any, view_kwargs: dict): - """ - Make checks before delete object. - - :param obj: an object from data layer. - :param view_kwargs: kwargs from the resource view. - """ - - async def after_delete_object(self, obj: Any, view_kwargs: dict): - """ - Make work after delete object. - - :param obj: an object from data layer. - :param view_kwargs: kwargs from the resource view. - """ - - async def before_create_relationship( - self, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work before to create a relationship. - - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return boolean: True if relationship have changed else False. - """ - - async def after_create_relationship( - self, - obj: Any, - updated: bool, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work after to create a relationship. - - :param obj: an object from data layer. - :param updated: True if object was updated else False. - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return boolean: True if relationship have changed else False. - """ - - async def before_get_relationship( - self, - relationship_field: str, - related_type_: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work before to get information about a relationship. - - :param str relationship_field: the model attribute used for relationship. - :param str related_type_: the related resource type. - :param str related_id_field: the identifier field of the related model. - :param dict view_kwargs: kwargs from the resource view. - :return tuple: the object and related object(s). - """ - - async def after_get_relationship( - self, - obj: Any, - related_objects: Iterable, - relationship_field: str, - related_type_: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work after to get information about a relationship. - - :param obj: an object from data layer. - :param related_objects: related objects of the object. - :param relationship_field: the model attribute used for relationship. - :param related_type_: the related resource type. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return tuple: the object and related object(s). - """ - - async def before_update_relationship( - self, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work before to update a relationship. - - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return boolean: True if relationship have changed else False. - """ - - async def after_update_relationship( - self, - obj: Any, - updated: bool, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work after to update a relationship. - - :param obj: an object from data layer. - :param updated: True if object was updated else False. - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - :return boolean: True if relationship have changed else False. - """ - - async def before_delete_relationship( - self, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work before to delete a relationship. - - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - """ - - async def after_delete_relationship( - self, - obj: Any, - updated: bool, - json_data: dict, - relationship_field: str, - related_id_field: str, - view_kwargs: dict, - ): - """ - Make work after to delete a relationship. - - :param obj: an object from data layer. - :param updated: True if object was updated else False. - :param json_data: the request params. - :param relationship_field: the model attribute used for relationship. - :param related_id_field: the identifier field of the related model. - :param view_kwargs: kwargs from the resource view. - """ diff --git a/fastapi_jsonapi/exceptions/__init__.py b/fastapi_jsonapi/exceptions/__init__.py index 7f4ade40..0598fd60 100644 --- a/fastapi_jsonapi/exceptions/__init__.py +++ b/fastapi_jsonapi/exceptions/__init__.py @@ -4,7 +4,6 @@ ExceptionResponseSchema, ExceptionSchema, ExceptionSourceSchema, - QueryError, ) from .json_api import ( BadRequest, @@ -22,20 +21,19 @@ ) __all__ = [ + "BadRequest", "ExceptionResponseSchema", "ExceptionSchema", "ExceptionSourceSchema", - "BadRequest", + "Forbidden", + "HTTPException", + "InternalServerError", "InvalidField", "InvalidFilters", "InvalidInclude", - "InvalidType", - "InternalServerError", - "RelationNotFound", "InvalidSort", - "QueryError", - "HTTPException", - "RelatedObjectNotFound", + "InvalidType", "ObjectNotFound", - "Forbidden", + "RelatedObjectNotFound", + "RelationNotFound", ] diff --git a/fastapi_jsonapi/exceptions/base.py b/fastapi_jsonapi/exceptions/base.py index 12ebb197..43eb1ac1 100644 --- a/fastapi_jsonapi/exceptions/base.py +++ b/fastapi_jsonapi/exceptions/base.py @@ -1,11 +1,6 @@ """Collection of useful http error for the Api.""" -from typing import ( - Any, - Dict, - List, - Optional, -) +from typing import Any, Optional from pydantic import Field from pydantic.main import BaseModel @@ -30,9 +25,5 @@ class ExceptionSchema(BaseModel): class ExceptionResponseSchema(BaseModel): """Exception response schema.""" - errors: List[ExceptionSchema] - jsonapi: Dict[str, str] = Field(default={"version": "1.0"}) - - -class QueryError(Exception): - """Query build error.""" + errors: list[ExceptionSchema] + jsonapi: dict[str, str] = Field(default={"version": "1.0"}) diff --git a/fastapi_jsonapi/exceptions/handlers.py b/fastapi_jsonapi/exceptions/handlers.py index 043af0e5..239cb48c 100644 --- a/fastapi_jsonapi/exceptions/handlers.py +++ b/fastapi_jsonapi/exceptions/handlers.py @@ -1,5 +1,5 @@ from fastapi import Request -from fastapi.responses import JSONResponse +from fastapi.responses import ORJSONResponse as JSONResponse from fastapi_jsonapi.exceptions import HTTPException diff --git a/fastapi_jsonapi/exceptions/json_api.py b/fastapi_jsonapi/exceptions/json_api.py index 0fb548ba..7200a473 100644 --- a/fastapi_jsonapi/exceptions/json_api.py +++ b/fastapi_jsonapi/exceptions/json_api.py @@ -1,12 +1,7 @@ """JSON API exceptions schemas.""" from http import HTTPStatus -from typing import ( - Any, - List, - Optional, - Union, -) +from typing import Any, Optional, Union from fastapi import HTTPException as FastApiHttpException from fastapi import status @@ -26,7 +21,7 @@ def __init__( parameter: str = "", title: Optional[str] = None, status_code: Optional[int] = None, - errors: Optional[List["HTTPException"]] = None, + errors: Optional[list["HTTPException"]] = None, meta: Optional[dict[str, Any]] = None, ): """ @@ -95,16 +90,6 @@ class InternalServerError(HTTPException): status_code = status.HTTP_500_INTERNAL_SERVER_ERROR -class UnsupportedFeatureORM(InternalServerError): - """ - Init for invalid ORM exception. - - Unsupported feature ORM exception class customized for json_api exceptions. - """ - - title = "Unsupported ORM" - - class BadRequest(HTTPException): """ Bad request HTTP exception class customized for json_api exceptions. diff --git a/fastapi_jsonapi/jsonapi_typing.py b/fastapi_jsonapi/jsonapi_typing.py deleted file mode 100644 index f29dc1f1..00000000 --- a/fastapi_jsonapi/jsonapi_typing.py +++ /dev/null @@ -1,12 +0,0 @@ -"""JSON API types.""" - -from typing import ( - Dict, - List, - Optional, - Union, -) - -DictValueType = Union[str, int, float, dict, list] -Filters = List[Dict[str, Optional[DictValueType]]] -JsonParamsType = Dict[str, DictValueType] diff --git a/fastapi_jsonapi/misc/sqla/generics/base.py b/fastapi_jsonapi/misc/sqla/generics/base.py index ce3273ac..9ba2fbbf 100644 --- a/fastapi_jsonapi/misc/sqla/generics/base.py +++ b/fastapi_jsonapi/misc/sqla/generics/base.py @@ -1,11 +1,6 @@ -from fastapi_jsonapi.data_layers.sqla_orm import SqlalchemyDataLayer -from fastapi_jsonapi.views.detail_view import DetailViewBase -from fastapi_jsonapi.views.list_view import ListViewBase +from fastapi_jsonapi.data_layers.sqla.orm import SqlalchemyDataLayer +from fastapi_jsonapi.views.view_base import ViewBase -class DetailViewBaseGeneric(DetailViewBase): - data_layer_cls = SqlalchemyDataLayer - - -class ListViewBaseGeneric(ListViewBase): +class ViewBaseGeneric(ViewBase): data_layer_cls = SqlalchemyDataLayer diff --git a/fastapi_jsonapi/querystring.py b/fastapi_jsonapi/querystring.py index c68bbba4..116dbcbe 100644 --- a/fastapi_jsonapi/querystring.py +++ b/fastapi_jsonapi/querystring.py @@ -1,44 +1,23 @@ """Helper to deal with querystring parameters according to jsonapi specification.""" + from collections import defaultdict from functools import cached_property -from typing import ( - TYPE_CHECKING, - Any, - Dict, - List, - Optional, - Type, -) +from typing import Any, Optional, Type from urllib.parse import unquote -import simplejson as json -from fastapi import ( - FastAPI, - Request, -) -from pydantic import ( - BaseModel, - Field, -) -from starlette.datastructures import QueryParams +import orjson as json +from fastapi import FastAPI, Request +from fastapi.datastructures import QueryParams +from pydantic import BaseModel, Field -from fastapi_jsonapi.api import RoutersJSONAPI from fastapi_jsonapi.exceptions import ( BadRequest, InvalidField, InvalidFilters, InvalidInclude, - InvalidSort, InvalidType, ) -from fastapi_jsonapi.schema import ( - get_model_field, - get_relationships, -) -from fastapi_jsonapi.splitter import SPLIT_REL - -if TYPE_CHECKING: - from fastapi_jsonapi.data_typing import TypeSchema +from fastapi_jsonapi.storages import schemas_storage class PaginationQueryStringManager(BaseModel): @@ -64,10 +43,10 @@ class HeadersQueryStringManager(BaseModel): host: Optional[str] = None connection: Optional[str] = None accept: Optional[str] = None - user_agent: Optional[str] = Field(None, alias="user-agent") + user_agent: Optional[str] = Field(default=None, alias="user-agent") referer: Optional[str] = None - accept_encoding: Optional[str] = Field(None, alias="accept-encoding") - accept_language: Optional[str] = Field(None, alias="accept-language") + accept_encoding: Optional[str] = Field(default=None, alias="accept-encoding") + accept_language: Optional[str] = Field(default=None, alias="accept-language") class QueryStringManager: @@ -84,13 +63,14 @@ def __init__(self, request: Request) -> None: self.request: Request = request self.app: FastAPI = request.app self.qs: QueryParams = request.query_params - self.config: Dict[str, Any] = getattr(self.app, "config", {}) + self.config: dict[str, Any] = getattr(self.app, "config", {}) self.ALLOW_DISABLE_PAGINATION: bool = self.config.get("ALLOW_DISABLE_PAGINATION", True) self.MAX_PAGE_SIZE: int = self.config.get("MAX_PAGE_SIZE", 10000) self.MAX_INCLUDE_DEPTH: int = self.config.get("MAX_INCLUDE_DEPTH", 3) self.headers: HeadersQueryStringManager = HeadersQueryStringManager(**dict(self.request.headers)) - def _extract_item_key(self, key: str) -> str: + @classmethod + def extract_item_key(cls, key: str) -> str: try: key_start = key.index("[") + 1 key_end = key.index("]") @@ -99,7 +79,7 @@ def _extract_item_key(self, key: str) -> str: msg = "Parse error" raise BadRequest(msg, parameter=key) - def _get_unique_key_values(self, name: str) -> Dict[str, str]: + def _get_unique_key_values(self, name: str) -> dict[str, str]: """ Return a dict containing key / values items for a given key, used for items like filters, page, etc. @@ -114,12 +94,12 @@ def _get_unique_key_values(self, name: str) -> Dict[str, str]: if not key.startswith(name): continue - item_key = self._extract_item_key(key) + item_key = self.extract_item_key(key) results[item_key] = value return results - def _get_multiple_key_values(self, name: str) -> Dict[str, List]: + def _get_multiple_key_values(self, name: str) -> dict[str, list]: results = defaultdict(list) for raw_key, value in self.qs.multi_items(): @@ -127,18 +107,18 @@ def _get_multiple_key_values(self, name: str) -> Dict[str, List]: if not key.startswith(name): continue - item_key = self._extract_item_key(key) + item_key = self.extract_item_key(key) results[item_key].extend(value.split(",")) return results @classmethod - def _simple_filters(cls, dict_: Dict[str, Any]) -> List[Dict[str, Any]]: + def _simple_filters(cls, dict_: dict[str, Any]) -> list[dict[str, Any]]: """Filter creation.""" return [{"name": key, "op": "eq", "val": value} for (key, value) in dict_.items()] @property - def querystring(self) -> Dict[str, str]: + def querystring(self) -> dict[str, str]: """ Return original querystring but containing only managed keys. @@ -151,7 +131,7 @@ def querystring(self) -> Dict[str, str]: } @property - def filters(self) -> List[dict]: + def filters(self) -> list[dict]: """ Return filters from query string. @@ -172,10 +152,33 @@ def filters(self) -> List[dict]: raise InvalidFilters(msg) results.extend(loaded_filters) + if filter_key_values := self._get_unique_key_values("filter["): results.extend(self._simple_filters(filter_key_values)) + return results + @property + def sorts(self) -> list[dict]: + if (sort_q := self.qs.get("sort")) is None: + return [] + + sorting_results = [] + for sort_field in sort_q.split(","): + field, order = sort_field, "asc" + + if sort_field.startswith("-"): + field = sort_field.removeprefix("-") + order = "desc" + + relationship_path = None + if "." in field: + relationship_path = ".".join(field.split(".")[:-1]) + + sorting_results.append({"field": field, "order": order, "rel_path": relationship_path}) + + return sorting_results + @cached_property def pagination(self) -> PaginationQueryStringManager: """ @@ -199,12 +202,12 @@ def pagination(self) -> PaginationQueryStringManager: :raises BadRequest: if the client is not allowed to disable pagination. """ # check values type - pagination_data: Dict[str, str] = self._get_unique_key_values("page") + pagination_data: dict[str, str] = self._get_unique_key_values("page") pagination = PaginationQueryStringManager(**pagination_data) if pagination_data.get("size") is None: pagination.size = None if pagination.size: - if self.ALLOW_DISABLE_PAGINATION is False and pagination.size == 0: + if not self.ALLOW_DISABLE_PAGINATION and pagination.size == 0: msg = "You are not allowed to disable pagination" raise BadRequest(msg, parameter="page[size]") if self.MAX_PAGE_SIZE and pagination.size > self.MAX_PAGE_SIZE: @@ -213,7 +216,7 @@ def pagination(self) -> PaginationQueryStringManager: return pagination @property - def fields(self) -> Dict[str, List[str]]: + def fields(self) -> dict[str, set]: """ Return fields wanted by client. @@ -229,68 +232,24 @@ def fields(self) -> Dict[str, List[str]]: """ fields = self._get_multiple_key_values("fields") for resource_type, field_names in fields.items(): - # TODO: we have registry for models (BaseModel) - # TODO: create `type to schemas` registry - - if resource_type not in RoutersJSONAPI.all_jsonapi_routers: + if not schemas_storage.has_resource(resource_type): msg = f"Application has no resource with type {resource_type!r}" raise InvalidType(msg) - schema: Type[BaseModel] = self._get_schema(resource_type) + schema: Type[BaseModel] = schemas_storage.get_attrs_schema(resource_type, "get") for field_name in field_names: if field_name == "": continue - if field_name not in schema.__fields__: - msg = "{schema} has no attribute {field}".format( - schema=schema.__name__, - field=field_name, - ) + if field_name not in schema.model_fields: + msg = f"{schema.__name__} has no attribute {field_name}" raise InvalidField(msg) return {resource_type: set(field_names) for resource_type, field_names in fields.items()} - def _get_schema(self, resource_type: str) -> Type[BaseModel]: - return RoutersJSONAPI.all_jsonapi_routers[resource_type]._schema - - def get_sorts(self, schema: Type["TypeSchema"]) -> List[Dict[str, str]]: - """ - Return fields to sort by including sort name for SQLAlchemy and row sort parameter for other ORMs. - - :return: a list of sorting information - - Example of return value:: - - [ - {'field': 'created_at', 'order': 'desc'}, - ] - - :raises InvalidSort: if sort field wrong. - """ - if sort_q := self.qs.get("sort"): - sorting_results = [] - for sort_field in sort_q.split(","): - field = sort_field.replace("-", "") - if SPLIT_REL not in field: - if field not in schema.__fields__: - msg = "{schema} has no attribute {field}".format( - schema=schema.__name__, - field=field, - ) - raise InvalidSort(msg) - if field in get_relationships(schema): - msg = "You can't sort on {field} because it is a relationship field".format(field=field) - raise InvalidSort(msg) - field = get_model_field(schema, field) - order = "desc" if sort_field.startswith("-") else "asc" - sorting_results.append({"field": field, "order": order}) - return sorting_results - - return [] - @property - def include(self) -> List[str]: + def include(self) -> list[str]: """ Return fields to include. @@ -302,9 +261,7 @@ def include(self) -> List[str]: if self.MAX_INCLUDE_DEPTH is not None: for include_path in includes: - if len(include_path.split(SPLIT_REL)) > self.MAX_INCLUDE_DEPTH: - msg = "You can't use include through more than {max_include_depth} relationships".format( - max_include_depth=self.MAX_INCLUDE_DEPTH, - ) + if len(include_path.split(".")) > self.MAX_INCLUDE_DEPTH: + msg = f"You can't use include through more than {self.MAX_INCLUDE_DEPTH} relationships" raise InvalidInclude(msg) return includes diff --git a/fastapi_jsonapi/schema.py b/fastapi_jsonapi/schema.py index d9bf31d6..fd07a9a0 100644 --- a/fastapi_jsonapi/schema.py +++ b/fastapi_jsonapi/schema.py @@ -3,34 +3,37 @@ Pydantic (for FastAPI). """ -from typing import ( - TYPE_CHECKING, - Dict, - List, - Optional, - Sequence, - Type, - Union, -) + +from __future__ import annotations + +from dataclasses import dataclass +from inspect import isclass +from types import GenericAlias +from typing import TYPE_CHECKING, Any, Callable, Optional, Sequence, Type, Union, get_args from fastapi import FastAPI -from pydantic import ( - BaseConfig, - BaseModel, - Extra, - Field, -) +from pydantic import BaseModel, ConfigDict, Field + +# noinspection PyProtectedMember +from pydantic._internal._typing_extra import is_none_type + +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo + +from fastapi_jsonapi.common import search_relationship_info +from fastapi_jsonapi.types_metadata import RelationshipInfo if TYPE_CHECKING: from fastapi_jsonapi.data_typing import TypeSchema class BaseJSONAPIRelationshipSchema(BaseModel): - id: str = Field(..., description="Related object ID") - type: str = Field(..., description="Type of the related resource object") + model_config = ConfigDict( + extra="forbid", + ) - class Config(BaseConfig): - extra = Extra.forbid + id: str = Field(default=..., description="Related object ID") + type: str = Field(default=..., description="Type of the related resource object") class BaseJSONAPIRelationshipDataToOneSchema(BaseModel): @@ -38,7 +41,7 @@ class BaseJSONAPIRelationshipDataToOneSchema(BaseModel): class BaseJSONAPIRelationshipDataToManySchema(BaseModel): - data: List[BaseJSONAPIRelationshipSchema] + data: list[BaseJSONAPIRelationshipSchema] class BaseJSONAPIItemSchema(BaseModel): @@ -56,8 +59,8 @@ class BaseJSONAPIItemInSchema(BaseJSONAPIItemSchema): TODO PATCH: accept object id (maybe create a new separate schema) """ - attributes: "TypeSchema" = Field(description="Resource object attributes") - relationships: Optional["TypeSchema"] = Field(None, description="Resource object relationships") + attributes: TypeSchema = Field(description="Resource object attributes") + relationships: Optional[TypeSchema] = Field(default=None, description="Resource object relationships") id: Optional[str] = Field(description="Resource object ID") @@ -74,12 +77,14 @@ class BaseJSONAPIObjectSchema(BaseJSONAPIItemSchema): class JSONAPIResultListMetaSchema(BaseModel): """JSON:API list meta schema.""" + model_config = ConfigDict( + extra="forbid", + populate_by_name=True, + ) + count: Optional[int] total_pages: Optional[int] = Field(alias="totalPages") - class Config: - allow_population_by_field_name = True - class JSONAPIDocumentObjectSchema(BaseModel): """ @@ -94,13 +99,21 @@ class JSONAPIDocumentObjectSchema(BaseModel): class JSONAPIObjectSchema(BaseJSONAPIObjectSchema): """JSON:API base object schema.""" + model_config = ConfigDict( + from_attributes=True, + ) + class BaseJSONAPIResultSchema(BaseModel): """ JSON:API Required fields schema """ - meta: Optional[JSONAPIResultListMetaSchema] = Field(description="JSON:API metadata") + model_config = ConfigDict( + from_attributes=True, + ) + + meta: Optional[JSONAPIResultListMetaSchema] = Field(default=None, description="JSON:API metadata") jsonapi: JSONAPIDocumentObjectSchema = JSONAPIDocumentObjectSchema() @@ -126,6 +139,54 @@ class JSONAPISchemaIntrospectionError(Exception): pass +# todo: when 3.9 support is dropped, return back `slots=True to JSONAPIObjectSchemas dataclass` + + +@dataclass(frozen=True) +class JSONAPIObjectSchemas: + attributes_schema: Type[BaseModel] + relationships_schema: Type[BaseModel] + object_jsonapi_schema: Type[JSONAPIObjectSchema] + can_be_included_schemas: dict[str, Type[JSONAPIObjectSchema]] + + @property + def included_schemas_list(self) -> list[Type[JSONAPIObjectSchema]]: + return list(self.can_be_included_schemas.values()) + + +@dataclass(frozen=True) +class BuiltSchemasDTO: + schema_in_post: Type[BaseJSONAPIDataInSchema] + schema_in_post_data: Type[BaseJSONAPIItemInSchema] + schema_in_patch: Type[BaseJSONAPIDataInSchema] + schema_in_patch_data: Type[BaseJSONAPIItemInSchema] + detail_response_schema: Type[JSONAPIResultDetailSchema] + list_response_schema: Type[JSONAPIResultListSchema] + + +FieldValidators = dict[str, Callable] + + +@dataclass(frozen=True) +class SchemasInfoDTO: + # id field + resource_id_field: tuple[Type, FieldInfo, Callable, FieldValidators] + # pre-built attributes + attributes_schema: Type[BaseModel] + # relationships + relationships_schema: Type[BaseModel] + # has any required relationship + has_required_relationship: bool + # anything that can be included + included_schemas: list[tuple[str, BaseModel, str]] + + relationships_info: dict[str, tuple[RelationshipInfo, Any]] + + field_schemas: dict[str, Type[BaseModel]] + + model_validators: dict + + def get_model_field(schema: Type["TypeSchema"], field: str) -> str: """ Get the model field of a schema field. @@ -144,34 +205,23 @@ class ComputerSchema(pydantic_base): :return: the name of the field in the model :raises Exception: if the schema from parameter has no attribute for parameter. """ - if schema.__fields__.get(field) is None: - msg = "{schema} has no attribute {field}".format( - schema=schema.__name__, - field=field, - ) + if schema.model_fields.get(field) is None: + msg = f"{schema.__name__} has no attribute {field}" raise JSONAPISchemaIntrospectionError(msg) return field -def get_relationships(schema: Type["TypeSchema"], model_field: bool = False) -> List[str]: +def get_relationship_fields_names(schema: Type["TypeSchema"]) -> set[str]: """ Return relationship fields of a schema. :param schema: a schemas schema - :param model_field: list of relationship fields of a schema """ - relationships: List[str] = [] - for i_name, i_type in schema.__fields__.items(): - try: - if issubclass(i_type.type_, BaseModel): - relationships.append(i_name) - except TypeError: - pass - - if model_field is True: - relationships = [get_model_field(schema, key) for key in relationships] - - return relationships + names: set[str] = set() + for i_name, i_type in schema.model_fields.items(): + if search_relationship_info.first(i_type): + names.add(i_name) + return names def get_schema_from_type(resource_type: str, app: FastAPI) -> Type[BaseModel]: @@ -183,15 +233,37 @@ def get_schema_from_type(resource_type: str, app: FastAPI) -> Type[BaseModel]: :return Schema: the schema class. :raises Exception: if the schema not found for this resource type. """ - schemas: Dict[str, Type[BaseModel]] = getattr(app, "schemas", {}) + schemas: dict[str, Type[BaseModel]] = getattr(app, "schemas", {}) try: return schemas[resource_type] except KeyError: - msg = "Couldn't find schema for type: {type}".format(type=resource_type) + msg = f"Couldn't find schema for type: {resource_type}" raise Exception(msg) -def get_related_schema(schema: Type["TypeSchema"], field: str) -> Type["TypeSchema"]: +def get_schema_from_field_annotation(field: FieldInfo) -> Optional[Type[TypeSchema]]: + annotation_ = field.annotation + + if isclass(annotation_) and issubclass(annotation_, BaseModel): + return annotation_ + + choices = list(get_args(field.annotation)) + while choices: + elem = choices.pop(0) + if isinstance(elem, GenericAlias): + choices.extend(get_args(elem)) + continue + + if is_none_type(elem): + continue + + if isclass(elem) and issubclass(elem, BaseModel): + return elem + + return None + + +def get_related_schema(schema: Type[TypeSchema], field: str) -> Type[TypeSchema]: """ Retrieve the related schema of a relationship field. @@ -199,4 +271,4 @@ def get_related_schema(schema: Type["TypeSchema"], field: str) -> Type["TypeSche :params field: the relationship field :return: the related schema """ - return schema.__fields__[field].type_ + return get_schema_from_field_annotation(schema.model_fields[field]) diff --git a/fastapi_jsonapi/schema_base.py b/fastapi_jsonapi/schema_base.py index b89bad8c..ddaada25 100644 --- a/fastapi_jsonapi/schema_base.py +++ b/fastapi_jsonapi/schema_base.py @@ -1,15 +1,11 @@ __all__ = ( - "Field", "BaseModel", + "Field", "registry", - "RelationshipInfo", ) -from typing import Dict - from pydantic import BaseModel as BaseModelGeneric from pydantic import Field -from pydantic.main import ModelMetaclass class Registry: @@ -30,26 +26,11 @@ def schemas(self): registry = Registry() -class RegistryMeta(ModelMetaclass): - def __new__(mcs, *args, **kwargs): - # any other way to get all known schemas? - schema = super().__new__(mcs, *args, **kwargs) - registry.add(schema) - return schema +class RegistryMeta(BaseModelGeneric): + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + registry.add(cls) -class BaseModel(BaseModelGeneric, metaclass=RegistryMeta): +class BaseModel(RegistryMeta): pass - - -class RelationshipInfo(BaseModel): - resource_type: str - many: bool = False - related_view: str = None - related_view_kwargs: Dict[str, str] = Field(default_factory=dict) - resource_id_example: str = "1" - id_field_name: str = "id" - - # TODO: Pydantic V2 use model_config - class Config: - frozen = True diff --git a/fastapi_jsonapi/schema_builder.py b/fastapi_jsonapi/schema_builder.py index 7601e2d1..9205fcdd 100644 --- a/fastapi_jsonapi/schema_builder.py +++ b/fastapi_jsonapi/schema_builder.py @@ -1,26 +1,15 @@ """JSON API schemas builder class.""" -from dataclasses import dataclass -from functools import lru_cache -from typing import ( - Any, - Callable, - ClassVar, - Dict, - Iterable, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) -import pydantic -from pydantic import BaseConfig +import logging +from typing import Annotated, Any, Callable, Literal, Optional, Type, TypeVar, Union + +from pydantic import AfterValidator, BeforeValidator, ConfigDict, PlainValidator, WrapValidator, create_model from pydantic import BaseModel as PydanticBaseModel -from pydantic.fields import FieldInfo, ModelField -from fastapi_jsonapi.data_typing import TypeSchema +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo + +from fastapi_jsonapi.common import get_relationship_info_from_field_metadata, search_client_can_set_id from fastapi_jsonapi.schema import ( BaseJSONAPIDataInSchema, BaseJSONAPIItemInSchema, @@ -28,111 +17,30 @@ BaseJSONAPIRelationshipDataToOneSchema, BaseJSONAPIRelationshipSchema, BaseJSONAPIResultSchema, + BuiltSchemasDTO, JSONAPIObjectSchema, + JSONAPIObjectSchemas, JSONAPIResultDetailSchema, JSONAPIResultListSchema, RelationshipInfoSchema, + SchemasInfoDTO, + get_schema_from_field_annotation, ) -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo, registry -from fastapi_jsonapi.splitter import SPLIT_REL -from fastapi_jsonapi.validation_utils import ( - extract_field_validators, - extract_validators, -) - -JSON_API_RESPONSE_TYPE = Dict[Union[int, str], Dict[str, Any]] +from fastapi_jsonapi.schema_base import BaseModel, Field, registry +from fastapi_jsonapi.storages.schemas_storage import schemas_storage +from fastapi_jsonapi.types_metadata import RelationshipInfo +from fastapi_jsonapi.validation_utils import extract_validators +log = logging.getLogger(__name__) JSONAPIObjectSchemaType = TypeVar("JSONAPIObjectSchemaType", bound=PydanticBaseModel) -not_passed = object() - - -# todo: when 3.9 support is dropped, return back `slots=True to JSONAPIObjectSchemas dataclass` - - -class FieldConfig: - cast_type: Callable - - def __init__(self, cast_type: Optional[Callable] = None): - self.cast_type = cast_type - - -class TransferSaveWrapper: - """ - This class helps to transfer type from one pydantic Field to another - - Types doesn't allowed to be passed as keywords to pydantic Field, - so this exists to help save them - - In other case OpenAPI generation will fail - """ - - def __init__(self, field_config: FieldConfig): - def get_field_config() -> FieldConfig: - return field_config - - self.get_field_config = get_field_config - - -@dataclass(frozen=True) -class JSONAPIObjectSchemas: - attributes_schema: Type[BaseModel] - relationships_schema: Type[BaseModel] - object_jsonapi_schema: Type[JSONAPIObjectSchema] - can_be_included_schemas: Dict[str, Type[JSONAPIObjectSchema]] - - @property - def included_schemas_list(self) -> List[Type[JSONAPIObjectSchema]]: - return list(self.can_be_included_schemas.values()) - - -@dataclass(frozen=True) -class BuiltSchemasDTO: - schema_in_post: Type[BaseJSONAPIDataInSchema] - schema_in_post_data: Type[BaseJSONAPIItemInSchema] - schema_in_patch: Type[BaseJSONAPIDataInSchema] - schema_in_patch_data: Type[BaseJSONAPIItemInSchema] - detail_response_schema: Type[JSONAPIResultDetailSchema] - list_response_schema: Type[JSONAPIResultListSchema] - - -FieldValidators = Dict[str, Callable] - - -@dataclass(frozen=True) -class SchemasInfoDTO: - # id field - resource_id_field: Tuple[Type, FieldInfo, Callable, FieldValidators] - # pre-built attributes - attributes_schema: Type[BaseModel] - # relationships - relationships_schema: Type[BaseModel] - # has any required relationship - has_required_relationship: bool - # anything that can be included - included_schemas: List[Tuple[str, BaseModel, str]] - class SchemaBuilder: - # IDK if there's a better way than global caches - # shared between ALL RoutersJSONAPI instances - object_schemas_cache: ClassVar = {} - relationship_schema_cache: ClassVar = {} - base_jsonapi_object_schemas_cache: ClassVar = {} - def __init__( self, resource_type: str, - max_cache_size: int = 0, ): self._resource_type = resource_type - self._init_cache(max_cache_size) - - def _init_cache(self, max_cache_size: int): - # TODO: remove crutch - self._get_info_from_schema_for_building_cached = lru_cache(maxsize=max_cache_size)( - self._get_info_from_schema_for_building_cached, - ) def _create_schemas_objects_list(self, schema: Type[BaseModel]) -> Type[JSONAPIResultListSchema]: object_jsonapi_list_schema, list_jsonapi_schema = self.build_list_schemas(schema) @@ -153,7 +61,6 @@ def create_schemas( schema_in_post: Optional[Type[BaseModel]] = None, schema_in_patch: Optional[Type[BaseModel]] = None, ) -> BuiltSchemasDTO: - # TODO: generic? schema_in_post = schema_in_post or schema schema_name_in_post_suffix = "" @@ -168,12 +75,16 @@ def create_schemas( schema_in_post, schema_in_post_data = self.build_schema_in( schema_in=schema_in_post, + schema=schema, + operation_type="create", schema_name_suffix=schema_name_in_post_suffix, non_optional_relationships=True, ) schema_in_patch, schema_in_patch_data = self.build_schema_in( schema_in=schema_in_patch, + schema=schema, + operation_type="update", schema_name_suffix=schema_name_in_patch_suffix, id_field_required=True, ) @@ -190,36 +101,48 @@ def create_schemas( def build_schema_in( self, schema_in: Type[BaseModel], + schema, + operation_type: Literal["create", "update", "get"], schema_name_suffix: str = "", non_optional_relationships: bool = False, id_field_required: bool = False, - ) -> Tuple[Type[BaseJSONAPIDataInSchema], Type[BaseJSONAPIItemInSchema]]: + ) -> tuple[Type[BaseJSONAPIDataInSchema], Type[BaseJSONAPIItemInSchema]]: base_schema_name = schema_in.__name__.removesuffix("Schema") + schema_name_suffix - dto = self._get_info_from_schema_for_building_wrapper( + dto = self.get_info_from_schema_for_building( base_name=base_schema_name, schema=schema_in, + operation_type=operation_type, non_optional_relationships=non_optional_relationships, ) - object_jsonapi_schema = self._build_jsonapi_object( + object_jsonapi_schema = self.build_jsonapi_object( base_name=base_schema_name, resource_type=self._resource_type, - attributes_schema=dto.attributes_schema, - relationships_schema=dto.relationships_schema, - resource_id_field=dto.resource_id_field, - includes=not_passed, + dto=dto, model_base=BaseJSONAPIItemInSchema, - relationships_required=dto.has_required_relationship, id_field_required=id_field_required, ) - wrapped_object_jsonapi_schema = pydantic.create_model( + wrapped_object_jsonapi_schema = create_model( f"{base_schema_name}ObjectDataJSONAPI", data=(object_jsonapi_schema, ...), __base__=BaseJSONAPIDataInSchema, ) + schemas_storage.add_resource( + builder=self, + resource_type=self._resource_type, + operation_type=operation_type, + source_schema=schema, + data_schema=object_jsonapi_schema, + attributes_schema=dto.attributes_schema, + field_schemas=dto.field_schemas, + relationships_info=dto.relationships_info, + model_validators=dto.model_validators, + schema_in=wrapped_object_jsonapi_schema, + ) + return wrapped_object_jsonapi_schema, object_jsonapi_schema def _build_schema( @@ -227,13 +150,11 @@ def _build_schema( base_name: str, schema: Type[BaseModel], builder: Callable, - includes: Iterable[str] = not_passed, ): object_schemas = self.create_jsonapi_object_schemas( schema=schema, base_name=base_name, compute_included_schemas=True, - includes=includes, ) object_jsonapi_schema = object_schemas.object_jsonapi_schema response_jsonapi_schema = builder( @@ -246,140 +167,127 @@ def _build_schema( def build_detail_schemas( self, schema: Type[BaseModel], - includes: Iterable[str] = not_passed, - ) -> Tuple[Type[JSONAPIObjectSchema], Type[JSONAPIResultDetailSchema]]: + ) -> tuple[Type[JSONAPIObjectSchema], Type[JSONAPIResultDetailSchema]]: return self._build_schema( base_name=f"{schema.__name__}Detail", schema=schema, builder=self.build_schema_for_detail_result, - includes=includes, ) def build_list_schemas( self, schema: Type[BaseModel], - includes: Iterable[str] = not_passed, - ) -> Tuple[Type[JSONAPIObjectSchema], Type[JSONAPIResultListSchema]]: + ) -> tuple[Type[JSONAPIObjectSchema], Type[JSONAPIResultListSchema]]: return self._build_schema( base_name=f"{schema.__name__}List", schema=schema, builder=self.build_schema_for_list_result, - includes=includes, ) - def _get_info_from_schema_for_building_cached( - self, - base_name: str, - schema: Type[BaseModel], - includes: Iterable[str], - non_optional_relationships: bool, - ): - return self._get_info_from_schema_for_building( - base_name=base_name, - schema=schema, - includes=includes, - non_optional_relationships=non_optional_relationships, - ) + @classmethod + def _annotation_with_validators(cls, field: FieldInfo) -> type: + annotation = field.annotation + validators = [] + for val in field.metadata: + if isinstance(val, (AfterValidator, BeforeValidator, WrapValidator, PlainValidator)): + validators.append(val) - def _get_info_from_schema_for_building_wrapper( - self, - base_name: str, - schema: Type[BaseModel], - includes: Iterable[str] = not_passed, - non_optional_relationships: bool = False, - ): - """ - Wrapper function for return cached schema result - """ - if includes is not not_passed: - includes = tuple(includes) + if validators: + annotation = Annotated.__class_getitem__((annotation, *validators)) - return self._get_info_from_schema_for_building_cached( - base_name=base_name, - schema=schema, - includes=includes, - non_optional_relationships=non_optional_relationships, - ) + return annotation - def _get_info_from_schema_for_building( + def get_info_from_schema_for_building( self, base_name: str, schema: Type[BaseModel], - includes: Iterable[str] = not_passed, + operation_type: Literal["create", "update", "get"], non_optional_relationships: bool = False, ) -> SchemasInfoDTO: attributes_schema_fields = {} relationships_schema_fields = {} - included_schemas: List[Tuple[str, BaseModel, str]] = [] + relationships_info: dict[str, tuple[RelationshipInfo, Any]] = {} + included_schemas: list[tuple[str, BaseModel, str]] = [] has_required_relationship = False - resource_id_field = (str, Field(None), None, {}) - - for name, field in (schema.__fields__ or {}).items(): - if isinstance(field.field_info.extra.get("relationship"), RelationshipInfo): - if includes is not_passed: - pass - elif name not in includes: - # if includes are passed, skip this if name not present! - continue - relationship: RelationshipInfo = field.field_info.extra["relationship"] + resource_id_field = (str, Field(default=None), None, {}) + + # required! otherwise we get ForwardRef + schema.model_rebuild(_types_namespace=registry.schemas) + for name, field in (schema.model_fields or {}).items(): + if relationship_info := get_relationship_info_from_field_metadata(field): + relationships_info[name] = (relationship_info, field) relationship_schema = self.create_relationship_data_schema( field_name=name, base_name=base_name, field=field, - relationship_info=relationship, + operation_type=operation_type, + relationship_info=relationship_info, ) - # TODO: xxx - # is there a way to read that the field type is Optional? (r.n. it's ForwardRef) - # consider field is not required until is marked required explicitly (`default=...` means required) - field_marked_required = field.required is True + field_marked_required = field.is_required() relationship_field = ... if (non_optional_relationships and field_marked_required) else None if relationship_field is not None: has_required_relationship = True relationships_schema_fields[name] = (relationship_schema, relationship_field) # works both for to-one and to-many - included_schemas.append((name, field.type_, relationship.resource_type)) + if related_schema := get_schema_from_field_annotation(field): + included_schemas.append((name, related_schema, relationship_info.resource_type)) elif name == "id": - id_validators = extract_field_validators( - schema, + id_validators, _ = extract_validators( + model=schema, include_for_field_names={"id"}, ) - resource_id_field = (*(resource_id_field[:-1]), id_validators) - if not field.field_info.extra.get("client_can_set_id"): + if not (can_set_id := search_client_can_set_id.first(field)): continue - - # todo: support for union types? - # support custom cast func - resource_id_field = (str, Field(**field.field_info.extra), field.outer_type_, id_validators) + resource_id_field = (str, can_set_id, self._annotation_with_validators(field=field), id_validators) else: - attributes_schema_fields[name] = (field.outer_type_, field.field_info) + attributes_schema_fields[name] = (self._annotation_with_validators(field=field), field.default) - class ConfigOrmMode(BaseConfig): - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) - attributes_schema = pydantic.create_model( + field_validators, model_validators = extract_validators(schema, exclude_for_field_names={"id"}) + attributes_schema = create_model( f"{base_name}AttributesJSONAPI", **attributes_schema_fields, - __config__=ConfigOrmMode, - __validators__=extract_validators(schema, exclude_for_field_names={"id"}), + __config__=model_config, + __validators__={**field_validators, **model_validators}, ) - relationships_schema = pydantic.create_model( + field_schemas = {} + for field_name, field in attributes_schema_fields.items(): + field_validators, _ = extract_validators( + schema, + include_for_field_names={field_name}, + ) + field_schemas[field_name] = create_model( + f"{base_name}{field_name.title()}AttributeJSONAPI", + **{field_name: field}, + __config__=model_config, + __validators__=field_validators, + ) + + relationships_schema = create_model( f"{base_name}RelationshipsJSONAPI", **relationships_schema_fields, - __config__=ConfigOrmMode, + __config__=model_config, ) return SchemasInfoDTO( resource_id_field=resource_id_field, attributes_schema=attributes_schema, relationships_schema=relationships_schema, + relationships_info=relationships_info, has_required_relationship=has_required_relationship, included_schemas=included_schemas, + field_schemas=field_schemas, + model_validators=model_validators, ) + @classmethod def create_relationship_schema( - self, + cls, name: str, relationship_info: RelationshipInfo, ) -> Type[BaseJSONAPIRelationshipSchema]: @@ -388,26 +296,41 @@ def create_relationship_schema( # plural to single name = name[:-1] - schema_name = f"{name}RelationshipJSONAPI".format(name=name) - relationship_schema = pydantic.create_model( - schema_name, - id=(str, Field(..., description="Resource object id", example=relationship_info.resource_id_example)), - type=(str, Field(default=relationship_info.resource_type, description="Resource type")), + return create_model( + f"{name}RelationshipJSONAPI", + id=( + str, + Field( + ..., + description="Resource object id", + json_schema_extra={"example": relationship_info.resource_id_example}, + ), + ), + type=( + str, + Field( + default=relationship_info.resource_type, + description="Resource type", + ), + ), __base__=BaseJSONAPIRelationshipSchema, ) - return relationship_schema - def create_relationship_data_schema( self, field_name: str, base_name: str, - field: ModelField, + operation_type: Literal["create", "update", "get"], + field: FieldInfo, relationship_info: RelationshipInfo, ) -> RelationshipInfoSchema: - cache_key = (base_name, field_name, relationship_info.resource_type, relationship_info.many) - if cache_key in self.relationship_schema_cache: - return self.relationship_schema_cache[cache_key] + if relationship_schema := schemas_storage.get_relationship_schema( + from_resource_type=self._resource_type, + to_resource_type=relationship_info.resource_type, + operation_type=operation_type, + field_name=field_name, + ): + return relationship_schema base_name = base_name.removesuffix("Schema") schema_name = f"{base_name}{field_name.title()}" @@ -417,151 +340,132 @@ def create_relationship_data_schema( ) base = BaseJSONAPIRelationshipDataToOneSchema if relationship_info.many: - relationship_schema = List[relationship_schema] + relationship_schema = list[relationship_schema] base = BaseJSONAPIRelationshipDataToManySchema + elif not field.is_required(): + relationship_schema = Optional[relationship_schema] - relationship_data_schema = pydantic.create_model( + relationship_data_schema = create_model( f"{schema_name}RelationshipDataJSONAPI", # TODO: on create (post request) sometimes it's required and at the same time on fetch it's not required - data=(relationship_schema, Field(... if field.required else None)), + data=(relationship_schema, Field(... if field.is_required() else None)), __base__=base, ) - self.relationship_schema_cache[cache_key] = relationship_data_schema + + schemas_storage.add_relationship( + from_resource_type=self._resource_type, + to_resource_type=relationship_info.resource_type, + operation_type=operation_type, + field_name=field_name, + relationship_schema=relationship_data_schema, + relationship_info=relationship_info, + ) return relationship_data_schema - def _build_jsonapi_object( + def build_jsonapi_object( self, base_name: str, resource_type: str, - attributes_schema: Type[TypeSchema], - relationships_schema: Type[TypeSchema], - includes, - resource_id_field: Tuple[Type, FieldInfo, Callable, FieldValidators], + dto: SchemasInfoDTO, model_base: Type[JSONAPIObjectSchemaType] = JSONAPIObjectSchema, - use_schema_cache: bool = True, - relationships_required: bool = False, + with_relationships: bool = True, id_field_required: bool = False, ) -> Type[JSONAPIObjectSchemaType]: - if use_schema_cache and base_name in self.base_jsonapi_object_schemas_cache: - return self.base_jsonapi_object_schemas_cache[base_name] + field_type, can_set_id, id_cast_func, id_validators = dto.resource_id_field - field_type, field_info, id_cast_func, id_validators = resource_id_field + if can_set_id: + field_type = Annotated[field_type, can_set_id] - id_field_kw = { - **field_info.extra, - } - if id_cast_func: - id_field_kw.update( - field_config=TransferSaveWrapper(field_config=FieldConfig(cast_type=id_cast_func)), - ) + relationship_less_fields = {} + relationship_less_fields.update( + id=(field_type, Field(... if id_field_required else None)), + attributes=(dto.attributes_schema, ...), + type=(str, Field(default=resource_type or self._resource_type, description="Resource type")), + ) - object_jsonapi_schema_fields = { - "attributes": (attributes_schema, ...), - "id": (str, Field(... if id_field_required else None, **id_field_kw)), - } - if includes: + object_jsonapi_schema_fields = {} + object_jsonapi_schema_fields.update( + id=(field_type, Field(... if id_field_required else None)), + attributes=(dto.attributes_schema, ...), + type=(str, Field(default=resource_type or self._resource_type, description="Resource type")), + ) + + if with_relationships: object_jsonapi_schema_fields.update( - relationships=(relationships_schema, (... if relationships_required else None)), + relationships=(Optional[dto.relationships_schema], ... if dto.has_required_relationship else None), ) - object_jsonapi_schema = pydantic.create_model( + object_jsonapi_schema = create_model( f"{base_name}ObjectJSONAPI", **object_jsonapi_schema_fields, - type=(str, Field(default=resource_type or self._resource_type, description="Resource type")), __validators__=id_validators, __base__=model_base, ) - if use_schema_cache: - self.base_jsonapi_object_schemas_cache[base_name] = object_jsonapi_schema - return object_jsonapi_schema def find_all_included_schemas( self, - schema: Type[BaseModel], - resource_type: str, - includes: Iterable[str], - included_schemas: List[Tuple[str, BaseModel, str]], - ) -> Dict[str, Type[JSONAPIObjectSchema]]: - if includes is not_passed: - return { - # prepare same object schema - # TODO: caches?! - name: self.create_jsonapi_object_schemas( - included_schema, - resource_type=resource_type, - ).object_jsonapi_schema - for (name, included_schema, resource_type) in included_schemas - } - - can_be_included_schemas = {} - for i_include in includes: - current_schema = schema - relations_list: List[str] = i_include.split(SPLIT_REL) - for part_index, include_part in enumerate(relations_list, start=1): - # find nested from the Schema - nested_schema: Type[BaseModel] = current_schema.__fields__[include_part].type_ - # find all relations for this one - nested_schema_includes = set(relations_list[: part_index - 1] + relations_list[part_index:]) - related_jsonapi_object_schema = self.create_jsonapi_object_schemas( - nested_schema, - resource_type=resource_type, - # higher and lower - includes=nested_schema_includes, - # rebuild schemas for each response - use_schema_cache=False, - ).object_jsonapi_schema - # cache it - can_be_included_schemas[include_part] = related_jsonapi_object_schema - # prepare for the next step - current_schema = nested_schema - - return can_be_included_schemas + included_schemas: list[tuple[str, BaseModel, str]], + ) -> dict[str, Type[JSONAPIObjectSchema]]: + return { + name: self.create_jsonapi_object_schemas( + included_schema, + resource_type=resource_type, + ).object_jsonapi_schema + for (name, included_schema, resource_type) in included_schemas + } def create_jsonapi_object_schemas( self, schema: Type[BaseModel], - includes: Iterable[str] = not_passed, resource_type: Optional[str] = None, base_name: str = "", compute_included_schemas: bool = False, - use_schema_cache: bool = True, ) -> JSONAPIObjectSchemas: - if use_schema_cache and schema in self.object_schemas_cache and includes is not_passed: - return self.object_schemas_cache[schema] + resource_type = resource_type or self._resource_type - schema.update_forward_refs(**registry.schemas) - base_name = base_name or schema.__name__ + if object_schema := schemas_storage.get_jsonapi_object_schema( + source_schema=schema, + resource_type=resource_type, + ): + return object_schema - if includes is not not_passed: - includes = set(includes) + base_name = base_name or schema.__name__ - dto = self._get_info_from_schema_for_building_wrapper( + dto = self.get_info_from_schema_for_building( base_name=base_name, + operation_type="get", schema=schema, - includes=includes, ) - object_jsonapi_schema = self._build_jsonapi_object( + object_jsonapi_schema = self.build_jsonapi_object( base_name=base_name, - resource_type=resource_type, + resource_type=resource_type or self._resource_type, + dto=dto, + ) + relationship_less_object_jsonapi_schema = self.build_jsonapi_object( + base_name=base_name, + resource_type=resource_type or self._resource_type, + dto=dto, + with_relationships=False, + ) + + schemas_storage.add_resource( + builder=self, + resource_type=self._resource_type, + operation_type="get", + source_schema=schema, + data_schema=relationship_less_object_jsonapi_schema, attributes_schema=dto.attributes_schema, - relationships_schema=dto.relationships_schema, - resource_id_field=dto.resource_id_field, - includes=includes, - use_schema_cache=use_schema_cache, - # pass has_required_relationship ? + field_schemas=dto.field_schemas, + relationships_info=dto.relationships_info, + model_validators=dto.model_validators, ) can_be_included_schemas = {} if compute_included_schemas: - can_be_included_schemas = self.find_all_included_schemas( - schema=schema, - resource_type=resource_type, - includes=includes, - included_schemas=dto.included_schemas, - ) + can_be_included_schemas = self.find_all_included_schemas(included_schemas=dto.included_schemas) result = JSONAPIObjectSchemas( attributes_schema=dto.attributes_schema, @@ -569,20 +473,23 @@ def create_jsonapi_object_schemas( object_jsonapi_schema=object_jsonapi_schema, can_be_included_schemas=can_be_included_schemas, ) - if use_schema_cache and includes is not_passed: - self.object_schemas_cache[schema] = result + schemas_storage.add_jsonapi_object_schema( + source_schema=schema, + resource_type=resource_type, + jsonapi_object_schema=result, + ) return result def build_schema_for_list_result( self, name: str, object_jsonapi_schema: Type[JSONAPIObjectSchema], - includes_schemas: List[Type[JSONAPIObjectSchema]], + includes_schemas: list[Type[JSONAPIObjectSchema]], ) -> Type[JSONAPIResultListSchema]: return self.build_schema_for_result( name=f"{name}JSONAPI", base=JSONAPIResultListSchema, - data_type=List[object_jsonapi_schema], + data_type=list[object_jsonapi_schema], includes_schemas=includes_schemas, ) @@ -590,9 +497,8 @@ def build_schema_for_detail_result( self, name: str, object_jsonapi_schema: Type[JSONAPIObjectSchema], - includes_schemas: List[Type[JSONAPIObjectSchema]], + includes_schemas: list[Type[JSONAPIObjectSchema]], ) -> Type[JSONAPIResultDetailSchema]: - # return detail_jsonapi_schema return self.build_schema_for_result( name=f"{name}JSONAPI", base=JSONAPIResultDetailSchema, @@ -600,12 +506,13 @@ def build_schema_for_detail_result( includes_schemas=includes_schemas, ) + @classmethod def build_schema_for_result( - self, + cls, name: str, base: Type[BaseJSONAPIResultSchema], - data_type: Union[Type[JSONAPIObjectSchema], Type[List[JSONAPIObjectSchema]]], - includes_schemas: List[Type[JSONAPIObjectSchema]], + data_type: Union[Type[JSONAPIObjectSchema], Type[list[JSONAPIObjectSchema]]], + includes_schemas: list[Type[JSONAPIObjectSchema]], ) -> Union[Type[JSONAPIResultListSchema], Type[JSONAPIResultDetailSchema]]: included_schema_annotation = Union[JSONAPIObjectSchema] for includes_schema in includes_schemas: @@ -617,14 +524,13 @@ def build_schema_for_result( if includes_schemas: schema_fields.update( included=( - List[included_schema_annotation], - Field(None), + list[included_schema_annotation], + Field(default=None), ), ) - result_jsonapi_schema = pydantic.create_model( + return create_model( name, **schema_fields, __base__=base, ) - return result_jsonapi_schema diff --git a/fastapi_jsonapi/signature.py b/fastapi_jsonapi/signature.py index 2d4fd88f..70d32628 100644 --- a/fastapi_jsonapi/signature.py +++ b/fastapi_jsonapi/signature.py @@ -1,84 +1,88 @@ """Functions for extracting and updating signatures.""" + import inspect import logging from enum import Enum -from inspect import Parameter -from types import GenericAlias -from typing import ( - Dict, - List, - Optional, - Set, - Tuple, - Type, -) +from inspect import Parameter, Signature +from typing import Any, Optional, Type, Union, get_args, get_origin from fastapi import Query -from pydantic import BaseModel as BaseModelOriginal -from pydantic.fields import ModelField -from fastapi_jsonapi.schema_base import BaseModel, registry +# noinspection PyProtectedMember +from fastapi._compat import field_annotation_is_scalar, field_annotation_is_sequence +from fastapi.types import UnionType + +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo + +from fastapi_jsonapi.common import get_relationship_info_from_field_metadata +from fastapi_jsonapi.data_typing import TypeSchema +from fastapi_jsonapi.schema_base import BaseModel log = logging.getLogger(__name__) -def create_filter_parameter(name: str, field: ModelField) -> Parameter: - if field.sub_fields: - default = Query(None, alias="filter[{alias}]".format(alias=field.alias)) - type_field = field.type_ - elif inspect.isclass(field.type_) and issubclass(field.type_, Enum) and hasattr(field.type_, "values"): - default = Query(None, alias="filter[{alias}]".format(alias=field.alias), enum=field.type_.values()) +def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + at_least_one_scalar_sequence = False + for arg in get_args(annotation): + if field_annotation_is_scalar_sequence(arg): + at_least_one_scalar_sequence = True + continue + elif not field_annotation_is_scalar(arg): + return False + return at_least_one_scalar_sequence + return ( + field_annotation_is_sequence(annotation) + and all(field_annotation_is_scalar(sub_annotation) for sub_annotation in get_args(annotation)) + ) or field_annotation_is_scalar(annotation) + + +def create_filter_parameter( + name: str, + field: FieldInfo, +) -> Parameter: + filter_alias = field.alias or name + query_filter_name = f"filter[{filter_alias}]" + if ( + inspect.isclass(field.annotation) + and issubclass(field.annotation, Enum) + and hasattr(field.annotation, "values") + ): + default = Query(None, alias=query_filter_name, enum=list(field.annotation)) + type_field = str + elif not field_annotation_is_scalar_sequence(field.annotation): + default = Query(None, alias=query_filter_name) type_field = str else: - default = Query(None, alias="filter[{alias}]".format(alias=field.alias)) - type_field = field.type_ + default = Query(None, alias=query_filter_name) + type_field = field.annotation return Parameter( - name, + name=name, kind=Parameter.POSITIONAL_OR_KEYWORD, annotation=Optional[type_field], default=default, ) -def create_additional_query_params(schema: Optional[Type[BaseModel]]) -> tuple[list[Parameter], list[Parameter]]: - filter_params = [] - include_params = [] +def create_additional_query_params(schema: type[BaseModel]) -> tuple[list[Parameter], list[Parameter]]: + filter_params: list[Parameter] = [] + include_params: list[Parameter] = [] if not schema: return filter_params, include_params available_includes_names = [] - - # TODO! ? - schema.update_forward_refs(**registry.schemas) - for name, field in (schema.__fields__ or {}).items(): - try: - # skip collections - if inspect.isclass(field.type_): - if type(field.type_) is GenericAlias: - continue - if issubclass(field.type_, (dict, list, tuple, set, Dict, List, Tuple, Set)): - continue - # process inner models, find relationships - if inspect.isclass(field.type_) and issubclass(field.type_, (BaseModel, BaseModelOriginal)): - if field.field_info.extra.get("relationship"): - available_includes_names.append(name) - else: - log.warning( - "found nested schema %s for field %r. Consider marking it as relationship", - field, - name, - ) - continue - - # create filter params + for name, field in schema.model_fields.items(): + if get_relationship_info_from_field_metadata(field): + available_includes_names.append(name) + else: parameter = create_filter_parameter( name=name, field=field, ) filter_params.append(parameter) - except Exception as ex: - log.warning("could not create filter for field %s %s", name, field, exc_info=ex) if available_includes_names: doc_available_includes = "\n".join([f"* `{name}`" for name in available_includes_names]) @@ -94,3 +98,40 @@ def create_additional_query_params(schema: Optional[Type[BaseModel]]) -> tuple[l ) include_params.append(include_param) return filter_params, include_params + + +def create_dependency_params_from_pydantic_model( + model_class: Type[TypeSchema], +) -> list[Parameter]: + return [ + Parameter( + name=field_name, + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=field_info.annotation, + default=field_info.default, + ) + for field_name, field_info in model_class.model_fields.items() + ] + + +def get_separated_params(sig: Signature): + """ + Separate params, tail params, skip **kwargs + + :param sig: + :return: + """ + params = [] + tail_params = [] + + for param in sig.parameters.values(): + if param.kind is Parameter.VAR_KEYWORD: + # skip **kwargs for spec + continue + + if param.kind is Parameter.KEYWORD_ONLY: + tail_params.append(param) + else: + params.append(param) + + return params, tail_params diff --git a/fastapi_jsonapi/splitter.py b/fastapi_jsonapi/splitter.py deleted file mode 100644 index 6b5c1d86..00000000 --- a/fastapi_jsonapi/splitter.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -Splitter for filters, sorts and includes. -""" - -SPLIT_REL = "." diff --git a/fastapi_jsonapi/storages/__init__.py b/fastapi_jsonapi/storages/__init__.py new file mode 100644 index 00000000..7da6043e --- /dev/null +++ b/fastapi_jsonapi/storages/__init__.py @@ -0,0 +1,9 @@ +from fastapi_jsonapi.storages.models_storage import models_storage +from fastapi_jsonapi.storages.schemas_storage import schemas_storage +from fastapi_jsonapi.storages.views_storage import views_storage + +__all__ = [ + "models_storage", + "schemas_storage", + "views_storage", +] diff --git a/fastapi_jsonapi/storages/models_storage.py b/fastapi_jsonapi/storages/models_storage.py new file mode 100644 index 00000000..43b883cf --- /dev/null +++ b/fastapi_jsonapi/storages/models_storage.py @@ -0,0 +1,103 @@ +import logging +from typing import Any, Callable, Type + +from fastapi_jsonapi.data_typing import TypeModel +from fastapi_jsonapi.exceptions import BadRequest, InternalServerError + +log = logging.getLogger(__name__) + + +class ModelsStorage: + relationship_search_handlers: dict[str, Callable[[str, Type[TypeModel], str], Type[TypeModel]]] + + def __init__(self): + self._models: dict[str, Type[TypeModel]] = {} + self._id_field_names: dict[str, str] = {} + self.relationship_search_handlers = {} + + def add_model(self, resource_type: str, model: Type[TypeModel], id_field_name: str): + self._models[resource_type] = model + self._id_field_names[resource_type] = id_field_name + + def get_model(self, resource_type: str) -> Type[TypeModel]: + try: + return self._models[resource_type] + except KeyError: + raise InternalServerError( + detail=f"Not found model for resource_type {resource_type!r}.", + ) + + def get_model_id_field_name(self, resource_type: str) -> str: + try: + return self._id_field_names[resource_type] + except KeyError: + raise InternalServerError( + detail=f"Not found model id field name for resource_type {resource_type!r}.", + ) + + def get_object_id_field(self, resource_type: str) -> Any: + model = self.get_model(resource_type) + id_field_name = self.get_model_id_field_name(resource_type) + + try: + return getattr(model, id_field_name) + except AttributeError: + raise InternalServerError( + detail=f"Can't get object id field. The model {model.__name__!r} has no attribute {id_field_name!r}", + ) + + def get_object_id(self, db_object: TypeModel, resource_type: str) -> Any: + id_field_name = self.get_model_id_field_name(resource_type) + + try: + return getattr(db_object, id_field_name) + except AttributeError: + model = self.get_model(resource_type) + raise InternalServerError( + detail=f"Can't get object id. The model {model.__name__!r} has no attribute {id_field_name!r}.", + ) + + def register_search_handler(self, orm_mode: str, handler: Callable[[str, Type[TypeModel], str], Type[TypeModel]]): + self.relationship_search_handlers[orm_mode] = handler + + def set_orm_mode(self, orm_mode: str): + self._orm_mode = orm_mode + + def search_relationship_model( + self, + resource_type: str, + model: Type[TypeModel], + field_name: str, + ) -> Type[TypeModel]: + try: + orm_handler = self.relationship_search_handlers[self._orm_mode] + except KeyError: + raise InternalServerError( + detail=f"Not found orm handler for {self._orm_mode!r}. " + f"Please register this with SchemasStorage.register_search_handler.", + ) + + return orm_handler(resource_type, model, field_name) + + @staticmethod + def sqla_search_relationship_model( + resource_type: str, + model: Type[TypeModel], + field_name: str, + ): + try: + return getattr(model, field_name).property.entity.entity + except AttributeError: + raise BadRequest( + detail=f"There is no related model for resource_type {resource_type!r} by relation {field_name!r}.", + ) + except Exception as ex: + log.error("Relationship search error", exc_info=ex) + raise InternalServerError( + detail=f"Relationship search error for resource_type {resource_type!r} by relation {field_name!r}.", + ) + + +models_storage = ModelsStorage() +models_storage.register_search_handler("sqla", ModelsStorage.sqla_search_relationship_model) +models_storage.set_orm_mode("sqla") diff --git a/fastapi_jsonapi/storages/schemas_storage.py b/fastapi_jsonapi/storages/schemas_storage.py new file mode 100644 index 00000000..fc05822b --- /dev/null +++ b/fastapi_jsonapi/storages/schemas_storage.py @@ -0,0 +1,191 @@ +from collections import defaultdict +from typing import Any, Literal, Optional, Type + +from fastapi_jsonapi.data_typing import TypeSchema +from fastapi_jsonapi.exceptions import InternalServerError +from fastapi_jsonapi.schema import JSONAPIObjectSchemas +from fastapi_jsonapi.types_metadata.relationship_info import RelationshipInfo + + +class SchemasStorage: + def __init__(self): + self._data: dict = {} + self._source_schemas: dict[str, Type[TypeSchema]] = {} + self._jsonapi_object_schemas: dict[tuple[Type[TypeSchema], str], JSONAPIObjectSchemas] = {} + self._schema_in_keys: dict[str, str] = { + "create": "schema_in_create", + "update": "schema_in_update", + } + + def _init_resource_if_needed(self, resource_type: str): + if resource_type not in self._data: + self._data[resource_type] = { + "relationships": defaultdict(lambda: defaultdict(dict)), + } + + def add_relationship( + self, + from_resource_type: str, + to_resource_type: str, + operation_type: Literal["create", "update", "get"], + field_name: str, + relationship_schema: Type[TypeSchema], + relationship_info: RelationshipInfo, + ): + self._init_resource_if_needed(from_resource_type) + relationships = self._data[from_resource_type]["relationships"][to_resource_type] + relationships[(operation_type, field_name)] = { + "schema": relationship_schema, + "info": relationship_info, + } + + def get_relationship_schema( + self, + from_resource_type: str, + to_resource_type: str, + operation_type: Literal["create", "update", "get"], + field_name: str, + ) -> Optional[TypeSchema]: + self._init_resource_if_needed(from_resource_type) + + relationships = self._data[from_resource_type]["relationships"][to_resource_type] + return relationships.get((operation_type, field_name), {}).get("schema") + + def add_resource( + self, + builder, + resource_type: str, + operation_type: Literal["create", "update", "get"], + source_schema: Type[TypeSchema], + data_schema: Type[TypeSchema], + attributes_schema: Type[TypeSchema], + field_schemas: dict[str, Type[TypeSchema]], + relationships_info: dict[str, tuple[RelationshipInfo, Any]], + model_validators: dict, + schema_in: Optional[Type[TypeSchema]] = None, + ): + self._init_resource_if_needed(resource_type) + if operation_type in self._data[resource_type]: + return + + before_validators, after_validators = {}, {} + for validator_name, validator in model_validators.items(): + if validator.decorator_info.mode == "before": + before_validators[validator_name] = validator + else: + after_validators[validator_name] = validator + + self._source_schemas[resource_type] = source_schema + self._data[resource_type][operation_type] = { + "attrs_schema": attributes_schema, + "field_schemas": field_schemas, + "data_schema": data_schema, + "relationships_info": { + relationship_name: info for relationship_name, (info, _) in relationships_info.items() + }, + "relationships_pydantic_fields": { + relationship_name: field for relationship_name, (_, field) in relationships_info.items() + }, + "model_validators": (before_validators, after_validators), + } + + if schema_in: + self._data[resource_type][operation_type][self._schema_in_keys[operation_type]] = schema_in + + def get_source_schema(self, resource_type: str): + try: + return self._source_schemas[resource_type] + except KeyError: + raise InternalServerError(detail=f"Not found source schema for resource type {resource_type!r}") + + def get_source_relationship_pydantic_field( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + field_name: str, + ): + return self._data[resource_type][operation_type]["relationships_pydantic_fields"][field_name] + + def get_data_schema( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + ) -> Optional[TypeSchema]: + return self._data[resource_type][operation_type]["data_schema"] + + def get_attrs_schema( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + ) -> Optional[TypeSchema]: + return self._data[resource_type][operation_type]["attrs_schema"] + + def get_field_schema( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + field_name: str, + ) -> Optional[TypeSchema]: + return self._data[resource_type][operation_type]["field_schemas"].get(field_name) + + def get_schema_in( + self, + resource_type: str, + operation_type: Literal["create", "update"], + ) -> Type[TypeSchema]: + try: + return self._data[resource_type][operation_type][self._schema_in_keys[operation_type]] + except KeyError: + raise InternalServerError( + detail=f"Not found schema for operation {operation_type!r} with resource type {resource_type!r}", + ) + + def get_model_validators( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + ) -> tuple[dict, dict]: + return self._data[resource_type][operation_type]["model_validators"] + + def get_relationship_info( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + field_name: str, + ) -> Optional[RelationshipInfo]: + return self._data[resource_type][operation_type]["relationships_info"].get(field_name) + + def get_relationships_info( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + ) -> dict[str, RelationshipInfo]: + return self._data[resource_type][operation_type]["relationships_info"] + + def get_jsonapi_object_schema( + self, + source_schema: Type[TypeSchema], + resource_type: str, + ) -> Optional[JSONAPIObjectSchemas]: + return self._jsonapi_object_schemas.get((source_schema, resource_type)) + + def add_jsonapi_object_schema( + self, + source_schema: Type[TypeSchema], + resource_type: str, + jsonapi_object_schema: Type[TypeSchema], + ): + self._jsonapi_object_schemas[(source_schema, resource_type)] = jsonapi_object_schema + + def has_resource(self, resource_type: str) -> bool: + return resource_type in self._source_schemas + + def has_operation( + self, + resource_type: str, + operation_type: Literal["create", "update", "get"], + ) -> bool: + return self.has_resource(resource_type) and operation_type in self._data[resource_type] + + +schemas_storage = SchemasStorage() diff --git a/fastapi_jsonapi/storages/views_storage.py b/fastapi_jsonapi/storages/views_storage.py new file mode 100644 index 00000000..0cc575e5 --- /dev/null +++ b/fastapi_jsonapi/storages/views_storage.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Type + +from fastapi_jsonapi.exceptions import InternalServerError + +if TYPE_CHECKING: + from fastapi_jsonapi.views import ViewBase + + +class ViewStorage: + def __init__(self): + self._views: dict[str, Type[ViewBase]] = {} + + def add_view(self, resource_type: str, view: Type[ViewBase]): + self._views[resource_type] = view + + def get_view(self, resource_type: str) -> Type[ViewBase]: + try: + return self._views[resource_type] + except KeyError: + raise InternalServerError( + detail=f"Not found view for resource type {resource_type!r}", + ) + + def has_view(self, resource_type: str) -> bool: + return resource_type in self._views + + +views_storage = ViewStorage() diff --git a/fastapi_jsonapi/types_metadata/__init__.py b/fastapi_jsonapi/types_metadata/__init__.py new file mode 100644 index 00000000..975c7031 --- /dev/null +++ b/fastapi_jsonapi/types_metadata/__init__.py @@ -0,0 +1,11 @@ +from .client_can_set_id import ClientCanSetId +from .custom_filter_sql import CustomFilterSQL +from .custom_sort_sql import CustomSortSQL +from .relationship_info import RelationshipInfo + +__all__ = ( + "ClientCanSetId", + "CustomFilterSQL", + "CustomSortSQL", + "RelationshipInfo", +) diff --git a/fastapi_jsonapi/types_metadata/client_can_set_id.py b/fastapi_jsonapi/types_metadata/client_can_set_id.py new file mode 100644 index 00000000..b44406cd --- /dev/null +++ b/fastapi_jsonapi/types_metadata/client_can_set_id.py @@ -0,0 +1,7 @@ +from dataclasses import dataclass +from typing import Any, Callable, Optional + + +@dataclass(frozen=True) +class ClientCanSetId: + cast_type: Optional[Callable[[Any], Any]] = None diff --git a/fastapi_jsonapi/types_metadata/custom_filter_sql.py b/fastapi_jsonapi/types_metadata/custom_filter_sql.py new file mode 100644 index 00000000..22431853 --- /dev/null +++ b/fastapi_jsonapi/types_metadata/custom_filter_sql.py @@ -0,0 +1,189 @@ +import contextlib +import logging +from dataclasses import dataclass +from typing import Any, Generic, TypeVar, Union, cast + +import orjson as json + +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo +from sqlalchemy import func +from sqlalchemy.dialects.postgresql import JSONB as JSONB_SQLA +from sqlalchemy.orm import InstrumentedAttribute +from sqlalchemy.sql.expression import BinaryExpression, BooleanClauseList + +from fastapi_jsonapi.exceptions import InvalidFilters + +log = logging.getLogger(__name__) + +ColumnType = TypeVar("ColumnType") +ExpressionType = TypeVar("ExpressionType") + + +@dataclass(frozen=True) +class CustomFilterSQL(Generic[ColumnType, ExpressionType]): + op: str + + def get_expression( + self, + schema_field: FieldInfo, + model_column: ColumnType, + value: str, + operator: str, + ) -> ExpressionType: + raise NotImplementedError + + +class CustomFilterSQLA(CustomFilterSQL[InstrumentedAttribute, Union[BinaryExpression, BooleanClauseList]]): + """Base class for custom SQLAlchemy filters""" + + +def _get_pg_jsonb_contains_expression( + model_column: InstrumentedAttribute, + value: Any, +) -> BinaryExpression: + with contextlib.suppress(ValueError): + value = json.loads(value) + + return model_column.cast(JSONB_SQLA).op("@>")(value) + + +def _get_sqlite_json_contains_expression( + model_column: InstrumentedAttribute, + value: Any, +) -> BinaryExpression: + with contextlib.suppress(ValueError): + value = json.loads(value) + + return model_column.ilike(value) + + +def _get_pg_jsonb_ilike_expression( + model_column: InstrumentedAttribute, + value: list, + operator: str, +) -> BinaryExpression: + try: + target_field, regex = value + except ValueError: + msg = f'The "value" field has to be list of two values for op `{operator}`' + raise InvalidFilters(msg) + + if isinstance(regex, (list, dict)): + return model_column[target_field].cast(JSONB_SQLA).op("@>")(regex) + elif isinstance(regex, bool): + regex = f"{regex}".lower() + else: + regex = f"{regex}" + + return model_column.op("->>")(target_field).ilike(regex) + + +def _get_sqlite_json_ilike_expression( + model_column: InstrumentedAttribute, + value: list, + operator: str, +) -> BinaryExpression: + try: + target_field, regex = value + except ValueError: + msg = f'The "value" field has to be list of two values for op `{operator}`' + raise InvalidFilters(msg) + + if isinstance(regex, (list, dict)): + regex = json.dumps(regex).decode() + elif isinstance(regex, bool): + return model_column.op("->>")(target_field).is_(regex) + else: + regex = f"{regex}" + + return model_column.op("->>")(target_field).ilike(regex) + + +class LowerEqualsFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: str, + operator: str, + ) -> BinaryExpression: + return cast( + BinaryExpression, + func.lower(model_column) == func.lower(value), + ) + + +class PGJSONContainsFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: Any, + operator: str, + ) -> BinaryExpression: + return _get_pg_jsonb_contains_expression(model_column, value) + + +class PGJSONBContainsFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: Any, + operator: str, + ) -> BinaryExpression: + return _get_pg_jsonb_contains_expression(model_column, value) + + +class PGJSONIlikeFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: list[str], + operator: str, + ) -> BinaryExpression: + return _get_pg_jsonb_ilike_expression(model_column, value, operator) + + +class PGJSONBIlikeFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: list[str], + operator: str, + ) -> BinaryExpression: + return _get_pg_jsonb_ilike_expression(model_column, value, operator) + + +class SQLiteJSONContainsFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: Any, + operator: str, + ) -> BinaryExpression: + return _get_sqlite_json_contains_expression(model_column, value) + + +class SQLiteJSONIlikeFilterSQL(CustomFilterSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + value: list[str], + operator: str, + ) -> BinaryExpression: + return _get_sqlite_json_ilike_expression(model_column, value, operator) + + +sql_filter_lower_equals = LowerEqualsFilterSQL(op="lower_equals") +sql_filter_pg_json_contains = PGJSONContainsFilterSQL(op="pg_json_contains") +sql_filter_pg_jsonb_contains = PGJSONBContainsFilterSQL(op="pg_jsonb_contains") +sql_filter_pg_json_ilike = PGJSONIlikeFilterSQL(op="pg_json_ilike") +sql_filter_pg_jsonb_ilike = PGJSONBIlikeFilterSQL(op="pg_jsonb_ilike") +sql_filter_sqlite_json_contains = SQLiteJSONContainsFilterSQL(op="sqlite_json_contains") +sql_filter_sqlite_json_ilike = SQLiteJSONIlikeFilterSQL(op="sqlite_json_ilike") diff --git a/fastapi_jsonapi/types_metadata/custom_sort_sql.py b/fastapi_jsonapi/types_metadata/custom_sort_sql.py new file mode 100644 index 00000000..45df4585 --- /dev/null +++ b/fastapi_jsonapi/types_metadata/custom_sort_sql.py @@ -0,0 +1,37 @@ +from dataclasses import dataclass +from typing import Generic, TypeVar, Union + +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo +from sqlalchemy import func +from sqlalchemy.orm import InstrumentedAttribute +from sqlalchemy.sql.expression import BinaryExpression, BooleanClauseList + +ColumnType = TypeVar("ColumnType") +ExpressionType = TypeVar("ExpressionType") + + +@dataclass(frozen=True) +class CustomSortSQL(Generic[ColumnType, ExpressionType]): + def get_expression( + self, + schema_field: FieldInfo, + model_column: ColumnType, + ) -> ExpressionType: + raise NotImplementedError + + +class CustomSortSQLA(CustomSortSQL[InstrumentedAttribute, Union[BinaryExpression, BooleanClauseList]]): + """Base class for custom SQLAlchemy sorts""" + + +class RegisterFreeStringSortSQL(CustomSortSQLA): + def get_expression( + self, + schema_field: FieldInfo, + model_column: InstrumentedAttribute, + ) -> BinaryExpression: + return func.lower(model_column) + + +sql_register_free_sort = RegisterFreeStringSortSQL() diff --git a/fastapi_jsonapi/types_metadata/relationship_info.py b/fastapi_jsonapi/types_metadata/relationship_info.py new file mode 100644 index 00000000..4b25aa50 --- /dev/null +++ b/fastapi_jsonapi/types_metadata/relationship_info.py @@ -0,0 +1,9 @@ +from dataclasses import dataclass + + +@dataclass(frozen=True) +class RelationshipInfo: + resource_type: str + many: bool = False + resource_id_example: str = "1" + id_field_name: str = "id" diff --git a/fastapi_jsonapi/utils/dependency_helper.py b/fastapi_jsonapi/utils/dependency_helper.py index ead4e721..b6430d57 100644 --- a/fastapi_jsonapi/utils/dependency_helper.py +++ b/fastapi_jsonapi/utils/dependency_helper.py @@ -1,18 +1,10 @@ import inspect -from typing import ( - Any, - Awaitable, - Callable, - TypeVar, - Union, -) +from contextlib import AsyncExitStack +from typing import Any, Awaitable, Callable, TypeVar, Union from fastapi import Request from fastapi.dependencies.models import Dependant -from fastapi.dependencies.utils import ( - get_dependant, - solve_dependencies, -) +from fastapi.dependencies.utils import get_dependant, solve_dependencies from fastapi.exceptions import RequestValidationError ReturnType = TypeVar("ReturnType") @@ -32,20 +24,23 @@ def __init__(self, request: Request): async def solve_dependencies_and_run(self, dependant: Dependant) -> ReturnType: body_data = await self.request.body() or None body = body_data and (await self.request.json()) - values, errors, *_ = await solve_dependencies( # WPS110 - request=self.request, - dependant=dependant, - body=body, - ) - - if errors: - raise RequestValidationError(errors, body=body) + async with AsyncExitStack() as async_exit_stack: + solved_dependencies = await solve_dependencies( + request=self.request, + dependant=dependant, + body=body, + async_exit_stack=async_exit_stack, + embed_body_fields=True, + ) + + if solved_dependencies.errors: + raise RequestValidationError(solved_dependencies.errors, body=body) orig_func: Callable[..., FuncReturnType[Any]] = dependant.call # type: ignore if inspect.iscoroutinefunction(orig_func): - function_call_result = await orig_func(**values) + function_call_result = await orig_func(**solved_dependencies.values) else: - function_call_result = orig_func(**values) + function_call_result = orig_func(**solved_dependencies.values) return function_call_result diff --git a/fastapi_jsonapi/utils/metadata_instance_search.py b/fastapi_jsonapi/utils/metadata_instance_search.py new file mode 100644 index 00000000..efef3bd6 --- /dev/null +++ b/fastapi_jsonapi/utils/metadata_instance_search.py @@ -0,0 +1,23 @@ +# noinspection PyProtectedMember +from collections.abc import Generator +from typing import Generic, Optional, TypeVar + +# noinspection PyProtectedMember +from pydantic.fields import FieldInfo + +SearchType = TypeVar("SearchType") + + +class MetadataInstanceSearch(Generic[SearchType]): + def __init__(self, search_type: type[SearchType]): + self.search_type = search_type + + def iterate(self, field: FieldInfo) -> Generator[SearchType, None, None]: + for elem in field.metadata: + if isinstance(elem, self.search_type): + yield elem + + return None + + def first(self, field: FieldInfo) -> Optional[SearchType]: + return next(self.iterate(field), None) diff --git a/fastapi_jsonapi/utils/sqla.py b/fastapi_jsonapi/utils/sqla.py deleted file mode 100644 index 6e2ea85c..00000000 --- a/fastapi_jsonapi/utils/sqla.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Type - -from fastapi_jsonapi.data_typing import TypeModel - - -def get_related_model_cls(cls: Type[TypeModel], relation_name: str) -> Type[TypeModel]: - """ - Get related model from SQLAlchemy model - - SQLA Get related model class - User.computers -> Computer - - # todo: use alias (custom names)? - For example: - - class Computer(sqla_base): - user = relationship(User) - - class ComputerSchema(pydantic_base): - owner = Field(alias="user", relationship=...) - - :param cls: - :param relation_name: - :return: - """ - return getattr(cls, relation_name).property.mapper.class_ diff --git a/fastapi_jsonapi/validation_utils.py b/fastapi_jsonapi/validation_utils.py index 5c27af83..c22ecb8c 100644 --- a/fastapi_jsonapi/validation_utils.py +++ b/fastapi_jsonapi/validation_utils.py @@ -1,124 +1,51 @@ -from copy import deepcopy -from typing import ( - Callable, - Dict, - Optional, - Set, - Type, -) +from __future__ import annotations -from pydantic import ( - class_validators, - root_validator, - validator, -) -from pydantic.fields import Validator -from pydantic.utils import unique_list +from typing import TYPE_CHECKING, Callable, Optional, Type -from fastapi_jsonapi.schema_base import BaseModel +from pydantic import BaseModel, field_validator, model_validator +from pydantic._internal._decorators import PydanticDescriptorProxy +if TYPE_CHECKING: + # noinspection PyProtectedMember + from pydantic._internal._decorators import DecoratorInfos -def extract_root_validators(model: Type[BaseModel]) -> Dict[str, Callable]: - pre_rv_new, post_rv_new = class_validators.extract_root_validators(model.__dict__) - pre_root_validators = unique_list( - model.__pre_root_validators__ + pre_rv_new, - name_factory=lambda v: v.__name__, - ) - post_root_validators = unique_list( - model.__post_root_validators__ + post_rv_new, - name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__, - ) - result_validators = {} - - for validator_func in pre_root_validators: - result_validators[validator_func.__name__] = root_validator( - pre=True, - allow_reuse=True, - )(validator_func) - - for skip_on_failure, validator_func in post_root_validators: - result_validators[validator_func.__name__] = root_validator( - allow_reuse=True, - skip_on_failure=skip_on_failure, - )(validator_func) - - return result_validators - - -def _deduplicate_field_validators(validators: Dict) -> Dict: - result_validators = {} - - for field_name, field_validators in validators.items(): - result_validators[field_name] = list( - { - # override in definition order - field_validator.func.__name__: field_validator - for field_validator in field_validators - }.values(), - ) - - return result_validators - - -def extract_field_validators( +def extract_validators( model: Type[BaseModel], - *, - include_for_field_names: Optional[Set[str]] = None, - exclude_for_field_names: Optional[Set[str]] = None, -): - validators = class_validators.inherit_validators( - class_validators.extract_validators(model.__dict__), - deepcopy(model.__validators__), - ) - validators = _deduplicate_field_validators(validators) - validator_origin_param_keys = ( - "pre", - "each_item", - "always", - "check_fields", - ) + include_for_field_names: Optional[set[str]] = None, + exclude_for_field_names: Optional[set[str]] = None, +) -> tuple[dict[str, Callable], dict[str, PydanticDescriptorProxy]]: + validators: DecoratorInfos = model.__pydantic_decorators__ exclude_for_field_names = exclude_for_field_names or set() - if include_for_field_names and exclude_for_field_names: include_for_field_names = include_for_field_names.difference( exclude_for_field_names, ) - result_validators = {} - for field_name, field_validators in validators.items(): - if field_name in exclude_for_field_names: - continue + field_validators, model_validators = {}, {} - if include_for_field_names and field_name not in include_for_field_names: - continue + # field validators + for name, validator in validators.field_validators.items(): + for field_name in validator.info.fields: + # exclude + if field_name in exclude_for_field_names: + continue + # or include + if include_for_field_names and field_name not in include_for_field_names: + continue + validator_config = field_validator(field_name, mode=validator.info.mode) - field_validator: Validator - for field_validator in field_validators: - validator_name = f"{field_name}_{field_validator.func.__name__}_validator" - validator_params = { - # copy validator params - param_key: getattr(field_validator, param_key) - for param_key in validator_origin_param_keys - } - result_validators[validator_name] = validator( - field_name, - **validator_params, - allow_reuse=True, - )(field_validator.func) + func = validator.func.__func__ if hasattr(validator.func, "__func__") else validator.func - return result_validators + field_validators[name] = validator_config(func) + # model validators + for name, validator in validators.model_validators.items(): + validator_config = model_validator(mode=validator.info.mode) -def extract_validators( - model: Type[BaseModel], - exclude_for_field_names: Optional[Set[str]] = None, -) -> Dict[str, Callable]: - return { - **extract_field_validators( - model, - exclude_for_field_names=exclude_for_field_names, - ), - **extract_root_validators(model), - } + func = validator.func.__func__ if hasattr(validator.func, "__func__") else validator.func + + model_validators[name] = validator_config(func) + + return field_validators, model_validators diff --git a/fastapi_jsonapi/views/__init__.py b/fastapi_jsonapi/views/__init__.py index e69de29b..9490f40b 100644 --- a/fastapi_jsonapi/views/__init__.py +++ b/fastapi_jsonapi/views/__init__.py @@ -0,0 +1,10 @@ +from fastapi_jsonapi.views.enums import Operation +from fastapi_jsonapi.views.schemas import OperationConfig, RelationshipRequestInfo +from fastapi_jsonapi.views.view_base import ViewBase + +__all__ = [ + "Operation", + "OperationConfig", + "RelationshipRequestInfo", + "ViewBase", +] diff --git a/fastapi_jsonapi/views/detail_view.py b/fastapi_jsonapi/views/detail_view.py deleted file mode 100644 index 712f2c17..00000000 --- a/fastapi_jsonapi/views/detail_view.py +++ /dev/null @@ -1,91 +0,0 @@ -import logging -from typing import ( - TYPE_CHECKING, - Any, - Dict, - TypeVar, - Union, -) - -from fastapi_jsonapi import BadRequest -from fastapi_jsonapi.schema import ( - BaseJSONAPIItemInSchema, - JSONAPIResultDetailSchema, -) -from fastapi_jsonapi.views.utils import handle_jsonapi_fields -from fastapi_jsonapi.views.view_base import ViewBase - -if TYPE_CHECKING: - from fastapi_jsonapi.data_layers.base import BaseDataLayer - -logger = logging.getLogger(__name__) - - -TypeModel = TypeVar("TypeModel") - - -class DetailViewBase(ViewBase): - async def get_data_layer( - self, - extra_view_deps: Dict[str, Any], - ) -> "BaseDataLayer": - return await self.get_data_layer_for_detail(extra_view_deps) - - async def handle_get_resource_detail( - self, - object_id: Union[int, str], - **extra_view_deps, - ) -> Union[JSONAPIResultDetailSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) - - view_kwargs = {dl.url_id_field: object_id} - db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) - - response = self._build_detail_response(db_object) - return handle_jsonapi_fields(response, self.query_params, self.jsonapi) - - async def handle_update_resource( - self, - obj_id: str, - data_update: BaseJSONAPIItemInSchema, - **extra_view_deps, - ) -> Union[JSONAPIResultDetailSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) - response = await self.process_update_object(dl=dl, obj_id=obj_id, data_update=data_update) - return handle_jsonapi_fields(response, self.query_params, self.jsonapi) - - async def process_update_object( - self, - dl: "BaseDataLayer", - obj_id: str, - data_update: BaseJSONAPIItemInSchema, - ): - if obj_id != data_update.id: - raise BadRequest( - detail="obj_id and data.id should be same", - pointer="/data/id", - ) - view_kwargs = {dl.url_id_field: obj_id} - db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) - - await dl.update_object(db_object, data_update, view_kwargs) - - return self._build_detail_response(db_object) - - async def handle_delete_resource( - self, - obj_id: str, - **extra_view_deps, - ) -> None: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) - await self.process_delete_object(dl=dl, obj_id=obj_id) - - async def process_delete_object( - self, - dl: "BaseDataLayer", - obj_id: str, - ) -> None: - view_kwargs = {dl.url_id_field: obj_id} - db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) - - await dl.delete_object(db_object, view_kwargs) diff --git a/fastapi_jsonapi/views/enums.py b/fastapi_jsonapi/views/enums.py new file mode 100644 index 00000000..379e523e --- /dev/null +++ b/fastapi_jsonapi/views/enums.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from enum import Enum, auto + + +class Operation(str, Enum): + ALL = auto() + CREATE = auto() + DELETE = auto() + DELETE_LIST = auto() + GET = auto() + GET_LIST = auto() + UPDATE = auto() + + @staticmethod + def real_operations() -> list[Operation]: + return list(filter(lambda op: op != Operation.ALL, Operation)) + + def http_method(self) -> str: + if self == Operation.ALL: + msg = "HTTP method is not defined for 'ALL' operation." + raise Exception(msg) + + operation_to_http_method = { + Operation.GET: "GET", + Operation.GET_LIST: "GET", + Operation.UPDATE: "PATCH", + Operation.CREATE: "POST", + Operation.DELETE: "DELETE", + Operation.DELETE_LIST: "DELETE", + } + return operation_to_http_method[self] diff --git a/fastapi_jsonapi/views/list_view.py b/fastapi_jsonapi/views/list_view.py deleted file mode 100644 index e6fc59a1..00000000 --- a/fastapi_jsonapi/views/list_view.py +++ /dev/null @@ -1,75 +0,0 @@ -import logging -from typing import TYPE_CHECKING, Any, Dict, Union - -from fastapi_jsonapi.schema import ( - BaseJSONAPIItemInSchema, - JSONAPIResultDetailSchema, - JSONAPIResultListSchema, -) -from fastapi_jsonapi.views.utils import handle_jsonapi_fields -from fastapi_jsonapi.views.view_base import ViewBase - -if TYPE_CHECKING: - from fastapi_jsonapi.data_layers.base import BaseDataLayer - -logger = logging.getLogger(__name__) - - -class ListViewBase(ViewBase): - def _calculate_total_pages(self, db_items_count: int) -> int: - total_pages = 1 - if not (pagination_size := self.query_params.pagination.size): - return total_pages - - total_pages = db_items_count // pagination_size + ( - # one more page if not a multiple of size - (db_items_count % pagination_size) - and 1 - ) - - return total_pages - - async def get_data_layer( - self, - extra_view_deps: Dict[str, Any], - ) -> "BaseDataLayer": - return await self.get_data_layer_for_list(extra_view_deps) - - async def handle_get_resource_list(self, **extra_view_deps) -> Union[JSONAPIResultListSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) - query_params = self.query_params - count, items_from_db = await dl.get_collection(qs=query_params) - total_pages = self._calculate_total_pages(count) - - response = self._build_list_response(items_from_db, count, total_pages) - return handle_jsonapi_fields(response, query_params, self.jsonapi) - - async def handle_post_resource_list( - self, - data_create: BaseJSONAPIItemInSchema, - **extra_view_deps, - ) -> Union[JSONAPIResultDetailSchema, Dict]: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) - response = await self.process_create_object(dl=dl, data_create=data_create) - return handle_jsonapi_fields(response, self.query_params, self.jsonapi) - - async def process_create_object(self, dl: "BaseDataLayer", data_create: BaseJSONAPIItemInSchema): - created_object = await dl.create_object(data_create=data_create, view_kwargs={}) - - created_object_id = dl.get_object_id(created_object) - - view_kwargs = {dl.url_id_field: created_object_id} - db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) - - return self._build_detail_response(db_object) - - async def handle_delete_resource_list(self, **extra_view_deps) -> JSONAPIResultListSchema: - dl: "BaseDataLayer" = await self.get_data_layer(extra_view_deps) - query_params = self.query_params - count, items_from_db = await dl.get_collection(qs=query_params) - total_pages = self._calculate_total_pages(count) - - await dl.delete_objects(items_from_db, {}) - - response = self._build_list_response(items_from_db, count, total_pages) - return handle_jsonapi_fields(response, self.query_params, self.jsonapi) diff --git a/fastapi_jsonapi/views/schemas.py b/fastapi_jsonapi/views/schemas.py new file mode 100644 index 00000000..1467f7dc --- /dev/null +++ b/fastapi_jsonapi/views/schemas.py @@ -0,0 +1,22 @@ +from typing import Callable, Coroutine, Optional, Type, Union + +from pydantic import BaseModel, ConfigDict + + +class OperationConfig(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) + + dependencies: Optional[Type[BaseModel]] = None + prepare_data_layer_kwargs: Optional[Union[Callable, Coroutine]] = None + + @property + def handler(self) -> Optional[Union[Callable, Coroutine]]: + return self.prepare_data_layer_kwargs + + +class RelationshipRequestInfo(BaseModel): + parent_obj_id: str + parent_resource_type: str + relationship_name: str diff --git a/fastapi_jsonapi/views/utils.py b/fastapi_jsonapi/views/utils.py deleted file mode 100644 index e521d773..00000000 --- a/fastapi_jsonapi/views/utils.py +++ /dev/null @@ -1,156 +0,0 @@ -from __future__ import annotations - -from collections import defaultdict -from enum import Enum -from functools import cache -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Coroutine, - Dict, - Iterable, - List, - Optional, - Set, - Type, - Union, -) - -from pydantic import BaseModel -from pydantic.fields import ModelField - -from fastapi_jsonapi.data_typing import TypeSchema -from fastapi_jsonapi.schema import JSONAPIObjectSchema -from fastapi_jsonapi.schema_builder import ( - JSONAPIResultDetailSchema, - JSONAPIResultListSchema, -) - -if TYPE_CHECKING: - from fastapi_jsonapi.api import RoutersJSONAPI - from fastapi_jsonapi.querystring import QueryStringManager - - -JSONAPIResponse = Union[JSONAPIResultDetailSchema, JSONAPIResultListSchema] -IGNORE_ALL_FIELDS_LITERAL = "" - - -class HTTPMethod(Enum): - ALL = "all" - GET = "get" - POST = "post" - PATCH = "patch" - DELETE = "delete" - - @cache - def names() -> Set[str]: - return {item.name for item in HTTPMethod} - - -class HTTPMethodConfig(BaseModel): - dependencies: Optional[Type[BaseModel]] = None - prepare_data_layer_kwargs: Optional[Union[Callable, Coroutine]] = None - - class Config: - arbitrary_types_allowed = True - - @property - def handler(self) -> Optional[Union[Callable, Coroutine]]: - return self.prepare_data_layer_kwargs - - -def _get_includes_indexes_by_type(included: List[JSONAPIObjectSchema]) -> Dict[str, List[int]]: - result = defaultdict(list) - - for idx, item in enumerate(included): - result[item.type].append(idx) - - return result - - -# TODO: move to schema builder? -def _is_relationship_field(field: ModelField) -> bool: - return "relationship" in field.field_info.extra - - -def _get_schema_field_names(schema: Type[TypeSchema]) -> Set[str]: - """ - Returns all attribute names except relationships - """ - result = set() - - for field_name, field in schema.__fields__.items(): - if _is_relationship_field(field): - continue - - result.add(field_name) - - return result - - -def _get_exclude_fields( - schema: Type[TypeSchema], - include_fields: Iterable[str], -) -> Set[str]: - schema_fields = _get_schema_field_names(schema) - - if IGNORE_ALL_FIELDS_LITERAL in include_fields: - return schema_fields - - return set(_get_schema_field_names(schema)).difference(include_fields) - - -def _calculate_exclude_fields( - response: JSONAPIResponse, - query_params: QueryStringManager, - jsonapi: RoutersJSONAPI, -) -> Dict: - included = "included" in response.__fields__ and response.included or [] - is_list_response = isinstance(response, JSONAPIResultListSchema) - - exclude_params: Dict[str, Any] = {} - - includes_indexes_by_type = _get_includes_indexes_by_type(included) - - for resource_type, field_names in query_params.fields.items(): - schema = jsonapi.all_jsonapi_routers[resource_type]._schema - exclude_fields = _get_exclude_fields(schema, include_fields=field_names) - attributes_exclude = {"attributes": exclude_fields} - - if resource_type == jsonapi.type_: - if is_list_response: - exclude_params["data"] = {"__all__": attributes_exclude} - else: - exclude_params["data"] = attributes_exclude - - continue - - if not included: - continue - - target_type_indexes = includes_indexes_by_type.get(resource_type) - - if target_type_indexes: - if "included" not in exclude_params: - exclude_params["included"] = {} - - exclude_params["included"].update((idx, attributes_exclude) for idx in target_type_indexes) - - return exclude_params - - -def handle_jsonapi_fields( - response: JSONAPIResponse, - query_params: QueryStringManager, - jsonapi: RoutersJSONAPI, -) -> Union[JSONAPIResponse, Dict]: - if not query_params.fields: - return response - - exclude_params = _calculate_exclude_fields(response, query_params, jsonapi) - - if exclude_params: - return response.dict(exclude=exclude_params, by_alias=True) - - return response diff --git a/fastapi_jsonapi/views/view_base.py b/fastapi_jsonapi/views/view_base.py index 5566d7ab..ddf499da 100644 --- a/fastapi_jsonapi/views/view_base.py +++ b/fastapi_jsonapi/views/view_base.py @@ -1,58 +1,26 @@ import inspect import logging -from collections import defaultdict -from contextvars import ContextVar from functools import partial -from typing import ( - Any, - Callable, - ClassVar, - Dict, - Iterable, - List, - Optional, - Tuple, - Type, - Union, -) +from typing import Any, Callable, ClassVar, Iterable, Optional, Type from fastapi import Request +from fastapi.concurrency import run_in_threadpool from pydantic import BaseModel as PydanticBaseModel -from pydantic.fields import ModelField -from starlette.concurrency import run_in_threadpool -from fastapi_jsonapi import QueryStringManager, RoutersJSONAPI +from fastapi_jsonapi.common import get_relationship_info_from_field_metadata from fastapi_jsonapi.data_layers.base import BaseDataLayer -from fastapi_jsonapi.data_typing import ( - TypeModel, - TypeSchema, -) -from fastapi_jsonapi.schema import ( - JSONAPIObjectSchema, - JSONAPIResultListMetaSchema, - JSONAPIResultListSchema, - get_related_schema, -) -from fastapi_jsonapi.schema_base import BaseModel, RelationshipInfo -from fastapi_jsonapi.schema_builder import JSONAPIObjectSchemas -from fastapi_jsonapi.splitter import SPLIT_REL -from fastapi_jsonapi.views.utils import ( - HTTPMethod, - HTTPMethodConfig, -) +from fastapi_jsonapi.data_typing import TypeModel, TypeSchema +from fastapi_jsonapi.exceptions import BadRequest +from fastapi_jsonapi.querystring import QueryStringManager +from fastapi_jsonapi.schema import BaseJSONAPIItemInSchema +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.storages.models_storage import models_storage +from fastapi_jsonapi.storages.schemas_storage import schemas_storage +from fastapi_jsonapi.types_metadata import RelationshipInfo +from fastapi_jsonapi.views import Operation, OperationConfig, RelationshipRequestInfo logger = logging.getLogger(__name__) -previous_resource_type_ctx_var: ContextVar[str] = ContextVar("previous_resource_type_ctx_var") -related_field_name_ctx_var: ContextVar[str] = ContextVar("related_field_name_ctx_var") -relationships_schema_ctx_var: ContextVar[Type[BaseModel]] = ContextVar("relationships_schema_ctx_var") -object_schema_ctx_var: ContextVar[Type[JSONAPIObjectSchema]] = ContextVar("object_schema_ctx_var") -included_object_schema_ctx_var: ContextVar[Type[TypeSchema]] = ContextVar("included_object_schema_ctx_var") -relationship_info_ctx_var: ContextVar[RelationshipInfo] = ContextVar("relationship_info_ctx_var") - -# TODO: just change state on `self`!! (refactor) -included_objects_ctx_var: ContextVar[Dict[Tuple[str, str], TypeSchema]] = ContextVar("included_objects_ctx_var") - class ViewBase: """ @@ -60,32 +28,30 @@ class ViewBase: """ data_layer_cls = BaseDataLayer - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = {} + operation_dependencies: ClassVar[dict[Operation, OperationConfig]] = {} - def __init__(self, *, request: Request, jsonapi: RoutersJSONAPI, **options): + def __init__( + self, + *, + request: Request, + resource_type: str, + operation: Operation, + model: Type[TypeModel], + schema: Type[TypeSchema], + **options, + ): self.request: Request = request - self.jsonapi: RoutersJSONAPI = jsonapi + self.query_params: QueryStringManager + self.resource_type: str = resource_type + self.operation: Operation = operation + self.model: Type[TypeModel] = model + self.schema: Type[TypeSchema] = schema self.options: dict = options self.query_params: QueryStringManager = QueryStringManager(request=request) - def _get_data_layer(self, schema: Type[BaseModel], **dl_kwargs): - return self.data_layer_cls( - request=self.request, - schema=schema, - model=self.jsonapi.model, - type_=self.jsonapi.type_, - **dl_kwargs, - ) - async def get_data_layer( self, - extra_view_deps: Dict[str, Any], - ) -> BaseDataLayer: - raise NotImplementedError - - async def get_data_layer_for_detail( - self, - extra_view_deps: Dict[str, Any], + extra_view_deps: dict[str, Any], ) -> BaseDataLayer: """ Prepares data layer for detail view @@ -94,26 +60,145 @@ async def get_data_layer_for_detail( :return: """ dl_kwargs = await self.handle_endpoint_dependencies(extra_view_deps) - return self._get_data_layer( - schema=self.jsonapi.schema_detail, + return self.data_layer_cls( + request=self.request, + model=self.model, + schema=self.schema, + resource_type=self.resource_type, **dl_kwargs, ) - async def get_data_layer_for_list( + async def handle_get_resource_detail( self, - extra_view_deps: Dict[str, Any], - ) -> BaseDataLayer: - """ - Prepares data layer for list view + obj_id: str, + **extra_view_deps, + ) -> dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) - :param extra_view_deps: - :return: - """ - dl_kwargs = await self.handle_endpoint_dependencies(extra_view_deps) - return self._get_data_layer( - schema=self.jsonapi.schema_list, - **dl_kwargs, + view_kwargs = {dl.url_id_field: obj_id} + db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) + + return self._build_detail_response(db_object) + + async def handle_get_resource_relationship( + self, + obj_id: str, + relationship_name: str, + parent_resource_type: str, + **extra_view_deps, + ) -> dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) + view_kwargs = {dl.url_id_field: obj_id} + db_object = await dl.get_object( + view_kwargs=view_kwargs, + qs=self.query_params, + relationship_request_info=RelationshipRequestInfo( + parent_resource_type=parent_resource_type, + parent_obj_id=obj_id, + relationship_name=relationship_name, + ), ) + return self._build_detail_response(db_object) + + async def handle_get_resource_relationship_list( + self, + obj_id: str, + relationship_name: str, + parent_resource_type: str, + **extra_view_deps, + ) -> dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) + count, items_from_db = await dl.get_collection( + qs=self.query_params, + relationship_request_info=RelationshipRequestInfo( + parent_resource_type=parent_resource_type, + parent_obj_id=obj_id, + relationship_name=relationship_name, + ), + ) + total_pages = self._calculate_total_pages(count) + return self._build_list_response(items_from_db, count, total_pages) + + async def handle_update_resource( + self, + obj_id: str, + data_update: BaseJSONAPIItemInSchema, + **extra_view_deps, + ) -> dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) + return await self.process_update_object(dl=dl, obj_id=obj_id, data_update=data_update) + + async def process_update_object( + self, + dl: BaseDataLayer, + obj_id: str, + data_update: BaseJSONAPIItemInSchema, + ) -> dict: + if obj_id != data_update.id: + raise BadRequest( + detail="obj_id and data.id should be same.", + pointer="/data/id", + ) + view_kwargs = { + dl.url_id_field: obj_id, + "required_to_load": data_update.attributes.model_fields.keys(), + } + db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) + + await dl.update_object(db_object, data_update, view_kwargs) + + return self._build_detail_response(db_object) + + async def handle_delete_resource( + self, + obj_id: str, + **extra_view_deps, + ) -> None: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) + await self.process_delete_object(dl=dl, obj_id=obj_id) + + async def process_delete_object( + self, + dl: BaseDataLayer, + obj_id: str, + ) -> None: + view_kwargs = {dl.url_id_field: obj_id} + db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) + + await dl.delete_object(db_object, view_kwargs) + + async def handle_get_resource_list(self, **extra_view_deps) -> dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) + count, items_from_db = await dl.get_collection(qs=self.query_params) + total_pages = self._calculate_total_pages(count) + + return self._build_list_response(items_from_db, count, total_pages) + + async def handle_post_resource_list( + self, + data_create: BaseJSONAPIItemInSchema, + **extra_view_deps, + ) -> dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) + return await self.process_create_object(dl=dl, data_create=data_create) + + async def process_create_object(self, dl: BaseDataLayer, data_create: BaseJSONAPIItemInSchema) -> dict: + db_object = await dl.create_object(data_create=data_create, view_kwargs={}) + + view_kwargs = {dl.url_id_field: models_storage.get_object_id(db_object, self.resource_type)} + if self.query_params.include: + db_object = await dl.get_object(view_kwargs=view_kwargs, qs=self.query_params) + + return self._build_detail_response(db_object) + + async def handle_delete_resource_list(self, **extra_view_deps) -> dict: + dl: BaseDataLayer = await self.get_data_layer(extra_view_deps) + count, items_from_db = await dl.get_collection(qs=self.query_params) + total_pages = self._calculate_total_pages(count) + + await dl.delete_objects(items_from_db, {}) + + return self._build_list_response(items_from_db, count, total_pages) async def _run_handler( self, @@ -129,367 +214,270 @@ async def _run_handler( async def _handle_config( self, - method_config: HTTPMethodConfig, - extra_view_deps: Dict[str, Any], - ) -> Dict[str, Any]: - if method_config.handler is None: + config: OperationConfig, + extra_view_deps: dict[str, Any], + ) -> dict[str, Any]: + if config.handler is None: return {} - if method_config.dependencies: - dto_class: Type[PydanticBaseModel] = method_config.dependencies + if config.dependencies: + dto_class: Type[PydanticBaseModel] = config.dependencies dto = dto_class(**extra_view_deps) - dl_kwargs = await self._run_handler(method_config.handler, dto) + return await self._run_handler(config.handler, dto) - return dl_kwargs - - dl_kwargs = await self._run_handler(method_config.handler) - - return dl_kwargs + return await self._run_handler(config.handler) async def handle_endpoint_dependencies( self, - extra_view_deps: Dict[str, Any], - ) -> Dict: + extra_view_deps: dict[str, Any], + ) -> dict: """ :return dict: this is **kwargs for DataLayer.__init___ """ dl_kwargs = {} - if common_method_config := self.method_dependencies.get(HTTPMethod.ALL): + if common_method_config := self.operation_dependencies.get(Operation.ALL): dl_kwargs.update(await self._handle_config(common_method_config, extra_view_deps)) - if self.request.method not in HTTPMethod.names(): - return dl_kwargs - - if method_config := self.method_dependencies.get(HTTPMethod[self.request.method]): + if method_config := self.operation_dependencies.get(self.operation): dl_kwargs.update(await self._handle_config(method_config, extra_view_deps)) return dl_kwargs - def _build_response(self, items_from_db: List[TypeModel], item_schema: Type[BaseModel]): - return self.process_includes_for_db_items( - includes=self.query_params.include, - # as list to reuse helper - items_from_db=items_from_db, - item_schema=item_schema, - ) - - def _build_detail_response(self, db_item: TypeModel): - result_objects, object_schemas, extras = self._build_response([db_item], self.jsonapi.schema_detail) - # is it ok to do through list? - result_object = result_objects[0] - - detail_jsonapi_schema = self.jsonapi.schema_builder.build_schema_for_detail_result( - name=f"Result{self.__class__.__name__}", - object_jsonapi_schema=object_schemas.object_jsonapi_schema, - includes_schemas=object_schemas.included_schemas_list, - ) - - return detail_jsonapi_schema(data=result_object, **extras) - - def _build_list_response( - self, - items_from_db: List[TypeModel], - count: int, - total_pages: int, - ) -> JSONAPIResultListSchema: - result_objects, object_schemas, extras = self._build_response(items_from_db, self.jsonapi.schema_list) - - # we need to build a new schema here - # because we'd like to exclude some fields (relationships, includes, etc) - list_jsonapi_schema = self.jsonapi.schema_builder.build_schema_for_list_result( - name=f"Result{self.__class__.__name__}", - object_jsonapi_schema=object_schemas.object_jsonapi_schema, - includes_schemas=object_schemas.included_schemas_list, - ) - return list_jsonapi_schema( - meta=JSONAPIResultListMetaSchema(count=count, total_pages=total_pages), - data=result_objects, - **extras, - ) + def _calculate_total_pages(self, db_items_count: int) -> int: + total_pages = 1 + if not (pagination_size := self.query_params.pagination.size): + return total_pages - # data preparing below: - - @classmethod - def get_db_item_id(cls, item_from_db: TypeModel): - """ - just converts to str. maybe needs another approach - - TODO: check if id is None? raise? - TODO: any another conversion for id to string? - :param item_from_db: - :return: - """ - return str(item_from_db.id) - - @classmethod - def prepare_related_object_data( - cls, - item_from_db: TypeModel, - ) -> Tuple[Dict[str, Union[str, int]], Optional[TypeSchema]]: - included_object_schema: Type[TypeSchema] = included_object_schema_ctx_var.get() - relationship_info: RelationshipInfo = relationship_info_ctx_var.get() - item_id = cls.get_db_item_id(item_from_db) - data_for_relationship = {"id": item_id} - processed_object = included_object_schema( - id=item_id, - attributes=item_from_db, - type=relationship_info.resource_type, + return db_items_count // pagination_size + ( + # one more page if not a multiple of size + (db_items_count % pagination_size) + and 1 ) - return data_for_relationship, processed_object - - @classmethod - def prepare_data_for_relationship( - cls, - related_db_item: Union[List[TypeModel], TypeModel], - ) -> Tuple[Optional[Dict[str, Union[str, int]]], List[TypeSchema]]: - included_objects = [] - if related_db_item is None: - return None, included_objects - - data_for_relationship, processed_object = cls.prepare_related_object_data( - item_from_db=related_db_item, - ) - if processed_object: - included_objects.append(processed_object) - return data_for_relationship, included_objects - - @classmethod - def update_related_object( - cls, - relationship_data: Union[Dict[str, str], List[Dict[str, str]]], - cache_key: Tuple[str, str], - related_field_name: str, - ): - relationships_schema: Type[BaseModel] = relationships_schema_ctx_var.get() - object_schema: Type[JSONAPIObjectSchema] = object_schema_ctx_var.get() - included_objects: Dict[Tuple[str, str], TypeSchema] = included_objects_ctx_var.get() - - relationship_data_schema = get_related_schema(relationships_schema, related_field_name) - parent_included_object = included_objects.get(cache_key) - new_relationships = {} - if hasattr(parent_included_object, "relationships") and parent_included_object.relationships: - existing = parent_included_object.relationships or {} - if isinstance(existing, BaseModel): - existing = existing.dict() - new_relationships.update(existing) - new_relationships.update( - **{ - related_field_name: relationship_data_schema( - data=relationship_data, - ), - }, - ) - included_objects[cache_key] = object_schema.parse_obj( - parent_included_object, - ).copy( - update={"relationships": new_relationships}, - ) - - @classmethod - def update_known_included( - cls, - new_included: List[TypeSchema], - ): - included_objects: Dict[Tuple[str, str], TypeSchema] = included_objects_ctx_var.get() - - for included in new_included: - key = (included.id, included.type) - if key not in included_objects: - included_objects[key] = included - @classmethod - def process_single_db_item_and_prepare_includes( + def _prepare_item_data( cls, - parent_db_item: TypeModel, - ): - previous_resource_type: str = previous_resource_type_ctx_var.get() - related_field_name: str = related_field_name_ctx_var.get() - - next_current_db_item = [] - cache_key = (cls.get_db_item_id(parent_db_item), previous_resource_type) - current_db_item = getattr(parent_db_item, related_field_name) - current_is_single = False - if not isinstance(current_db_item, Iterable): - # hack to do less if/else - current_db_item = [current_db_item] - current_is_single = True - relationship_data_items = [] - - for db_item in current_db_item: - next_current_db_item.append(db_item) - data_for_relationship, new_included = cls.prepare_data_for_relationship( - related_db_item=db_item, + db_item, + resource_type: str, + include_fields: Optional[dict[str, dict[str, Type[TypeSchema]]]] = None, + ) -> dict: + attrs_schema = schemas_storage.get_attrs_schema(resource_type, operation_type="get") + + if include_fields is None or not (field_schemas := include_fields.get(resource_type)): + + data_schema = schemas_storage.get_data_schema(resource_type, operation_type="get") + return data_schema( + id=f"{db_item.id}", + attributes=attrs_schema.model_validate(db_item), + ).model_dump() + + result_attributes = {} + # empty str means skip all attributes + if "" not in field_schemas: + pre_values = {} + for field_name, field_schema in field_schemas.items(): + pre_values[field_name] = getattr(db_item, field_name) + + before_validators, after_validators = schemas_storage.get_model_validators( + resource_type, + operation_type="get", ) + if before_validators: + for validator_name, validator in before_validators.items(): + if hasattr(validator.wrapped, "__func__"): + pre_values = validator.wrapped.__func__(attrs_schema, pre_values) + continue - cls.update_known_included( - new_included=new_included, - ) - relationship_data_items.append(data_for_relationship) + pre_values = validator.wrapped(pre_values) - if current_is_single: - # if initially was single, get back one dict - # hack to do less if/else - relationship_data_items = relationship_data_items[0] + for field_name, field_schema in field_schemas.items(): + validated_model = field_schema(**{field_name: pre_values[field_name]}) - cls.update_related_object( - relationship_data=relationship_data_items, - cache_key=cache_key, - related_field_name=related_field_name, - ) + if after_validators: + for validator_name, validator in after_validators.items(): + if hasattr(validator.wrapped, "__func__"): + validated_model = validator.wrapped.__func__(attrs_schema, validated_model) + continue - return next_current_db_item + validated_model = validator.wrapped(validated_model) - @classmethod - def process_db_items_and_prepare_includes( - cls, - parent_db_items: List[TypeModel], - ): - next_current_db_item = [] + result_attributes[field_name] = getattr(validated_model, field_name) - for parent_db_item in parent_db_items: - new_next_items = cls.process_single_db_item_and_prepare_includes( - parent_db_item=parent_db_item, - ) - next_current_db_item.extend(new_next_items) - return next_current_db_item + return { + "id": f"{models_storage.get_object_id(db_item, resource_type)}", + "type": resource_type, + "attributes": result_attributes, + } - def process_include_with_nested( - self, - include: str, - current_db_item: Union[List[TypeModel], TypeModel], - item_as_schema: TypeSchema, - current_relation_schema: Type[TypeSchema], - included_objects: Dict[Tuple[str, str], TypeSchema], - requested_includes: Dict[str, Iterable[str]], - ) -> Tuple[Dict[str, TypeSchema], List[JSONAPIObjectSchema]]: - root_item_key = (item_as_schema.id, item_as_schema.type) - - if root_item_key not in included_objects: - included_objects[root_item_key] = item_as_schema - previous_resource_type = item_as_schema.type - - previous_related_field_name = previous_resource_type - for related_field_name in include.split(SPLIT_REL): - object_schemas = self.jsonapi.schema_builder.create_jsonapi_object_schemas( - schema=current_relation_schema, - includes=requested_includes[previous_related_field_name], - compute_included_schemas=True, - ) - relationships_schema = object_schemas.relationships_schema - schemas_include = object_schemas.can_be_included_schemas - - current_relation_field: ModelField = current_relation_schema.__fields__[related_field_name] - current_relation_schema: Type[TypeSchema] = current_relation_field.type_ - - relationship_info: RelationshipInfo = current_relation_field.field_info.extra["relationship"] - included_object_schema: Type[JSONAPIObjectSchema] = schemas_include[related_field_name] - - if not isinstance(current_db_item, Iterable): - # xxx: less if/else - current_db_item = [current_db_item] - - # ctx vars to skip multi-level args passing - relationships_schema_ctx_var.set(relationships_schema) - object_schema_ctx_var.set(object_schemas.object_jsonapi_schema) - previous_resource_type_ctx_var.set(previous_resource_type) - related_field_name_ctx_var.set(related_field_name) - relationship_info_ctx_var.set(relationship_info) - included_object_schema_ctx_var.set(included_object_schema) - included_objects_ctx_var.set(included_objects) - - current_db_item = self.process_db_items_and_prepare_includes( - parent_db_items=current_db_item, - ) + def _prepare_include_params(self) -> list[list[str]]: + result = [] + includes = sorted(self.query_params.include) + prev, *_ = includes - previous_resource_type = relationship_info.resource_type - previous_related_field_name = related_field_name - - return included_objects.pop(root_item_key), list(included_objects.values()) - - def prep_requested_includes(self, includes: Iterable[str]): - requested_includes: Dict[str, set[str]] = defaultdict(set) - default: str = self.jsonapi.type_ for include in includes: - prev = default - for related_field_name in include.split(SPLIT_REL): - requested_includes[prev].add(related_field_name) - prev = related_field_name + if not include.startswith(prev): + result.append(prev.split(".")) - return requested_includes + prev = include - def process_db_object( - self, - includes: List[str], - item: TypeModel, - item_schema: Type[TypeSchema], - object_schemas: JSONAPIObjectSchemas, - ): - included_objects = [] + result.append(prev.split(".")) + return result - item_as_schema = object_schemas.object_jsonapi_schema( - id=self.get_db_item_id(item), - attributes=object_schemas.attributes_schema.from_orm(item), - ) + @classmethod + def _get_include_key(cls, db_item: TypeModel, info: RelationshipInfo) -> tuple[str, str]: + return info.resource_type, str(getattr(db_item, info.id_field_name)) - cache_included_objects: Dict[Tuple[str, str], TypeSchema] = {} - requested_includes = self.prep_requested_includes(includes) + def _process_includes( + self, + db_items: list[TypeModel], + items_data: list[dict], + resource_type: str, + include_paths: list[Iterable[str]], + include_fields: dict[str, dict[str, Type[TypeSchema]]], + result_included: Optional[dict] = None, + ) -> dict[tuple[str, str], dict]: + result_included = result_included or {} + + for db_item, item_data in zip(db_items, items_data): + item_data["relationships"] = item_data.get("relationships", {}) + + for path in include_paths: + target_relationship, *include_path = path + info: RelationshipInfo = schemas_storage.get_relationship_info( + resource_type=resource_type, + operation_type="get", + field_name=target_relationship, + ) + db_items_to_process: list[TypeModel] = [] + items_data_to_process: list[dict] = [] + + if info.many: + relationship_data = [] + + for relationship_db_item in getattr(db_item, target_relationship): + include_key = self._get_include_key(relationship_db_item, info) + + if not (relationship_item_data := result_included.get(include_key)): + relationship_item_data = self._prepare_item_data( + db_item=relationship_db_item, + resource_type=info.resource_type, + include_fields=include_fields, + ) + result_included[include_key] = relationship_item_data + + db_items_to_process.append(relationship_db_item) + relationship_data.append( + { + "id": str(getattr(relationship_db_item, info.id_field_name)), + "type": info.resource_type, + }, + ) + items_data_to_process.append(relationship_item_data) + else: + if (relationship_db_item := getattr(db_item, target_relationship)) is None: + item_data["relationships"][target_relationship] = {"data": None} + continue + + db_items_to_process.append(relationship_db_item) + relationship_data = { + "id": str(getattr(relationship_db_item, info.id_field_name)), + "type": info.resource_type, + } + + include_key = self._get_include_key(relationship_db_item, info) + + if not (relationship_item_data := result_included.get(include_key)): + relationship_item_data = self._prepare_item_data(relationship_db_item, info.resource_type) + result_included[include_key] = relationship_item_data + + items_data_to_process.append(relationship_item_data) + + if include_path: + self._process_includes( + db_items=db_items_to_process, + items_data=items_data_to_process, + resource_type=info.resource_type, + include_paths=[include_path], + result_included=result_included, + include_fields=include_fields, + ) + + item_data["relationships"][target_relationship] = {"data": relationship_data} + + return result_included - for include in includes: - item_as_schema, new_included_objects = self.process_include_with_nested( - include=include, - current_db_item=item, - item_as_schema=item_as_schema, - current_relation_schema=item_schema, - included_objects=cache_included_objects, - requested_includes=requested_includes, + @classmethod + def _get_schema_field_names(cls, schema: type[TypeSchema]) -> set[str]: + """Returns all attribute names except relationships""" + result = set() + + for field_name, field in schema.model_fields.items(): + if get_relationship_info_from_field_metadata(field): + continue + + result.add(field_name) + + return result + + def _get_include_fields(self) -> dict[str, dict[str, Type[TypeSchema]]]: + include_fields = {} + for resource_type, field_names in self.query_params.fields.items(): + include_fields[resource_type] = {} + + for field_name in field_names: + include_fields[resource_type][field_name] = schemas_storage.get_field_schema( + resource_type=resource_type, + operation_type="get", + field_name=field_name, + ) + + return include_fields + + def _build_detail_response(self, db_item: TypeModel) -> dict: + include_fields = self._get_include_fields() + item_data = self._prepare_item_data(db_item, self.resource_type, include_fields) + response = { + "data": item_data, + "jsonapi": {"version": "1.0"}, + "meta": None, + } + + if self.query_params.include: + included = self._process_includes( + db_items=[db_item], + items_data=[item_data], + include_paths=self._prepare_include_params(), + resource_type=self.resource_type, + include_fields=include_fields, ) + response["included"] = [value for _, value in sorted(included.items(), key=lambda item: item[0])] - included_objects.extend(new_included_objects) - - return item_as_schema, included_objects + return response - def process_includes_for_db_items( + def _build_list_response( self, - includes: List[str], - items_from_db: List[TypeModel], - item_schema: Type[TypeSchema], - ): - object_schemas = self.jsonapi.schema_builder.create_jsonapi_object_schemas( - schema=item_schema, - includes=includes, - compute_included_schemas=bool(includes), - use_schema_cache=False, - ) - - result_objects = [] - # form: - # `(type, id): serialized_object` - # helps to exclude duplicates - included_objects: Dict[Tuple[str, str], TypeSchema] = {} - for item in items_from_db: - jsonapi_object, new_included = self.process_db_object( - includes=includes, - item=item, - item_schema=item_schema, - object_schemas=object_schemas, - ) - result_objects.append(jsonapi_object) - for included in new_included: - # update too? - included_objects[(included.type, included.id)] = included - - extras = {} - if includes: - # if query has includes, add includes to response - # even if no related objects were found - extras.update( - included=[ - # ignore key - value - # sort for prettiness - for key, value in sorted(included_objects.items()) - ], + items_from_db: list[TypeModel], + count: int, + total_pages: int, + ) -> dict: + include_fields = self._get_include_fields() + items_data = [ + self._prepare_item_data(db_item, self.resource_type, include_fields) for db_item in items_from_db + ] + response = { + "data": items_data, + "jsonapi": {"version": "1.0"}, + "meta": {"count": count, "totalPages": total_pages}, + } + + if self.query_params.include: + included = self._process_includes( + db_items=items_from_db, + items_data=items_data, + resource_type=self.resource_type, + include_paths=self._prepare_include_params(), + include_fields=include_fields, ) + response["included"] = [value for _, value in sorted(included.items(), key=lambda item: item[0])] - return result_objects, object_schemas, extras + return response diff --git a/poetry.lock b/poetry.lock index 632f64e7..92dc41c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "aiosqlite" @@ -16,34 +16,47 @@ typing_extensions = ">=3.7.2" [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] name = "anyio" -version = "3.6.2" +version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.9" files = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] [package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16,<0.22)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "asyncpg" @@ -100,47 +113,62 @@ test = ["flake8 (>=5.0,<6.0)", "uvloop (>=0.15.3)"] [[package]] name = "babel" -version = "2.12.1" +version = "2.17.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] +[package.extras] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +description = "Backport of CPython tarfile module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, + {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] + [[package]] name = "black" -version = "23.3.0" +version = "25.1.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.7" -files = [ - {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, - {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, - {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, - {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, - {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, - {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, - {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, - {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, - {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, - {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, - {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, - {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, - {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, - {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, +python-versions = ">=3.9" +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] [package.dependencies] @@ -150,96 +178,99 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2023.5.7" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -247,108 +278,125 @@ pycparser = "*" [[package]] name = "cfgv" -version = "3.3.1" +version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.8" files = [ - {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, - {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -367,62 +415,74 @@ files = [ [[package]] name = "coverage" -version = "7.2.6" +version = "7.6.12" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" -files = [ - {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, - {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, - {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, - {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, - {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, - {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, - {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, - {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, - {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, - {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, - {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, - {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, - {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, - {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, - {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.dependencies] @@ -433,87 +493,84 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.1" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, - {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, - {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, - {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "distlib" -version = "0.3.6" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] name = "docutils" -version = "0.20.1" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, -] - -[[package]] -name = "editables" -version = "0.3" -description = "Editable installations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "editables-0.3-py3-none-any.whl", hash = "sha256:ee686a8db9f5d91da39849f175ffeef094dd0e9c36d6a59a2e8c7f92a3b80020"}, - {file = "editables-0.3.tar.gz", hash = "sha256:167524e377358ed1f1374e61c268f0d7a4bf7dbd046c656f7b410cde16161b1a"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -521,13 +578,13 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "18.9.0" +version = "18.13.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.7" files = [ - {file = "Faker-18.9.0-py3-none-any.whl", hash = "sha256:defe9ed618a67ebf0f3eb1895e198c2355a7128a09087a6dce342ef2253263ea"}, - {file = "Faker-18.9.0.tar.gz", hash = "sha256:80a5ea1464556c06b98bf47ea3adc7f33811a1182518d847860b1874080bd3c9"}, + {file = "Faker-18.13.0-py3-none-any.whl", hash = "sha256:801d1a2d71f1fc54d332de2ab19de7452454309937233ea2f7485402882d67b3"}, + {file = "Faker-18.13.0.tar.gz", hash = "sha256:84bcf92bb725dd7341336eea4685df9a364f16f2470c4d29c1d7e6c5fd5a457d"}, ] [package.dependencies] @@ -535,115 +592,124 @@ python-dateutil = ">=2.4" [[package]] name = "fastapi" -version = "0.95.2" +version = "0.115.8" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "fastapi-0.95.2-py3-none-any.whl", hash = "sha256:d374dbc4ef2ad9b803899bd3360d34c534adc574546e25314ab72c0c4411749f"}, - {file = "fastapi-0.95.2.tar.gz", hash = "sha256:4d9d3e8c71c73f11874bcf5e33626258d143252e329a01002f767306c64fb982"}, + {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, + {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, ] [package.dependencies] -pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" -starlette = ">=0.27.0,<0.28.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.46.0" +typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] -dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.138)", "uvicorn[standard] (>=0.12.0,<0.21.0)"] -doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer-cli (>=0.0.13,<0.0.14)", "typer[all] (>=0.6.1,<0.8.0)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==23.1.0)", "coverage[toml] (>=6.5.0,<8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.7)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.138)", "sqlalchemy (>=1.3.18,<1.4.43)", "types-orjson (==3.6.2)", "types-ujson (==5.7.0.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.17.0" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, + {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "greenlet" -version = "2.0.2" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = true -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] @@ -659,46 +725,46 @@ files = [ [[package]] name = "hatch" -version = "1.7.0" +version = "1.14.0" description = "Modern, extensible Python project management" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "hatch-1.7.0-py3-none-any.whl", hash = "sha256:efc84112fd02ca85b7bab54f5e2ef71393a98dc849eac9aca390504031f8a1a8"}, - {file = "hatch-1.7.0.tar.gz", hash = "sha256:7afc701fd5b33684a6650e1ecab8957e19685f824240ba7458dcacd66f90fb46"}, + {file = "hatch-1.14.0-py3-none-any.whl", hash = "sha256:b12c7a2f4aaf6db7180e35c476e1a2ad4ec7197c20c4332964599424d4918ded"}, + {file = "hatch-1.14.0.tar.gz", hash = "sha256:351e41bc6c72bc93cb98651212226e495b43549eee27c487832e459e5d0f0eda"}, ] [package.dependencies] -click = ">=8.0.3" -hatchling = ">=1.14.0" +click = ">=8.0.6" +hatchling = ">=1.26.3" httpx = ">=0.22.0" hyperlink = ">=21.0.0" keyring = ">=23.5.0" -packaging = ">=21.3" +packaging = ">=23.2" pexpect = ">=4.8,<5.0" platformdirs = ">=2.5.0" -pyperclip = ">=1.8.2" rich = ">=11.2.0" shellingham = ">=1.4.0" tomli-w = ">=1.0" tomlkit = ">=0.11.1" userpath = ">=1.7,<2.0" -virtualenv = ">=20.16.2" +uv = ">=0.1.35" +virtualenv = ">=20.26.6" +zstandard = "<1" [[package]] name = "hatchling" -version = "1.18.0" +version = "1.27.0" description = "Modern, extensible Python build backend" optional = false python-versions = ">=3.8" files = [ - {file = "hatchling-1.18.0-py3-none-any.whl", hash = "sha256:b66dc254931ec42aa68b5febd1d342c58142cc5267b7ff3b12ba3fa5b4900c93"}, - {file = "hatchling-1.18.0.tar.gz", hash = "sha256:50e99c3110ce0afc3f7bdbadff1c71c17758e476731c27607940cfa6686489ca"}, + {file = "hatchling-1.27.0-py3-none-any.whl", hash = "sha256:d3a2f3567c4f926ea39849cdf924c7e99e6686c9c8e288ae1037c8fa2a5d937b"}, + {file = "hatchling-1.27.0.tar.gz", hash = "sha256:971c296d9819abb3811112fc52c7a9751c8d381898f36533bb16f9791e941fd6"}, ] [package.dependencies] -editables = ">=0.3" -packaging = ">=21.3" +packaging = ">=24.2" pathspec = ">=0.10.1" pluggy = ">=1.0.0" tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} @@ -706,13 +772,13 @@ trove-classifiers = "*" [[package]] name = "httpcore" -version = "0.17.2" +version = "0.17.3" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.7" files = [ - {file = "httpcore-0.17.2-py3-none-any.whl", hash = "sha256:5581b9c12379c4288fe70f43c710d16060c10080617001e6b22a3b6dbcbefd36"}, - {file = "httpcore-0.17.2.tar.gz", hash = "sha256:125f8375ab60036db632f34f4b627a9ad085048eef7cb7d2616fea0f739f98af"}, + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, ] [package.dependencies] @@ -764,13 +830,13 @@ idna = ">=2.5" [[package]] name = "identify" -version = "2.5.26" +version = "2.6.7" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.5.26-py2.py3-none-any.whl", hash = "sha256:c22a8ead0d4ca11f1edd6c9418c3220669b3b7533ada0a0ffa6cc0ef85cf9b54"}, - {file = "identify-2.5.26.tar.gz", hash = "sha256:7243800bce2f58404ed41b7c002e53d4d22bcf3ae1b7900c2d7aefd95394bf7f"}, + {file = "identify-2.6.7-py2.py3-none-any.whl", hash = "sha256:155931cb617a401807b09ecec6635d6c692d180090a1cedca8ef7d58ba5b6aa0"}, + {file = "identify-2.6.7.tar.gz", hash = "sha256:3fa266b42eba321ee0b2bb0936a6a6b9e36a1351cbb69055b3082f4193035684"}, ] [package.extras] @@ -778,15 +844,18 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "imagesize" version = "1.4.1" @@ -800,22 +869,26 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "8.6.1" description = "Read metadata from Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -829,33 +902,62 @@ files = [ ] [[package]] -name = "iso8601" -version = "1.1.0" -description = "Simple module to parse ISO 8601 dates" -optional = true -python-versions = ">=3.6.2,<4.0" +name = "jaraco-classes" +version = "3.4.0" +description = "Utility functions for Python class constructs" +optional = false +python-versions = ">=3.8" files = [ - {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, - {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, + {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, + {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, ] +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [[package]] -name = "jaraco-classes" -version = "3.2.3" -description = "Utility functions for Python class constructs" +name = "jaraco-context" +version = "6.0.1" +description = "Useful decorators and context managers" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jaraco.classes-3.2.3-py3-none-any.whl", hash = "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158"}, - {file = "jaraco.classes-3.2.3.tar.gz", hash = "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a"}, + {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"}, + {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"}, +] + +[package.dependencies] +"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-functools" +version = "4.1.0" +description = "Functools like those found in stdlib" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, + {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, ] [package.dependencies] more-itertools = "*" [package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] +type = ["pytest-mypy"] [[package]] name = "jeepney" @@ -874,13 +976,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -891,26 +993,32 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "keyring" -version = "24.0.0" +version = "25.6.0" description = "Store and access your passwords safely." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "keyring-24.0.0-py3-none-any.whl", hash = "sha256:770f609eed2a16c65a6349f3ba1545d00c73f9fed4254c13766c674fe6d0d22b"}, - {file = "keyring-24.0.0.tar.gz", hash = "sha256:4e87665a19c514c7edada8b15015cf89bd99b8d7edabc5c43cca77166fa8dfad"}, + {file = "keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd"}, + {file = "keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66"}, ] [package.dependencies] -importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +importlib_metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} "jaraco.classes" = "*" +"jaraco.context" = "*" +"jaraco.functools" = "*" jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -completion = ["shtab"] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +completion = ["shtab (>=1.1.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["pyfakefs", "pytest (>=6,!=8.1.*)"] +type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"] [[package]] name = "markdown-it-py" @@ -938,61 +1046,72 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +python-versions = ">=3.9" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -1008,59 +1127,66 @@ files = [ [[package]] name = "more-itertools" -version = "9.1.0" +version = "10.6.0" description = "More routines for operating on iterables, beyond itertools" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "more-itertools-9.1.0.tar.gz", hash = "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d"}, - {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"}, + {file = "more-itertools-10.6.0.tar.gz", hash = "sha256:2cd7fad1009c31cc9fb6a035108509e6547547a7a738374f10bd49a09eb3ee3b"}, + {file = "more_itertools-10.6.0-py3-none-any.whl", hash = "sha256:6eb054cb4b6db1473f6e15fcc676a08e4732548acd47c708f0e179c2c7c01e89"}, ] [[package]] name = "mypy" -version = "1.4.1" +version = "1.15.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, +python-versions = ">=3.9" +files = [ + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" +mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1076,49 +1202,134 @@ files = [ [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" +[[package]] +name = "orjson" +version = "3.10.15" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e"}, + {file = "orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab"}, + {file = "orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806"}, + {file = "orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c"}, + {file = "orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e"}, + {file = "orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e"}, + {file = "orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a"}, + {file = "orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665"}, + {file = "orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa"}, + {file = "orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825"}, + {file = "orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890"}, + {file = "orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf"}, + {file = "orjson-3.10.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e8afd6200e12771467a1a44e5ad780614b86abb4b11862ec54861a82d677746"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9a18c500f19273e9e104cca8c1f0b40a6470bcccfc33afcc088045d0bf5ea6"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb00b7bfbdf5d34a13180e4805d76b4567025da19a197645ca746fc2fb536586"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33aedc3d903378e257047fee506f11e0833146ca3e57a1a1fb0ddb789876c1e1"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0099ae6aed5eb1fc84c9eb72b95505a3df4267e6962eb93cdd5af03be71c98"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c864a80a2d467d7786274fce0e4f93ef2a7ca4ff31f7fc5634225aaa4e9e98c"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c25774c9e88a3e0013d7d1a6c8056926b607a61edd423b50eb5c88fd7f2823ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e78c211d0074e783d824ce7bb85bf459f93a233eb67a5b5003498232ddfb0e8a"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:43e17289ffdbbac8f39243916c893d2ae41a2ea1a9cbb060a56a4d75286351ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:781d54657063f361e89714293c095f506c533582ee40a426cb6489c48a637b81"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6875210307d36c94873f553786a808af2788e362bd0cf4c8e66d976791e7b528"}, + {file = "orjson-3.10.15-cp38-cp38-win32.whl", hash = "sha256:305b38b2b8f8083cc3d618927d7f424349afce5975b316d33075ef0f73576b60"}, + {file = "orjson-3.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:5dd9ef1639878cc3efffed349543cbf9372bdbd79f478615a1c633fe4e4180d1"}, + {file = "orjson-3.10.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ffe19f3e8d68111e8644d4f4e267a069ca427926855582ff01fc012496d19969"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d433bf32a363823863a96561a555227c18a522a8217a6f9400f00ddc70139ae2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da03392674f59a95d03fa5fb9fe3a160b0511ad84b7a3914699ea5a1b3a38da2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a63bb41559b05360ded9132032239e47983a39b151af1201f07ec9370715c82"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3766ac4702f8f795ff3fa067968e806b4344af257011858cc3d6d8721588b53f"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1c73dcc8fadbd7c55802d9aa093b36878d34a3b3222c41052ce6b0fc65f8e8"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b299383825eafe642cbab34be762ccff9fd3408d72726a6b2a4506d410a71ab3"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:abc7abecdbf67a173ef1316036ebbf54ce400ef2300b4e26a7b843bd446c2480"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:3614ea508d522a621384c1d6639016a5a2e4f027f3e4a1c93a51867615d28829"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:295c70f9dc154307777ba30fe29ff15c1bcc9dfc5c48632f37d20a607e9ba85a"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:63309e3ff924c62404923c80b9e2048c1f74ba4b615e7584584389ada50ed428"}, + {file = "orjson-3.10.15-cp39-cp39-win32.whl", hash = "sha256:a2f708c62d026fb5340788ba94a55c23df4e1869fec74be455e0b2f5363b8507"}, + {file = "orjson-3.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd"}, + {file = "orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e"}, +] [[package]] name = "packaging" -version = "23.1" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pathspec" -version = "0.11.1" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pexpect" -version = "4.8.0" +version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" files = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, ] [package.dependencies] @@ -1126,28 +1337,29 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "3.5.1" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1156,13 +1368,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.3.3" +version = "4.1.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, + {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, + {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, ] [package.dependencies] @@ -1185,111 +1397,170 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pydantic" -version = "1.10.8" -description = "Data validation and settings management using python type hints" +version = "2.10.6" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] -name = "pygments" -version = "2.15.1" -description = "Pygments is a syntax highlighting package written in Python." +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] -[package.extras] -plugins = ["importlib-metadata"] +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] -name = "pyperclip" -version = "1.8.2" -description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] -[[package]] -name = "pypika-tortoise" -version = "0.1.6" -description = "Forked from pypika and streamline just for tortoise-orm" -optional = true -python-versions = ">=3.7,<4.0" -files = [ - {file = "pypika-tortoise-0.1.6.tar.gz", hash = "sha256:d802868f479a708e3263724c7b5719a26ad79399b2a70cea065f4a4cadbebf36"}, - {file = "pypika_tortoise-0.1.6-py3-none-any.whl", hash = "sha256:2d68bbb7e377673743cff42aa1059f3a80228d411fbcae591e4465e173109fd8"}, -] +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1301,17 +1572,17 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.21.0" +version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, - {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, + {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, + {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, ] [package.dependencies] @@ -1341,109 +1612,100 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" -[[package]] -name = "pytz" -version = "2023.3" -description = "World timezone definitions, modern and historical" -optional = true -python-versions = "*" -files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, -] - [[package]] name = "pywin32-ctypes" -version = "0.2.1" +version = "0.2.3" description = "A (partial) reimplementation of pywin32 using ctypes/cffi" optional = false python-versions = ">=3.6" files = [ - {file = "pywin32-ctypes-0.2.1.tar.gz", hash = "sha256:934a2def1e5cbc472b2b6bf80680c0f03cd87df65dfd58bfd1846969de095b03"}, - {file = "pywin32_ctypes-0.2.1-py3-none-any.whl", hash = "sha256:b9a53ef754c894a525469933ab2a447c74ec1ea6b9d2ef446f40ec50d3dcec9f"}, + {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, + {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1458,46 +1720,48 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.4.2" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"}, - {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.1.8" +version = "0.9.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7de792582f6e490ae6aef36a58d85df9f7a0cfd1b0d4fe6b4fb51803a3ac96fa"}, - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8e3255afd186c142eef4ec400d7826134f028a85da2146102a1172ecc7c3696"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff78a7583020da124dd0deb835ece1d87bb91762d40c514ee9b67a087940528b"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd8ee69b02e7bdefe1e5da2d5b6eaaddcf4f90859f00281b2333c0e3a0cc9cd6"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a05b0ddd7ea25495e4115a43125e8a7ebed0aa043c3d432de7e7d6e8e8cd6448"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6f08ca730f4dc1b76b473bdf30b1b37d42da379202a059eae54ec7fc1fbcfed"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f35960b02df6b827c1b903091bb14f4b003f6cf102705efc4ce78132a0aa5af3"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d076717c67b34c162da7c1a5bda16ffc205e0e0072c03745275e7eab888719f"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a21ab023124eafb7cef6d038f835cb1155cd5ea798edd8d9eb2f8b84be07d9"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ce697c463458555027dfb194cb96d26608abab920fa85213deb5edf26e026664"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:db6cedd9ffed55548ab313ad718bc34582d394e27a7875b4b952c2d29c001b26"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:05ffe9dbd278965271252704eddb97b4384bf58b971054d517decfbf8c523f05"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5daaeaf00ae3c1efec9742ff294b06c3a2a9db8d3db51ee4851c12ad385cda30"}, - {file = "ruff-0.1.8-py3-none-win32.whl", hash = "sha256:e49fbdfe257fa41e5c9e13c79b9e79a23a79bd0e40b9314bc53840f520c2c0b3"}, - {file = "ruff-0.1.8-py3-none-win_amd64.whl", hash = "sha256:f41f692f1691ad87f51708b823af4bb2c5c87c9248ddd3191c8f088e66ce590a"}, - {file = "ruff-0.1.8-py3-none-win_arm64.whl", hash = "sha256:aa8ee4f8440023b0a6c3707f76cadce8657553655dcbb5fc9b2f9bb9bee389f6"}, - {file = "ruff-0.1.8.tar.gz", hash = "sha256:f7ee467677467526cfe135eab86a40a0e8db43117936ac4f9b469ce9cdb3fb62"}, + {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, + {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, + {file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"}, + {file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"}, + {file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"}, + {file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"}, + {file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"}, ] [[package]] @@ -1515,147 +1779,37 @@ files = [ cryptography = ">=2.0" jeepney = ">=0.6" -[[package]] -name = "setuptools" -version = "68.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "shellingham" -version = "1.5.0.post1" +version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" files = [ - {file = "shellingham-1.5.0.post1-py2.py3-none-any.whl", hash = "sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744"}, - {file = "shellingham-1.5.0.post1.tar.gz", hash = "sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28"}, -] - -[[package]] -name = "simplejson" -version = "3.19.1" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "simplejson-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:412e58997a30c5deb8cab5858b8e2e5b40ca007079f7010ee74565cc13d19665"}, - {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e765b1f47293dedf77946f0427e03ee45def2862edacd8868c6cf9ab97c8afbd"}, - {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3231100edee292da78948fa0a77dee4e5a94a0a60bcba9ed7a9dc77f4d4bb11e"}, - {file = "simplejson-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:081ea6305b3b5e84ae7417e7f45956db5ea3872ec497a584ec86c3260cda049e"}, - {file = "simplejson-3.19.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f253edf694ce836631b350d758d00a8c4011243d58318fbfbe0dd54a6a839ab4"}, - {file = "simplejson-3.19.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:5db86bb82034e055257c8e45228ca3dbce85e38d7bfa84fa7b2838e032a3219c"}, - {file = "simplejson-3.19.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:69a8b10a4f81548bc1e06ded0c4a6c9042c0be0d947c53c1ed89703f7e613950"}, - {file = "simplejson-3.19.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:58ee5e24d6863b22194020eb62673cf8cc69945fcad6b283919490f6e359f7c5"}, - {file = "simplejson-3.19.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:73d0904c2471f317386d4ae5c665b16b5c50ab4f3ee7fd3d3b7651e564ad74b1"}, - {file = "simplejson-3.19.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:66d780047c31ff316ee305c3f7550f352d87257c756413632303fc59fef19eac"}, - {file = "simplejson-3.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd4d50a27b065447c9c399f0bf0a993bd0e6308db8bbbfbc3ea03b41c145775a"}, - {file = "simplejson-3.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c16ec6a67a5f66ab004190829eeede01c633936375edcad7cbf06d3241e5865"}, - {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17a963e8dd4d81061cc05b627677c1f6a12e81345111fbdc5708c9f088d752c9"}, - {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e78d79b10aa92f40f54178ada2b635c960d24fc6141856b926d82f67e56d169"}, - {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad071cd84a636195f35fa71de2186d717db775f94f985232775794d09f8d9061"}, - {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e7c70f19405e5f99168077b785fe15fcb5f9b3c0b70b0b5c2757ce294922c8c"}, - {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54fca2b26bcd1c403146fd9461d1da76199442297160721b1d63def2a1b17799"}, - {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:48600a6e0032bed17c20319d91775f1797d39953ccfd68c27f83c8d7fc3b32cb"}, - {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:93f5ac30607157a0b2579af59a065bcfaa7fadeb4875bf927a8f8b6739c8d910"}, - {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b79642a599740603ca86cf9df54f57a2013c47e1dd4dd2ae4769af0a6816900"}, - {file = "simplejson-3.19.1-cp310-cp310-win32.whl", hash = "sha256:d9f2c27f18a0b94107d57294aab3d06d6046ea843ed4a45cae8bd45756749f3a"}, - {file = "simplejson-3.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:5673d27806085d2a413b3be5f85fad6fca4b7ffd31cfe510bbe65eea52fff571"}, - {file = "simplejson-3.19.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:79c748aa61fd8098d0472e776743de20fae2686edb80a24f0f6593a77f74fe86"}, - {file = "simplejson-3.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:390f4a8ca61d90bcf806c3ad644e05fa5890f5b9a72abdd4ca8430cdc1e386fa"}, - {file = "simplejson-3.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d61482b5d18181e6bb4810b4a6a24c63a490c3a20e9fbd7876639653e2b30a1a"}, - {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2541fdb7467ef9bfad1f55b6c52e8ea52b3ce4a0027d37aff094190a955daa9d"}, - {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46133bc7dd45c9953e6ee4852e3de3d5a9a4a03b068bd238935a5c72f0a1ce34"}, - {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f96def94576f857abf58e031ce881b5a3fc25cbec64b2bc4824824a8a4367af9"}, - {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f14ecca970d825df0d29d5c6736ff27999ee7bdf5510e807f7ad8845f7760ce"}, - {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:66389b6b6ee46a94a493a933a26008a1bae0cfadeca176933e7ff6556c0ce998"}, - {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:22b867205cd258050c2625325fdd9a65f917a5aff22a23387e245ecae4098e78"}, - {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c39fa911e4302eb79c804b221ddec775c3da08833c0a9120041dd322789824de"}, - {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:65dafe413b15e8895ad42e49210b74a955c9ae65564952b0243a18fb35b986cc"}, - {file = "simplejson-3.19.1-cp311-cp311-win32.whl", hash = "sha256:f05d05d99fce5537d8f7a0af6417a9afa9af3a6c4bb1ba7359c53b6257625fcb"}, - {file = "simplejson-3.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:b46aaf0332a8a9c965310058cf3487d705bf672641d2c43a835625b326689cf4"}, - {file = "simplejson-3.19.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b438e5eaa474365f4faaeeef1ec3e8d5b4e7030706e3e3d6b5bee6049732e0e6"}, - {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9d614a612ad02492f704fbac636f666fa89295a5d22b4facf2d665fc3b5ea9"}, - {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46e89f58e4bed107626edce1cf098da3664a336d01fc78fddcfb1f397f553d44"}, - {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96ade243fb6f3b57e7bd3b71e90c190cd0f93ec5dce6bf38734a73a2e5fa274f"}, - {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed18728b90758d171f0c66c475c24a443ede815cf3f1a91e907b0db0ebc6e508"}, - {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6a561320485017ddfc21bd2ed5de2d70184f754f1c9b1947c55f8e2b0163a268"}, - {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:2098811cd241429c08b7fc5c9e41fcc3f59f27c2e8d1da2ccdcf6c8e340ab507"}, - {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8f8d179393e6f0cf6c7c950576892ea6acbcea0a320838c61968ac7046f59228"}, - {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:eff87c68058374e45225089e4538c26329a13499bc0104b52b77f8428eed36b2"}, - {file = "simplejson-3.19.1-cp36-cp36m-win32.whl", hash = "sha256:d300773b93eed82f6da138fd1d081dc96fbe53d96000a85e41460fe07c8d8b33"}, - {file = "simplejson-3.19.1-cp36-cp36m-win_amd64.whl", hash = "sha256:37724c634f93e5caaca04458f267836eb9505d897ab3947b52f33b191bf344f3"}, - {file = "simplejson-3.19.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74bf802debe68627227ddb665c067eb8c73aa68b2476369237adf55c1161b728"}, - {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70128fb92932524c89f373e17221cf9535d7d0c63794955cc3cd5868e19f5d38"}, - {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8090e75653ea7db75bc21fa5f7bcf5f7bdf64ea258cbbac45c7065f6324f1b50"}, - {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a755f7bfc8adcb94887710dc70cc12a69a454120c6adcc6f251c3f7b46ee6aac"}, - {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ccb2c1877bc9b25bc4f4687169caa925ffda605d7569c40e8e95186e9a5e58b"}, - {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:919bc5aa4d8094cf8f1371ea9119e5d952f741dc4162810ab714aec948a23fe5"}, - {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e333c5b62e93949f5ac27e6758ba53ef6ee4f93e36cc977fe2e3df85c02f6dc4"}, - {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3a4480e348000d89cf501b5606415f4d328484bbb431146c2971123d49fd8430"}, - {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cb502cde018e93e75dc8fc7bb2d93477ce4f3ac10369f48866c61b5e031db1fd"}, - {file = "simplejson-3.19.1-cp37-cp37m-win32.whl", hash = "sha256:f41915a4e1f059dfad614b187bc06021fefb5fc5255bfe63abf8247d2f7a646a"}, - {file = "simplejson-3.19.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3844305bc33d52c4975da07f75b480e17af3558c0d13085eaa6cc2f32882ccf7"}, - {file = "simplejson-3.19.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1cb19eacb77adc5a9720244d8d0b5507421d117c7ed4f2f9461424a1829e0ceb"}, - {file = "simplejson-3.19.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:926957b278de22797bfc2f004b15297013843b595b3cd7ecd9e37ccb5fad0b72"}, - {file = "simplejson-3.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b0e9a5e66969f7a47dc500e3dba8edc3b45d4eb31efb855c8647700a3493dd8a"}, - {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79d46e7e33c3a4ef853a1307b2032cfb7220e1a079d0c65488fbd7118f44935a"}, - {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344a5093b71c1b370968d0fbd14d55c9413cb6f0355fdefeb4a322d602d21776"}, - {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23fbb7b46d44ed7cbcda689295862851105c7594ae5875dce2a70eeaa498ff86"}, - {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3025e7e9ddb48813aec2974e1a7e68e63eac911dd5e0a9568775de107ac79a"}, - {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:87b190e6ceec286219bd6b6f13547ca433f977d4600b4e81739e9ac23b5b9ba9"}, - {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc935d8322ba9bc7b84f99f40f111809b0473df167bf5b93b89fb719d2c4892b"}, - {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3b652579c21af73879d99c8072c31476788c8c26b5565687fd9db154070d852a"}, - {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6aa7ca03f25b23b01629b1c7f78e1cd826a66bfb8809f8977a3635be2ec48f1a"}, - {file = "simplejson-3.19.1-cp38-cp38-win32.whl", hash = "sha256:08be5a241fdf67a8e05ac7edbd49b07b638ebe4846b560673e196b2a25c94b92"}, - {file = "simplejson-3.19.1-cp38-cp38-win_amd64.whl", hash = "sha256:ca56a6c8c8236d6fe19abb67ef08d76f3c3f46712c49a3b6a5352b6e43e8855f"}, - {file = "simplejson-3.19.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6424d8229ba62e5dbbc377908cfee9b2edf25abd63b855c21f12ac596cd18e41"}, - {file = "simplejson-3.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:547ea86ca408a6735335c881a2e6208851027f5bfd678d8f2c92a0f02c7e7330"}, - {file = "simplejson-3.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:889328873c35cb0b2b4c83cbb83ec52efee5a05e75002e2c0c46c4e42790e83c"}, - {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cdb4e544134f305b033ad79ae5c6b9a32e7c58b46d9f55a64e2a883fbbba01"}, - {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2b3f06430cbd4fac0dae5b2974d2bf14f71b415fb6de017f498950da8159b1"}, - {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d125e754d26c0298715bdc3f8a03a0658ecbe72330be247f4b328d229d8cf67f"}, - {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:476c8033abed7b1fd8db62a7600bf18501ce701c1a71179e4ce04ac92c1c5c3c"}, - {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:199a0bcd792811c252d71e3eabb3d4a132b3e85e43ebd93bfd053d5b59a7e78b"}, - {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a79b439a6a77649bb8e2f2644e6c9cc0adb720fc55bed63546edea86e1d5c6c8"}, - {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:203412745fed916fc04566ecef3f2b6c872b52f1e7fb3a6a84451b800fb508c1"}, - {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca922c61d87b4c38f37aa706520328ffe22d7ac1553ef1cadc73f053a673553"}, - {file = "simplejson-3.19.1-cp39-cp39-win32.whl", hash = "sha256:3e0902c278243d6f7223ba3e6c5738614c971fd9a887fff8feaa8dcf7249c8d4"}, - {file = "simplejson-3.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:d396b610e77b0c438846607cd56418bfc194973b9886550a98fd6724e8c6cfec"}, - {file = "simplejson-3.19.1-py3-none-any.whl", hash = "sha256:4710806eb75e87919b858af0cba4ffedc01b463edc3982ded7b55143f39e41e1"}, - {file = "simplejson-3.19.1.tar.gz", hash = "sha256:6277f60848a7d8319d27d2be767a7546bc965535b28070e310b3a9af90604a4c"}, + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, ] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -1671,82 +1825,86 @@ files = [ [[package]] name = "sphinx" -version = "7.0.1" +version = "7.4.7" description = "Python documentation generator" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "Sphinx-7.0.1.tar.gz", hash = "sha256:61e025f788c5977d9412587e733733a289e2b9fdc2fef8868ddfbfc4ccfe881d"}, - {file = "sphinx-7.0.1-py3-none-any.whl", hash = "sha256:60c5e04756c1709a98845ed27a2eed7a556af3993afb66e77fec48189f742616"}, + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.18.1,<0.21" +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.13" -requests = ">=2.25.0" -snowballstemmer = ">=2.0" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] -test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.4" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.1" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -1765,107 +1923,130 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.4.48" +version = "2.0.38" description = "Database Abstraction Library" optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "SQLAlchemy-1.4.48-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4bac3aa3c3d8bc7408097e6fe8bf983caa6e9491c5d2e2488cfcfd8106f13b6a"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dbcae0e528d755f4522cad5842f0942e54b578d79f21a692c44d91352ea6d64e"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-win32.whl", hash = "sha256:cbbe8b8bffb199b225d2fe3804421b7b43a0d49983f81dc654d0431d2f855543"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-win_amd64.whl", hash = "sha256:627e04a5d54bd50628fc8734d5fc6df2a1aa5962f219c44aad50b00a6cdcf965"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9af1db7a287ef86e0f5cd990b38da6bd9328de739d17e8864f1817710da2d217"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ce7915eecc9c14a93b73f4e1c9d779ca43e955b43ddf1e21df154184f39748e5"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5381ddd09a99638f429f4cbe1b71b025bed318f6a7b23e11d65f3eed5e181c33"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:87609f6d4e81a941a17e61a4c19fee57f795e96f834c4f0a30cee725fc3f81d9"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0808ad34167f394fea21bd4587fc62f3bd81bba232a1e7fbdfa17e6cfa7cd7"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-win32.whl", hash = "sha256:d53cd8bc582da5c1c8c86b6acc4ef42e20985c57d0ebc906445989df566c5603"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-win_amd64.whl", hash = "sha256:4355e5915844afdc5cf22ec29fba1010166e35dd94a21305f49020022167556b"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:066c2b0413e8cb980e6d46bf9d35ca83be81c20af688fedaef01450b06e4aa5e"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c99bf13e07140601d111a7c6f1fc1519914dd4e5228315bbda255e08412f61a4"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee26276f12614d47cc07bc85490a70f559cba965fb178b1c45d46ffa8d73fda"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-win32.whl", hash = "sha256:49c312bcff4728bffc6fb5e5318b8020ed5c8b958a06800f91859fe9633ca20e"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-win_amd64.whl", hash = "sha256:cef2e2abc06eab187a533ec3e1067a71d7bbec69e582401afdf6d8cad4ba3515"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3509159e050bd6d24189ec7af373359f07aed690db91909c131e5068176c5a5d"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc2ab4d9f6d9218a5caa4121bdcf1125303482a1cdcfcdbd8567be8518969c0"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1ddbbcef9bcedaa370c03771ebec7e39e3944782bef49e69430383c376a250b"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f82d8efea1ca92b24f51d3aea1a82897ed2409868a0af04247c8c1e4fef5890"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-win32.whl", hash = "sha256:e3e98d4907805b07743b583a99ecc58bf8807ecb6985576d82d5e8ae103b5272"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-win_amd64.whl", hash = "sha256:25887b4f716e085a1c5162f130b852f84e18d2633942c8ca40dfb8519367c14f"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0817c181271b0ce5df1aa20949f0a9e2426830fed5ecdcc8db449618f12c2730"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1dd2562313dd9fe1778ed56739ad5d9aae10f9f43d9f4cf81d65b0c85168bb"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:68413aead943883b341b2b77acd7a7fe2377c34d82e64d1840860247cec7ff7c"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbde5642104ac6e95f96e8ad6d18d9382aa20672008cf26068fe36f3004491df"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-win32.whl", hash = "sha256:11c6b1de720f816c22d6ad3bbfa2f026f89c7b78a5c4ffafb220e0183956a92a"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-win_amd64.whl", hash = "sha256:eb5464ee8d4bb6549d368b578e9529d3c43265007193597ddca71c1bae6174e6"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:92e6133cf337c42bfee03ca08c62ba0f2d9695618c8abc14a564f47503157be9"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d29a3fc6d9c45962476b470a81983dd8add6ad26fdbfae6d463b509d5adcda"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:005e942b451cad5285015481ae4e557ff4154dde327840ba91b9ac379be3b6ce"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8cfe951ed074ba5e708ed29c45397a95c4143255b0d022c7c8331a75ae61f3"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-win32.whl", hash = "sha256:2b9af65cc58726129d8414fc1a1a650dcdd594ba12e9c97909f1f57d48e393d3"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-win_amd64.whl", hash = "sha256:2b562e9d1e59be7833edf28b0968f156683d57cabd2137d8121806f38a9d58f4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a1fc046756cf2a37d7277c93278566ddf8be135c6a58397b4c940abf837011f4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b55252d2ca42a09bcd10a697fa041e696def9dfab0b78c0aaea1485551a08"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6dab89874e72a9ab5462997846d4c760cdb957958be27b03b49cf0de5e5c327c"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fd8b5ee5a3acc4371f820934b36f8109ce604ee73cc668c724abb054cebcb6e"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-win32.whl", hash = "sha256:eee09350fd538e29cfe3a496ec6f148504d2da40dbf52adefb0d2f8e4d38ccc4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-win_amd64.whl", hash = "sha256:7ad2b0f6520ed5038e795cc2852eb5c1f20fa6831d73301ced4aafbe3a10e1f6"}, - {file = "SQLAlchemy-1.4.48.tar.gz", hash = "sha256:b47bc287096d989a0838ce96f7d8e966914a24da877ed41a7531d44b55cdb8df"}, +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:402c2316d95ed90d3d3c25ad0390afa52f4d2c56b348f212aa9c8d072a40eee5"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6493bc0eacdbb2c0f0d260d8988e943fee06089cd239bd7f3d0c45d1657a70e2"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0561832b04c6071bac3aad45b0d3bb6d2c4f46a8409f0a7a9c9fa6673b41bc03"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49aa2cdd1e88adb1617c672a09bf4ebf2f05c9448c6dbeba096a3aeeb9d4d443"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-win32.whl", hash = "sha256:64aa8934200e222f72fcfd82ee71c0130a9c07d5725af6fe6e919017d095b297"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-win_amd64.whl", hash = "sha256:c57b8e0841f3fce7b703530ed70c7c36269c6d180ea2e02e36b34cb7288c50c7"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf89e0e4a30714b357f5d46b6f20e0099d38b30d45fa68ea48589faf5f12f62d"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8455aa60da49cb112df62b4721bd8ad3654a3a02b9452c783e651637a1f21fa2"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f53c0d6a859b2db58332e0e6a921582a02c1677cc93d4cbb36fdf49709b327b2"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c4817dff8cef5697f5afe5fec6bc1783994d55a68391be24cb7d80d2dbc3a6"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9cea5b756173bb86e2235f2f871b406a9b9d722417ae31e5391ccaef5348f2c"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40e9cdbd18c1f84631312b64993f7d755d85a3930252f6276a77432a2b25a2f3"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-win32.whl", hash = "sha256:cb39ed598aaf102251483f3e4675c5dd6b289c8142210ef76ba24aae0a8f8aba"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-win_amd64.whl", hash = "sha256:f9d57f1b3061b3e21476b0ad5f0397b112b94ace21d1f439f2db472e568178ae"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12d5b06a1f3aeccf295a5843c86835033797fea292c60e72b07bcb5d820e6dd3"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e036549ad14f2b414c725349cce0772ea34a7ab008e9cd67f9084e4f371d1f32"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3bee874cb1fadee2ff2b79fc9fc808aa638670f28b2145074538d4a6a5028e"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e185ea07a99ce8b8edfc788c586c538c4b1351007e614ceb708fd01b095ef33e"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b79ee64d01d05a5476d5cceb3c27b5535e6bb84ee0f872ba60d9a8cd4d0e6579"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afd776cf1ebfc7f9aa42a09cf19feadb40a26366802d86c1fba080d8e5e74bdd"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-win32.whl", hash = "sha256:a5645cd45f56895cfe3ca3459aed9ff2d3f9aaa29ff7edf557fa7a23515a3725"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-win_amd64.whl", hash = "sha256:1052723e6cd95312f6a6eff9a279fd41bbae67633415373fdac3c430eca3425d"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ecef029b69843b82048c5b347d8e6049356aa24ed644006c9a9d7098c3bd3bfd"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c8bcad7fc12f0cc5896d8e10fdf703c45bd487294a986903fe032c72201596b"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0ef3f98175d77180ffdc623d38e9f1736e8d86b6ba70bff182a7e68bed7727"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ac78898c50e2574e9f938d2e5caa8fe187d7a5b69b65faa1ea4648925b096"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eb4fa13c8c7a2404b6a8e3772c17a55b1ba18bc711e25e4d6c0c9f5f541b02a"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, + {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, + {file = "sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\")"} +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-stubs" @@ -1884,192 +2065,341 @@ typing-extensions = ">=3.7.4" [[package]] name = "starlette" -version = "0.27.0" +version = "0.45.3" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, - {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, + {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, + {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, ] [package.dependencies] -anyio = ">=3.4.0,<5" +anyio = ">=3.6.2,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tomli-w" -version = "1.0.0" +version = "1.2.0" description = "A lil' TOML writer" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"}, - {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, + {file = "tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90"}, + {file = "tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021"}, ] [[package]] name = "tomlkit" -version = "0.11.8" +version = "0.13.2" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, -] - -[[package]] -name = "tortoise-orm" -version = "0.19.3" -description = "Easy async ORM for python, built with relations in mind" -optional = true -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "tortoise_orm-0.19.3-py3-none-any.whl", hash = "sha256:9e368820c70a0866ef9c521d43aa5503485bd7a20a561edc0933b7b0f7036fbc"}, - {file = "tortoise_orm-0.19.3.tar.gz", hash = "sha256:ca574bca5191f55608f9013314b1f5d1c6ffd4165a1fcc2f60f6c902f529b3b6"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] -[package.dependencies] -aiosqlite = ">=0.16.0,<0.18.0" -iso8601 = ">=1.0.2,<2.0.0" -pypika-tortoise = ">=0.1.6,<0.2.0" -pytz = "*" - -[package.extras] -accel = ["ciso8601", "orjson", "uvloop"] -aiomysql = ["aiomysql"] -asyncmy = ["asyncmy (>=0.2.5,<0.3.0)"] -asyncodbc = ["asyncodbc (>=0.1.1,<0.2.0)"] -asyncpg = ["asyncpg"] -psycopg = ["psycopg[binary,pool] (==3.0.12)"] - [[package]] name = "trove-classifiers" -version = "2023.5.24" +version = "2025.1.15.22" description = "Canonical source for classifiers on PyPI (pypi.org)." optional = false python-versions = "*" files = [ - {file = "trove-classifiers-2023.5.24.tar.gz", hash = "sha256:fd5a1546283be941f47540a135bdeae8fb261380a6a204d9c18012f2a1b0ceae"}, - {file = "trove_classifiers-2023.5.24-py3-none-any.whl", hash = "sha256:d9d7ae14fb90bf3d50bef99c3941b176b5326509e6e9037e622562d6352629d0"}, + {file = "trove_classifiers-2025.1.15.22-py3-none-any.whl", hash = "sha256:5f19c789d4f17f501d36c94dbbf969fb3e8c2784d008e6f5164dd2c3d6a2b07c"}, + {file = "trove_classifiers-2025.1.15.22.tar.gz", hash = "sha256:90af74358d3a01b3532bc7b3c88d8c6a094c2fd50a563d13d9576179326d7ed9"}, ] [[package]] name = "typing-extensions" -version = "4.6.2" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, - {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "2.0.2" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, - {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "userpath" -version = "1.8.0" +version = "1.9.2" description = "Cross-platform tool for adding locations to the user PATH" optional = false python-versions = ">=3.7" files = [ - {file = "userpath-1.8.0-py3-none-any.whl", hash = "sha256:f133b534a8c0b73511fc6fa40be68f070d9474de1b5aada9cded58cdf23fb557"}, - {file = "userpath-1.8.0.tar.gz", hash = "sha256:04233d2fcfe5cff911c1e4fb7189755640e1524ff87a4b82ab9d6b875fee5787"}, + {file = "userpath-1.9.2-py3-none-any.whl", hash = "sha256:2cbf01a23d655a1ff8fc166dfb78da1b641d1ceabf0fe5f970767d380b14e89d"}, + {file = "userpath-1.9.2.tar.gz", hash = "sha256:6c52288dab069257cc831846d15d48133522455d4677ee69a9781f11dbefd815"}, ] [package.dependencies] click = "*" +[[package]] +name = "uv" +version = "0.5.31" +description = "An extremely fast Python package and project manager, written in Rust." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uv-0.5.31-py3-none-linux_armv6l.whl", hash = "sha256:ba5707a6e363284ba1acd29ae9e70e2377ed31e272b953069798c444bae847ef"}, + {file = "uv-0.5.31-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3169a373d0d41571a7b9d4a442f875f6e26250693ced7779f62461f52ba1da64"}, + {file = "uv-0.5.31-py3-none-macosx_11_0_arm64.whl", hash = "sha256:335c16f91b46b4f4a3b31c18cf112a0643d59d4c1708a177103621da0addbaef"}, + {file = "uv-0.5.31-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:cedceefebf2123b514464671d0544a8db126071c2d56dbc10d408b8222939e6a"}, + {file = "uv-0.5.31-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7233182a2b8226011562341f05aaee19925b48730fccdb2e7ee20e31a84f12db"}, + {file = "uv-0.5.31-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ce4dc079fd5ddf1946e6085b6ece126ce7c4be23ba27e4010aa68fdec004191"}, + {file = "uv-0.5.31-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:007576e1b62268d4a21d4a375d43ff5ae3698313a11f7702c8e7cb5bd29d7f1b"}, + {file = "uv-0.5.31-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51d8287cdb760ea8c44b374cb96a59fae2292f1b3e18e228f7ed817d2bd96243"}, + {file = "uv-0.5.31-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27ce8f3eecd281a6ec255644a328b60eb10044e506a46be931db7bbfe8db89ab"}, + {file = "uv-0.5.31-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d07e9db12a55005a28bb49ecfa444a0221702158fc021f79e26d8e174f1ebdf9"}, + {file = "uv-0.5.31-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:8acf6bcb0c0c27e1a157926f35dc70b1c7620c1a2e1124ffacdbf21c78265761"}, + {file = "uv-0.5.31-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:a8f27ea8441ce9de43a6af4825d2b936030a0a6864c608f1015db30e9f5f9cdb"}, + {file = "uv-0.5.31-py3-none-musllinux_1_1_i686.whl", hash = "sha256:e6b5a29c29e774525baf982f570c53e8862f19e3f7e74bd819c7b3749f4cdfa0"}, + {file = "uv-0.5.31-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:15109a938c56ee1e1c997b291743812af3ea1d7547b0929569494c359082a993"}, + {file = "uv-0.5.31-py3-none-win32.whl", hash = "sha256:f2161ef8b9a0308f05dd4a3eb2c1d104301e23c699fab5898e9fc38387690e4b"}, + {file = "uv-0.5.31-py3-none-win_amd64.whl", hash = "sha256:bcc57b75883516233658ff1daee0d17347a8b872f717a1644d36e8ea2b021f45"}, + {file = "uv-0.5.31-py3-none-win_arm64.whl", hash = "sha256:51ceab5a128dd22bcd62489107563e10084e13ed9c15107193c2d7d1139979f4"}, + {file = "uv-0.5.31.tar.gz", hash = "sha256:59c4c6e3704208a8dd5e8d51b79ec995db18a64bd3ff88fd239ca433fbaf1694"}, +] + [[package]] name = "uvicorn" -version = "0.22.0" +version = "0.34.0" description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.7" +optional = true +python-versions = ">=3.9" files = [ - {file = "uvicorn-0.22.0-py3-none-any.whl", hash = "sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996"}, - {file = "uvicorn-0.22.0.tar.gz", hash = "sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8"}, + {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, + {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, ] [package.dependencies] click = ">=7.0" h11 = ">=0.8" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "virtualenv" -version = "20.23.1" +version = "20.29.2" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.23.1-py3-none-any.whl", hash = "sha256:34da10f14fea9be20e0fd7f04aba9732f84e593dac291b757ce42e3368a39419"}, - {file = "virtualenv-20.23.1.tar.gz", hash = "sha256:8ff19a38c1021c742148edc4f81cb43d7f8c6816d2ede2ab72af5b84c749ade1"}, + {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, + {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, ] [package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.12,<4" -platformdirs = ">=3.5.1,<4" +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "zipp" -version = "3.15.0" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[[package]] +name = "zstandard" +version = "0.23.0" +description = "Zstandard bindings for Python" +optional = false +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, + {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, + {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, + {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, + {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, + {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, + {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, + {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, + {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, + {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, + {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, + {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, + {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, + {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, ] +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +cffi = ["cffi (>=1.11)"] [extras] -databases = ["sqlalchemy", "tortoise-orm"] +databases = ["sqlalchemy"] sqla = ["sqlalchemy"] -tortoise = ["tortoise-orm"] +uvicorn = ["uvicorn"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "7b85a97137b5a7a8983babc8127252ede873a1993e986f43228c1ae210eb0ab8" +content-hash = "0182b5ef4cb4ae7df2efe8c076866c3861ffb5eaee6080fd1afc2b9845185f4f" diff --git a/pyproject.toml b/pyproject.toml index 5ef57294..784e0b3b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "hatchling.build" [project] name = "FastAPI-JSONAPI" dynamic = ["version"] -description = "FastAPI extension to create REST web api according to JSON:API 1.0 specification with FastAPI, Pydantic and data provider of your choice (SQLAlchemy, Tortoise ORM)" +description = "FastAPI extension to create REST web api according to JSON:API 1.0 specification with FastAPI, Pydantic and data provider (SQLAlchemy)" readme = "README.md" license = "MIT" authors = [ @@ -24,33 +24,33 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Utilities", ] dependencies = [ - "fastapi>=0.79.0", - "pydantic>=1.9.1", - "simplejson>=3.17.6", - "uvicorn>=0.18.2", + "fastapi>=0.112.3", + "orjson>=3.10.0", + "pydantic>=2.6.0", ] [project.optional-dependencies] all = [ "pytest", "sphinx", - "SQLAlchemy[asyncio]>=1.4.39", - "tortoise-orm>=0.19.2", + "SQLAlchemy[asyncio]>=2.0.26", + "uvicorn>=0.18.2", ] docs = [ "sphinx", ] sqlalchemy = [ - "SQLAlchemy[asyncio]>=1.4.39", + "SQLAlchemy[asyncio]>=2.0.26", ] tests = [ "pytest", ] -tortoise-orm = [ - "tortoise-orm>=0.19.2", +uvicorn = [ + "uvicorn>=0.18.2", ] [project.urls] @@ -77,13 +77,12 @@ package-mode = false [tool.poetry.dependencies] python = "^3.9" -fastapi = ">=0.79.0" -pydantic = ">=1.9.1" -simplejson = ">=3.17.6" -uvicorn = ">=0.18.2" +fastapi = ">=0.112.3" +orjson = ">=3.10.0" +pydantic = ">=2.6.0" -sqlalchemy = { version = ">=1.4.39", optional = true, extras = ["asyncio"] } -tortoise-orm = { version = ">=0.19.2", optional = true } +uvicorn = { version = ">=0.18.2", optional = true } +sqlalchemy = { version = ">=2.0.26", optional = true, extras = ["asyncio"] } [tool.poetry.group.tests.dependencies] pytest = "^7.3.1" @@ -97,11 +96,11 @@ asyncpg = "0.28.0" [tool.poetry.group.lint.dependencies] -black = "^23.3.0" -ruff = "^0.1.8" -mypy = "^1.4.1" +black = "^25.1.0" +mypy = "^1.14.1" +pre-commit = "^4.1.0" +ruff = "^0.9.4" sqlalchemy-stubs = "^0.4" -pre-commit = "^3.3.3" [tool.poetry.group.docs.dependencies] sphinx = "^7.0.1" @@ -112,8 +111,15 @@ hatch = "^1.7.0" [tool.poetry.extras] sqla = ["sqlalchemy"] -tortoise = ["tortoise-orm"] -databases = ["sqlalchemy", "tortoise-orm"] +databases = ["sqlalchemy"] +uvicorn = ["uvicorn"] + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::DeprecationWarning", + "ignore::PendingDeprecationWarning" +] +asyncio_mode = "auto" [tool.black] line-length = 119 @@ -122,7 +128,8 @@ target-version = ["py38"] [tool.ruff] line-length = 119 target-version = "py39" -select = [ + +lint.select = [ "E", "W", "F", @@ -143,15 +150,12 @@ select = [ "TID", "ARG", "PTH", -# "ERA", "PL", "PLE", "PLR", "PLW", "RUF", "ASYNC", -# "UP", -# "ANN", ] # Exclude a variety of commonly ignored directories. @@ -178,13 +182,12 @@ exclude = [ "node_modules", "venv", "docs/*", - "examples/api_for_tortoise_orm/*", ] # Avoid trying to fix flake8-bugbear (`B`) violations. -unfixable = ["B"] +lint.unfixable = ["B"] -extend-ignore = [ +lint.extend-ignore = [ "D401", "D403", "D400", @@ -214,12 +217,12 @@ extend-ignore = [ "PT006", # pytest parametrize tuple args ] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "examples/api_for_sqlalchemy/*" = [ "E402", "D105", ] -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] # Unlike Flake8, default to a complexity level of 10. max-complexity = 10 diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 2f4c80e3..00000000 --- a/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -asyncio_mode = auto diff --git a/tests/common_user_api_test.py b/tests/common_user_api_test.py index aeef9808..7bde15ab 100644 --- a/tests/common_user_api_test.py +++ b/tests/common_user_api_test.py @@ -1,15 +1,14 @@ from typing import Literal -from fastapi import FastAPI +from fastapi import FastAPI, status from httpx import AsyncClient from pydantic import BaseModel from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession -from starlette import status +from examples.api_for_sqlalchemy.models import User +from examples.api_for_sqlalchemy.schemas import UserAttributesBaseSchema from tests.misc.utils import fake -from tests.models import User -from tests.schemas import UserAttributesBaseSchema FIELD_CUSTOM_NAME = "custom_name" @@ -42,13 +41,12 @@ def prepare_user_create_data( user_attributes: UserAttributesBaseSchema, resource_type: str, ): - data_user_attributes = user_attributes.dict() + data_user_attributes = user_attributes.model_dump() data_user_attributes[self.FIELD_CUSTOM_NAME] = self.validator_create.expected_value - data_user_create = { + return { "type": resource_type, "attributes": data_user_attributes, } - return data_user_create def prepare_user_update_data( self, @@ -59,29 +57,22 @@ def prepare_user_update_data( for field_name, value in user_attributes: assert getattr(user, field_name) != value - data_user_attributes = user_attributes.dict() + data_user_attributes = user_attributes.model_dump() data_user_attributes[self.FIELD_CUSTOM_NAME] = self.validator_update.expected_value - data_user_update = { - "id": user.id, + return { + "id": f"{user.id}", "type": resource_type, "attributes": data_user_attributes, } - return data_user_update def validate_field_not_passed_response(self, response, expected_status=status.HTTP_422_UNPROCESSABLE_ENTITY): assert response.status_code == expected_status, response.text response_data = response.json() - assert response_data == { - "detail": [ - { - "loc": ["body", "data", "attributes", self.FIELD_CUSTOM_NAME], - "msg": "field required", - "type": "value_error.missing", - }, - ], - } + assert response_data["detail"][0]["loc"] == ["body", "data", "attributes", self.FIELD_CUSTOM_NAME] + assert response_data["detail"][0]["msg"] == "Field required" - def validate_field_value_invalid_response(self, response, validator: ValidateCustomNameEqualsBase): + @classmethod + def validate_field_value_invalid_response(cls, response, validator: ValidateCustomNameEqualsBase): assert response.status_code == validator.STATUS_ON_ERROR, response.text response_data = response.json() assert response_data["detail"].pop("error") @@ -98,7 +89,7 @@ async def validate_user_creation_on_error_key_not_passed( resource_type: str, user_attributes: UserAttributesBaseSchema, ): - attributes_data = user_attributes.dict() + attributes_data = user_attributes.model_dump() assert self.FIELD_CUSTOM_NAME not in attributes_data data_user_create = { "data": { @@ -117,7 +108,7 @@ async def validate_user_creation_test_error_value_passed_but_invalid( resource_type: str, user_attributes: UserAttributesBaseSchema, ): - attributes_data = user_attributes.dict() + attributes_data = user_attributes.model_dump() attributes_data[self.FIELD_CUSTOM_NAME] = fake.word() assert attributes_data[self.FIELD_CUSTOM_NAME] != self.validator_create.expected_value data_user_create = { @@ -138,11 +129,11 @@ async def validate_user_update_error_key_not_passed( resource_type: str, user_attributes: UserAttributesBaseSchema, ): - attributes_data = user_attributes.dict() + attributes_data = user_attributes.model_dump() assert self.FIELD_CUSTOM_NAME not in attributes_data data_user_update = { "data": { - "id": user.id, + "id": f"{user.id}", "type": resource_type, "attributes": attributes_data, }, @@ -159,12 +150,12 @@ async def validate_user_update_error_value_passed_but_invalid( resource_type: str, user_attributes: UserAttributesBaseSchema, ): - attributes_data = user_attributes.dict() + attributes_data = user_attributes.model_dump() attributes_data[self.FIELD_CUSTOM_NAME] = fake.word() assert attributes_data[self.FIELD_CUSTOM_NAME] != self.validator_update.expected_value data_user_update = { "data": { - "id": user.id, + "id": f"{user.id}", "type": resource_type, "attributes": attributes_data, }, @@ -191,10 +182,10 @@ async def validate_created_user( ), ) assert isinstance(user, User) - assert user_created_data["id"] == str(user.id) - assert user_created_data["attributes"] == user_attributes.dict() + assert user_created_data["id"] == f"{user.id}" + assert user_created_data["attributes"] == user_attributes.model_dump() assert user_created_data["type"] == resource_type - assert user_attributes == UserAttributesBaseSchema.from_orm(user) + assert user_attributes == UserAttributesBaseSchema.model_validate(user) async def validate_generic_user_create_works( self, @@ -220,8 +211,9 @@ async def validate_generic_user_create_works( resource_type=resource_type, ) + @classmethod async def validate_updated_user( - self, + cls, user: User, async_session: AsyncSession, user_updated_data: dict, @@ -229,10 +221,10 @@ async def validate_updated_user( resource_type: str, ): await async_session.refresh(user) - assert user_updated_data["id"] == str(user.id) - assert user_updated_data["attributes"] == user_attributes.dict() + assert user_updated_data["id"] == f"{user.id}" + assert user_updated_data["attributes"] == user_attributes.model_dump() assert user_updated_data["type"] == resource_type - assert user_attributes == UserAttributesBaseSchema.from_orm(user) + assert user_attributes == UserAttributesBaseSchema.model_validate(user) async def validate_generic_user_update_works( self, diff --git a/tests/conftest.py b/tests/conftest.py index 45ecccfa..6ec7ed4a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,7 @@ import asyncio import logging +from collections import defaultdict +from copy import copy import pytest from fastapi import FastAPI @@ -7,6 +9,8 @@ from pytest import fixture # noqa PT013 from pytest_asyncio import fixture as async_fixture +from fastapi_jsonapi.atomic.prepared_atomic_operation import atomic_dependency_handlers +from fastapi_jsonapi.data_layers.sqla.query_building import relationships_info_storage from tests.fixtures.app import ( # noqa app, app_plain, @@ -14,7 +18,7 @@ from tests.fixtures.db_connection import ( # noqa async_engine, async_session, - async_session_plain, + refresh_db, ) from tests.fixtures.entities import ( # noqa child_1, @@ -32,6 +36,8 @@ parent_1, parent_2, parent_3, + task_1, + task_2, user_1, user_1_bio, user_1_comments_for_u2_posts, @@ -50,11 +56,7 @@ user_attributes, user_attributes_factory, ) -from tests.fixtures.views import ( # noqa - DetailViewBaseGeneric, - ListViewBaseGeneric, -) -from tests.models import Base +from tests.fixtures.views import ViewBaseGeneric # noqa def configure_logging(): @@ -86,8 +88,14 @@ async def client(app: FastAPI) -> AsyncClient: # noqa yield ac -@async_fixture(autouse=True) -async def refresh_db(async_engine): # noqa F811 - async with async_engine.begin() as connector: - for table in reversed(Base.metadata.sorted_tables): - await connector.execute(table.delete()) +@pytest.fixture +def clear_relationships_info_storage(): + data = copy(relationships_info_storage._data) + relationships_info_storage._data = defaultdict(dict) + yield + relationships_info_storage._data = data + + +@pytest.fixture(autouse=True) +def clear_atomic_dependency_handlers(): + atomic_dependency_handlers.clear() diff --git a/tests/fixtures/app.py b/tests/fixtures/app.py index 70998e58..8973bee5 100644 --- a/tests/fixtures/app.py +++ b/tests/fixtures/app.py @@ -3,45 +3,25 @@ import pytest from fastapi import APIRouter, FastAPI -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict -from fastapi_jsonapi import RoutersJSONAPI, init -from fastapi_jsonapi.atomic import AtomicOperations -from fastapi_jsonapi.data_typing import TypeModel -from fastapi_jsonapi.views.detail_view import DetailViewBase -from fastapi_jsonapi.views.list_view import ListViewBase -from tests.fixtures.views import ( - DetailViewBaseGeneric, - ListViewBaseGeneric, -) -from tests.models import ( - Alpha, - Beta, +from examples.api_for_sqlalchemy.models import ( Child, Computer, - CustomUUIDItem, - Delta, - Gamma, Parent, ParentToChildAssociation, Post, PostComment, - Task, User, UserBio, ) -from tests.schemas import ( - AlphaSchema, - BetaSchema, +from examples.api_for_sqlalchemy.schemas import ( ChildInSchema, ChildPatchSchema, ChildSchema, ComputerInSchema, ComputerPatchSchema, ComputerSchema, - CustomUUIDItemSchema, - DeltaSchema, - GammaSchema, ParentPatchSchema, ParentSchema, ParentToChildAssociationSchema, @@ -49,18 +29,30 @@ PostInSchema, PostPatchSchema, PostSchema, - TaskInSchema, - TaskPatchSchema, - TaskSchema, - UserBioSchema, + UserBioBaseSchema, UserInSchema, UserPatchSchema, UserSchema, ) +from fastapi_jsonapi import ApplicationBuilder +from fastapi_jsonapi.atomic import AtomicOperations +from fastapi_jsonapi.data_typing import TypeModel +from fastapi_jsonapi.views.view_base import ViewBase -CURRENT_FILE = Path(__file__).resolve() -CURRENT_DIR = CURRENT_FILE.parent +from .models import Alpha, Beta, CustomUUIDItem, Delta, Gamma, Task +from .schemas import ( + AlphaSchema, + BetaSchema, + CustomUUIDItemSchema, + DeltaSchema, + GammaSchema, + TaskInSchema, + TaskPatchSchema, + TaskSchema, +) +from .views import ViewBaseGeneric +CURRENT_DIR = Path(__file__).resolve().parent MAX_INCLUDE_DEPTH = 5 @@ -72,141 +64,105 @@ def build_app_plain() -> FastAPI: docs_url="/docs", ) app.config = {"MAX_INCLUDE_DEPTH": MAX_INCLUDE_DEPTH} - return app def add_routers(app_plain: FastAPI): router: APIRouter = APIRouter() - - RoutersJSONAPI( - router=router, - path="/users", - tags=["User"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - schema=UserSchema, - resource_type="user", - schema_in_patch=UserPatchSchema, - schema_in_post=UserInSchema, - model=User, - ) - - RoutersJSONAPI( - router=router, - path="/posts", - tags=["Post"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - schema=PostSchema, - resource_type="post", - schema_in_patch=PostPatchSchema, - schema_in_post=PostInSchema, - model=Post, - ) - - RoutersJSONAPI( - router=router, - path="/comments", - tags=["Comment"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - schema=PostCommentSchema, - resource_type="post_comment", - model=PostComment, - ) - - RoutersJSONAPI( - router=router, - path="/user-bio", - tags=["Bio"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - schema=UserBioSchema, - resource_type="user_bio", - model=UserBio, - ) - - RoutersJSONAPI( - router=router, - path="/parents", - tags=["Parent"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - schema=ParentSchema, - resource_type="parent", - schema_in_patch=ParentPatchSchema, - schema_in_post=ParentPatchSchema, - model=Parent, - ) - - RoutersJSONAPI( - router=router, + builder = ApplicationBuilder(app=app_plain, base_router=router) + builder.add_resource( path="/children", tags=["Child"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - schema=ChildSchema, resource_type="child", + view=ViewBaseGeneric, + schema=ChildSchema, schema_in_patch=ChildPatchSchema, schema_in_post=ChildInSchema, model=Child, ) - - RoutersJSONAPI( - router=router, - path="/parent-to-child-association", - tags=["Parent To Child Association"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - schema=ParentToChildAssociationSchema, - resource_type="parent-to-child-association", - model=ParentToChildAssociation, + builder.add_resource( + path="/comments", + tags=["Comment"], + resource_type="post_comment", + view=ViewBaseGeneric, + schema=PostCommentSchema, + model=PostComment, ) - - RoutersJSONAPI( - router=router, + builder.add_resource( path="/computers", tags=["Computer"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, + resource_type="computer", + view=ViewBaseGeneric, model=Computer, schema=ComputerSchema, - resource_type="computer", schema_in_patch=ComputerPatchSchema, schema_in_post=ComputerInSchema, ) - - RoutersJSONAPI( - router=router, + builder.add_resource( + path="/custom-uuid-item", + tags=["Custom UUID Item"], + resource_type="custom_uuid_item", + view=ViewBaseGeneric, + model=CustomUUIDItem, + schema=CustomUUIDItemSchema, + ) + builder.add_resource( + path="/parent-to-child-association", + tags=["Parent To Child Association"], + resource_type="parent-to-child-association", + view=ViewBaseGeneric, + model=ParentToChildAssociation, + schema=ParentToChildAssociationSchema, + ) + builder.add_resource( + path="/parents", + tags=["Parent"], + resource_type="parent", + view=ViewBaseGeneric, + model=Parent, + schema=ParentSchema, + schema_in_patch=ParentPatchSchema, + schema_in_post=ParentPatchSchema, + ) + builder.add_resource( + path="/posts", + tags=["Post"], + resource_type="post", + view=ViewBaseGeneric, + schema=PostSchema, + schema_in_patch=PostPatchSchema, + schema_in_post=PostInSchema, + model=Post, + ) + builder.add_resource( path="/tasks", tags=["Task"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, + resource_type="task", + view=ViewBaseGeneric, model=Task, schema=TaskSchema, - resource_type="task", schema_in_patch=TaskPatchSchema, schema_in_post=TaskInSchema, ) - - RoutersJSONAPI( - router=router, - path="/custom-uuid-item", - tags=["Custom UUID Item"], - class_detail=DetailViewBaseGeneric, - class_list=ListViewBaseGeneric, - model=CustomUUIDItem, - schema=CustomUUIDItemSchema, - resource_type="custom_uuid_item", + builder.add_resource( + path="/user-bio", + tags=["Bio"], + resource_type="user_bio", + model=UserBio, + view=ViewBaseGeneric, + schema=UserBioBaseSchema, ) - - atomic = AtomicOperations() - - app_plain.include_router(router, prefix="") - app_plain.include_router(atomic.router, prefix="") - - init(app_plain) + builder.add_resource( + path="/users", + tags=["User"], + resource_type="user", + view=ViewBaseGeneric, + model=User, + schema=UserSchema, + schema_in_patch=UserPatchSchema, + schema_in_post=UserInSchema, + ) + builder.initialize() return app_plain @@ -219,7 +175,6 @@ def app_plain() -> FastAPI: @pytest.fixture(scope="session") def app(app_plain: FastAPI): add_routers(app_plain) - return app_plain @@ -230,34 +185,28 @@ def build_app_custom( schema_in_post=None, path: str = "/misc", resource_type: str = "misc", - class_list: Type[ListViewBase] = ListViewBaseGeneric, - class_detail: Type[DetailViewBase] = DetailViewBaseGeneric, - max_cache_size: int = 0, + view: Type[ViewBase] = ViewBaseGeneric, ) -> FastAPI: router: APIRouter = APIRouter() - - jsonapi_routers = RoutersJSONAPI( + app = build_app_plain() + builder = ApplicationBuilder(app=app) + builder.add_resource( router=router, path=path, tags=["Misc"], - class_list=class_list, - class_detail=class_detail, + view=view, schema=schema, resource_type=resource_type, schema_in_patch=schema_in_patch, schema_in_post=schema_in_post, model=model, - max_cache_size=max_cache_size, ) + builder.initialize() - app = build_app_plain() app.include_router(router, prefix="") atomic = AtomicOperations() app.include_router(atomic.router, prefix="") - init(app) - - app.jsonapi_routers = jsonapi_routers return app @@ -294,29 +243,30 @@ def build_alphabet_app() -> FastAPI: class ResourceInfoDTO(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) + path: str resource_type: str model: Type[TypeModel] schema_: Type[BaseModel] schema_in_patch: Optional[BaseModel] = None schema_in_post: Optional[BaseModel] = None - class_list: Type[ListViewBase] = ListViewBaseGeneric - class_detail: Type[DetailViewBase] = DetailViewBaseGeneric - - class Config: - arbitrary_types_allowed = True + view: Type[ViewBase] = ViewBaseGeneric def build_custom_app_by_schemas(resources_info: list[ResourceInfoDTO]): router: APIRouter = APIRouter() + app = build_app_plain() + builder = ApplicationBuilder(app) for info in resources_info: - RoutersJSONAPI( + builder.add_resource( router=router, path=info.path, tags=["Misc"], - class_list=info.class_list, - class_detail=info.class_detail, + view=ViewBaseGeneric, schema=info.schema_, resource_type=info.resource_type, schema_in_patch=info.schema_in_patch, @@ -324,10 +274,10 @@ def build_custom_app_by_schemas(resources_info: list[ResourceInfoDTO]): model=info.model, ) - app = build_app_plain() + builder.initialize() app.include_router(router, prefix="") atomic = AtomicOperations() app.include_router(atomic.router, prefix="") - init(app) + return app diff --git a/tests/fixtures/db_connection.py b/tests/fixtures/db_connection.py index 70eb353e..017f9e80 100644 --- a/tests/fixtures/db_connection.py +++ b/tests/fixtures/db_connection.py @@ -1,56 +1,35 @@ -from pytest import fixture # noqa PT013 from pytest_asyncio import fixture as async_fixture from sqlalchemy.engine import make_url -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker +from examples.api_for_sqlalchemy.models.base import Base +from examples.api_for_sqlalchemy.models.db import DB from tests.common import sqla_uri -from tests.models import Base - -def get_async_sessionmaker() -> sessionmaker: - engine = create_async_engine(url=make_url(sqla_uri())) - _async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) - return _async_session +db = DB( + url=make_url(sqla_uri()), +) async def async_session_dependency(): - """ - Get session as dependency - - :return: - """ - session_maker = get_async_sessionmaker() - async with session_maker() as db_session: # type: AsyncSession - yield db_session - await db_session.rollback() + async with db.session_maker() as session: + yield session @async_fixture(scope="class") async def async_engine(): - engine = create_async_engine( - url=make_url(sqla_uri()), - echo=False, - # echo=True, - ) - async with engine.begin() as conn: + async with db.engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.create_all) - return engine @async_fixture(scope="class") -async def async_session_plain(async_engine): - session = sessionmaker( - bind=async_engine, - class_=AsyncSession, - expire_on_commit=False, - ) - return session +async def async_session(async_engine): + async with db.session_maker() as session: + yield session -@async_fixture(scope="class") -async def async_session(async_session_plain): - async with async_session_plain() as session: # type: AsyncSession - yield session - await session.rollback() +@async_fixture(autouse=True) +async def refresh_db(async_engine): # F811 + async with db.engine.begin() as connector: + for table in reversed(Base.metadata.sorted_tables): + await connector.execute(table.delete()) diff --git a/tests/fixtures/debug_app.py b/tests/fixtures/debug_app.py index 2571f739..a634c73e 100644 --- a/tests/fixtures/debug_app.py +++ b/tests/fixtures/debug_app.py @@ -18,5 +18,5 @@ host="0.0.0.0", port=8082, reload=True, - app_dir=str(CURRENT_DIR), + app_dir=f"{CURRENT_DIR}", ) diff --git a/tests/fixtures/entities.py b/tests/fixtures/entities.py index f5f45970..9d26ed76 100644 --- a/tests/fixtures/entities.py +++ b/tests/fixtures/entities.py @@ -1,13 +1,10 @@ -from __future__ import annotations +from typing import Awaitable, Callable, Optional -from typing import Awaitable, Callable, List - -from pytest import fixture # noqa +import pytest from pytest_asyncio import fixture as async_fixture from sqlalchemy.ext.asyncio import AsyncSession -from tests.misc.utils import fake -from tests.models import ( +from examples.api_for_sqlalchemy.models import ( Child, Computer, Parent, @@ -18,6 +15,9 @@ UserBio, Workplace, ) +from tests.common import is_postgres_tests +from tests.fixtures.models import Task +from tests.misc.utils import fake def build_user(**fields) -> User: @@ -29,21 +29,47 @@ def build_user(**fields) -> User: return User(**(fake_fields | fields)) -async def create_user(async_session: AsyncSession, **fields): +def build_computer(**fields) -> Computer: + fields = { + "name": fake.name(), + **fields, + } + return Computer(**fields) + + +def build_user_bio(user: User, **fields) -> UserBio: + return UserBio(user=user, **fields) + + +async def create_user(async_session: AsyncSession, **fields) -> User: user = build_user(**fields) async_session.add(user) await async_session.commit() - return user +async def create_user_bio(async_session: AsyncSession, user: User, **fields) -> UserBio: + user_bio = build_user_bio(user=user, **fields) + async_session.add(user_bio) + await async_session.commit() + return user_bio + + +async def create_computer(async_session: AsyncSession, **fields) -> Computer: + computer = build_computer(**fields) + async_session.add(computer) + await async_session.commit() + return computer + + @async_fixture() async def user_1(async_session: AsyncSession): user = build_user() async_session.add(user) await async_session.commit() - await async_session.refresh(user) + yield user + await async_session.delete(user) await async_session.commit() @@ -53,8 +79,9 @@ async def user_2(async_session: AsyncSession): user = build_user() async_session.add(user) await async_session.commit() - await async_session.refresh(user) + yield user + await async_session.delete(user) await async_session.commit() @@ -64,51 +91,51 @@ async def user_3(async_session: AsyncSession): user = build_user() async_session.add(user) await async_session.commit() - await async_session.refresh(user) - yield user - await async_session.delete(user) - await async_session.commit() + yield user -async def build_user_bio(async_session: AsyncSession, user: User, **fields): - bio = UserBio(user=user, **fields) - async_session.add(bio) + await async_session.delete(user) await async_session.commit() - return bio @async_fixture() async def user_1_bio(async_session: AsyncSession, user_1: User) -> UserBio: - return await build_user_bio( - async_session, - user_1, + return await create_user_bio( + async_session=async_session, + user=user_1, birth_city="Moscow", favourite_movies="Django, Alien", - keys_to_ids_list={"key": [1, 2, 3]}, ) @async_fixture() async def user_2_bio(async_session: AsyncSession, user_2: User) -> UserBio: - return await build_user_bio( - async_session, - user_2, + return await create_user_bio( + async_session=async_session, + user=user_2, birth_city="Snezhnogorsk", favourite_movies="A Beautiful Mind, Rocky", - keys_to_ids_list={"key": [0, 1, 2]}, ) -async def build_post(async_session: AsyncSession, user: User, **fields) -> Post: - fields = {"title": fake.name(), "body": fake.sentence(), **fields} - post = Post(user=user, **fields) +def build_post(user: User, **fields) -> Post: + fields = { + "title": fake.name(), + "body": fake.sentence(), + **fields, + } + return Post(user=user, **fields) + + +async def create_post(async_session: AsyncSession, user: User, **fields) -> Post: + post = build_post(user, **fields) async_session.add(post) await async_session.commit() return post @async_fixture() -async def user_1_posts(async_session: AsyncSession, user_1: User) -> List[Post]: +async def user_1_posts(async_session: AsyncSession, user_1: User) -> list[Post]: posts = [ Post( title=f"post_u1_{i}", @@ -119,21 +146,18 @@ async def user_1_posts(async_session: AsyncSession, user_1: User) -> List[Post]: ] async_session.add_all(posts) await async_session.commit() - - for post in posts: - await async_session.refresh(post) - return posts @async_fixture() async def user_1_post(async_session: AsyncSession, user_1: User): - post = Post(title="post_for_u1", user=user_1) + post = Post( + title="post_for_u1", + user=user_1, + ) async_session.add(post) await async_session.commit() - await async_session.refresh(post) - yield post await async_session.delete(post) @@ -141,7 +165,7 @@ async def user_1_post(async_session: AsyncSession, user_1: User): @async_fixture() -async def user_2_posts(async_session: AsyncSession, user_2: User) -> List[Post]: +async def user_2_posts(async_session: AsyncSession, user_2: User) -> list[Post]: posts = [ Post( title=f"post_u2_{i}", @@ -152,10 +176,6 @@ async def user_2_posts(async_session: AsyncSession, user_2: User) -> List[Post]: ] async_session.add_all(posts) await async_session.commit() - - for post in posts: - await async_session.refresh(post) - return posts @@ -165,16 +185,13 @@ async def user_1_comments_for_u2_posts(async_session: AsyncSession, user_1, user PostComment( text=f"comment_{i}_for_post_{post.id}", post=post, - author=user_1, + user=user_1, ) for i, post in enumerate(user_2_posts, start=1) ] async_session.add_all(post_comments) await async_session.commit() - for comment in post_comments: - await async_session.refresh(comment) - yield post_comments for comment in post_comments: @@ -182,18 +199,18 @@ async def user_1_comments_for_u2_posts(async_session: AsyncSession, user_1, user await async_session.commit() -@fixture() -def user_1_post_for_comments(user_1_posts: List[Post]) -> Post: +@pytest.fixture +def user_1_post_for_comments(user_1_posts: list[Post]) -> Post: return user_1_posts[0] @async_fixture() async def computer_1(async_session: AsyncSession): - computer = Computer(name="Halo") - + computer = Computer( + name="Halo", + ) async_session.add(computer) await async_session.commit() - await async_session.refresh(computer) yield computer @@ -203,11 +220,11 @@ async def computer_1(async_session: AsyncSession): @async_fixture() async def computer_2(async_session: AsyncSession): - computer = Computer(name="Nestor") - + computer = Computer( + name="Nestor", + ) async_session.add(computer) await async_session.commit() - await async_session.refresh(computer) yield computer @@ -216,29 +233,35 @@ async def computer_2(async_session: AsyncSession): @async_fixture() -async def computer_factory(async_session: AsyncSession) -> Callable[[str | None], Awaitable[Computer]]: - async def factory(name: str | None = None) -> Computer: +async def computer_factory(async_session: AsyncSession) -> Callable[[Optional[str]], Awaitable[Computer]]: + async def factory(name: Optional[str] = None) -> Computer: computer = Computer(name=name or fake.word()) async_session.add(computer) await async_session.commit() - await async_session.refresh(computer) return computer return factory -async def build_post_comment( +def build_post_comment(user: User, post: Post, **fields) -> PostComment: + fields = { + "text": fake.sentence(), + **fields, + } + return PostComment( + user=user, + post=post, + **fields, + ) + + +async def create_post_comment( async_session: AsyncSession, user: User, post: Post, **fields, ) -> PostComment: - fields = {"text": fake.sentence(), **fields} - post_comment = PostComment( - author=user, - post=post, - **fields, - ) + post_comment = build_post_comment(user=user, post=post, **fields) async_session.add(post_comment) await async_session.commit() return post_comment @@ -246,17 +269,14 @@ async def build_post_comment( @async_fixture() async def user_2_comment_for_one_u1_post(async_session: AsyncSession, user_2, user_1_post_for_comments): - post = user_1_post_for_comments post_comment = PostComment( - text=f"one_comment_from_u2_for_post_{post.id}", - post=post, - author=user_2, + text=f"one_comment_from_u2_for_post_{user_1_post_for_comments.id}", + post=user_1_post_for_comments, + user=user_2, ) async_session.add(post_comment) await async_session.commit() - await async_session.refresh(post_comment) - yield post_comment await async_session.delete(post_comment) @@ -271,8 +291,6 @@ async def parent_1(async_session: AsyncSession): async_session.add(parent) await async_session.commit() - await async_session.refresh(parent) - yield parent await async_session.delete(parent) @@ -287,8 +305,6 @@ async def parent_2(async_session: AsyncSession): async_session.add(parent) await async_session.commit() - await async_session.refresh(parent) - yield parent await async_session.delete(parent) @@ -303,8 +319,6 @@ async def parent_3(async_session: AsyncSession): async_session.add(parent) await async_session.commit() - await async_session.refresh(parent) - yield parent await async_session.delete(parent) @@ -319,8 +333,6 @@ async def child_1(async_session: AsyncSession): async_session.add(child) await async_session.commit() - await async_session.refresh(child) - yield child await async_session.delete(child) @@ -335,8 +347,6 @@ async def child_2(async_session: AsyncSession): async_session.add(child) await async_session.commit() - await async_session.refresh(child) - yield child await async_session.delete(child) @@ -351,8 +361,6 @@ async def child_3(async_session: AsyncSession): async_session.add(child) await async_session.commit() - await async_session.refresh(child) - yield child await async_session.delete(child) @@ -367,8 +375,6 @@ async def child_4(async_session: AsyncSession): async_session.add(child) await async_session.commit() - await async_session.refresh(child) - yield child await async_session.delete(child) @@ -389,8 +395,6 @@ async def p1_c1_association( async_session.add(assoc) await async_session.commit() - await async_session.refresh(assoc) - yield assoc await async_session.delete(assoc) @@ -411,8 +415,6 @@ async def p2_c1_association( async_session.add(assoc) await async_session.commit() - await async_session.refresh(assoc) - yield assoc await async_session.delete(assoc) @@ -433,8 +435,6 @@ async def p1_c2_association( async_session.add(assoc) await async_session.commit() - await async_session.refresh(assoc) - yield assoc await async_session.delete(assoc) @@ -455,8 +455,6 @@ async def p2_c2_association( async_session.add(assoc) await async_session.commit() - await async_session.refresh(assoc) - yield assoc await async_session.delete(assoc) @@ -477,20 +475,67 @@ async def p2_c3_association( async_session.add(assoc) await async_session.commit() - await async_session.refresh(assoc) - yield assoc await async_session.delete(assoc) await async_session.commit() -async def build_workplace(async_session: AsyncSession, **fields): - workplace = Workplace(**fields) - async_session.add(workplace) +def build_task(**fields): + return Task(**fields) + +async def create_task(async_session: AsyncSession, **fields): + task = build_task(**fields) + async_session.add(task) await async_session.commit() + return task + + +@async_fixture() +async def task_1( + async_session: AsyncSession, +): + fields = { + "task_ids_list_json": [1, 2, 3], + "task_ids_dict_json": {"completed": [1, 2, 3], "count": 1, "is_complete": True}, + } + if is_postgres_tests(): + fields.update( + { + "task_ids_list_jsonb": ["a", "b", "c"], + "task_ids_dict_jsonb": {"completed": ["a", "b", "c"], "count": 2, "is_complete": True}, + }, + ) + yield await create_task(async_session, **fields) + +@async_fixture() +async def task_2( + async_session: AsyncSession, +): + fields = { + "task_ids_list_json": [4, 5, 6], + "task_ids_dict_json": {"completed": [4, 5, 6], "count": 3, "is_complete": False}, + } + if is_postgres_tests(): + fields.update( + { + "task_ids_list_jsonb": ["d", "e", "f"], + "task_ids_dict_jsonb": {"completed": ["d", "e", "f"], "count": 4, "is_complete": False}, + }, + ) + yield await create_task(async_session, **fields) + + +def build_workplace(**fields): + return Workplace(**fields) + + +async def create_workplace(async_session: AsyncSession, **fields): + workplace = build_workplace(**fields) + async_session.add(workplace) + await async_session.commit() return workplace @@ -498,11 +543,11 @@ async def build_workplace(async_session: AsyncSession, **fields): async def workplace_1( async_session: AsyncSession, ): - yield await build_workplace(async_session, name="workplace_1") + yield await create_workplace(async_session, name="workplace_1") @async_fixture() async def workplace_2( async_session: AsyncSession, ): - yield await build_workplace(async_session, name="workplace_2") + yield await create_workplace(async_session, name="workplace_2") diff --git a/tests/fixtures/models/__init__.py b/tests/fixtures/models/__init__.py new file mode 100644 index 00000000..340ac6bd --- /dev/null +++ b/tests/fixtures/models/__init__.py @@ -0,0 +1,25 @@ +from tests.fixtures.models.alpha import Alpha +from tests.fixtures.models.beta import Beta +from tests.fixtures.models.beta_delta_binding import BetaDeltaBinding +from tests.fixtures.models.beta_gamma_binding import BetaGammaBinding +from tests.fixtures.models.cascade_case import CascadeCase +from tests.fixtures.models.contains_timestamp import ContainsTimestamp +from tests.fixtures.models.custom_uuid_item import CustomUUIDItem +from tests.fixtures.models.delta import Delta +from tests.fixtures.models.gamma import Gamma +from tests.fixtures.models.self_relationship import SelfRelationship +from tests.fixtures.models.task import Task + +__all__ = ( + "Alpha", + "Beta", + "BetaDeltaBinding", + "BetaGammaBinding", + "CascadeCase", + "ContainsTimestamp", + "CustomUUIDItem", + "Delta", + "Gamma", + "SelfRelationship", + "Task", +) diff --git a/tests/fixtures/models/alpha.py b/tests/fixtures/models/alpha.py new file mode 100644 index 00000000..dc6997bc --- /dev/null +++ b/tests/fixtures/models/alpha.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from examples.api_for_sqlalchemy.models.base import Base + +if TYPE_CHECKING: + from .beta import Beta + from .gamma import Gamma + + +class Alpha(Base): + __tablename__ = "alpha" + + beta_id: Mapped[int] = mapped_column(ForeignKey("beta.id"), index=True) + beta: Mapped[Beta] = relationship(back_populates="alphas") + gamma_id: Mapped[int] = mapped_column(ForeignKey("gamma.id")) + gamma: Mapped[Gamma] = relationship("Gamma") diff --git a/tests/fixtures/models/beta.py b/tests/fixtures/models/beta.py new file mode 100644 index 00000000..263f9274 --- /dev/null +++ b/tests/fixtures/models/beta.py @@ -0,0 +1,24 @@ +from sqlalchemy.orm import Mapped, relationship + +from examples.api_for_sqlalchemy.models.base import Base + +from .alpha import Alpha +from .delta import Delta +from .gamma import Gamma + + +class Beta(Base): + __tablename__ = "beta" + + alphas: Mapped[Alpha] = relationship("Alpha") + deltas: Mapped[list[Delta]] = relationship( + "Delta", + secondary="beta_delta_binding", + lazy="noload", + ) + gammas: Mapped[list[Gamma]] = relationship( + "Gamma", + secondary="beta_gamma_binding", + back_populates="betas", + lazy="noload", + ) diff --git a/tests/fixtures/models/beta_delta_binding.py b/tests/fixtures/models/beta_delta_binding.py new file mode 100644 index 00000000..4a61e07d --- /dev/null +++ b/tests/fixtures/models/beta_delta_binding.py @@ -0,0 +1,11 @@ +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column + +from examples.api_for_sqlalchemy.models.base import Base + + +class BetaDeltaBinding(Base): + __tablename__ = "beta_delta_binding" + + beta_id: Mapped[int] = mapped_column(ForeignKey("beta.id", ondelete="CASCADE")) + delta_id: Mapped[int] = mapped_column(ForeignKey("delta.id", ondelete="CASCADE")) diff --git a/tests/fixtures/models/beta_gamma_binding.py b/tests/fixtures/models/beta_gamma_binding.py new file mode 100644 index 00000000..9569cb6b --- /dev/null +++ b/tests/fixtures/models/beta_gamma_binding.py @@ -0,0 +1,11 @@ +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column + +from examples.api_for_sqlalchemy.models.base import Base + + +class BetaGammaBinding(Base): + __tablename__ = "beta_gamma_binding" + + beta_id: Mapped[int] = mapped_column(ForeignKey("beta.id", ondelete="CASCADE")) + gamma_id: Mapped[int] = mapped_column(ForeignKey("gamma.id", ondelete="CASCADE")) diff --git a/tests/fixtures/models/cascade_case.py b/tests/fixtures/models/cascade_case.py new file mode 100644 index 00000000..712a2d61 --- /dev/null +++ b/tests/fixtures/models/cascade_case.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, backref, mapped_column, relationship + +from examples.api_for_sqlalchemy.models.base import Base + + +class CascadeCase(Base): + __tablename__ = "cascade_case" + + parent_item_id: Mapped[Optional[int]] = mapped_column( + ForeignKey( + "cascade_case.id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + ) + sub_items: Mapped[list[CascadeCase]] = relationship( + backref=backref("parent_item", remote_side="CascadeCase.id"), + ) + + if TYPE_CHECKING: + parent_item: Mapped[CascadeCase] diff --git a/tests/fixtures/models/contains_timestamp.py b/tests/fixtures/models/contains_timestamp.py new file mode 100644 index 00000000..22c50615 --- /dev/null +++ b/tests/fixtures/models/contains_timestamp.py @@ -0,0 +1,12 @@ +from datetime import datetime + +from sqlalchemy import DateTime +from sqlalchemy.orm import Mapped, mapped_column + +from examples.api_for_sqlalchemy.models.base import Base + + +class ContainsTimestamp(Base): + __tablename__ = "contains_timestamp" + + timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True)) diff --git a/tests/fixtures/models/custom_uuid_item.py b/tests/fixtures/models/custom_uuid_item.py new file mode 100644 index 00000000..c70fcae9 --- /dev/null +++ b/tests/fixtures/models/custom_uuid_item.py @@ -0,0 +1,14 @@ +from typing import Optional +from uuid import UUID + +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.types import UUID as UUIDType + +from examples.api_for_sqlalchemy.models.base import Base + + +class CustomUUIDItem(Base): + __tablename__ = "custom_uuid_item" + + id: Mapped[UUID] = mapped_column(UUIDType(as_uuid=True), primary_key=True) + extra_id: Mapped[Optional[UUID]] = mapped_column(UUIDType(as_uuid=True), unique=True) diff --git a/tests/fixtures/models/delta.py b/tests/fixtures/models/delta.py new file mode 100644 index 00000000..9356a5ab --- /dev/null +++ b/tests/fixtures/models/delta.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sqlalchemy.orm import Mapped, relationship + +from examples.api_for_sqlalchemy.models.base import Base + +if TYPE_CHECKING: + from .beta import Beta + from .gamma import Gamma + + +class Delta(Base): + __tablename__ = "delta" + + name: Mapped[str] + + gammas: Mapped[list[Gamma]] = relationship( + "Gamma", + back_populates="delta", + lazy="noload", + ) + betas: Mapped[list[Beta]] = relationship( + "Beta", + secondary="beta_delta_binding", + back_populates="deltas", + lazy="noload", + ) diff --git a/tests/fixtures/models/gamma.py b/tests/fixtures/models/gamma.py new file mode 100644 index 00000000..c3a16ee3 --- /dev/null +++ b/tests/fixtures/models/gamma.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from examples.api_for_sqlalchemy.models.base import Base + +if TYPE_CHECKING: + from .alpha import Alpha + from .beta import Beta + from .delta import Delta + + +class Gamma(Base): + __tablename__ = "gamma" + + alpha: Mapped[Alpha] = relationship("Alpha") + betas: Mapped[list[Beta]] = relationship( + "Beta", + secondary="beta_gamma_binding", + back_populates="gammas", + lazy="raise", + ) + delta_id: Mapped[int] = mapped_column( + ForeignKey( + "delta.id", + ondelete="CASCADE", + ), + index=True, + ) + delta: Mapped[Delta] = relationship("Delta") diff --git a/tests/fixtures/models/self_relationship.py b/tests/fixtures/models/self_relationship.py new file mode 100644 index 00000000..6bed23f8 --- /dev/null +++ b/tests/fixtures/models/self_relationship.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +from sqlalchemy import ForeignKey, String +from sqlalchemy.orm import Mapped, backref, mapped_column, relationship + +from examples.api_for_sqlalchemy.models.base import Base + + +class SelfRelationship(Base): + __tablename__ = "selfrelationships" + + name: Mapped[str] = mapped_column(String) + + self_relationship_id: Mapped[Optional[int]] = mapped_column( + ForeignKey( + "selfrelationships.id", + name="fk_self_relationship_id", + ondelete="CASCADE", + onupdate="CASCADE", + ), + ) + children_objects: Mapped[list[SelfRelationship]] = relationship( + backref=backref("parent_object", remote_side="SelfRelationship.id"), + ) + + if TYPE_CHECKING: + parent_object: Mapped[SelfRelationship] diff --git a/tests/fixtures/models/task.py b/tests/fixtures/models/task.py new file mode 100644 index 00000000..e36bc205 --- /dev/null +++ b/tests/fixtures/models/task.py @@ -0,0 +1,19 @@ +from typing import Optional + +from sqlalchemy import JSON +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Mapped, mapped_column + +from examples.api_for_sqlalchemy.models.base import Base +from tests.common import is_postgres_tests + + +class Task(Base): + __tablename__ = "tasks" + + task_ids_dict_json: Mapped[Optional[dict]] = mapped_column(JSON, unique=False) + task_ids_list_json: Mapped[Optional[list]] = mapped_column(JSON, unique=False) + + if is_postgres_tests(): + task_ids_dict_jsonb: Mapped[Optional[dict]] = mapped_column(JSONB, unique=False) + task_ids_list_jsonb: Mapped[Optional[list]] = mapped_column(JSONB, unique=False) diff --git a/tests/fixtures/schemas/__init__.py b/tests/fixtures/schemas/__init__.py new file mode 100644 index 00000000..a15b4692 --- /dev/null +++ b/tests/fixtures/schemas/__init__.py @@ -0,0 +1,31 @@ +from .alpha import AlphaSchema +from .beta import BetaSchema +from .cascade_case import CascadeCaseSchema +from .custom_uuid import ( + CustomUUIDItemAttributesSchema, + CustomUUIDItemSchema, +) +from .delta import DeltaSchema +from .gamma import GammaSchema +from .self_relationship import SelfRelationshipAttributesSchema +from .task import ( + TaskBaseSchema, + TaskInSchema, + TaskPatchSchema, + TaskSchema, +) + +__all__ = ( + "AlphaSchema", + "BetaSchema", + "CascadeCaseSchema", + "CustomUUIDItemAttributesSchema", + "CustomUUIDItemSchema", + "DeltaSchema", + "GammaSchema", + "SelfRelationshipAttributesSchema", + "TaskBaseSchema", + "TaskInSchema", + "TaskPatchSchema", + "TaskSchema", +) diff --git a/tests/fixtures/schemas/alpha.py b/tests/fixtures/schemas/alpha.py new file mode 100755 index 00000000..1fe1572f --- /dev/null +++ b/tests/fixtures/schemas/alpha.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .beta import BetaSchema + from .gamma import GammaSchema + + +class AlphaSchema(BaseModel): + beta: Annotated[ + Optional[BetaSchema], + RelationshipInfo( + resource_type="beta", + ), + ] = None + gamma: Annotated[ + Optional[GammaSchema], + RelationshipInfo( + resource_type="gamma", + ), + ] = None diff --git a/tests/fixtures/schemas/beta.py b/tests/fixtures/schemas/beta.py new file mode 100755 index 00000000..4aa1dced --- /dev/null +++ b/tests/fixtures/schemas/beta.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .alpha import AlphaSchema + from .delta import DeltaSchema + from .gamma import GammaSchema + + +class BetaSchema(BaseModel): + alphas: Annotated[ + Optional[AlphaSchema], + RelationshipInfo( + resource_type="alpha", + ), + ] = None + gammas: Annotated[ + Optional[GammaSchema], + RelationshipInfo( + resource_type="gamma", + many=True, + ), + ] = None + deltas: Annotated[ + Optional[DeltaSchema], + RelationshipInfo( + resource_type="delta", + many=True, + ), + ] = None diff --git a/tests/fixtures/schemas/cascade_case.py b/tests/fixtures/schemas/cascade_case.py new file mode 100755 index 00000000..467dff0d --- /dev/null +++ b/tests/fixtures/schemas/cascade_case.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from typing import Annotated, Optional + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + + +class CascadeCaseSchema(BaseModel): + parent_item: Annotated[ + Optional[CascadeCaseSchema], + RelationshipInfo( + resource_type="cascade_case", + ), + ] = None + sub_items: Annotated[ + Optional[list[CascadeCaseSchema]], + RelationshipInfo( + resource_type="cascade_case", + many=True, + ), + ] = None diff --git a/tests/fixtures/schemas/custom_uuid.py b/tests/fixtures/schemas/custom_uuid.py new file mode 100755 index 00000000..98bad7a3 --- /dev/null +++ b/tests/fixtures/schemas/custom_uuid.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +from typing import Annotated, Optional +from uuid import UUID + +from pydantic import ConfigDict + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import ClientCanSetId + + +class CustomUUIDItemAttributesSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + extra_id: Optional[UUID] = None + + +class CustomUUIDItemSchema(CustomUUIDItemAttributesSchema): + id: Annotated[UUID, ClientCanSetId()] diff --git a/tests/fixtures/schemas/delta.py b/tests/fixtures/schemas/delta.py new file mode 100755 index 00000000..ee6cba7d --- /dev/null +++ b/tests/fixtures/schemas/delta.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .beta import BetaSchema + from .gamma import GammaSchema + + +class DeltaSchema(BaseModel): + name: str + + gammas: Annotated[ + Optional[GammaSchema], + RelationshipInfo( + resource_type="gamma", + many=True, + ), + ] = None + betas: Annotated[ + Optional[BetaSchema], + RelationshipInfo( + resource_type="beta", + many=True, + ), + ] = None diff --git a/tests/fixtures/schemas/gamma.py b/tests/fixtures/schemas/gamma.py new file mode 100755 index 00000000..e7397d6e --- /dev/null +++ b/tests/fixtures/schemas/gamma.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated, Optional + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + +if TYPE_CHECKING: + from .beta import BetaSchema + from .delta import DeltaSchema + + +class GammaSchema(BaseModel): + betas: Annotated[ + Optional[BetaSchema], + RelationshipInfo( + resource_type="beta", + many=True, + ), + ] = None + delta: Annotated[ + Optional[DeltaSchema], + RelationshipInfo( + resource_type="delta", + ), + ] = None diff --git a/tests/fixtures/schemas/self_relationship.py b/tests/fixtures/schemas/self_relationship.py new file mode 100755 index 00000000..e65ad029 --- /dev/null +++ b/tests/fixtures/schemas/self_relationship.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import Annotated, Optional + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata import RelationshipInfo + + +class SelfRelationshipAttributesSchema(BaseModel): + name: str + + parent_object: Annotated[ + Optional[SelfRelationshipAttributesSchema], + RelationshipInfo( + resource_type="self_relationship", + ), + ] = None + children_objects: Annotated[ + Optional[list[SelfRelationshipAttributesSchema]], + RelationshipInfo( + resource_type="self_relationship", + many=True, + ), + ] = None diff --git a/tests/fixtures/schemas/task.py b/tests/fixtures/schemas/task.py new file mode 100755 index 00000000..9871290e --- /dev/null +++ b/tests/fixtures/schemas/task.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from typing import Annotated, Optional + +from pydantic import ConfigDict, field_validator + +from fastapi_jsonapi.schema_base import BaseModel +from fastapi_jsonapi.types_metadata.custom_filter_sql import ( + sql_filter_pg_json_contains, + sql_filter_pg_json_ilike, + sql_filter_pg_jsonb_contains, + sql_filter_pg_jsonb_ilike, + sql_filter_sqlite_json_contains, + sql_filter_sqlite_json_ilike, +) +from tests.common import is_postgres_tests + + +class TaskBaseSchema(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + if is_postgres_tests(): + task_ids_dict_json: Annotated[Optional[dict], sql_filter_pg_json_ilike] + task_ids_list_json: Annotated[Optional[list], sql_filter_pg_json_contains] + else: + task_ids_dict_json: Annotated[Optional[dict], sql_filter_sqlite_json_ilike] + task_ids_list_json: Annotated[Optional[list], sql_filter_sqlite_json_contains] + + # noinspection PyMethodParameters + @field_validator("task_ids_dict_json", mode="before", check_fields=False) + @classmethod + def task_ids_dict_json_validator(cls, value: Optional[dict]): + """ + return `{}`, if value is None both on get and on create + """ + return value or {} + + # noinspection PyMethodParameters + @field_validator("task_ids_list_json", mode="before", check_fields=False) + @classmethod + def task_ids_list_json_validator(cls, value: Optional[list]): + """ + return `[]`, if value is None both on get and on create + """ + return value or [] + + if is_postgres_tests(): + task_ids_dict_jsonb: Annotated[Optional[dict], sql_filter_pg_jsonb_ilike] + task_ids_list_jsonb: Annotated[Optional[list], sql_filter_pg_jsonb_contains] + + # noinspection PyMethodParameters + @field_validator("task_ids_dict_jsonb", mode="before", check_fields=False) + @classmethod + def task_ids_dict_jsonb_validator(cls, value: Optional[dict]): + """ + return `{}`, if value is None both on get and on create + """ + return value or {} + + # noinspection PyMethodParameters + @field_validator("task_ids_list_jsonb", mode="before", check_fields=False) + @classmethod + def task_ids_list_jsonb_validator(cls, value: Optional[list]): + """ + return `[]`, if value is None both on get and on create + """ + return value or [] + + +class TaskPatchSchema(TaskBaseSchema): + """Task PATCH schema.""" + + +class TaskInSchema(TaskBaseSchema): + """Task create schema.""" + + +class TaskSchema(TaskBaseSchema): + """Task item schema.""" + + id: int diff --git a/tests/fixtures/user.py b/tests/fixtures/user.py index 070d6c1d..4477c44d 100644 --- a/tests/fixtures/user.py +++ b/tests/fixtures/user.py @@ -1,10 +1,10 @@ import pytest +from examples.api_for_sqlalchemy.schemas import UserAttributesBaseSchema from tests.misc.utils import fake -from tests.schemas import UserAttributesBaseSchema -@pytest.fixture() +@pytest.fixture def user_attributes_factory(): def factory(): user_attributes = UserAttributesBaseSchema( @@ -17,6 +17,6 @@ def factory(): return factory -@pytest.fixture() +@pytest.fixture def user_attributes(user_attributes_factory): return user_attributes_factory() diff --git a/tests/fixtures/views.py b/tests/fixtures/views.py index 697b16ce..da14d41c 100644 --- a/tests/fixtures/views.py +++ b/tests/fixtures/views.py @@ -1,46 +1,33 @@ -from typing import ClassVar, Dict +from typing import ClassVar from fastapi import Depends -from pydantic import BaseModel -from pytest import fixture # noqa +from pydantic import BaseModel, ConfigDict from sqlalchemy.ext.asyncio import AsyncSession -from fastapi_jsonapi.misc.sqla.generics.base import ( - DetailViewBaseGeneric as DetailViewBaseGenericHelper, -) -from fastapi_jsonapi.misc.sqla.generics.base import ( - ListViewBaseGeneric as ListViewBaseGenericHelper, -) -from fastapi_jsonapi.views.utils import HTTPMethod, HTTPMethodConfig -from fastapi_jsonapi.views.view_base import ViewBase +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric as ViewBaseGenericHelper +from fastapi_jsonapi.views import Operation, OperationConfig, ViewBase from tests.fixtures.db_connection import async_session_dependency class ArbitraryModelBase(BaseModel): - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) class SessionDependency(ArbitraryModelBase): session: AsyncSession = Depends(async_session_dependency) -def common_handler(view: ViewBase, dto: SessionDependency) -> Dict: - return {"session": dto.session} - - -class DetailViewBaseGeneric(DetailViewBaseGenericHelper): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=common_handler, - ), +def common_handler(view: ViewBase, dto: SessionDependency) -> dict: + return { + "session": dto.session, } -class ListViewBaseGeneric(ListViewBaseGenericHelper): - method_dependencies: ClassVar = { - HTTPMethod.ALL: HTTPMethodConfig( +class ViewBaseGeneric(ViewBaseGenericHelper): + operation_dependencies: ClassVar = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, ), diff --git a/tests/models.py b/tests/models.py deleted file mode 100644 index 8039504b..00000000 --- a/tests/models.py +++ /dev/null @@ -1,419 +0,0 @@ -from typing import TYPE_CHECKING, Dict, List, Optional -from uuid import UUID - -from sqlalchemy import JSON, Column, DateTime, ForeignKey, Index, Integer, String, Text -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import backref, declared_attr, relationship -from sqlalchemy.types import CHAR, TypeDecorator - -from tests.common import is_postgres_tests, sqla_uri - - -class Base: - @declared_attr - def __tablename__(cls): - """ - Generate table name - - :return: - """ - return f"{cls.__name__.lower()}s" - - -class AutoIdMixin: - @declared_attr - def id(cls): - return Column(Integer, primary_key=True, autoincrement=True) - - -Base = declarative_base(cls=Base) - - -class User(AutoIdMixin, Base): - name: str = Column(String, nullable=False, unique=True) - age: int = Column(Integer, nullable=True) - email: Optional[str] = Column(String, nullable=True) - - posts = relationship( - "Post", - back_populates="user", - uselist=True, - cascade="all,delete", - ) - bio = relationship( - "UserBio", - back_populates="user", - uselist=False, - cascade="save-update, merge, delete, delete-orphan", - ) - comments = relationship( - "PostComment", - back_populates="author", - uselist=True, - cascade="save-update, merge, delete, delete-orphan", - ) - computers = relationship( - "Computer", - # TODO: rename - # back_populates="owner", - back_populates="user", - uselist=True, - ) - workplace = relationship( - "Workplace", - back_populates="user", - uselist=False, - ) - if TYPE_CHECKING: - computers: list["Computer"] - - def __repr__(self): - return f"{self.__class__.__name__}(id={self.id}, name={self.name!r})" - - -class UserBio(AutoIdMixin, Base): - birth_city: str = Column(String, nullable=False, default="", server_default="") - favourite_movies: str = Column(String, nullable=False, default="", server_default="") - keys_to_ids_list: Dict[str, List[int]] = Column(JSON) - - user_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=True) - user = relationship( - "User", - back_populates="bio", - uselist=False, - ) - - def __repr__(self): - return ( - f"{self.__class__.__name__}(" - f"id={self.id}," - f" birth_city={self.birth_city!r}," - f" favourite_movies={self.favourite_movies!r}," - f" user_id={self.user_id}" - ")" - ) - - -class Post(AutoIdMixin, Base): - title = Column(String, nullable=False) - body = Column(Text, nullable=False, default="", server_default="") - - user_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=False) - user = relationship( - "User", - back_populates="posts", - uselist=False, - ) - - comments = relationship( - "PostComment", - back_populates="post", - uselist=True, - cascade="save-update, merge, delete, delete-orphan", - ) - - def __repr__(self): - return f"{self.__class__.__name__}(id={self.id} title={self.title!r} user_id={self.user_id})" - - -class PostComment(AutoIdMixin, Base): - text: str = Column(String, nullable=False, default="", server_default="") - - post_id = Column(Integer, ForeignKey("posts.id"), nullable=False, unique=False) - post = relationship( - "Post", - back_populates="comments", - uselist=False, - ) - - author_id = Column(Integer, ForeignKey("users.id"), nullable=False, unique=False) - author = relationship( - "User", - back_populates="comments", - uselist=False, - ) - - def __repr__(self): - return ( - f"{self.__class__.__name__}(" - f"id={self.id}," - f" text={self.text!r}," - f" author_id={self.author_id}," - f" post_id={self.post_id}" - ")" - ) - - -class Parent(AutoIdMixin, Base): - __tablename__ = "left_table_parents" - name = Column(String, nullable=False) - children = relationship( - "ParentToChildAssociation", - back_populates="parent", - ) - - -class Child(AutoIdMixin, Base): - __tablename__ = "right_table_children" - name = Column(String, nullable=False) - parents = relationship( - "ParentToChildAssociation", - back_populates="child", - ) - - -class ParentToChildAssociation(AutoIdMixin, Base): - __table_args__ = ( - # JSON:API requires `id` field on any model, - # so we can't create a composite PK here - # that's why we need to create this index - Index( - "ix_parent_child_association_unique", - "parent_left_id", - "child_right_id", - unique=True, - ), - ) - - __tablename__ = "parent_to_child_association_table" - parent_left_id = Column( - ForeignKey(Parent.id), - nullable=False, - ) - child_right_id = Column( - ForeignKey(Child.id), - nullable=False, - ) - extra_data = Column(String(50)) - parent = relationship("Parent", back_populates="children") - child = relationship("Child", back_populates="parents") - - -class Computer(AutoIdMixin, Base): - """ - Model for check many-to-one relationships update - """ - - __tablename__ = "computers" - - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - user_id = Column(Integer, ForeignKey("users.id"), nullable=True) - # TODO: rename - # owner = relationship("User", back_populates="computers") - user = relationship("User", back_populates="computers") - - def __repr__(self): - return f"{self.__class__.__name__}(id={self.id}, name={self.name!r}, user_id={self.user_id})" - - -class Workplace(AutoIdMixin, Base): - """ - Model for check one-to-one relationships update - """ - - __tablename__ = "workplaces" - - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String, nullable=False) - user_id = Column(Integer, ForeignKey("users.id"), nullable=True) - user = relationship("User", back_populates="workplace") - - def __repr__(self): - return f"{self.__class__.__name__}(id={self.id}, name={self.name!r}, user_id={self.user_id})" - - -class Task(Base): - __tablename__ = "tasks" - id = Column(Integer, primary_key=True) - task_ids = Column(JSON, nullable=True, unique=False) - - -# uuid below - - -class CustomUUIDType(TypeDecorator): - cache_ok = True - - impl = CHAR - - def __init__(self, *args, as_uuid=True, **kwargs): - """ - Construct a UUID type. - - # TODO: support as_uuid=False (and set by default!) - :param as_uuid=True: if True, values will be interpreted - as Python uuid objects, converting to/from string via theDBAPI. - - """ - super().__init__(*args, **kwargs) - self.as_uuid = as_uuid - - def load_dialect_impl(self, dialect): - return CHAR(32) - - def process_bind_param(self, value, dialect): - if value is None: - return value - - if not isinstance(value, UUID): - msg = f"Incorrect type got {type(value).__name__}, expected {UUID.__name__}" - raise Exception(msg) - - return str(value) - - def process_result_value(self, value, dialect): - return value and UUID(value) - - @property - def python_type(self): - return UUID if self.as_uuid else str - - -db_uri = sqla_uri() -if is_postgres_tests(): - # noinspection PyPep8Naming - from sqlalchemy.dialects.postgresql.asyncpg import AsyncpgUUID as UUIDType -elif "sqlite" in db_uri: - UUIDType = CustomUUIDType -else: - msg = "unsupported dialect (custom uuid?)" - raise ValueError(msg) - - -class CustomUUIDItem(Base): - __tablename__ = "custom_uuid_item" - id = Column(UUIDType(as_uuid=True), primary_key=True) - - extra_id = Column( - UUIDType(as_uuid=True), - nullable=True, - unique=True, - ) - - -class SelfRelationship(Base): - id = Column(Integer, primary_key=True) - name = Column(String) - self_relationship_id = Column( - Integer, - ForeignKey( - "selfrelationships.id", - name="fk_self_relationship_id", - ondelete="CASCADE", - onupdate="CASCADE", - ), - nullable=True, - ) - children_objects = relationship( - "SelfRelationship", - backref=backref("parent_object", remote_side=[id]), - ) - - if TYPE_CHECKING: - parent_object: Optional["SelfRelationship"] - - -class ContainsTimestamp(Base): - id = Column(Integer, primary_key=True) - timestamp = Column(DateTime(True), nullable=False) - - -class Alpha(Base): - __tablename__ = "alpha" - - id = Column(Integer, primary_key=True, autoincrement=True) - beta_id = Column( - Integer, - ForeignKey("beta.id"), - nullable=False, - index=True, - ) - beta = relationship("Beta", back_populates="alphas") - gamma_id = Column(Integer, ForeignKey("gamma.id"), nullable=False) - gamma: "Gamma" = relationship("Gamma") - - -class BetaGammaBinding(Base): - __tablename__ = "beta_gamma_binding" - - id: int = Column(Integer, primary_key=True) - beta_id: int = Column(ForeignKey("beta.id", ondelete="CASCADE"), nullable=False) - gamma_id: int = Column(ForeignKey("gamma.id", ondelete="CASCADE"), nullable=False) - - -class Beta(Base): - __tablename__ = "beta" - - id = Column(Integer, primary_key=True, autoincrement=True) - gammas: List["Gamma"] = relationship( - "Gamma", - secondary="beta_gamma_binding", - back_populates="betas", - lazy="noload", - ) - alphas = relationship("Alpha") - deltas: List["Delta"] = relationship( - "Delta", - secondary="beta_delta_binding", - lazy="noload", - ) - - -class Gamma(Base): - __tablename__ = "gamma" - - id = Column(Integer, primary_key=True, autoincrement=True) - betas: List["Beta"] = relationship( - "Beta", - secondary="beta_gamma_binding", - back_populates="gammas", - lazy="raise", - ) - delta_id: int = Column( - Integer, - ForeignKey("delta.id", ondelete="CASCADE"), - nullable=False, - index=True, - ) - alpha = relationship("Alpha") - delta: "Delta" = relationship("Delta") - - -class BetaDeltaBinding(Base): - __tablename__ = "beta_delta_binding" - - id: int = Column(Integer, primary_key=True) - beta_id: int = Column(ForeignKey("beta.id", ondelete="CASCADE"), nullable=False) - delta_id: int = Column(ForeignKey("delta.id", ondelete="CASCADE"), nullable=False) - - -class Delta(Base): - __tablename__ = "delta" - - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(String) - gammas: List["Gamma"] = relationship("Gamma", back_populates="delta", lazy="noload") - betas: List["Beta"] = relationship("Beta", secondary="beta_delta_binding", back_populates="deltas", lazy="noload") - - -class CascadeCase(Base): - __tablename__ = "cascade_case" - - id = Column(Integer, primary_key=True, autoincrement=True) - parent_item_id = Column( - Integer, - ForeignKey( - "cascade_case.id", - onupdate="CASCADE", - ondelete="CASCADE", - ), - nullable=True, - ) - sub_items = relationship( - "CascadeCase", - backref=backref("parent_item", remote_side=[id]), - ) - - if TYPE_CHECKING: - parent_item: Optional["CascadeCase"] diff --git a/tests/pytest.ini b/tests/pytest.ini deleted file mode 100644 index df291533..00000000 --- a/tests/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -filterwarnings = - ignore::DeprecationWarning - ignore::PendingDeprecationWarning diff --git a/tests/schemas.py b/tests/schemas.py deleted file mode 100644 index 77e262f8..00000000 --- a/tests/schemas.py +++ /dev/null @@ -1,509 +0,0 @@ -from typing import Dict, List, Optional -from uuid import UUID - -from pydantic import validator - -from fastapi_jsonapi.schema_base import BaseModel, Field, RelationshipInfo - - -class UserAttributesBaseSchema(BaseModel): - name: str - age: Optional[int] = None - email: Optional[str] = None - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - -class UserBaseSchema(UserAttributesBaseSchema): - """User base schema.""" - - posts: Optional[List["PostSchema"]] = Field( - relationship=RelationshipInfo( - resource_type="post", - many=True, - ), - ) - - bio: Optional["UserBioSchema"] = Field( - relationship=RelationshipInfo( - resource_type="user_bio", - ), - ) - - computers: Optional[List["ComputerSchema"]] = Field( - relationship=RelationshipInfo( - resource_type="computer", - many=True, - ), - ) - workplace: Optional["WorkplaceSchema"] = Field( - relationship=RelationshipInfo( - resource_type="workplace", - ), - ) - - -class UserPatchSchema(UserBaseSchema): - """User PATCH schema.""" - - -class UserInSchema(UserBaseSchema): - """User input schema.""" - - -class UserInSchemaAllowIdOnPost(UserBaseSchema): - id: str = Field(client_can_set_id=True) - - -class UserSchema(UserInSchema): - """User item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int - - -# User Bio Schemas ⬇️ - - -class UserBioAttributesBaseSchema(BaseModel): - """UserBio base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - birth_city: str - favourite_movies: str - keys_to_ids_list: Dict[str, List[int]] = None - - -class UserBioSchema(UserBioAttributesBaseSchema): - """UserBio item schema.""" - - id: int - user: "UserSchema" = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -# Post Schemas ⬇️ - - -class PostAttributesBaseSchema(BaseModel): - title: str - body: str - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - -class PostBaseSchema(PostAttributesBaseSchema): - """Post base schema.""" - - user: "UserSchema" = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - comments: Optional[List["PostCommentSchema"]] = Field( - relationship=RelationshipInfo( - resource_type="post_comment", - many=True, - ), - ) - - -class PostPatchSchema(PostBaseSchema): - """Post PATCH schema.""" - - -class PostInSchema(PostBaseSchema): - """Post input schema.""" - - -class PostSchema(PostInSchema): - """Post item schema.""" - - id: int - - -# Post Comment Schemas ⬇️ - - -class PostCommentAttributesBaseSchema(BaseModel): - text: str - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - -class PostCommentBaseSchema(PostCommentAttributesBaseSchema): - """PostComment base schema.""" - - post: "PostSchema" = Field( - relationship=RelationshipInfo( - resource_type="post", - ), - ) - author: "UserSchema" = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -class PostCommentSchema(PostCommentBaseSchema): - """PostComment item schema.""" - - id: int - - -# Parents and Children associations ⬇️⬇️ - - -# Association Schemas ⬇️ - - -class ParentToChildAssociationAttributesSchema(BaseModel): - extra_data: str - - class Config: - orm_mode = True - - -class ParentToChildAssociationSchema(ParentToChildAssociationAttributesSchema): - parent: "ParentSchema" = Field( - relationship=RelationshipInfo( - resource_type="parent", - ), - ) - - child: "ChildSchema" = Field( - relationship=RelationshipInfo( - resource_type="child", - ), - ) - - -# Parent Schemas ⬇️ - - -class ParentAttributesSchema(BaseModel): - name: str - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - -class ParentBaseSchema(ParentAttributesSchema): - """Parent base schema.""" - - children: List["ParentToChildAssociationSchema"] = Field( - default=None, - relationship=RelationshipInfo( - resource_type="parent_child_association", - many=True, - ), - ) - - -class ParentPatchSchema(ParentBaseSchema): - """Parent PATCH schema.""" - - -class ParentInSchema(ParentBaseSchema): - """Parent input schema.""" - - -class ParentSchema(ParentInSchema): - """Parent item schema.""" - - id: int - - -# Child Schemas ⬇️ - - -class ChildAttributesSchema(BaseModel): - name: str - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - -class ChildBaseSchema(ChildAttributesSchema): - """Child base schema.""" - - parents: List["ParentToChildAssociationSchema"] = Field( - default=None, - relationship=RelationshipInfo( - resource_type="parent_child_association", - many=True, - ), - ) - - -class ChildPatchSchema(ChildBaseSchema): - """Child PATCH schema.""" - - -class ChildInSchema(ChildBaseSchema): - """Child input schema.""" - - -class ChildSchema(ChildInSchema): - """Child item schema.""" - - id: int - - -class ComputerAttributesBaseSchema(BaseModel): - class Config: - """Pydantic schema config.""" - - orm_mode = True - - name: str - - -class ComputerBaseSchema(ComputerAttributesBaseSchema): - """Computer base schema.""" - - user: Optional["UserSchema"] = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -class ComputerPatchSchema(ComputerBaseSchema): - """Computer PATCH schema.""" - - -class ComputerInSchema(ComputerBaseSchema): - """Computer input schema.""" - - -class ComputerSchema(ComputerInSchema): - """Computer item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int - - # TODO: rename - # owner: Optional["UserSchema"] = Field( - user: Optional["UserSchema"] = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -class WorkplaceBaseSchema(BaseModel): - """Workplace base schema.""" - - class Config: - """Pydantic schema config.""" - - orm_mode = True - - name: str - user: Optional["UserSchema"] = Field( - relationship=RelationshipInfo( - resource_type="user", - ), - ) - - -class WorkplacePatchSchema(ComputerBaseSchema): - """Workplace PATCH schema.""" - - -class WorkplaceInSchema(ComputerBaseSchema): - """Workplace input schema.""" - - -class WorkplaceSchema(ComputerInSchema): - """Workplace item schema.""" - - class Config: - """Pydantic model config.""" - - orm_mode = True - - id: int - - -# task -class TaskBaseSchema(BaseModel): - class Config: - orm_mode = True - - task_ids: Optional[list[str]] = None - - # noinspection PyMethodParameters - @validator("task_ids", pre=True) - def task_ids_validator(cls, value: Optional[list[str]]): - """ - return `[]`, if value is None both on get and on create - """ - return value or [] - - -class TaskPatchSchema(TaskBaseSchema): - """Task PATCH schema.""" - - -class TaskInSchema(TaskBaseSchema): - """Task create schema.""" - - -class TaskSchema(TaskBaseSchema): - """Task item schema.""" - - id: int - - -# uuid below - - -class CustomUUIDItemAttributesSchema(BaseModel): - extra_id: Optional[UUID] = None - - class Config: - orm_mode = True - - -class CustomUUIDItemSchema(CustomUUIDItemAttributesSchema): - id: UUID = Field(client_can_set_id=True) - - -class SelfRelationshipAttributesSchema(BaseModel): - name: str - - class Config: - orm_mode = True - - -class SelfRelationshipSchema(SelfRelationshipAttributesSchema): - parent_object: Optional["SelfRelationshipSchema"] = Field( - relationship=RelationshipInfo( - resource_type="self_relationship", - ), - ) - children_objects: Optional[list["SelfRelationshipSchema"]] = Field( - relationship=RelationshipInfo( - resource_type="self_relatiosnhip", - many=True, - ), - ) - - -class CascadeCaseSchema(BaseModel): - parent_item: Optional["CascadeCaseSchema"] = Field( - relationship=RelationshipInfo( - resource_type="cascade_case", - ), - ) - sub_items: Optional[list["CascadeCaseSchema"]] = Field( - relationship=RelationshipInfo( - resource_type="cascade_case", - many=True, - ), - ) - - -class CustomUserAttributesSchema(UserBaseSchema): - spam: str - eggs: str - - -class AlphaSchema(BaseModel): - beta: Optional["BetaSchema"] = Field( - relationship=RelationshipInfo( - resource_type="beta", - ), - ) - gamma: Optional["GammaSchema"] = Field( - relationship=RelationshipInfo( - resource_type="gamma", - ), - ) - - -class BetaSchema(BaseModel): - alphas: Optional["AlphaSchema"] = Field( - relationship=RelationshipInfo( - resource_type="alpha", - ), - ) - gammas: Optional["GammaSchema"] = Field( - None, - relationship=RelationshipInfo( - resource_type="gamma", - many=True, - ), - ) - deltas: Optional["DeltaSchema"] = Field( - None, - relationship=RelationshipInfo( - resource_type="delta", - many=True, - ), - ) - - -class GammaSchema(BaseModel): - betas: Optional["BetaSchema"] = Field( - None, - relationship=RelationshipInfo( - resource_type="beta", - many=True, - ), - ) - delta: Optional["DeltaSchema"] = Field( - None, - relationship=RelationshipInfo( - resource_type="Delta", - ), - ) - - -class DeltaSchema(BaseModel): - name: str - gammas: Optional["GammaSchema"] = Field( - None, - relationship=RelationshipInfo( - resource_type="gamma", - many=True, - ), - ) - betas: Optional["BetaSchema"] = Field( - None, - relationship=RelationshipInfo( - resource_type="beta", - many=True, - ), - ) diff --git a/tests/test_api/test_api_sqla_with_includes.py b/tests/test_api/test_api_sqla_with_includes.py index 0ea5ab9d..a05fa34c 100644 --- a/tests/test_api/test_api_sqla_with_includes.py +++ b/tests/test_api/test_api_sqla_with_includes.py @@ -1,71 +1,58 @@ -import json import logging from collections import defaultdict -from contextlib import suppress from datetime import datetime, timezone -from itertools import chain, zip_longest -from json import dumps, loads -from typing import Dict, List, Literal, Set, Tuple -from unittest.mock import call, patch +from itertools import chain +from typing import Annotated, Literal +from unittest import mock +from unittest.mock import call from uuid import UUID, uuid4 +import orjson as json import pytest from fastapi import FastAPI, status +from fastapi.datastructures import QueryParams from httpx import AsyncClient -from pydantic import BaseModel, Field -from pydantic.fields import ModelField -from pytest import fixture, mark, param, raises # noqa PT013 -from sqlalchemy import func, select +from pydantic import BaseModel +from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm import InstrumentedAttribute -from starlette.datastructures import QueryParams -from fastapi_jsonapi.api import RoutersJSONAPI -from fastapi_jsonapi.schema_builder import SchemaBuilder -from fastapi_jsonapi.views.view_base import ViewBase +from examples.api_for_sqlalchemy.models import Computer, Post, PostComment, User, UserBio, Workplace +from examples.api_for_sqlalchemy.schemas import ( + CustomUserAttributesSchema, + PostAttributesBaseSchema, + PostCommentAttributesBaseSchema, + UserAttributesBaseSchema, + UserBioAttributesBaseSchema, + UserInSchemaAllowIdOnPost, + UserPatchSchema, + UserSchema, +) +from fastapi_jsonapi.data_layers.sqla import query_building as query_building_module +from fastapi_jsonapi.types_metadata import ClientCanSetId +from fastapi_jsonapi.types_metadata.custom_filter_sql import sql_filter_lower_equals +from fastapi_jsonapi.types_metadata.custom_sort_sql import sql_register_free_sort from tests.common import is_postgres_tests from tests.fixtures.app import build_alphabet_app, build_app_custom from tests.fixtures.entities import ( - build_post, - build_post_comment, - build_workplace, + create_computer, + create_post, + create_post_comment, create_user, + create_user_bio, + create_workplace, ) -from tests.misc.utils import fake -from tests.models import ( +from tests.fixtures.models import ( Alpha, Beta, CascadeCase, - Computer, ContainsTimestamp, CustomUUIDItem, Delta, Gamma, - Post, - PostComment, SelfRelationship, - User, - UserBio, - Workplace, ) -from tests.schemas import ( - CascadeCaseSchema, - CustomUserAttributesSchema, - CustomUUIDItemAttributesSchema, - PostAttributesBaseSchema, - PostCommentAttributesBaseSchema, - PostCommentSchema, - PostSchema, - SelfRelationshipAttributesSchema, - SelfRelationshipSchema, - UserAttributesBaseSchema, - UserBioAttributesBaseSchema, - UserInSchemaAllowIdOnPost, - UserPatchSchema, - UserSchema, -) - -pytestmark = mark.asyncio +from tests.fixtures.schemas import CascadeCaseSchema, CustomUUIDItemAttributesSchema, SelfRelationshipAttributesSchema +from tests.misc.utils import fake logging.basicConfig(level=logging.DEBUG) @@ -82,14 +69,14 @@ async def test_root(client: AsyncClient): async def test_get_users(app: FastAPI, client: AsyncClient, user_1: User, user_2: User): url = app.url_path_for("get_user_list") response = await client.get(url) - assert response.status_code == status.HTTP_200_OK + assert response.status_code == status.HTTP_200_OK, response.text response_data = response.json() assert "data" in response_data, response_data users_data = response_data["data"] users = [user_1, user_2] assert len(users_data) == len(users) for user_data, user in zip(users_data, users): - assert user_data["id"] == ViewBase.get_db_item_id(user) + assert user_data["id"] == f"{user.id}" assert user_data["type"] == "user" @@ -105,11 +92,11 @@ async def test_get_user_with_bio_relation( assert response.status_code == status.HTTP_200_OK response_data = response.json() assert "data" in response_data, response_data - assert response_data["data"]["id"] == ViewBase.get_db_item_id(user_1) + assert response_data["data"]["id"] == f"{user_1.id}" assert response_data["data"]["type"] == "user" assert "included" in response_data, response_data included_bio = response_data["included"][0] - assert included_bio["id"] == ViewBase.get_db_item_id(user_1_bio) + assert included_bio["id"] == f"{user_1_bio.id}" assert included_bio["type"] == "user_bio" @@ -130,12 +117,12 @@ async def test_get_users_with_bio_relation( users = [user_1, user_2] assert len(users_data) == len(users) for user_data, user in zip(users_data, users): - assert user_data["id"] == ViewBase.get_db_item_id(user) + assert user_data["id"] == f"{user.id}" assert user_data["type"] == "user" assert "included" in response_data, response_data included_bio = response_data["included"][0] - assert included_bio["id"] == ViewBase.get_db_item_id(user_1_bio) + assert included_bio["id"] == f"{user_1_bio.id}" assert included_bio["type"] == "user_bio" @@ -154,28 +141,26 @@ async def test_get_users_paginated( assert response.status_code == status.HTTP_200_OK, response.text response_data = response.json() - assert response_data == { - "data": [ - { - "attributes": UserAttributesBaseSchema.from_orm(user), - "id": str(user.id), - "type": "user", - }, - ], - "jsonapi": {"version": "1.0"}, - "meta": {"count": 2, "totalPages": 2}, - } + expected_data = [ + { + "attributes": UserAttributesBaseSchema.model_validate(user).model_dump(), + "id": f"{user.id}", + "type": "user", + }, + ] + assert "data" in response_data + assert response_data["data"] == expected_data - @mark.parametrize( - "fields, expected_include", + @pytest.mark.parametrize( + ("fields", "expected_include"), [ - param( + pytest.param( [ ("fields[user]", "name,age"), ], {"name", "age"}, ), - param( + pytest.param( [ ("fields[user]", "name,age"), ("fields[user]", "email"), @@ -190,14 +175,14 @@ async def test_select_custom_fields( client: AsyncClient, user_1: User, user_2: User, - fields: List[Tuple[str, str]], - expected_include: Set[str], + fields: list[tuple[str, str]], + expected_include: set[str], ): url = app.url_path_for("get_user_list") user_1, user_2 = sorted((user_1, user_2), key=lambda x: x.id) params = QueryParams(fields) - response = await client.get(url, params=str(params)) + response = await client.get(url, params=f"{params}") assert response.status_code == status.HTTP_200_OK, response.text response_data = response.json() @@ -205,13 +190,13 @@ async def test_select_custom_fields( assert response_data == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(include=expected_include), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(include=expected_include), + "id": f"{user_1.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict(include=expected_include), - "id": str(user_2.id), + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(include=expected_include), + "id": f"{user_2.id}", "type": "user", }, ], @@ -230,11 +215,11 @@ async def test_select_custom_fields_with_includes( url = app.url_path_for("get_user_list") user_1, user_2 = sorted((user_1, user_2), key=lambda x: x.id) - user_2_post = await build_post(async_session, user_2) - user_1_post = await build_post(async_session, user_1) + user_2_post = await create_post(async_session, user_2) + user_1_post = await create_post(async_session, user_1) - user_1_comment = await build_post_comment(async_session, user_1, user_2_post) - user_2_comment = await build_post_comment(async_session, user_2, user_1_post) + user_1_comment = await create_post_comment(async_session, user_1, user_2_post) + user_2_comment = await create_post_comment(async_session, user_2, user_1_post) queried_user_fields = "name" queried_post_fields = "title" @@ -249,7 +234,7 @@ async def test_select_custom_fields_with_includes( ("sort", "id"), ], ) - response = await client.get(url, params=str(params)) + response = await client.get(url, params=f"{params}") assert response.status_code == status.HTTP_200_OK, response.text response_data = response.json() @@ -258,37 +243,37 @@ async def test_select_custom_fields_with_includes( assert response_data == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump( include=set(queried_user_fields.split(",")), ), "relationships": { "posts": { "data": [ { - "id": str(user_1_post.id), + "id": f"{user_1_post.id}", "type": "post", }, ], }, }, - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump( include=set(queried_user_fields.split(",")), ), "relationships": { "posts": { "data": [ { - "id": str(user_2_post.id), + "id": f"{user_2_post.id}", "type": "post", }, ], }, }, - "id": str(user_2.id), + "id": f"{user_2.id}", "type": "user", }, ], @@ -297,15 +282,15 @@ async def test_select_custom_fields_with_includes( "included": sorted( [ { - "attributes": PostAttributesBaseSchema.from_orm(user_2_post).dict( + "attributes": PostAttributesBaseSchema.model_validate(user_2_post).model_dump( include=set(queried_post_fields.split(",")), ), - "id": str(user_2_post.id), + "id": f"{user_2_post.id}", "relationships": { "comments": { "data": [ { - "id": str(user_1_comment.id), + "id": f"{user_1_comment.id}", "type": "post_comment", }, ], @@ -314,23 +299,30 @@ async def test_select_custom_fields_with_includes( "type": "post", }, { - "attributes": PostAttributesBaseSchema.from_orm(user_1_post).dict( + "attributes": PostAttributesBaseSchema.model_validate(user_1_post).model_dump( include=set(queried_post_fields.split(",")), ), - "id": str(user_1_post.id), + "id": f"{user_1_post.id}", "relationships": { - "comments": {"data": [{"id": str(user_2_comment.id), "type": "post_comment"}]}, + "comments": { + "data": [ + { + "id": f"{user_2_comment.id}", + "type": "post_comment", + }, + ], + }, }, "type": "post", }, { "attributes": {}, - "id": str(user_1_comment.id), + "id": f"{user_1_comment.id}", "type": "post_comment", }, { "attributes": {}, - "id": str(user_2_comment.id), + "id": f"{user_2_comment.id}", "type": "post_comment", }, ], @@ -347,7 +339,7 @@ async def test_select_custom_fields_for_includes_without_requesting_includes( url = app.url_path_for("get_user_list") params = QueryParams([("fields[post]", "title")]) - response = await client.get(url, params=str(params)) + response = await client.get(url, params=f"{params}") assert response.status_code == status.HTTP_200_OK, response.text response_data = response.json() @@ -355,8 +347,8 @@ async def test_select_custom_fields_for_includes_without_requesting_includes( assert response_data == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, ], @@ -364,215 +356,6 @@ async def test_select_custom_fields_for_includes_without_requesting_includes( "meta": {"count": 1, "totalPages": 1}, } - def _get_clear_mock_calls(self, mock_obj) -> list[call]: - mock_calls = mock_obj.mock_calls - return [call_ for call_ in mock_calls if call_ not in [call.__len__(), call.__str__()]] - - def _prepare_info_schema_calls_to_assert(self, mock_calls) -> list[call]: - calls_to_check = [] - for wrapper_call in mock_calls: - kwargs = wrapper_call.kwargs - kwargs["includes"] = sorted(kwargs["includes"], key=lambda x: x) - - calls_to_check.append( - call( - *wrapper_call.args, - **kwargs, - ), - ) - - return sorted( - calls_to_check, - key=lambda x: (x.kwargs["base_name"], x.kwargs["includes"]), - ) - - async def test_check_get_info_schema_cache( - self, - user_1: User, - ): - resource_type = "user_with_cache" - with suppress(KeyError): - RoutersJSONAPI.all_jsonapi_routers.pop(resource_type) - - app_with_cache = build_app_custom( - model=User, - schema=UserSchema, - schema_in_post=UserInSchemaAllowIdOnPost, - schema_in_patch=UserPatchSchema, - resource_type=resource_type, - # set cache size to enable caching - max_cache_size=128, - ) - - target_func_name = "_get_info_from_schema_for_building" - url = app_with_cache.url_path_for(f"get_{resource_type}_list") - params = { - "include": "posts,posts.comments", - } - - expected_len_with_cache = 6 - expected_len_without_cache = 10 - - with patch.object( - SchemaBuilder, - target_func_name, - wraps=app_with_cache.jsonapi_routers.schema_builder._get_info_from_schema_for_building, - ) as wrapped_func: - async with AsyncClient(app=app_with_cache, base_url="http://test") as client: - response = await client.get(url, params=params) - assert response.status_code == status.HTTP_200_OK, response.text - - calls_to_check = self._prepare_info_schema_calls_to_assert(self._get_clear_mock_calls(wrapped_func)) - - # there are no duplicated calls - assert calls_to_check == sorted( - [ - call( - base_name="UserSchema", - schema=UserSchema, - includes=["posts"], - non_optional_relationships=False, - ), - call( - base_name="UserSchema", - schema=UserSchema, - includes=["posts", "posts.comments"], - non_optional_relationships=False, - ), - call( - base_name="PostSchema", - schema=PostSchema, - includes=[], - non_optional_relationships=False, - ), - call( - base_name="PostSchema", - schema=PostSchema, - includes=["comments"], - non_optional_relationships=False, - ), - call( - base_name="PostCommentSchema", - schema=PostCommentSchema, - includes=[], - non_optional_relationships=False, - ), - call( - base_name="PostCommentSchema", - schema=PostCommentSchema, - includes=["posts"], - non_optional_relationships=False, - ), - ], - key=lambda x: (x.kwargs["base_name"], x.kwargs["includes"]), - ) - assert wrapped_func.call_count == expected_len_with_cache - - response = await client.get(url, params=params) - assert response.status_code == status.HTTP_200_OK, response.text - - # there are no new calls - assert wrapped_func.call_count == expected_len_with_cache - - resource_type = "user_without_cache" - with suppress(KeyError): - RoutersJSONAPI.all_jsonapi_routers.pop(resource_type) - - app_without_cache = build_app_custom( - model=User, - schema=UserSchema, - schema_in_post=UserInSchemaAllowIdOnPost, - schema_in_patch=UserPatchSchema, - resource_type=resource_type, - max_cache_size=0, - ) - - with patch.object( - SchemaBuilder, - target_func_name, - wraps=app_without_cache.jsonapi_routers.schema_builder._get_info_from_schema_for_building, - ) as wrapped_func: - async with AsyncClient(app=app_without_cache, base_url="http://test") as client: - response = await client.get(url, params=params) - assert response.status_code == status.HTTP_200_OK, response.text - - calls_to_check = self._prepare_info_schema_calls_to_assert(self._get_clear_mock_calls(wrapped_func)) - - # there are duplicated calls - assert calls_to_check == sorted( - [ - call( - base_name="UserSchema", - schema=UserSchema, - includes=["posts"], - non_optional_relationships=False, - ), - call( - base_name="UserSchema", - schema=UserSchema, - includes=["posts"], - non_optional_relationships=False, - ), # duplicate - call( - base_name="UserSchema", - schema=UserSchema, - includes=["posts", "posts.comments"], - non_optional_relationships=False, - ), - call( - base_name="PostSchema", - schema=PostSchema, - includes=[], - non_optional_relationships=False, - ), - call( - base_name="PostSchema", - schema=PostSchema, - includes=[], - non_optional_relationships=False, - ), # duplicate - call( - base_name="PostSchema", - schema=PostSchema, - includes=[], - non_optional_relationships=False, - ), # duplicate - call( - base_name="PostSchema", - schema=PostSchema, - includes=["comments"], - non_optional_relationships=False, - ), - call( - base_name="PostSchema", - schema=PostSchema, - includes=["comments"], - non_optional_relationships=False, - ), # duplicate - call( - base_name="PostCommentSchema", - schema=PostCommentSchema, - includes=[], - non_optional_relationships=False, - ), - call( - base_name="PostCommentSchema", - schema=PostCommentSchema, - includes=["posts"], - non_optional_relationships=False, - ), # duplicate - ], - key=lambda x: (x.kwargs["base_name"], x.kwargs["includes"]), - ) - - assert wrapped_func.call_count == expected_len_without_cache - - response = await client.get(url, params=params) - assert response.status_code == status.HTTP_200_OK, response.text - - # there are new calls - assert wrapped_func.call_count == expected_len_without_cache * 2 - class TestCreatePostAndComments: async def test_get_posts_with_users( @@ -581,8 +364,8 @@ async def test_get_posts_with_users( client: AsyncClient, user_1: User, user_2: User, - user_1_posts: List[Post], - user_2_posts: List[Post], + user_1_posts: list[Post], + user_2_posts: list[Post], ): call( base_name="UserSchema", @@ -609,11 +392,11 @@ async def test_get_posts_with_users( included_users = response_data["included"] assert len(included_users) == len(users) for user_data, user in zip(included_users, users): - assert user_data["id"] == ViewBase.get_db_item_id(user) + assert user_data["id"] == f"{user.id}" assert user_data["type"] == "user" for post_data, post in zip(posts_data, posts): - assert post_data["id"] == ViewBase.get_db_item_id(post) + assert post_data["id"] == f"{post.id}" assert post_data["type"] == "post" all_posts_data = list(posts_data) @@ -628,13 +411,12 @@ async def test_get_posts_with_users( assert len(posts_data) == len(posts) idx_start = next_idx - u1_relation = { - "id": ViewBase.get_db_item_id(user), - "type": "user", - } for post_data in posts_data: user_relation = post_data["relationships"]["user"] - assert user_relation["data"] == u1_relation + assert user_relation["data"] == { + "id": f"{user.id}", + "type": "user", + } async def test_create_post_for_user( self, @@ -647,7 +429,7 @@ async def test_create_post_for_user( post_attributes = PostAttributesBaseSchema( title=fake.name(), body=fake.sentence(), - ).dict() + ).model_dump() post_create = { "data": { "attributes": post_attributes, @@ -655,7 +437,7 @@ async def test_create_post_for_user( "user": { "data": { "type": "user", - "id": user_1.id, + "id": f"{user_1.id}", }, }, }, @@ -673,7 +455,7 @@ async def test_create_post_for_user( "user": { "data": { "type": "user", - "id": str(user_1.id), + "id": f"{user_1.id}", }, }, }, @@ -681,9 +463,9 @@ async def test_create_post_for_user( included = response_data["included"] assert included == [ { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(exclude_unset=True), }, ] @@ -696,10 +478,10 @@ async def test_create_comments_for_post( user_1_post: Post, ): url = app.url_path_for("get_post_comment_list") - url = f"{url}?include=author,post,post.user" + url = f"{url}?include=user,post,post.user" comment_attributes = PostCommentAttributesBaseSchema( text=fake.sentence(), - ).dict() + ).model_dump() comment_create = { "data": { "attributes": comment_attributes, @@ -707,13 +489,13 @@ async def test_create_comments_for_post( "post": { "data": { "type": "post", - "id": user_1_post.id, + "id": f"{user_1_post.id}", }, }, - "author": { + "user": { "data": { "type": "user", - "id": user_2.id, + "id": f"{user_2.id}", }, }, }, @@ -732,13 +514,13 @@ async def test_create_comments_for_post( "post": { "data": { "type": "post", - "id": str(user_1_post.id), + "id": f"{user_1_post.id}", }, }, - "author": { + "user": { "data": { "type": "user", - "id": str(user_2.id), + "id": f"{user_2.id}", }, }, }, @@ -747,12 +529,12 @@ async def test_create_comments_for_post( assert included == [ { "type": "post", - "id": str(user_1_post.id), - "attributes": PostAttributesBaseSchema.from_orm(user_1_post).dict(), + "id": f"{user_1_post.id}", + "attributes": PostAttributesBaseSchema.model_validate(user_1_post).model_dump(), "relationships": { "user": { "data": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", }, }, @@ -760,13 +542,13 @@ async def test_create_comments_for_post( }, { "type": "user", - "id": str(user_1.id), - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), + "id": f"{user_1.id}", + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), }, { "type": "user", - "id": str(user_2.id), - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict(), + "id": f"{user_2.id}", + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), }, ] @@ -787,7 +569,7 @@ async def test_create_comment_error_no_relationship( url = app.url_path_for("get_post_comment_list") comment_attributes = PostCommentAttributesBaseSchema( text=fake.sentence(), - ).dict() + ).model_dump() comment_create = { "data": { "attributes": comment_attributes, @@ -795,30 +577,19 @@ async def test_create_comment_error_no_relationship( "post": { "data": { "type": "post", - "id": user_1_post.id, + "id": f"{user_1_post.id}", }, }, - # don't pass "author" + # don"t pass "user" }, }, } response = await client.post(url, json=comment_create) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text response_data = response.json() - assert response_data == { - "detail": [ - { - "loc": [ - "body", - "data", - "relationships", - "author", - ], - "msg": "field required", - "type": "value_error.missing", - }, - ], - } + detail, *_ = response_data["detail"] + assert detail["loc"] == ["body", "data", "relationships", "user"] + assert detail["msg"] == "Field required" async def test_create_comment_error_no_relationships_content( self, @@ -828,43 +599,24 @@ async def test_create_comment_error_no_relationships_content( url = app.url_path_for("get_post_comment_list") comment_attributes = PostCommentAttributesBaseSchema( text=fake.sentence(), - ).dict() + ).model_dump() comment_create = { "data": { "attributes": comment_attributes, "relationships": { - # don't pass "post" - # don't pass "author" + # don"t pass "post" + # don"t pass "user" }, }, } response = await client.post(url, json=comment_create) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text response_data = response.json() - assert response_data == { - "detail": [ - { - "loc": [ - "body", - "data", - "relationships", - "post", - ], - "msg": "field required", - "type": "value_error.missing", - }, - { - "loc": [ - "body", - "data", - "relationships", - "author", - ], - "msg": "field required", - "type": "value_error.missing", - }, - ], - } + detail_1, detail_2 = response_data["detail"] + assert detail_1["loc"] == ["body", "data", "relationships", "post"] + assert detail_1["msg"] == "Field required" + assert detail_2["loc"] == ["body", "data", "relationships", "user"] + assert detail_2["msg"] == "Field required" async def test_create_comment_error_no_relationships_field( self, @@ -874,29 +626,19 @@ async def test_create_comment_error_no_relationships_field( url = app.url_path_for("get_post_comment_list") comment_attributes = PostCommentAttributesBaseSchema( text=fake.sentence(), - ).dict() + ).model_dump() comment_create = { "data": { "attributes": comment_attributes, - # don't pass "relationships" at all + # don"t pass "relationships" at all }, } response = await client.post(url, json=comment_create) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text response_data = response.json() - assert response_data == { - "detail": [ - { - "loc": [ - "body", - "data", - "relationships", - ], - "msg": "field required", - "type": "value_error.missing", - }, - ], - } + detail, *_ = response_data["detail"] + assert detail["loc"] == ["body", "data", "relationships"] + assert detail["msg"] == "Field required" async def test_get_users_with_all_inner_relations( @@ -907,8 +649,8 @@ async def test_get_users_with_all_inner_relations( user_1_bio: UserBio, user_1_posts, user_1_post_for_comments: Post, - user_2_posts: List[Post], - user_1_comments_for_u2_posts: List[PostComment], + user_2_posts: list[Post], + user_1_comments_for_u2_posts: list[PostComment], user_2_comment_for_one_u1_post: PostComment, ): """ @@ -918,10 +660,10 @@ async def test_get_users_with_all_inner_relations( - bio - posts - posts.comments - - posts.comments.author + - posts.comments.user """ url = app.url_path_for("get_user_list") - url = f"{url}?include=bio,posts,posts.comments,posts.comments.author" + url = f"{url}?include=bio,posts,posts.comments,posts.comments.user" response = await client.get(url) assert response.status_code == status.HTTP_200_OK response_data = response.json() @@ -932,7 +674,7 @@ async def test_get_users_with_all_inner_relations( assert len(users_data) == len(users) assert "included" in response_data, response_data - included: List[Dict] = response_data["included"] + included: list[dict] = response_data["included"] included_data = {association_key(data): data for data in included} @@ -940,7 +682,7 @@ async def test_get_users_with_all_inner_relations( users_data, [(user_1, user_1_posts, user_1_bio), (user_2, user_2_posts, None)], ): - assert user_data["id"] == ViewBase.get_db_item_id(user) + assert user_data["id"] == f"{user.id}" assert user_data["type"] == "user" user_relationships = user_data["relationships"] posts_relation = user_relationships["posts"]["data"] @@ -955,17 +697,17 @@ async def test_get_users_with_all_inner_relations( continue assert bio_relation == { - "id": ViewBase.get_db_item_id(user_1_bio), + "id": f"{user_1_bio.id}", "type": "user_bio", } # ! assert posts have expected post comments - for posts, comments, comment_author in [ + for posts, comments, comment_user in [ ([user_1_post_for_comments], [user_2_comment_for_one_u1_post], user_2), (user_2_posts, user_1_comments_for_u2_posts, user_1), ]: for post, post_comment in zip(posts, comments): - post_data = included_data[("post", ViewBase.get_db_item_id(post))] + post_data = included_data[("post", f"{post.id}")] post_relationships = post_data["relationships"] assert "comments" in post_relationships post_comments_relation = post_relationships["comments"]["data"] @@ -973,16 +715,16 @@ async def test_get_users_with_all_inner_relations( assert len(post_comments_relation) == len(post_comments) for comment_relation_data, comment in zip(post_comments_relation, post_comments): assert comment_relation_data == { - "id": ViewBase.get_db_item_id(comment), + "id": f"{comment.id}", "type": "post_comment", } - comment_data = included_data[("post_comment", ViewBase.get_db_item_id(comment))] - assert comment_data["relationships"]["author"]["data"] == { - "id": ViewBase.get_db_item_id(comment_author), + comment_data = included_data[("post_comment", f"{comment.id}")] + assert comment_data["relationships"]["user"]["data"] == { + "id": f"{comment_user.id}", "type": "user", } - assert ("user", ViewBase.get_db_item_id(comment_author)) in included_data + assert ("user", f"{comment_user.id}") in included_data async def test_many_to_many_load_inner_includes_to_parents( @@ -1021,25 +763,25 @@ async def test_many_to_many_load_inner_includes_to_parents( (parent_3, []), ], ): - assert parent_data["id"] == ViewBase.get_db_item_id(parent) + assert parent_data["id"] == f"{parent.id}" assert parent_data["type"] == "parent" parent_relationships = parent_data["relationships"] parent_to_children_assocs = parent_relationships["children"]["data"] assert len(parent_to_children_assocs) == len(expected_assocs) for assoc_data, (assoc, child) in zip(parent_to_children_assocs, expected_assocs): - assert assoc_data["id"] == ViewBase.get_db_item_id(assoc) + assert assoc_data["id"] == f"{assoc.id}" assert assoc_data["type"] == "parent_child_association" assoc_key = association_key(assoc_data) assert assoc_key in included_data p_to_c_assoc_data = included_data[assoc_key] assert p_to_c_assoc_data["relationships"]["child"]["data"] == { - "id": ViewBase.get_db_item_id(child), + "id": f"{child.id}", "type": "child", } assert p_to_c_assoc_data["attributes"]["extra_data"] == assoc.extra_data - assert ("child", ViewBase.get_db_item_id(child_4)) not in included_data + assert ("child", f"{child_4.id}") not in included_data class TestGetUserDetail: @@ -1060,10 +802,10 @@ async def test_select_custom_fields( assert response.status_code == status.HTTP_200_OK assert response.json() == { "data": { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump( include=set(queried_user_fields.split(",")), ), - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", }, "jsonapi": {"version": "1.0"}, @@ -1071,29 +813,157 @@ async def test_select_custom_fields( } +class TestUserFetchRelationships: + @staticmethod + def get_url( + app: FastAPI, + user_id: int, + relationship_name: str, + many: bool = False, + ) -> str: + suffix = "list" if many else "detail" + return app.url_path_for(f"fetch_user_{relationship_name}_{suffix}", obj_id=user_id) + + async def test_fetch_to_one_relationship( + self, + app: FastAPI, + client: AsyncClient, + user_1: User, + user_1_bio: UserBio, + ): + url = self.get_url(app, user_1.id, "bio") + + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK, response.text + assert response.json() == { + "data": { + "attributes": UserBioAttributesBaseSchema.from_orm(user_1_bio).model_dump(), + "id": str(user_1_bio.id), + "type": "user_bio", + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + } + + async def test_fetch_to_one_relationship_with_include( + self, + app: FastAPI, + client: AsyncClient, + user_1: User, + user_1_bio: UserBio, + ): + url = self.get_url(app, user_1.id, "bio") + params = QueryParams([("include", "user")]) + + response = await client.get(url, params=params) + assert response.status_code == status.HTTP_200_OK, response.text + assert response.json() == { + "data": { + "attributes": UserBioAttributesBaseSchema.from_orm(user_1_bio).model_dump(), + "id": f"{user_1_bio.id}", + "type": "user_bio", + "relationships": {"user": {"data": {"id": f"{user_1.id}", "type": "user"}}}, + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + "included": [ + { + "attributes": UserAttributesBaseSchema.from_orm(user_1).model_dump(), + "id": f"{user_1.id}", + "type": "user", + }, + ], + } + + async def test_fetch_to_many_relationship( + self, + app: FastAPI, + client: AsyncClient, + user_1: User, + user_1_posts: list[Post], + ): + url = self.get_url(app, user_1.id, "posts", many=True) + + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK, response.text + assert response.json() == { + "data": [ + { + "id": f"{post.id}", + "type": "post", + "attributes": PostAttributesBaseSchema.from_orm(post).model_dump(), + } + for post in sorted(user_1_posts, key=lambda post: post.id) + ], + "jsonapi": {"version": "1.0"}, + "meta": {"count": len(user_1_posts), "totalPages": 1}, + } + + async def test_fetch_to_many_relationship_with_include( + self, + app: FastAPI, + client: AsyncClient, + user_1: User, + user_1_posts: list[Post], + ): + url = self.get_url(app, user_1.id, "posts", many=True) + params = QueryParams([("include", "user")]) + + response = await client.get(url, params=params) + assert response.status_code == status.HTTP_200_OK, response.text + assert response.json() == { + "data": [ + { + "id": f"{post.id}", + "type": "post", + "attributes": PostAttributesBaseSchema.from_orm(post).model_dump(), + "relationships": {"user": {"data": {"id": f"{user_1.id}", "type": "user"}}}, + } + for post in sorted(user_1_posts, key=lambda post: post.id) + ], + "jsonapi": {"version": "1.0"}, + "meta": {"count": len(user_1_posts), "totalPages": 1}, + "included": [ + { + "attributes": UserAttributesBaseSchema.from_orm(user_1).model_dump(), + "id": f"{user_1.id}", + "type": "user", + }, + ], + } + + class TestUserWithPostsWithInnerIncludes: - @mark.parametrize( - "include, expected_relationships_inner_relations, expect_user_include", + @pytest.mark.parametrize( + ( + "include", + "expected_relationships_inner_relations", + "expected_users", + ), [ - ( + pytest.param( ["posts", "posts.user"], {"post": ["user"], "user": []}, - False, + ["user_1"], + id="case_0", ), - ( + pytest.param( ["posts", "posts.comments"], {"post": ["comments"], "post_comment": []}, - False, + [], + id="case_1", ), - ( + pytest.param( ["posts", "posts.user", "posts.comments"], {"post": ["user", "comments"], "user": [], "post_comment": []}, - False, + ["user_1"], + id="case_2", ), - ( - ["posts", "posts.user", "posts.comments", "posts.comments.author"], - {"post": ["user", "comments"], "post_comment": ["author"], "user": []}, - True, + pytest.param( + ["posts", "posts.user", "posts.comments", "posts.comments.user"], + {"post": ["user", "comments"], "post_comment": ["user"], "user": []}, + ["user_1", "user_2"], + id="case_3", ), ], ) @@ -1108,7 +978,7 @@ async def test_get_users_with_posts_and_inner_includes( user_2_comment_for_one_u1_post: PostComment, include: list[str], expected_relationships_inner_relations: dict[str, list[str]], - expect_user_include: bool, + expected_users: list[str], ): """ Check returned data @@ -1117,7 +987,7 @@ async def test_get_users_with_posts_and_inner_includes( returns posts with both `user` and `comments` """ assert user_1_posts - assert user_2_comment_for_one_u1_post.author_id == user_2.id + assert user_2_comment_for_one_u1_post.user_id == user_2.id include_param = ",".join(include) resource_type = "user" url = app.url_path_for(f"get_{resource_type}_list") @@ -1130,14 +1000,14 @@ async def test_get_users_with_posts_and_inner_includes( assert result_data == [ { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": resource_type, - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), "relationships": { "posts": { "data": [ # relationship info - {"id": str(p.id), "type": "post"} + {"id": f"{p.id}", "type": "post"} # for every post for p in user_1_posts ], @@ -1147,8 +1017,8 @@ async def test_get_users_with_posts_and_inner_includes( ] included_data = response_json["included"] included_as_map = defaultdict(list) - for item in included_data: - included_as_map[item["type"]].append(item) + for included_ in included_data: + included_as_map[included_["type"]].append(included_) for item_type, items in included_as_map.items(): expected_relationships = expected_relationships_inner_relations[item_type] @@ -1163,6 +1033,7 @@ async def test_get_users_with_posts_and_inner_includes( user_2=user_2, user_1_posts=user_1_posts, user_2_comment_for_one_u1_post=user_2_comment_for_one_u1_post, + expected_users=expected_users, ) for item_type, includes_names in expected_relationships_inner_relations.items(): @@ -1179,10 +1050,7 @@ async def test_get_users_with_posts_and_inner_includes( for key in set(expected_includes).difference(expected_relationships_inner_relations): expected_includes.pop(key) - # XXX - if not expect_user_include: - expected_includes.pop("user", None) - assert included_as_map == expected_includes + assert dict(included_as_map) == expected_includes def prepare_expected_includes( self, @@ -1190,61 +1058,73 @@ def prepare_expected_includes( user_2: User, user_1_posts: list[PostComment], user_2_comment_for_one_u1_post: PostComment, + expected_users: list[str], ): - expected_includes = { + data = { "post": [ - # { - "id": str(p.id), + "id": f"{p.id}", "type": "post", - "attributes": PostAttributesBaseSchema.from_orm(p).dict(), + "attributes": PostAttributesBaseSchema.model_validate(p).model_dump(), "relationships": { "user": { "data": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", }, }, "comments": { - "data": [ - { - "id": str(user_2_comment_for_one_u1_post.id), - "type": "post_comment", - }, - ] - if p.id == user_2_comment_for_one_u1_post.post_id - else [], + "data": ( + [ + { + "id": f"{user_2_comment_for_one_u1_post.id}", + "type": "post_comment", + }, + ] + if p.id == user_2_comment_for_one_u1_post.post_id + else [] + ), }, }, } - # for p in user_1_posts ], "post_comment": [ { - "id": str(user_2_comment_for_one_u1_post.id), + "id": f"{user_2_comment_for_one_u1_post.id}", "type": "post_comment", - "attributes": PostCommentAttributesBaseSchema.from_orm(user_2_comment_for_one_u1_post).dict(), + "attributes": PostCommentAttributesBaseSchema.model_validate( + user_2_comment_for_one_u1_post, + ).model_dump(), "relationships": { - "author": { + "user": { "data": { - "id": str(user_2.id), + "id": f"{user_2.id}", "type": "user", }, }, }, }, ], - "user": [ - { - "id": str(user_2.id), - "type": "user", - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict(), - }, - ], } - return expected_includes + if not expected_users: + return data + + users = { + "user_1": { + "id": f"{user_1.id}", + "type": "user", + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + }, + "user_2": { + "id": f"{user_2.id}", + "type": "user", + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), + }, + } + data["user"] = [users[expected_user] for expected_user in expected_users] + return data async def test_method_not_allowed(app: FastAPI, client: AsyncClient): @@ -1262,8 +1142,8 @@ async def test_get_list_view_generic(app: FastAPI, client: AsyncClient, user_1: users_data = response_json["data"] assert len(users_data) == 1, users_data user_data = users_data[0] - assert user_data["id"] == str(user_1.id) - assert user_data["attributes"] == UserAttributesBaseSchema.from_orm(user_1) + assert user_data["id"] == f"{user_1.id}" + assert user_data["attributes"] == UserAttributesBaseSchema.model_validate(user_1).model_dump(exclude_none=True) async def test_get_user_not_found(app: FastAPI, client: AsyncClient): @@ -1274,10 +1154,10 @@ async def test_get_user_not_found(app: FastAPI, client: AsyncClient): assert res.json() == { "errors": [ { - "detail": f"Resource User `{fake_id}` not found", + "detail": f"Resource User `users.id = {fake_id}` not found", "title": "Resource not found.", "status_code": status.HTTP_404_NOT_FOUND, - "meta": {"parameter": "id"}, + "meta": {"pointer": ""}, }, ], } @@ -1291,7 +1171,7 @@ async def test_create_object(self, app: FastAPI, client: AsyncClient): name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict(), + ).model_dump(), }, } url = app.url_path_for("get_user_list") @@ -1312,9 +1192,15 @@ async def test_create_object_with_relationship_and_fetch_include( "attributes": UserBioAttributesBaseSchema( birth_city=fake.word(), favourite_movies=fake.sentence(), - keys_to_ids_list={"foobar": [1, 2, 3], "spameggs": [2, 3, 4]}, - ).dict(), - "relationships": {"user": {"data": {"type": "user", "id": user_1.id}}}, + ).model_dump(), + "relationships": { + "user": { + "data": { + "type": "user", + "id": f"{user_1.id}", + }, + }, + }, }, } url = app.url_path_for("get_user_bio_list") @@ -1330,8 +1216,8 @@ async def test_create_object_with_relationship_and_fetch_include( included_user = included_data[0] assert isinstance(included_user, dict), included_user assert included_user["type"] == "user" - assert included_user["id"] == str(user_1.id) - assert included_user["attributes"] == UserAttributesBaseSchema.from_orm(user_1) + assert included_user["id"] == f"{user_1.id}" + assert included_user["attributes"] == UserAttributesBaseSchema.model_validate(user_1).model_dump() async def test_create_object_with_to_many_relationship_and_fetch_include( self, @@ -1346,16 +1232,16 @@ async def test_create_object_with_to_many_relationship_and_fetch_include( name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict(), + ).model_dump(), "relationships": { "computers": { "data": [ { - "id": computer_1.id, + "id": f"{computer_1.id}", "type": "computer", }, { - "id": computer_2.id, + "id": f"{computer_2.id}", "type": "computer", }, ], @@ -1378,11 +1264,11 @@ async def test_create_object_with_to_many_relationship_and_fetch_include( "computers": { "data": [ { - "id": str(computer_1.id), + "id": f"{computer_1.id}", "type": "computer", }, { - "id": str(computer_2.id), + "id": f"{computer_2.id}", "type": "computer", }, ], @@ -1393,12 +1279,12 @@ async def test_create_object_with_to_many_relationship_and_fetch_include( "included": [ { "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "id": f"{computer_1.id}", "type": "computer", }, { "attributes": {"name": computer_2.name}, - "id": str(computer_2.id), + "id": f"{computer_2.id}", "type": "computer", }, ], @@ -1420,23 +1306,23 @@ async def test_create_to_one_and_to_many_relationship_at_the_same_time( name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict(), + ).model_dump(), "relationships": { "computers": { "data": [ { - "id": computer_1.id, + "id": f"{computer_1.id}", "type": "computer", }, { - "id": computer_2.id, + "id": f"{computer_2.id}", "type": "computer", }, ], }, "workplace": { "data": { - "id": str(workplace_1.id), + "id": f"{workplace_1.id}", "type": "workplace", }, }, @@ -1458,18 +1344,18 @@ async def test_create_to_one_and_to_many_relationship_at_the_same_time( "computers": { "data": [ { - "id": str(computer_1.id), + "id": f"{computer_1.id}", "type": "computer", }, { - "id": str(computer_2.id), + "id": f"{computer_2.id}", "type": "computer", }, ], }, "workplace": { "data": { - "id": str(workplace_1.id), + "id": f"{workplace_1.id}", "type": "workplace", }, }, @@ -1479,17 +1365,17 @@ async def test_create_to_one_and_to_many_relationship_at_the_same_time( "included": [ { "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "id": f"{computer_1.id}", "type": "computer", }, { "attributes": {"name": computer_2.name}, - "id": str(computer_2.id), + "id": f"{computer_2.id}", "type": "computer", }, { "attributes": {"name": workplace_1.name}, - "id": str(workplace_1.id), + "id": f"{workplace_1.id}", "type": "workplace", }, ], @@ -1504,7 +1390,7 @@ async def test_create_user(self, app: FastAPI, client: AsyncClient): name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict(), + ).model_dump(), }, } url = app.url_path_for("get_user_list") @@ -1521,11 +1407,11 @@ async def test_create_user_and_fetch_data(self, app: FastAPI, client: AsyncClien name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict(), + ).model_dump(), }, } app.url_path_for("get_user_list") - res = await client.post("/users", json=create_user_body) + res = await client.post("/users/", json=create_user_body) assert res.status_code == status.HTTP_201_CREATED, res.text response_data = res.json() assert "data" in response_data, response_data @@ -1533,7 +1419,7 @@ async def test_create_user_and_fetch_data(self, app: FastAPI, client: AsyncClien user_id = response_data["data"]["id"] - res = await client.get(f"/users/{user_id}") + res = await client.get(f"/users/{user_id}/") assert res.status_code == status.HTTP_200_OK, res.text response_data = res.json() assert "data" in response_data, response_data @@ -1550,7 +1436,7 @@ async def test_create_id_by_client(self): resource_type=resource_type, ) - new_id = str(fake.pyint(100, 999)) + new_id = f"{fake.pyint(100, 999)}" attrs = UserAttributesBaseSchema( name=fake.name(), age=fake.pyint(), @@ -1558,7 +1444,7 @@ async def test_create_id_by_client(self): ) create_user_body = { "data": { - "attributes": attrs.dict(), + "attributes": attrs.model_dump(), "id": new_id, }, } @@ -1569,7 +1455,7 @@ async def test_create_id_by_client(self): assert res.status_code == status.HTTP_201_CREATED, res.text assert res.json() == { "data": { - "attributes": attrs.dict(), + "attributes": attrs.model_dump(), "id": new_id, "type": resource_type, }, @@ -1593,13 +1479,13 @@ async def test_create_id_by_client_uuid_type( """ resource_type = "custom_uuid_item" - new_id = str(uuid4()) + new_id = f"{uuid4()}" create_attributes = CustomUUIDItemAttributesSchema( extra_id=uuid4(), ) create_body = { "data": { - "attributes": loads(create_attributes.json()), + "attributes": json.loads(create_attributes.json()), "id": new_id, }, } @@ -1609,7 +1495,7 @@ async def test_create_id_by_client_uuid_type( assert res.status_code == status.HTTP_201_CREATED, res.text assert res.json() == { "data": { - "attributes": loads(create_attributes.json()), + "attributes": json.loads(create_attributes.json()), "id": new_id, "type": resource_type, }, @@ -1621,7 +1507,7 @@ async def test_create_with_relationship_to_the_same_table(self): resource_type = "self_relationship" app = build_app_custom( model=SelfRelationship, - schema=SelfRelationshipSchema, + schema=SelfRelationshipAttributesSchema, resource_type=resource_type, ) @@ -1639,7 +1525,8 @@ async def test_create_with_relationship_to_the_same_table(self): response_json = res.json() assert response_json["data"] - assert (parent_object_id := response_json["data"].get("id")) + parent_object_id = response_json["data"].get("id") + assert parent_object_id assert response_json == { "data": { "attributes": { @@ -1676,7 +1563,9 @@ async def test_create_with_relationship_to_the_same_table(self): assert (child_object_id := response_json["data"].get("id")) assert res.json() == { "data": { - "attributes": {"name": "child"}, + "attributes": { + "name": "child", + }, "id": child_object_id, "relationships": { "parent_object": { @@ -1690,7 +1579,9 @@ async def test_create_with_relationship_to_the_same_table(self): }, "included": [ { - "attributes": {"name": "parent"}, + "attributes": { + "name": "parent", + }, "id": parent_object_id, "type": "self_relationship", }, @@ -1728,16 +1619,15 @@ class ContainsTimestampAttrsSchema(BaseModel): assert res.status_code == status.HTTP_201_CREATED, res.text response_json = res.json() - assert (entity_id := response_json["data"]["id"]) - assert response_json == { - "meta": None, - "jsonapi": {"version": "1.0"}, - "data": { - "type": resource_type, - "attributes": {"timestamp": create_timestamp.isoformat()}, - "id": entity_id, - }, - } + entity_id = response_json["data"]["id"] + assert entity_id + assert ( + # rec + ContainsTimestampAttrsSchema(**response_json["data"]["attributes"]) + == + # ex + ContainsTimestampAttrsSchema(timestamp=create_timestamp) + ) # noinspection PyTypeChecker stms = select(ContainsTimestamp).where(ContainsTimestamp.id == int(entity_id)) @@ -1756,23 +1646,20 @@ class ContainsTimestampAttrsSchema(BaseModel): "val": create_timestamp.isoformat(), }, ], - ), + ).decode(), } # successfully filtered res = await client.get(url, params=params) assert res.status_code == status.HTTP_200_OK, res.text - assert res.json() == { - "meta": {"count": 1, "totalPages": 1}, - "jsonapi": {"version": "1.0"}, - "data": [ - { - "type": resource_type, - "attributes": {"timestamp": expected_response_timestamp}, - "id": entity_id, - }, - ], - } + response_json = res.json() + assert ( + # rec + ContainsTimestampAttrsSchema(**response_json["data"][0]["attributes"]) + == + # ex + ContainsTimestampAttrsSchema(timestamp=expected_response_timestamp) + ) # check filter really work params = { @@ -1784,7 +1671,7 @@ class ContainsTimestampAttrsSchema(BaseModel): "val": datetime.now(tz=timezone.utc).isoformat(), }, ], - ), + ).decode(), } res = await client.get(url, params=params) assert res.status_code == status.HTTP_200_OK, res.text @@ -1802,7 +1689,7 @@ async def test_select_custom_fields(self, app: FastAPI, client: AsyncClient): ) create_user_body = { "data": { - "attributes": user_attrs_schema.dict(), + "attributes": user_attrs_schema.model_dump(), }, } queried_user_fields = "name" @@ -1816,7 +1703,7 @@ async def test_select_custom_fields(self, app: FastAPI, client: AsyncClient): assert response_data["data"].pop("id") assert response_data == { "data": { - "attributes": user_attrs_schema.dict(include=set(queried_user_fields.split(","))), + "attributes": user_attrs_schema.model_dump(include=set(queried_user_fields.split(","))), "type": "user", }, "jsonapi": {"version": "1.0"}, @@ -1835,11 +1722,11 @@ async def test_patch_object( name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict() + ).model_dump() patch_user_body = { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "attributes": new_attrs, }, } @@ -1850,46 +1737,13 @@ async def test_patch_object( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", }, "jsonapi": {"version": "1.0"}, "meta": None, } - async def test_do_nothing_with_field_not_presented_in_model( - self, - user_1: User, - ): - class UserPatchSchemaWithExtraAttribute(UserPatchSchema): - attr_which_is_not_presented_in_model: str - - resource_type = "user_custom_a" - app = build_app_custom( - model=User, - schema=UserSchema, - schema_in_post=UserPatchSchemaWithExtraAttribute, - schema_in_patch=UserPatchSchemaWithExtraAttribute, - resource_type=resource_type, - ) - new_attrs = UserPatchSchemaWithExtraAttribute( - name=fake.name(), - age=fake.pyint(), - email=fake.email(), - attr_which_is_not_presented_in_model=fake.name(), - ).dict() - - patch_user_body = { - "data": { - "id": user_1.id, - "attributes": new_attrs, - }, - } - async with AsyncClient(app=app, base_url="http://test") as client: - url = app.url_path_for(f"update_{resource_type}_detail", obj_id=user_1.id) - res = await client.patch(url, json=patch_user_body) - assert res.status_code == status.HTTP_200_OK, res.text - async def test_update_schema_has_extra_fields(self, user_1: User, caplog): resource_type = "user_extra_fields" app = build_app_custom( @@ -1907,8 +1761,8 @@ async def test_update_schema_has_extra_fields(self, user_1: User, caplog): ) create_body = { "data": { - "attributes": new_attributes.dict(), - "id": user_1.id, + "attributes": new_attributes.model_dump(), + "id": f"{user_1.id}", }, } @@ -1916,24 +1770,8 @@ async def test_update_schema_has_extra_fields(self, user_1: User, caplog): url = app.url_path_for(f"update_{resource_type}_detail", obj_id=user_1.id) res = await client.patch(url, json=create_body) - assert res.status_code == status.HTTP_200_OK, res.text - assert res.json() == { - "data": { - "attributes": UserAttributesBaseSchema(**new_attributes.dict()).dict(), - "id": str(user_1.id), - "type": resource_type, - }, - "jsonapi": {"version": "1.0"}, - "meta": None, - } - - messages = [x.message for x in caplog.get_records("call") if x.levelno == logging.WARNING] - messages.sort() - for log_message, expected in zip_longest( - messages, - sorted([f"No field {name!r}" for name in ("spam", "eggs")]), - ): - assert expected in log_message + assert res.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + assert res.json()["errors"][0]["detail"] == "No fields `spam` on `User`. Make sure schema conforms model." async def test_select_custom_fields( self, @@ -1949,8 +1787,8 @@ async def test_select_custom_fields( patch_user_body = { "data": { - "id": user_1.id, - "attributes": new_attrs.dict(), + "id": f"{user_1.id}", + "attributes": new_attrs.model_dump(), }, } queried_user_fields = "name" @@ -1961,19 +1799,22 @@ async def test_select_custom_fields( assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": { - "attributes": new_attrs.dict(include=set(queried_user_fields.split(","))), - "id": str(user_1.id), + "attributes": new_attrs.model_dump(include=set(queried_user_fields.split(","))), + "id": f"{user_1.id}", "type": "user", }, "jsonapi": {"version": "1.0"}, "meta": None, } - @mark.parametrize("check_type", ["ok", "fail"]) - async def test_update_to_many_relationships(self, async_session: AsyncSession, check_type: Literal["ok", "fail"]): + @pytest.mark.parametrize("check_type", ["ok", "fail"]) + async def test_update_to_many_relationships( + self, + async_session: AsyncSession, + check_type: Literal["ok", "fail"], + # clear_schemas_storage, + ): resource_type = "cascade_case" - with suppress(KeyError): - RoutersJSONAPI.all_jsonapi_routers.pop(resource_type) app = build_app_custom( model=CascadeCase, @@ -2006,18 +1847,18 @@ async def test_update_to_many_relationships(self, async_session: AsyncSession, c update_body = { "type": resource_type, "data": { - "id": new_top_item.id, + "id": f"{new_top_item.id}", "attributes": {}, "relationships": { "sub_items": { "data": [ { "type": resource_type, - "id": sub_item_1.id, + "id": f"{sub_item_1.id}", }, { "type": resource_type, - "id": sub_item_2.id, + "id": f"{sub_item_2.id}", }, ], }, @@ -2064,17 +1905,17 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict() + ).model_dump() patch_user_body = { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "attributes": new_attrs, "relationships": { "workplace": { "data": { "type": "workplace", - "id": workplace_1.id, + "id": f"{workplace_1.id}", }, }, }, @@ -2090,12 +1931,12 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": f"{user_1.id}", "relationships": { "workplace": { "data": { "type": "workplace", - "id": str(workplace_1.id), + "id": f"{workplace_1.id}", }, }, }, @@ -2103,8 +1944,10 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( }, "included": [ { - "attributes": {"name": workplace_1.name}, - "id": str(workplace_1.id), + "attributes": { + "name": workplace_1.name, + }, + "id": f"{workplace_1.id}", "type": "workplace", }, ], @@ -2112,7 +1955,7 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( "meta": None, } - patch_user_body["data"]["relationships"]["workplace"]["data"]["id"] = workplace_2.id + patch_user_body["data"]["relationships"]["workplace"]["data"]["id"] = f"{workplace_2.id}" # update relationship with patch endpoint res = await client.patch(url, json=patch_user_body) @@ -2121,12 +1964,12 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": f"{user_1.id}", "relationships": { "workplace": { "data": { "type": "workplace", - "id": str(workplace_2.id), + "id": f"{workplace_2.id}", }, }, }, @@ -2134,8 +1977,10 @@ async def test_ok_when_foreign_key_of_related_object_is_nullable( }, "included": [ { - "attributes": {"name": workplace_2.name}, - "id": str(workplace_2.id), + "attributes": { + "name": workplace_2.name, + }, + "id": f"{workplace_2.id}", "type": "workplace", }, ], @@ -2157,13 +2002,13 @@ async def test_fail_to_bind_relationship_with_constraint( patch_user_bio_body = { "data": { - "id": user_1_bio.id, - "attributes": UserBioAttributesBaseSchema.from_orm(user_1_bio).dict(), + "id": f"{user_1_bio.id}", + "attributes": UserBioAttributesBaseSchema.model_validate(user_1_bio).model_dump(), "relationships": { "user": { "data": { "type": "user", - "id": user_2.id, + "id": f"{user_2.id}", }, }, }, @@ -2177,12 +2022,12 @@ async def test_fail_to_bind_relationship_with_constraint( assert res.json() == { "errors": [ { - "detail": "Object update error", + "detail": "Could not update object", "source": {"pointer": "/data"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Bad Request", "meta": { - "id": str(user_1_bio.id), + "id": f"{user_1_bio.id}", "type": "user_bio", }, }, @@ -2199,12 +2044,12 @@ async def test_relationship_not_found( name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict() + ).model_dump() fake_relationship_id = "1" patch_user_body = { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "attributes": new_attrs, "relationships": { "workplace": { @@ -2226,8 +2071,8 @@ async def test_relationship_not_found( assert res.json() == { "errors": [ { - "detail": f"Workplace.id: {fake_relationship_id} not found", - "source": {"pointer": ""}, + "detail": f"Objects for Workplace with ids: ['{fake_relationship_id}'] not found", + "source": {"pointer": "/data"}, "status_code": status.HTTP_404_NOT_FOUND, "title": "Related object not found.", }, @@ -2240,22 +2085,20 @@ async def test_update_resource_error_same_id( client: AsyncClient, user_1: User, ): - user_id = user_1.id - another_id = 0 patch_user_body = { "data": { - "id": user_id, - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), + "id": f"{user_1.id}", + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), }, } - url = app.url_path_for("get_user_detail", obj_id=another_id) + url = app.url_path_for("get_user_detail", obj_id=0) res = await client.patch(url, json=patch_user_body) assert res.status_code == status.HTTP_400_BAD_REQUEST, res.text assert res.json() == { "errors": [ { - "detail": "obj_id and data.id should be same", + "detail": "obj_id and data.id should be same.", "source": {"pointer": "/data/id"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Bad Request", @@ -2265,12 +2108,10 @@ async def test_update_resource_error_same_id( async def test_remove_to_one_relationship_using_by_update(self, async_session: AsyncSession): resource_type = "self_relationship" - with suppress(KeyError): - RoutersJSONAPI.all_jsonapi_routers.pop(resource_type) app = build_app_custom( model=SelfRelationship, - schema=SelfRelationshipSchema, + schema=SelfRelationshipAttributesSchema, resource_type=resource_type, ) @@ -2285,7 +2126,7 @@ async def test_remove_to_one_relationship_using_by_update(self, async_session: A expected_name = fake.name() update_body = { "data": { - "id": str(child_obj.id), + "id": f"{child_obj.id}", "attributes": { "name": expected_name, }, @@ -2304,9 +2145,13 @@ async def test_remove_to_one_relationship_using_by_update(self, async_session: A assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": { - "attributes": SelfRelationshipAttributesSchema(name=expected_name).dict(), - "id": str(child_obj.id), - "relationships": {"parent_object": {"data": None}}, + "attributes": SelfRelationshipAttributesSchema(name=expected_name).model_dump(exclude_unset=True), + "id": f"{child_obj.id}", + "relationships": { + "parent_object": { + "data": None, + }, + }, "type": "self_relationship", }, "included": [], @@ -2331,11 +2176,11 @@ async def test_ok( name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict() + ).model_dump() patch_user_body = { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "attributes": new_attrs, "relationships": { "computers": { @@ -2343,12 +2188,12 @@ async def test_ok( { "type": "computer", # test id as int - "id": computer_1.id, + "id": f"{computer_1.id}", }, { "type": "computer", # test id as str - "id": str(computer_2.id), + "id": f"{computer_2.id}", }, ], }, @@ -2364,17 +2209,17 @@ async def test_ok( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": f"{user_1.id}", "relationships": { "computers": { "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": f"{computer_1.id}", }, { "type": "computer", - "id": str(computer_2.id), + "id": f"{computer_2.id}", }, ], }, @@ -2383,13 +2228,17 @@ async def test_ok( }, "included": [ { - "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "attributes": { + "name": computer_1.name, + }, + "id": f"{computer_1.id}", "type": "computer", }, { - "attributes": {"name": computer_2.name}, - "id": str(computer_2.id), + "attributes": { + "name": computer_2.name, + }, + "id": f"{computer_2.id}", "type": "computer", }, ], @@ -2401,7 +2250,7 @@ async def test_ok( "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": f"{computer_1.id}", }, ], } @@ -2413,13 +2262,13 @@ async def test_ok( assert res.json() == { "data": { "attributes": new_attrs, - "id": str(user_1.id), + "id": f"{user_1.id}", "relationships": { "computers": { "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": f"{computer_1.id}", }, ], }, @@ -2428,8 +2277,10 @@ async def test_ok( }, "included": [ { - "attributes": {"name": computer_1.name}, - "id": str(computer_1.id), + "attributes": { + "name": computer_1.name, + }, + "id": f"{computer_1.id}", "type": "computer", }, ], @@ -2449,25 +2300,25 @@ async def test_relationship_not_found( name=fake.name(), age=fake.pyint(), email=fake.email(), - ).dict() + ).model_dump() fake_computer_id = fake.pyint(min_value=1000, max_value=9999) assert fake_computer_id != computer_2.id patch_user_body = { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "attributes": new_attrs, "relationships": { "computers": { "data": [ { "type": "computer", - "id": str(computer_1.id), + "id": f"{computer_1.id}", }, { "type": "computer", - "id": fake_computer_id, + "id": f"{fake_computer_id}", }, ], }, @@ -2484,7 +2335,7 @@ async def test_relationship_not_found( assert res.json() == { "errors": [ { - "detail": "Objects for Computer with ids: {" + str(fake_computer_id) + "} not found", + "detail": f"Objects for Computer with ids: ['{fake_computer_id}'] not found", "source": {"pointer": "/data"}, "status_code": status.HTTP_404_NOT_FOUND, "title": "Related object not found.", @@ -2494,12 +2345,10 @@ async def test_relationship_not_found( async def test_remove_to_many_relationship_using_by_update(self, async_session: AsyncSession): resource_type = "self_relationship" - with suppress(KeyError): - RoutersJSONAPI.all_jsonapi_routers.pop(resource_type) app = build_app_custom( model=SelfRelationship, - schema=SelfRelationshipSchema, + schema=SelfRelationshipAttributesSchema, resource_type=resource_type, ) @@ -2517,13 +2366,13 @@ async def test_remove_to_many_relationship_using_by_update(self, async_session: expected_name = fake.name() update_body = { "data": { - "id": str(parent_obj.id), + "id": f"{parent_obj.id}", "attributes": { "name": expected_name, }, "relationships": { "children_objects": { - "data": None, + "data": [], }, }, }, @@ -2536,8 +2385,8 @@ async def test_remove_to_many_relationship_using_by_update(self, async_session: assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": { - "attributes": SelfRelationshipAttributesSchema(name=expected_name).dict(), - "id": str(parent_obj.id), + "attributes": SelfRelationshipAttributesSchema(name=expected_name).model_dump(exclude_unset=True), + "id": f"{parent_obj.id}", "relationships": {"children_objects": {"data": []}}, "type": "self_relationship", }, @@ -2585,7 +2434,7 @@ async def test_delete_objects_many( user_3: User, ): params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "id", @@ -2596,7 +2445,7 @@ async def test_delete_objects_many( ], }, ], - ), + ).decode(), } url = app.url_path_for("get_user_list") @@ -2605,13 +2454,13 @@ async def test_delete_objects_many( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_3), - "id": str(user_3.id), + "attributes": UserAttributesBaseSchema.model_validate(user_3).model_dump(), + "id": f"{user_3.id}", "type": "user", }, ], @@ -2624,8 +2473,8 @@ async def test_delete_objects_many( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_2), - "id": str(user_2.id), + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), + "id": f"{user_2.id}", "type": "user", }, ], @@ -2641,24 +2490,29 @@ async def test_select_custom_fields( user_2: User, ): queried_user_fields = "name" - params = QueryParams([("fields[user]", queried_user_fields)]) + params = QueryParams( + [ + ("fields[user]", queried_user_fields), + ("sort", "id"), + ], + ) url = app.url_path_for("get_user_list") res = await client.delete(url, params=params) assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump( include=set(queried_user_fields.split(",")), ), - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_2).dict( + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump( include=set(queried_user_fields.split(",")), ), - "id": str(user_2.id), + "id": f"{user_2.id}", "type": "user", }, ], @@ -2668,8 +2522,6 @@ async def test_select_custom_fields( async def test_cascade_delete(self, async_session: AsyncSession): resource_type = "cascade_case" - with suppress(KeyError): - RoutersJSONAPI.all_jsonapi_routers.pop(resource_type) app = build_app_custom( model=CascadeCase, @@ -2720,7 +2572,7 @@ async def test_openapi_endpoint_ok(self, client: AsyncClient, app: FastAPI): async def test_openapi_for_client_can_set_id(self): class Schema(BaseModel): - id: UUID = Field(client_can_set_id=True) + id: Annotated[UUID, ClientCanSetId()] app = build_app_custom( model=User, @@ -2746,7 +2598,7 @@ async def test_filters_really_works( params = {"filter[name]": fake_name} assert user_1.name != fake_name assert user_2.name != fake_name - res = await client.get("/users", params=params) + res = await client.get("/users/", params=params) assert res.status_code == status.HTTP_200_OK, res.text assert res.json() == { "data": [], @@ -2754,7 +2606,10 @@ async def test_filters_really_works( "meta": {"count": 0, "totalPages": 1}, } - @mark.parametrize("field_name", [param(name, id=name) for name in ["id", "name", "age", "email"]]) + @pytest.mark.parametrize( + "field_name", + [pytest.param(name, id=name) for name in ["id", "name", "age", "email"]], + ) async def test_field_filters( self, app: FastAPI, @@ -2773,8 +2628,8 @@ async def test_field_filters( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, ], @@ -2805,8 +2660,8 @@ async def test_several_field_filters_at_the_same_time( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, ], @@ -2835,11 +2690,11 @@ async def test_field_filters_with_values_from_different_models( "meta": {"count": 0, "totalPages": 1}, } - @mark.parametrize( + @pytest.mark.parametrize( ("filter_dict", "expected_email_is_null"), [ - param([{"name": "email", "op": "is_", "val": None}], True), - param([{"name": "email", "op": "isnot", "val": None}], False), + pytest.param([{"name": "email", "op": "is_", "val": None}], True), + pytest.param([{"name": "email", "op": "isnot", "val": None}], False), ], ) async def test_filter_by_null( @@ -2858,7 +2713,7 @@ async def test_filter_by_null( target_user = user_2 if expected_email_is_null else user_1 url = app.url_path_for("get_user_list") - params = {"filter": dumps(filter_dict)} + params = {"filter": json.dumps(filter_dict).decode()} response = await client.get(url, params=params) assert response.status_code == status.HTTP_200_OK, response.text @@ -2866,7 +2721,7 @@ async def test_filter_by_null( response_json = response.json() assert len(data := response_json["data"]) == 1 - assert data[0]["id"] == str(target_user.id) + assert data[0]["id"] == f"{target_user.id}" assert data[0]["attributes"]["email"] == target_user.email async def test_filter_by_null_error_when_null_is_not_possible_value( @@ -2891,7 +2746,7 @@ class UserWithNotNullableEmailSchema(UserSchema): url = app.url_path_for(f"get_{resource_type}_list") params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "email", @@ -2899,7 +2754,7 @@ class UserWithNotNullableEmailSchema(UserSchema): "val": None, }, ], - ), + ).decode(), } async with AsyncClient(app=app, base_url="http://test") as client: @@ -2908,7 +2763,7 @@ class UserWithNotNullableEmailSchema(UserSchema): assert response.json() == { "errors": [ { - "detail": "The field `email` can't be null", + "detail": "The field `email` can't be null.", "source": {"parameter": "filters"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Invalid filters querystring parameter.", @@ -2926,18 +2781,8 @@ async def test_custom_sql_filter_lower_string( assert user_1.id != user_2.id - def lower_equals_sql_filter( - schema_field: ModelField, - model_column: InstrumentedAttribute, - value: str, - operator: str, - ): - return func.lower(model_column) == func.lower(value) - class UserWithEmailFieldSchema(UserAttributesBaseSchema): - email: str = Field( - _lower_equals_sql_filter_=lower_equals_sql_filter, - ) + email: Annotated[str, sql_filter_lower_equals] app = build_app_custom( model=User, @@ -2949,7 +2794,7 @@ class UserWithEmailFieldSchema(UserAttributesBaseSchema): user_1.email = f"{name.upper()}@{domain}" await async_session.commit() params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "email", @@ -2957,7 +2802,7 @@ class UserWithEmailFieldSchema(UserAttributesBaseSchema): "val": f"{name}@{domain.upper()}", }, ], - ), + ).decode(), } url = app.url_path_for(f"get_{resource_type}_list") async with AsyncClient(app=app, base_url="http://test") as client: @@ -2967,9 +2812,9 @@ class UserWithEmailFieldSchema(UserAttributesBaseSchema): assert len(response_data) == 1 assert response_data[0] == { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": resource_type, - "attributes": UserWithEmailFieldSchema.from_orm(user_1).dict(), + "attributes": UserWithEmailFieldSchema.model_validate(user_1).model_dump(), } async def test_custom_sql_filter_lower_string_old_style_with_joins( @@ -2983,18 +2828,8 @@ async def test_custom_sql_filter_lower_string_old_style_with_joins( assert user_1.id != user_2.id - def lower_equals_sql_filter( - schema_field: ModelField, - model_column: InstrumentedAttribute, - value: str, - operator: str, - ): - return func.lower(model_column) == func.lower(value), [] - class UserWithEmailFieldFilterSchema(UserAttributesBaseSchema): - email: str = Field( - _lower_equals_sql_filter_=lower_equals_sql_filter, - ) + email: Annotated[str, sql_filter_lower_equals] app = build_app_custom( model=User, @@ -3006,7 +2841,7 @@ class UserWithEmailFieldFilterSchema(UserAttributesBaseSchema): user_1.email = f"{name.upper()}@{domain}" await async_session.commit() params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "email", @@ -3014,7 +2849,7 @@ class UserWithEmailFieldFilterSchema(UserAttributesBaseSchema): "val": f"{name}@{domain.upper()}", }, ], - ), + ).decode(), } url = app.url_path_for(f"get_{resource_type}_list") async with AsyncClient(app=app, base_url="http://test") as client: @@ -3024,16 +2859,10 @@ class UserWithEmailFieldFilterSchema(UserAttributesBaseSchema): assert len(response_data) == 1 assert response_data[0] == { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": resource_type, - "attributes": UserWithEmailFieldFilterSchema.from_orm(user_1).dict(), + "attributes": UserWithEmailFieldFilterSchema.model_validate(user_1).model_dump(), } - assert any( - # str from logs - "Please return only filter expression from now on" in record.msg - # check all records - for record in caplog.records - ) async def test_custom_sql_filter_invalid_result( self, @@ -3043,18 +2872,8 @@ async def test_custom_sql_filter_invalid_result( ): resource_type = "user_with_custom_invalid_sql_filter" - def returns_invalid_number_of_params_filter( - schema_field: ModelField, - model_column: InstrumentedAttribute, - value: str, - operator: str, - ): - return 1, 2, 3 - class UserWithInvalidEmailFieldFilterSchema(UserAttributesBaseSchema): - email: str = Field( - _custom_broken_filter_sql_filter_=returns_invalid_number_of_params_filter, - ) + email: str app = build_app_custom( model=User, @@ -3062,16 +2881,18 @@ class UserWithInvalidEmailFieldFilterSchema(UserAttributesBaseSchema): resource_type=resource_type, ) + field_name = "email" + field_op = "custom_broken_filter" params = { - "filter": dumps( + "filter": json.dumps( [ { - "name": "email", - "op": "custom_broken_filter", + "name": field_name, + "op": field_op, "val": "qwerty", }, ], - ), + ).decode(), } url = app.url_path_for(f"get_{resource_type}_list") async with AsyncClient(app=app, base_url="http://test") as client: @@ -3080,7 +2901,7 @@ class UserWithInvalidEmailFieldFilterSchema(UserAttributesBaseSchema): assert response.json() == { "errors": [ { - "detail": "Custom sql filter backend error.", + "detail": f"Field {field_name!r} has no operator {field_op!r}", "source": {"parameter": "filters"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Invalid filters querystring parameter.", @@ -3097,7 +2918,7 @@ async def test_composite_filter_by_one_field( user_3: User, ): params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "id", @@ -3108,7 +2929,7 @@ async def test_composite_filter_by_one_field( ], }, ], - ), + ).decode(), } url = app.url_path_for("get_user_list") @@ -3117,13 +2938,13 @@ async def test_composite_filter_by_one_field( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_3), - "id": str(user_3.id), + "attributes": UserAttributesBaseSchema.model_validate(user_3).model_dump(), + "id": f"{user_3.id}", "type": "user", }, ], @@ -3140,7 +2961,7 @@ async def test_composite_filter_by_several_fields( user_3: User, ): params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "id", @@ -3156,7 +2977,7 @@ async def test_composite_filter_by_several_fields( "val": user_1.name, }, ], - ), + ).decode(), } url = app.url_path_for("get_user_list") @@ -3165,8 +2986,8 @@ async def test_composite_filter_by_several_fields( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, ], @@ -3183,7 +3004,7 @@ async def test_composite_filter_with_mutually_exclusive_conditions( user_3: User, ): params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "id", @@ -3194,12 +3015,12 @@ async def test_composite_filter_with_mutually_exclusive_conditions( ], }, { - "name": "name", + "name": "id", "op": "eq", "val": user_2.id, }, ], - ), + ).decode(), } url = app.url_path_for("get_user_list") @@ -3220,10 +3041,10 @@ async def test_filter_with_nested_conditions( workplace_name = "Common workplace name" workplace_1, workplace_2, workplace_3, workplace_4 = ( - await build_workplace(async_session, name=workplace_name), - await build_workplace(async_session, name=workplace_name), - await build_workplace(async_session, name=workplace_name), - await build_workplace(async_session, name=workplace_name), + await create_workplace(async_session, name=workplace_name), + await create_workplace(async_session, name=workplace_name), + await create_workplace(async_session, name=workplace_name), + await create_workplace(async_session, name=workplace_name), ) user_1, user_2, _, user_4 = ( @@ -3234,7 +3055,7 @@ async def test_filter_with_nested_conditions( ) params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "workplace.name", @@ -3267,7 +3088,7 @@ async def test_filter_with_nested_conditions( ], }, ], - ), + ).decode(), } url = app.url_path_for("get_user_list") @@ -3276,18 +3097,18 @@ async def test_filter_with_nested_conditions( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_2), - "id": str(user_2.id), + "attributes": UserAttributesBaseSchema.model_validate(user_2).model_dump(), + "id": f"{user_2.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_4), - "id": str(user_4.id), + "attributes": UserAttributesBaseSchema.model_validate(user_4).model_dump(), + "id": f"{user_4.id}", "type": "user", }, ], @@ -3307,18 +3128,18 @@ async def test_join_by_relationships_does_not_duplicating_response_entities( comment_1 = PostComment( text=text, post_id=user_1_post.id, - author_id=user_1.id, + user_id=user_1.id, ) comment_2 = PostComment( text=text, post_id=user_1_post.id, - author_id=user_1.id, + user_id=user_1.id, ) async_session.add_all([comment_1, comment_2]) await async_session.commit() params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "posts.comments.text", @@ -3326,7 +3147,7 @@ async def test_join_by_relationships_does_not_duplicating_response_entities( "val": text, }, ], - ), + ).decode(), } url = app.url_path_for("get_user_list") @@ -3335,8 +3156,8 @@ async def test_join_by_relationships_does_not_duplicating_response_entities( assert res.json() == { "data": [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, ], @@ -3398,26 +3219,25 @@ async def test_filter_by_field_of_uuid_type( async_session.add(another_item) await async_session.commit() - # params = {} if filter_kind == "small": params.update( { - "filter[extra_id]": str(extra_id), + "filter[extra_id]": f"{extra_id}", }, ) else: params.update( { - "filter": dumps( + "filter": json.dumps( [ { "name": "extra_id", "op": "eq", - "val": str(extra_id), + "val": f"{extra_id}", }, ], - ), + ).decode(), }, ) @@ -3427,8 +3247,8 @@ async def test_filter_by_field_of_uuid_type( assert res.json() == { "data": [ { - "attributes": loads(CustomUUIDItemAttributesSchema.from_orm(item).json()), - "id": str(new_id), + "attributes": json.loads(CustomUUIDItemAttributesSchema.model_validate(item).model_dump_json()), + "id": f"{new_id}", "type": resource_type, }, ], @@ -3443,9 +3263,9 @@ async def test_filter_invalid_uuid( ): resource_type = "custom_uuid_item" - extra_id = str(uuid4()) + extra_id = f"{uuid4()}" params = { - "filter[extra_id]": str(extra_id) + "z", + "filter[extra_id]": f"{extra_id}" + "z", } url = app.url_path_for(f"get_{resource_type}_list") @@ -3460,7 +3280,7 @@ async def test_filter_none_instead_of_uuid( resource_type = "custom_uuid_item" params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "id", @@ -3468,7 +3288,7 @@ async def test_filter_none_instead_of_uuid( "val": None, }, ], - ), + ).decode(), } url = app.url_path_for(f"get_{resource_type}_list") res = await client.get(url, params=params) @@ -3476,7 +3296,7 @@ async def test_filter_none_instead_of_uuid( assert res.json() == { "errors": [ { - "detail": "The field `id` can't be null", + "detail": "The field `id` can't be null.", "source": {"parameter": "filters"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Invalid filters querystring parameter.", @@ -3495,19 +3315,19 @@ async def test_join_by_relationships_works_correctly_with_many_filters_for_one_f comment_1 = PostComment( text=fake.sentence(), post_id=user_1_post.id, - author_id=user_1.id, + user_id=user_1.id, ) comment_2 = PostComment( text=fake.sentence(), post_id=user_1_post.id, - author_id=user_1.id, + user_id=user_1.id, ) assert comment_1.text != comment_2.text async_session.add_all([comment_1, comment_2]) await async_session.commit() params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "posts.comments.text", @@ -3520,7 +3340,7 @@ async def test_join_by_relationships_works_correctly_with_many_filters_for_one_f "val": comment_2.text, }, ], - ), + ).decode(), } url = app.url_path_for("get_user_list") @@ -3576,7 +3396,7 @@ async def test_join_by_relationships_for_one_model_by_different_join_chains( "val": delta_2.name, }, ], - ), + ).decode(), } resource_type = "alpha" @@ -3585,25 +3405,109 @@ async def test_join_by_relationships_for_one_model_by_different_join_chains( assert response.status_code == status.HTTP_200_OK, response.text assert response.json() == { - "data": [{"attributes": {}, "id": str(alpha_1.id), "type": "alpha"}], + "data": [{"attributes": {}, "id": f"{alpha_1.id}", "type": "alpha"}], "jsonapi": {"version": "1.0"}, "meta": {"count": 1, "totalPages": 1}, } + async def test_relationships_storage_using( + self, + clear_relationships_info_storage, + app: FastAPI, + client: AsyncClient, + ): + params = { + "filter": json.dumps( + [ + { + "name": "workplace.name", + "op": "eq", + "val": "", + }, + { + "name": "posts.comments.text", + "op": "eq", + "val": "", + }, + ], + ).decode(), + } + + url = app.url_path_for("get_user_list") + + expected_call_count = 3 + expected_relationship_paths = {("workplace",), ("posts", "comments")} + with mock.patch.object( + query_building_module, + "gather_relationships_info", + wraps=query_building_module.gather_relationships_info, + ) as mocked_gather: + response = await client.get(url, params=params) + actual_relationship_paths = { + tuple(call_args.kwargs["relationship_path"]) for call_args in mocked_gather.call_args_list + } + + assert response.status_code == status.HTTP_200_OK, response.text + assert expected_call_count == mocked_gather.call_count, mocked_gather.mock_calls + assert expected_relationship_paths == actual_relationship_paths + + # don't gather relationships if already called with these paths + mocked_gather.reset_mock() + expected_call_count, expected_relationship_paths = 0, set() + response = await client.get(url, params=params) + actual_relationship_paths = { + tuple(call_args.kwargs["relationship_path"]) for call_args in mocked_gather.call_args_list + } + + assert response.status_code == status.HTTP_200_OK, response.text + assert expected_call_count == mocked_gather.call_count, mocked_gather.mock_calls + assert expected_relationship_paths == actual_relationship_paths + + params = { + "filter": json.dumps( + [ + { + "name": "workplace.name", + "op": "eq", + "val": "", + }, + # not called for this relationship yet + { + "name": "bio.birth_city", + "op": "eq", + "val": "", + }, + ], + ).decode(), + } + + # check called for new path only + mocked_gather.reset_mock() + expected_call_count, expected_relationship_paths = 1, {("bio",)} + response = await client.get(url, params=params) + actual_relationship_paths = { + tuple(call_args.kwargs["relationship_path"]) for call_args in mocked_gather.call_args_list + } + + assert response.status_code == status.HTTP_200_OK, response.text + assert expected_call_count == mocked_gather.call_count, mocked_gather.mock_calls + assert expected_relationship_paths == actual_relationship_paths + ASCENDING = "" DESCENDING = "-" class TestSorts: - def get_reverse(self, order: str) -> bool: + @classmethod + def get_reverse(cls, order: str) -> bool: return order is DESCENDING - @mark.parametrize( + @pytest.mark.parametrize( "order", [ - param(ASCENDING, id="ascending"), - param(DESCENDING, id="descending"), + pytest.param(ASCENDING, id="ascending"), + pytest.param(DESCENDING, id="descending"), ], ) async def test_sort( @@ -3620,7 +3524,7 @@ async def test_sort( ) params = { - "filter": dumps( + "filter": json.dumps( [ { "name": "id", @@ -3631,7 +3535,7 @@ async def test_sort( ], }, ], - ), + ).decode(), "sort": f"{order}age", } url = app.url_path_for("get_user_list") @@ -3641,13 +3545,13 @@ async def test_sort( "data": sorted( [ { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": "user", }, { - "attributes": UserAttributesBaseSchema.from_orm(user_3).dict(), - "id": str(user_3.id), + "attributes": UserAttributesBaseSchema.model_validate(user_3).model_dump(), + "id": f"{user_3.id}", "type": "user", }, ], @@ -3658,6 +3562,164 @@ async def test_sort( "meta": {"count": 2, "totalPages": 1}, } + @pytest.mark.parametrize( + "age_order, movie_order, comp_order, user_1, user_2, user_3, expected_order", + [ + pytest.param( + ASCENDING, + ASCENDING, + ASCENDING, + [10, "AAA", "COMP_1"], + [15, "BBB", "COMP_2"], + [20, "CCC", "COMP_3"], + ["user_1", "user_2", "user_3"], + id="ascending_simple", + ), + pytest.param( + DESCENDING, + DESCENDING, + DESCENDING, + [10, "AAA", "COMP_1"], + [15, "BBB", "COMP_2"], + [20, "CCC", "COMP_3"], + ["user_3", "user_2", "user_1"], + id="descending_simple", + ), + pytest.param( + ASCENDING, + ASCENDING, + ASCENDING, + [10, "AAA", "COMP_3"], + [15, "CCC", "COMP_2"], + [15, "BBB", "COMP_1"], + ["user_1", "user_3", "user_2"], + id="ascending_be_second_condition", + ), + pytest.param( + ASCENDING, + DESCENDING, + ASCENDING, + [10, "AAA", "COMP_3"], + [15, "BBB", "COMP_2"], + [15, "CCC", "COMP_1"], + ["user_1", "user_3", "user_2"], + id="descending_be_second_condition", + ), + pytest.param( + ASCENDING, + DESCENDING, + DESCENDING, + [10, "AAA", "COMP_3"], + [10, "AAA", "COMP_2"], + [10, "BBB", "COMP_1"], + ["user_3", "user_1", "user_2"], + id="last_condition", + ), + ], + ) + async def test_sorts_by_relationships( + self, + app: FastAPI, + client: AsyncClient, + async_session: AsyncSession, + age_order: str, + movie_order: str, + comp_order: str, + user_1: list[int, str], + user_2: list[int, str], + user_3: list[int, str], + expected_order: list[Literal["user_1", "user_2", "user_3"]], + ): + age, movie_1, computer_name = user_1 + user_1 = await create_user(async_session, age=age) + await create_user_bio(async_session, user_1, favourite_movies=movie_1) + await create_computer(async_session, name=computer_name, user=user_1) + + age, movie_2, computer_name = user_2 + user_2 = await create_user(async_session, age=age) + await create_user_bio(async_session, user_2, favourite_movies=movie_2) + await create_computer(async_session, name=computer_name, user=user_2) + + age, movie_3, computer_name = user_3 + user_3 = await create_user(async_session, age=age) + await create_user_bio(async_session, user_3, favourite_movies=movie_3) + await create_computer(async_session, name=computer_name, user=user_3) + + user_map = { + "user_1": user_1, + "user_2": user_2, + "user_3": user_3, + } + + params = { + "filter": json.dumps( + [ + # Note: hit filters to ensure there are no conflicts between sort and filter joins + {"name": "bio.favourite_movies", "op": "in", "val": [movie_1, movie_2, movie_3]}, + ], + ).decode(), + "sort": f"{age_order}age,{movie_order}bio.favourite_movies,{comp_order}computers.name", + } + url = app.url_path_for("get_user_list") + response = await client.get(url, params=params) + assert response.status_code == status.HTTP_200_OK, response.text + + response_json = response.json() + assert response_json + assert response_json["meta"] == {"count": 3, "totalPages": 1} + assert response_json["data"] == [ + { + "id": f"{user_map[user].id}", + "attributes": UserAttributesBaseSchema.model_validate(user_map[user]).model_dump(), + "type": "user", + } + for user in expected_order + ] + + @pytest.mark.parametrize( + "order, boris_position", + [ + (ASCENDING, -1), + (DESCENDING, 0), + ], + ) + async def test_register_free_sort( + self, + async_session: AsyncSession, + order: str, + boris_position: int, + ): + resource_type = "test_register_free_sort" + + # lexicographic order: Anton, Boris, anton + await create_user(async_session, name="Anton") + await create_user(async_session, name="anton") + target_user = await create_user(async_session, name="Boris") + + class UserWithNameFieldSortingSchema(UserAttributesBaseSchema): + name: Annotated[str, sql_register_free_sort] + + app = build_app_custom( + model=User, + schema=UserWithNameFieldSortingSchema, + resource_type=resource_type, + ) + params = {"sort": f"{order}name"} + url = app.url_path_for(f"get_{resource_type}_list") + + async with AsyncClient(app=app, base_url="http://test") as client: + response = await client.get(url, params=params) + assert response.status_code == status.HTTP_200_OK, response.text + response_json = response.json() + + expected_count = 3 + assert len(response_json["data"]) == expected_count + assert response_json["data"][boris_position] == { + "id": f"{target_user.id}", + "attributes": UserWithNameFieldSortingSchema.model_validate(target_user).model_dump(), + "type": resource_type, + } + class TestFilteringErrors: async def test_incorrect_field_name( @@ -3675,7 +3737,7 @@ async def test_incorrect_field_name( "val": "", }, ], - ), + ).decode(), } response = await client.get(url, params=params) assert response.status_code == status.HTTP_400_BAD_REQUEST, response.text diff --git a/tests/test_api/test_custom_body_dependency.py b/tests/test_api/test_custom_body_dependency.py index 5c6c212b..d56be4c2 100644 --- a/tests/test_api/test_custom_body_dependency.py +++ b/tests/test_api/test_custom_body_dependency.py @@ -1,4 +1,4 @@ -from typing import ClassVar, Dict, Literal +from typing import ClassVar, Literal import pytest from fastapi import Body, Depends, FastAPI, HTTPException, status @@ -6,11 +6,10 @@ from pytest_asyncio import fixture from sqlalchemy.ext.asyncio import AsyncSession -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric -from fastapi_jsonapi.views.utils import ( - HTTPMethod, - HTTPMethodConfig, -) +from examples.api_for_sqlalchemy.models import User +from examples.api_for_sqlalchemy.schemas import UserAttributesBaseSchema, UserSchema +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.views import Operation, OperationConfig from tests.common_user_api_test import ( BaseGenericUserCreateUpdateWithBodyDependency, CustomNameAttributesJSONAPI, @@ -18,13 +17,6 @@ ) from tests.fixtures.app import build_app_custom from tests.fixtures.views import ArbitraryModelBase, SessionDependency, common_handler -from tests.models import User -from tests.schemas import ( - UserAttributesBaseSchema, - UserSchema, -) - -pytestmark = pytest.mark.asyncio def get_custom_name_from_body( @@ -71,25 +63,16 @@ class UserUpdateCustomDependency(ArbitraryModelBase): allow: bool = Depends(validator_update.validate) -class UserCustomListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( +class UserCustomView(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[Operation, OperationConfig]] = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, ), - HTTPMethod.POST: HTTPMethodConfig( + Operation.CREATE: OperationConfig( dependencies=UserCreateCustomDependency, ), - } - - -class UserCustomDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=common_handler, - ), - HTTPMethod.PATCH: HTTPMethodConfig( + Operation.UPDATE: OperationConfig( dependencies=UserUpdateCustomDependency, ), } @@ -107,15 +90,13 @@ def resource_type(self): @pytest.fixture(scope="class") def app_w_deps(self, resource_type): - app = build_app_custom( + return build_app_custom( model=User, schema=UserSchema, resource_type=resource_type, - class_list=UserCustomListView, - class_detail=UserCustomDetailView, + view=UserCustomView, path=f"/path_{resource_type}", ) - return app @fixture(scope="class") async def client(self, app_w_deps: FastAPI): diff --git a/tests/test_api/test_filter_by_inner_json_schema.py b/tests/test_api/test_filter_by_inner_json_schema.py new file mode 100644 index 00000000..c865202e --- /dev/null +++ b/tests/test_api/test_filter_by_inner_json_schema.py @@ -0,0 +1,235 @@ +from typing import ClassVar + +import orjson as json +from fastapi import FastAPI, status +from httpx import AsyncClient + +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.views import Operation, OperationConfig +from tests.common import is_postgres_tests +from tests.fixtures.models import Task +from tests.fixtures.views import SessionDependency, common_handler + + +class TaskJsonView(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[Operation, OperationConfig]] = { + Operation.ALL: OperationConfig( + dependencies=SessionDependency, + prepare_data_layer_kwargs=common_handler, + ), + } + + +async def test_filter_inner_json_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, +): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_list_json", + "op": "pg_json_contains" if is_postgres_tests() else "sqlite_json_contains", + "val": [1, 2, 3], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" + + +async def test_filter_inner_nested_json_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, +): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_dict_json", + "op": "pg_json_ilike" if is_postgres_tests() else "sqlite_json_ilike", + "val": ["completed", [1, 2, 3]], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" + + +async def test_filter_inner_json_int_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, +): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_dict_json", + "op": "pg_json_ilike" if is_postgres_tests() else "sqlite_json_ilike", + "val": ["count", 1], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" + + +async def test_filter_inner_json_bool_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, +): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_dict_json", + "op": "pg_json_ilike" if is_postgres_tests() else "sqlite_json_ilike", + "val": ["is_complete", True], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" + + +if is_postgres_tests(): + + async def test_filter_inner_jsonb_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, + ): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_list_jsonb", + "op": "pg_jsonb_contains", + "val": ["a", "b", "c"], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" + + async def test_filter_inner_nested_jsonb_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, + ): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_dict_jsonb", + "op": "pg_jsonb_ilike", + "val": ["completed", ["a", "b", "c"]], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" + + async def test_filter_inner_jsonb_int_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, + ): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_dict_jsonb", + "op": "pg_jsonb_ilike", + "val": ["count", 2], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" + + async def test_filter_inner_jsonb_bool_field( + app: FastAPI, + client: AsyncClient, + task_1: Task, + task_2: Task, + ): + query_params = { + "filter": json.dumps( + [ + { + "name": "task_ids_dict_jsonb", + "op": "pg_jsonb_ilike", + "val": ["is_complete", True], + }, + ], + ).decode(), + } + url = app.url_path_for("get_task_list") + response = await client.get(url, params=query_params) + response_data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert "data" in response_data, response_data + assert len(response_data["data"]) == 1 + assert response_data["data"][0]["id"] == f"{task_1.id}" diff --git a/tests/test_api/test_routers.py b/tests/test_api/test_routers.py index c6b62250..da5f14ac 100644 --- a/tests/test_api/test_routers.py +++ b/tests/test_api/test_routers.py @@ -1,36 +1,28 @@ -from typing import ClassVar, Dict, Optional +from typing import ClassVar, Optional from fastapi import APIRouter, Depends, FastAPI, Header, Path, status from httpx import AsyncClient -from pydantic import BaseModel -from pytest import mark # noqa +from pydantic import BaseModel, ConfigDict from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from typing_extensions import Annotated -from fastapi_jsonapi import RoutersJSONAPI, init -from fastapi_jsonapi.exceptions import Forbidden, InternalServerError -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric -from fastapi_jsonapi.views.utils import ( - HTTPMethod, - HTTPMethodConfig, -) -from fastapi_jsonapi.views.view_base import ViewBase -from tests.fixtures.db_connection import async_session_dependency -from tests.fixtures.views import SessionDependency -from tests.misc.utils import fake -from tests.models import User -from tests.schemas import ( +from examples.api_for_sqlalchemy.models import User +from examples.api_for_sqlalchemy.schemas import ( UserAttributesBaseSchema, UserInSchema, UserPatchSchema, UserSchema, ) - -pytestmark = mark.asyncio +from fastapi_jsonapi import ApplicationBuilder +from fastapi_jsonapi.exceptions import Forbidden, InternalServerError +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.views import Operation, OperationConfig, ViewBase +from tests.fixtures.db_connection import async_session_dependency +from tests.fixtures.views import SessionDependency -def build_app(detail_view, resource_type: str) -> FastAPI: +def build_app(view, resource_type: str) -> FastAPI: app = FastAPI( title="FastAPI and SQLAlchemy", debug=True, @@ -38,22 +30,21 @@ def build_app(detail_view, resource_type: str) -> FastAPI: docs_url="/docs", ) router: APIRouter = APIRouter() - - RoutersJSONAPI( + builder = ApplicationBuilder(app) + builder.add_resource( router=router, path="/users", tags=["User"], - class_detail=detail_view, - class_list=ListViewBaseGeneric, + view=view, schema=UserSchema, resource_type=resource_type, schema_in_patch=UserPatchSchema, schema_in_post=UserInSchema, model=User, ) + builder.initialize() app.include_router(router, prefix="") - init(app) return app @@ -69,13 +60,13 @@ class CustomDependencies(BaseModel): dependency_1: int = Depends(one) dependency_2: int = Depends(two) - async def dependencies_handler(view_base: ViewBase, dto: CustomDependencies) -> Optional[Dict]: + async def dependencies_handler(view_base: ViewBase, dto: CustomDependencies) -> Optional[dict]: raise InternalServerError( detail="hi", errors=[ InternalServerError( title="Check that dependency successfully passed", - detail=dto.dict(), + detail=dto.model_dump(), status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, ), InternalServerError( @@ -86,17 +77,17 @@ async def dependencies_handler(view_base: ViewBase, dto: CustomDependencies) -> ], ) - class DependencyInjectionDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.GET: HTTPMethodConfig( + class DependencyInjectionView(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[OperationConfig, OperationConfig]] = { + Operation.GET: OperationConfig( dependencies=CustomDependencies, prepare_data_layer_kwargs=dependencies_handler, ), } - app = build_app(DependencyInjectionDetailView, resource_type="test_dependency_handler_call") + app = build_app(DependencyInjectionView, resource_type="test_dependency_handler_call") async with AsyncClient(app=app, base_url="http://test") as client: - res = await client.get("/users/1") + res = await client.get("/users/1/") assert res.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR, res.text assert res.json() == { @@ -111,7 +102,7 @@ class DependencyInjectionDetailView(DetailViewBaseGeneric): "title": "Check that dependency successfully passed", }, { - "detail": DependencyInjectionDetailView.__name__, + "detail": DependencyInjectionView.__name__, "source": {"pointer": ""}, "status_code": status.HTTP_500_INTERNAL_SERVER_ERROR, "title": "Check caller class", @@ -137,24 +128,26 @@ def get_path_obj_id(obj_id: int = Path(default=...)): class DetailGenericDependency(SessionDependency): custom_name_obj_id: int = Depends(get_path_obj_id) - def all_handler(view: ViewBase, dto: DetailGenericDependency) -> Dict: + def all_handler(view: ViewBase, dto: DetailGenericDependency) -> dict: # test inside handler assert dto.custom_name_obj_id == int(view.request.path_params["obj_id"]) - return {"session": dto.session} + return { + "session": dto.session, + } - class DependencyInjectionDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.GET: HTTPMethodConfig(dependencies=AdminOnlyPermission), - HTTPMethod.ALL: HTTPMethodConfig( + class DependencyInjectionView(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[Operation, OperationConfig]] = { + Operation.GET: OperationConfig(dependencies=AdminOnlyPermission), + Operation.ALL: OperationConfig( dependencies=DetailGenericDependency, prepare_data_layer_kwargs=all_handler, ), } - resource_type = fake.word() - app = build_app(DependencyInjectionDetailView, resource_type=resource_type) + resource_type = "test_dependencies_as_permissions" + app = build_app(DependencyInjectionView, resource_type=resource_type) async with AsyncClient(app=app, base_url="http://test") as client: - res = await client.get(f"/users/{user_1.id}", headers={"X-AUTH": "not_admin"}) + res = await client.get(f"/users/{user_1.id}/", headers={"X-AUTH": "not_admin"}) assert res.status_code == status.HTTP_403_FORBIDDEN, res.text assert res.json() == { @@ -168,11 +161,11 @@ class DependencyInjectionDetailView(DetailViewBaseGeneric): ], } - res = await client.get(f"/users/{user_1.id}", headers={"X-AUTH": "admin"}) + res = await client.get(f"/users/{user_1.id}/", headers={"X-AUTH": "admin"}) assert res.json() == { "data": { - "attributes": UserAttributesBaseSchema.from_orm(user_1).dict(), - "id": str(user_1.id), + "attributes": UserAttributesBaseSchema.model_validate(user_1).model_dump(), + "id": f"{user_1.id}", "type": resource_type, }, "jsonapi": {"version": "1.0"}, @@ -184,12 +177,13 @@ async def test_manipulate_data_layer_kwargs( user_1: User, ): class GetDetailDependencies(BaseModel): - session: AsyncSession = Depends(async_session_dependency) + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) - class Config: - arbitrary_types_allowed = True + session: AsyncSession = Depends(async_session_dependency) - async def set_session_and_ignore_user_1(view_base: ViewBase, dto: GetDetailDependencies) -> Dict: + async def set_session_and_ignore_user_1(view_base: ViewBase, dto: GetDetailDependencies) -> dict: query = select(User).where(User.id != user_1.id) return { @@ -197,24 +191,24 @@ async def set_session_and_ignore_user_1(view_base: ViewBase, dto: GetDetailDepen "query": query, } - class DependencyInjectionDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.GET: HTTPMethodConfig( + class DependencyInjectionView(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[OperationConfig, OperationConfig]] = { + Operation.GET: OperationConfig( dependencies=GetDetailDependencies, prepare_data_layer_kwargs=set_session_and_ignore_user_1, ), } - app = build_app(DependencyInjectionDetailView, resource_type="test_manipulate_data_layer_kwargs") + app = build_app(DependencyInjectionView, resource_type="test_manipulate_data_layer_kwargs") async with AsyncClient(app=app, base_url="http://test") as client: - res = await client.get(f"/users/{user_1.id}") + res = await client.get(f"/users/{user_1.id}/") assert res.status_code == status.HTTP_404_NOT_FOUND, res.text assert res.json() == { "errors": [ { - "detail": f"Resource User `{user_1.id}` not found", - "meta": {"parameter": "id"}, + "detail": f"Resource User `users.id = {user_1.id}` not found", + "meta": {"pointer": ""}, "status_code": status.HTTP_404_NOT_FOUND, "title": "Resource not found.", }, diff --git a/tests/test_api/test_validators.py b/tests/test_api/test_validators.py index ec0bba65..46682e88 100644 --- a/tests/test_api/test_validators.py +++ b/tests/test_api/test_validators.py @@ -1,41 +1,50 @@ -from copy import deepcopy -from typing import Dict, List, Optional, Set, Type +from copy import copy +from typing import Annotated, Generator, Optional, Type import pytest from fastapi import FastAPI, status +from fastapi.datastructures import QueryParams from httpx import AsyncClient -from pydantic import BaseModel, Field, root_validator, validator -from pytest import mark, param # noqa: PT013 +from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + ConfigDict, + PlainValidator, + ValidatorFunctionWrapHandler, + WrapValidator, + field_validator, + model_validator, +) from pytest_asyncio import fixture from sqlalchemy.ext.asyncio import AsyncSession -from fastapi_jsonapi import RoutersJSONAPI -from fastapi_jsonapi.exceptions import BadRequest -from fastapi_jsonapi.schema_builder import SchemaBuilder -from fastapi_jsonapi.validation_utils import extract_field_validators +from examples.api_for_sqlalchemy.models import User +from fastapi_jsonapi.storages.schemas_storage import schemas_storage +from fastapi_jsonapi.types_metadata import ClientCanSetId +from fastapi_jsonapi.validation_utils import extract_validators +from tests.common import is_postgres_tests from tests.fixtures.app import build_app_custom +from tests.fixtures.models import Task +from tests.fixtures.schemas import TaskBaseSchema from tests.misc.utils import fake -from tests.models import ( - Task, - User, -) -from tests.schemas import TaskBaseSchema - -pytestmark = pytest.mark.asyncio @fixture() async def task_with_none_ids( async_session: AsyncSession, ) -> Task: - task = Task(task_ids=None) + task = Task( + task_ids_dict_json=None, + task_ids_list_json=None, + ) async_session.add(task) await async_session.commit() return task -@pytest.fixture() +@pytest.fixture def resource_type(): return "task" @@ -48,7 +57,8 @@ async def test_base_model_validator_pre_true_get_one( resource_type: str, task_with_none_ids: Task, ): - assert task_with_none_ids.task_ids is None + assert task_with_none_ids.task_ids_dict_json is None + assert task_with_none_ids.task_ids_list_json is None url = app.url_path_for(f"get_{resource_type}_detail", obj_id=task_with_none_ids.id) res = await client.get(url) assert res.status_code == status.HTTP_200_OK, res.text @@ -56,41 +66,62 @@ async def test_base_model_validator_pre_true_get_one( attributes = response_data["data"].pop("attributes") assert response_data == { "data": { - "id": str(task_with_none_ids.id), + "id": f"{task_with_none_ids.id}", "type": resource_type, }, "jsonapi": {"version": "1.0"}, "meta": None, } - assert attributes == { - # not `None`! schema validator returns empty list `[]` - # "task_ids": None, - "task_ids": [], + res_attributes = { + # not `None`! schema validator returns empty dict `{}` and empty list `[]` + # "task_ids_dict_json": None, + # "task_ids_list_json": None, + "task_ids_dict_json": {}, + "task_ids_list_json": [], } - assert attributes == TaskBaseSchema.from_orm(task_with_none_ids) + if is_postgres_tests(): + res_attributes.update( + { + "task_ids_dict_jsonb": {}, + "task_ids_list_jsonb": [], + }, + ) + assert attributes == res_attributes + assert attributes == TaskBaseSchema.model_validate(task_with_none_ids).model_dump() - async def test_base_model_root_validator_get_list( + async def test_base_model_model_validator_get_list_and_dict( self, app: FastAPI, client: AsyncClient, resource_type: str, task_with_none_ids: Task, ): - assert task_with_none_ids.task_ids is None + assert task_with_none_ids.task_ids_dict_json is None + assert task_with_none_ids.task_ids_list_json is None url = app.url_path_for(f"get_{resource_type}_list") res = await client.get(url) assert res.status_code == status.HTTP_200_OK, res.text response_data = res.json() + attributes = { + # not `None`! schema validator returns empty dict `{}` and empty list `[]` + # "task_ids_dict_json": None, + # "task_ids_list_json": None, + "task_ids_dict_json": {}, + "task_ids_list_json": [], + } + if is_postgres_tests(): + attributes.update( + { + "task_ids_dict_jsonb": {}, + "task_ids_list_jsonb": [], + }, + ) assert response_data == { "data": [ { - "id": str(task_with_none_ids.id), + "id": f"{task_with_none_ids.id}", "type": resource_type, - "attributes": { - # not `None`! schema validator returns empty list `[]` - # "task_ids": None, - "task_ids": [], - }, + "attributes": attributes, }, ], "jsonapi": { @@ -102,72 +133,57 @@ async def test_base_model_root_validator_get_list( }, } - async def test_base_model_root_validator_create( + async def test_base_model_model_validator_create( self, app: FastAPI, client: AsyncClient, resource_type: str, async_session: AsyncSession, ): - task_data = { - # should be converted to [] by schema on create - "task_ids": None, + attributes = { + # should be converted to [] and {} by schema on create + "task_ids_dict_json": None, + "task_ids_list_json": None, } + if is_postgres_tests(): + attributes.update( + { + "task_ids_dict_jsonb": None, + "task_ids_list_jsonb": None, + }, + ) data_create = { "data": { "type": resource_type, - "attributes": task_data, + "attributes": attributes, }, } url = app.url_path_for(f"create_{resource_type}_list") res = await client.post(url, json=data_create) + assert res.status_code == status.HTTP_201_CREATED, res.text - response_data: dict = res.json() - task_id = response_data["data"].pop("id") + task_id = res.json()["data"].pop("id") task = await async_session.get(Task, int(task_id)) assert isinstance(task, Task) - assert task.task_ids == [] - # we sent request with `None`, but value in db is `[]` + # we sent request with `None`, but value in db is `[]` and `{}` # because validator converted data before object creation - assert task.task_ids == [] - assert response_data == { - "data": { - "type": resource_type, - "attributes": { - # should be empty list - "task_ids": [], - }, - }, - "jsonapi": {"version": "1.0"}, - "meta": None, - } + assert task.task_ids_dict_json == {} + assert task.task_ids_list_json == [] class TestValidators: resource_type = "validator" @fixture(autouse=True) - def _refresh_caches(self) -> None: - object_schemas_cache = deepcopy(SchemaBuilder.object_schemas_cache) - relationship_schema_cache = deepcopy(SchemaBuilder.relationship_schema_cache) - base_jsonapi_object_schemas_cache = deepcopy(SchemaBuilder.base_jsonapi_object_schemas_cache) - - all_jsonapi_routers = deepcopy(RoutersJSONAPI.all_jsonapi_routers) - + def _refresh_caches(self) -> Generator: + schemas_data = copy(schemas_storage._data) yield - - SchemaBuilder.object_schemas_cache = object_schemas_cache - SchemaBuilder.relationship_schema_cache = relationship_schema_cache - SchemaBuilder.base_jsonapi_object_schemas_cache = base_jsonapi_object_schemas_cache - - RoutersJSONAPI.all_jsonapi_routers = all_jsonapi_routers + schemas_storage._data = schemas_data def build_app(self, schema, resource_type: Optional[str] = None) -> FastAPI: return build_app_custom( model=User, schema=schema, - # schema_in_post=schema, - # schema_in_patch=schema, resource_type=resource_type or self.resource_type, ) @@ -180,30 +196,26 @@ class InheritedSchema(schema): async def execute_request_and_check_response( self, app: FastAPI, - body: Dict, + body: dict, expected_detail: str, resource_type: Optional[str] = None, ): resource_type = resource_type or self.resource_type async with AsyncClient(app=app, base_url="http://test") as client: - url = app.url_path_for(f"get_{resource_type}_list") + url = app.url_path_for(f"create_{resource_type}_list") res = await client.post(url, json=body) - assert res.status_code == status.HTTP_400_BAD_REQUEST, res.text - assert res.json() == { - "errors": [ - { - "detail": expected_detail, - "source": {"pointer": ""}, - "status_code": status.HTTP_400_BAD_REQUEST, - "title": "Bad Request", - }, - ], - } + assert res.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, res.text + response_json = res.json() + + assert response_json + assert "detail" in response_json, response_json + error = response_json["detail"][0] + assert error["msg"].endswith(expected_detail), (error, expected_detail) async def execute_request_twice_and_check_response( self, schema: Type[BaseModel], - body: Dict, + body: dict, expected_detail: str, ): """ @@ -228,21 +240,25 @@ async def test_field_validator_call(self): """ class UserSchemaWithValidator(BaseModel): - name: str - - @validator("name") - def validate_name(cls, v): - # checks that cls arg is not bound to the origin class - assert cls is not UserSchemaWithValidator - - raise BadRequest(detail="Check validator") + model_config = ConfigDict( + from_attributes=True, + ) - class Config: - orm_mode = True + name: str - attrs = {"name": fake.name()} - create_user_body = {"data": {"attributes": attrs}} + @field_validator("name") + @classmethod + def validate_name(cls, value): + msg = "Check validator" + raise ValueError(msg) + create_user_body = { + "data": { + "attributes": { + "name": fake.name(), + }, + }, + } await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, @@ -251,19 +267,27 @@ class Config: async def test_field_validator_each_item_arg(self): class UserSchemaWithValidator(BaseModel): - names: List[str] - - @validator("names", each_item=True) - def validate_name(cls, v): - if v == "bad_name": - raise BadRequest(detail="Bad name not allowed") + model_config = ConfigDict( + from_attributes=True, + ) - class Config: - orm_mode = True + names: list[str] - attrs = {"names": ["good_name", "bad_name"]} - create_user_body = {"data": {"attributes": attrs}} + @field_validator("names", mode="after") + @classmethod + def validate_name(cls, value): + for item in value: + if item == "bad_name": + msg = "Bad name not allowed" + raise ValueError(msg) + create_user_body = { + "data": { + "attributes": { + "names": ["good_name", "bad_name"], + }, + }, + } await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, @@ -272,71 +296,70 @@ class Config: async def test_field_validator_pre_arg(self): class UserSchemaWithValidator(BaseModel): - name: List[str] - - @validator("name", pre=True) - def validate_name_pre(cls, v): - raise BadRequest(detail="Pre validator called") + model_config = ConfigDict( + from_attributes=True, + ) - @validator("name") - def validate_name(cls, v): - raise BadRequest(detail="Not pre validator called") + name: list[str] - class Config: - orm_mode = True + @field_validator("name", mode="before") + @classmethod + def validate_name_pre(cls, value): + msg = "Pre validator called" + raise ValueError(msg) - attrs = {"name": fake.name()} - create_user_body = {"data": {"attributes": attrs}} + @field_validator("name", mode="after") + @classmethod + def validate_name(cls, value): + msg = "Not pre validator called" + raise ValueError(msg) + create_user_body = { + "data": { + "attributes": { + "name": fake.name(), + }, + }, + } await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, expected_detail="Pre validator called", ) - async def test_field_validator_always_arg(self): - class UserSchemaWithValidator(BaseModel): - name: str = None - - @validator("name", always=True) - def validate_name(cls, v): - raise BadRequest(detail="Called always validator") - - class Config: - orm_mode = True - - create_user_body = {"data": {"attributes": {}}} - - await self.execute_request_twice_and_check_response( - schema=UserSchemaWithValidator, - body=create_user_body, - expected_detail="Called always validator", - ) - async def test_field_validator_several_validators(self): class UserSchemaWithValidator(BaseModel): - field: str - - @validator("field") - def validator_1(cls, v): - if v == "check_validator_1": - raise BadRequest(detail="Called validator 1") + model_config = ConfigDict( + from_attributes=True, + ) - return v + field: str - @validator("field") - def validator_2(cls, v): - if v == "check_validator_2": - raise BadRequest(detail="Called validator 2") + @field_validator("field", mode="after") + @classmethod + def validator_1(cls, value): + if value == "check_validator_1": + msg = "Called validator 1" + raise ValueError(msg) - return v + return value - class Config: - orm_mode = True + @field_validator("field", mode="after") + @classmethod + def validator_2(cls, value): + if value == "check_validator_2": + msg = "Called validator 2" + raise ValueError(msg) - attrs = {"field": "check_validator_1"} - create_user_body = {"data": {"attributes": attrs}} + return value + create_user_body = { + "data": { + "attributes": { + "field": "check_validator_1", + }, + }, + } app = self.build_app(UserSchemaWithValidator) await self.execute_request_and_check_response( app=app, @@ -344,9 +367,13 @@ class Config: expected_detail="Called validator 1", ) - attrs = {"field": "check_validator_2"} - create_user_body = {"data": {"attributes": attrs}} - + create_user_body = { + "data": { + "attributes": { + "field": "check_validator_2", + }, + }, + } await self.execute_request_and_check_response( app=app, body=create_user_body, @@ -355,23 +382,29 @@ class Config: async def test_field_validator_asterisk(self): class UserSchemaWithValidator(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + field_1: str field_2: str - @validator("*", pre=True) - def validator(cls, v): - if v == "bad_value": - raise BadRequest(detail="Check validator") - - class Config: - orm_mode = True + @field_validator("*", mode="before") + @classmethod + def validator(cls, value): + if value == "bad_value": + msg = "Check validator" + raise ValueError(msg) + return value - attrs = { - "field_1": "bad_value", - "field_2": "", + create_user_body = { + "data": { + "attributes": { + "field_1": "bad_value", + "field_2": "", + }, + }, } - create_user_body = {"data": {"attributes": attrs}} - app = self.build_app(UserSchemaWithValidator) await self.execute_request_and_check_response( app=app, @@ -379,12 +412,14 @@ class Config: expected_detail="Check validator", ) - attrs = { - "field_1": "", - "field_2": "bad_value", + create_user_body = { + "data": { + "attributes": { + "field_1": "", + "field_2": "bad_value", + }, + }, } - create_user_body = {"data": {"attributes": attrs}} - await self.execute_request_and_check_response( app=app, body=create_user_body, @@ -397,48 +432,57 @@ async def test_check_validator_for_id_field(self): """ class UserSchemaWithValidator(BaseModel): - id: int = Field(client_can_set_id=True) + model_config = ConfigDict( + from_attributes=True, + ) - @validator("id") - def validate_id(cls, v): - raise BadRequest(detail="Check validator") + id: Annotated[int, ClientCanSetId()] - class Config: - orm_mode = True + @field_validator("id", mode="after") + @classmethod + def validate_id(cls, value): + msg = "Check validator" + raise ValueError(msg) create_user_body = { "data": { "attributes": {}, - "id": 42, + "id": "42", }, } - await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, expected_detail="Check validator", ) - @mark.parametrize( + @pytest.mark.parametrize( "inherit", [ - param(True, id="inherited_true"), - param(False, id="inherited_false"), + pytest.param(True, id="inherited_true"), + pytest.param(False, id="inherited_false"), ], ) async def test_field_validator_can_change_value(self, inherit: bool): class UserSchemaWithValidator(BaseModel): - name: str + model_config = ConfigDict( + from_attributes=True, + ) - @validator("name", allow_reuse=True) - def fix_title(cls, v): - return v.title() + name: str - class Config: - orm_mode = True + @field_validator("name", mode="after") + @classmethod + def fix_title(cls, value): + return value.title() - attrs = {"name": "john doe"} - create_user_body = {"data": {"attributes": attrs}} + create_user_body = { + "data": { + "attributes": { + "name": "john doe", + }, + }, + } if inherit: UserSchemaWithValidator = self.inherit(UserSchemaWithValidator) @@ -448,93 +492,113 @@ class Config: url = app.url_path_for(f"get_{self.resource_type}_list") res = await client.post(url, json=create_user_body) assert res.status_code == status.HTTP_201_CREATED, res.text - res_json = res.json() - assert res_json["data"] - assert res_json["data"].pop("id") - assert res_json == { - "data": { - "attributes": {"name": "John Doe"}, - "type": "validator", - }, - "jsonapi": {"version": "1.0"}, - "meta": None, - } - @mark.parametrize( + assert res_json["data"] + assert res_json["data"].pop("id") + assert res_json == { + "data": { + "attributes": {"name": "John Doe"}, + "type": "validator", + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + } + + @pytest.mark.parametrize( ("name", "expected_detail"), [ - param("check_pre_1", "Raised 1 pre validator", id="check_1_pre_validator"), - param("check_pre_2", "Raised 2 pre validator", id="check_2_pre_validator"), - param("check_post_1", "Raised 1 post validator", id="check_1_post_validator"), - param("check_post_2", "Raised 2 post validator", id="check_2_post_validator"), + pytest.param("check_pre_1", "Raised 1 pre validator", id="check_1_pre_validator"), + pytest.param("check_pre_2", "Raised 2 pre validator", id="check_2_pre_validator"), + pytest.param("check_post_1", "Raised 1 post validator", id="check_1_post_validator"), + pytest.param("check_post_2", "Raised 2 post validator", id="check_2_post_validator"), ], ) - async def test_root_validator(self, name: str, expected_detail: str): + async def test_model_validator(self, name: str, expected_detail: str): class UserSchemaWithValidator(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + name: str - @root_validator(pre=True, allow_reuse=True) + @model_validator(mode="before") + @classmethod def validator_pre_1(cls, values): if values["name"] == "check_pre_1": - raise BadRequest(detail="Raised 1 pre validator") + msg = "Raised 1 pre validator" + raise ValueError(msg) return values - @root_validator(pre=True, allow_reuse=True) + @model_validator(mode="before") + @classmethod def validator_pre_2(cls, values): if values["name"] == "check_pre_2": - raise BadRequest(detail="Raised 2 pre validator") + msg = "Raised 2 pre validator" + raise ValueError(msg) return values - @root_validator(allow_reuse=True) + @model_validator(mode="after") + @classmethod def validator_post_1(cls, values): - if values["name"] == "check_post_1": - raise BadRequest(detail="Raised 1 post validator") + if values.name == "check_post_1": + msg = "Raised 1 post validator" + raise ValueError(msg) return values - @root_validator(allow_reuse=True) + @model_validator(mode="after") + @classmethod def validator_post_2(cls, values): - if values["name"] == "check_post_2": - raise BadRequest(detail="Raised 2 post validator") + if values.name == "check_post_2": + msg = "Raised 2 post validator" + raise ValueError(msg) return values - class Config: - orm_mode = True - - attrs = {"name": name} - create_user_body = {"data": {"attributes": attrs}} - + create_user_body = { + "data": { + "attributes": { + "name": name, + }, + }, + } await self.execute_request_twice_and_check_response( schema=UserSchemaWithValidator, body=create_user_body, expected_detail=expected_detail, ) - @mark.parametrize( + @pytest.mark.parametrize( "inherit", [ - param(True, id="inherited_true"), - param(False, id="inherited_false"), + pytest.param(True, id="inherited_true"), + pytest.param(False, id="inherited_false"), ], ) - async def test_root_validator_can_change_value(self, inherit: bool): + async def test_model_validator_can_change_value(self, inherit: bool): class UserSchemaWithValidator(BaseModel): - name: str + model_config = ConfigDict( + from_attributes=True, + ) - @root_validator(allow_reuse=True) - def fix_title(cls, v): - v["name"] = v["name"].title() - return v + name: str - class Config: - orm_mode = True + @model_validator(mode="after") + @classmethod + def fix_title(cls, value): + value.name = value.name.title() + return value - attrs = {"name": "john doe"} - create_user_body = {"data": {"attributes": attrs}} + create_user_body = { + "data": { + "attributes": { + "name": "john doe", + }, + }, + } if inherit: UserSchemaWithValidator = self.inherit(UserSchemaWithValidator) @@ -544,142 +608,309 @@ class Config: url = app.url_path_for(f"get_{self.resource_type}_list") res = await client.post(url, json=create_user_body) assert res.status_code == status.HTTP_201_CREATED, res.text - res_json = res.json() - assert res_json["data"] - assert res_json["data"].pop("id") - assert res_json == { - "data": { - "attributes": {"name": "John Doe"}, - "type": "validator", + + assert res_json["data"] + assert res_json["data"].pop("id") + assert res_json == { + "data": { + "attributes": { + "name": "John Doe", }, - "jsonapi": {"version": "1.0"}, - "meta": None, - } + "type": "validator", + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + } - @mark.parametrize( + @pytest.mark.parametrize( ("name", "expected_detail"), [ - param("check_pre_1", "check_pre_1", id="check_1_pre_validator"), - param("check_pre_2", "check_pre_2", id="check_2_pre_validator"), - param("check_post_1", "check_post_1", id="check_1_post_validator"), - param("check_post_2", "check_post_2", id="check_2_post_validator"), + pytest.param("check_pre_1", "check_pre_1", id="check_1_pre_validator"), + pytest.param("check_pre_2", "check_pre_2", id="check_2_pre_validator"), + pytest.param("check_post_1", "check_post_1", id="check_1_post_validator"), + pytest.param("check_post_2", "check_post_2", id="check_2_post_validator"), ], ) - async def test_root_validator_inheritance(self, name: str, expected_detail: str): + async def test_model_validator_inheritance(self, name: str, expected_detail: str): class UserSchemaWithValidatorBase(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + name: str - @root_validator(pre=True, allow_reuse=True) + @model_validator(mode="before") + @classmethod def validator_pre_1(cls, values): if values["name"] == "check_pre_1": - raise BadRequest(detail="Base check_pre_1") + msg = "Base check_pre_1" + raise ValueError(msg) return values - @root_validator(pre=True, allow_reuse=True) + @model_validator(mode="before") + @classmethod def validator_pre_2(cls, values): if values["name"] == "check_pre_2": - raise BadRequest(detail="Base check_pre_2") + msg = "Base check_pre_2" + raise ValueError(msg) return values - @root_validator(allow_reuse=True) + @model_validator(mode="after") + @classmethod def validator_post_1(cls, values): - if values["name"] == "check_post_1": - raise BadRequest(detail="Base check_post_1") + if values.name == "check_post_1": + msg = "Base check_post_1" + raise ValueError(msg) return values - @root_validator(allow_reuse=True) + @model_validator(mode="after") + @classmethod def validator_post_2(cls, values): - if values["name"] == "check_post_2": - raise BadRequest(detail="Base check_post_2") + if values.name == "check_post_2": + msg = "Base check_post_2" + raise ValueError(msg) return values - class Config: - orm_mode = True - class UserSchemaWithValidator(UserSchemaWithValidatorBase): + model_config = ConfigDict( + from_attributes=True, + ) + name: str - @root_validator(pre=True, allow_reuse=True) + @model_validator(mode="before") + @classmethod def validator_pre_1(cls, values): if values["name"] == "check_pre_1": - raise BadRequest(detail="check_pre_1") + msg = "check_pre_1" + raise ValueError(msg) return values - @root_validator(pre=True, allow_reuse=True) + @model_validator(mode="before") + @classmethod def validator_pre_2(cls, values): if values["name"] == "check_pre_2": - raise BadRequest(detail="check_pre_2") + msg = "check_pre_2" + raise ValueError(msg) return values - @root_validator(allow_reuse=True) + @model_validator(mode="after") + @classmethod def validator_post_1(cls, values): - if values["name"] == "check_post_1": - raise BadRequest(detail="check_post_1") + if values.name == "check_post_1": + msg = "check_post_1" + raise ValueError(msg) return values - @root_validator(allow_reuse=True) + @model_validator(mode="after") + @classmethod def validator_post_2(cls, values): - if values["name"] == "check_post_2": - raise BadRequest(detail="check_post_2") + if values.name == "check_post_2": + msg = "check_post_2" + raise ValueError(msg) return values - class Config: - orm_mode = True - - attrs = {"name": name} - create_user_body = {"data": {"attributes": attrs}} - + create_user_body = { + "data": { + "attributes": { + "name": name, + }, + }, + } await self.execute_request_and_check_response( app=self.build_app(UserSchemaWithValidator), body=create_user_body, expected_detail=expected_detail, ) + async def test_validator_calls_for_field_requests(self, user_1: User): + def annotation_pre_validator(value: str) -> str: + return f"{value} (annotation_pre_field)" + + def annotation_post_validator(value: str) -> str: + return f"{value} (annotation_post_field)" + + class UserSchemaWithValidator(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + name: Annotated[ + str, + BeforeValidator(annotation_pre_validator), + AfterValidator(annotation_post_validator), + # WrapValidator(wrapp_validator), + ] + + @field_validator("name", mode="before") + @classmethod + def pre_field_validator(cls, value): + return f"{value} (pre_field)" + + @field_validator("name", mode="after") + @classmethod + def post_field_validator(cls, value): + return f"{value} (post_field)" + + @model_validator(mode="before") + @classmethod + def pre_model_validator(cls, data: dict): + name = data["name"] + data["name"] = f"{name} (pre_model)" + return data + + @model_validator(mode="after") + @classmethod + def post_model_validator(cls, value): + value.name = f"{value.name} (post_model)" + return value + + params = QueryParams( + [ + (f"fields[{self.resource_type}]", "name"), + ], + ) + + app = self.build_app(UserSchemaWithValidator) + + async with AsyncClient(app=app, base_url="http://test") as client: + url = app.url_path_for(f"get_{self.resource_type}_detail", obj_id=user_1.id) + res = await client.get(url, params=params) + assert res.status_code == status.HTTP_200_OK, res.text + res_json = res.json() + + assert res_json["data"] + assert res_json["data"].pop("id") + assert res_json == { + "data": { + "attributes": { + # check validators call order + "name": ( + f"{user_1.name} (pre_model) (pre_field) (annotation_pre_field) " + "(annotation_post_field) (post_field) (post_model)" + ), + }, + "type": self.resource_type, + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + } + + async def test_wrapp_validator_for_field_requests(self, user_1: User): + def wrapp_validator(value: str, handler: ValidatorFunctionWrapHandler) -> str: + return f"{value} (wrapp_field)" + + class UserSchemaWithValidator(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + name: Annotated[str, WrapValidator(wrapp_validator)] + + params = QueryParams( + [ + (f"fields[{self.resource_type}]", "name"), + ], + ) + + app = self.build_app(UserSchemaWithValidator) + + async with AsyncClient(app=app, base_url="http://test") as client: + url = app.url_path_for(f"get_{self.resource_type}_detail", obj_id=user_1.id) + res = await client.get(url, params=params) + assert res.status_code == status.HTTP_200_OK, res.text + res_json = res.json() + + assert res_json["data"] + assert res_json["data"].pop("id") + assert res_json == { + "data": { + "attributes": {"name": (f"{user_1.name} (wrapp_field)")}, + "type": self.resource_type, + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + } + + async def test_plain_validator_for_field_requests(self, user_1: User): + def plain_validator(value: str, handler: ValidatorFunctionWrapHandler) -> str: + return f"{value} (plain_field)" + + class UserSchemaWithValidator(BaseModel): + model_config = ConfigDict( + from_attributes=True, + ) + + name: Annotated[int, PlainValidator(plain_validator)] + + params = QueryParams( + [ + (f"fields[{self.resource_type}]", "name"), + ], + ) + + app = self.build_app(UserSchemaWithValidator) + + async with AsyncClient(app=app, base_url="http://test") as client: + url = app.url_path_for(f"get_{self.resource_type}_detail", obj_id=user_1.id) + res = await client.get(url, params=params) + assert res.status_code == status.HTTP_200_OK, res.text + res_json = res.json() + + assert res_json["data"] + assert res_json["data"].pop("id") + assert res_json == { + "data": { + "attributes": {"name": (f"{user_1.name} (plain_field)")}, + "type": self.resource_type, + }, + "jsonapi": {"version": "1.0"}, + "meta": None, + } + class TestValidationUtils: - @mark.parametrize( + @pytest.mark.parametrize( ("include", "exclude", "expected"), [ - param({"item_1"}, None, {"item_1_validator"}, id="include"), - param(None, {"item_1"}, {"item_2_validator"}, id="exclude"), - param(None, None, {"item_1_validator", "item_2_validator"}, id="empty_params"), - param({"item_1", "item_2"}, {"item_2"}, {"item_1_validator"}, id="intersection"), + pytest.param({"item_1"}, None, {"item_1_validator"}, id="include"), + pytest.param(None, {"item_1"}, {"item_2_validator"}, id="exclude"), + pytest.param(None, None, {"item_1_validator", "item_2_validator"}, id="empty_params"), + pytest.param({"item_1", "item_2"}, {"item_2"}, {"item_1_validator"}, id="intersection"), ], ) def test_extract_field_validators_args( self, - include: Set[str], - exclude: Set[str], - expected: Set[str], + include: set[str], + exclude: set[str], + expected: set[str], ): class ValidationSchema(BaseModel): item_1: str item_2: str - @validator("item_1", allow_reuse=True) - def item_1_validator(cls, v): - return v + @field_validator("item_1", mode="after") + @classmethod + def item_1_validator(cls, value): + return value - @validator("item_2", allow_reuse=True) - def item_2_validator(cls, v): - return v + @field_validator("item_2", mode="after") + @classmethod + def item_2_validator(cls, value): + return value - validators = extract_field_validators( + field_validators, model_validators = extract_validators( ValidationSchema, include_for_field_names=include, exclude_for_field_names=exclude, ) - validator_func_names = { - validator_item.__validator_config__[1].func.__name__ for validator_item in validators.values() - } - - assert expected == validator_func_names + assert {*field_validators.keys(), *model_validators.keys()} == expected diff --git a/tests/test_atomic/conftest.py b/tests/test_atomic/conftest.py index 7be3e8b2..3223a50d 100644 --- a/tests/test_atomic/conftest.py +++ b/tests/test_atomic/conftest.py @@ -1,15 +1,26 @@ -from __future__ import annotations +from collections.abc import Sequence import pytest from fastapi_jsonapi.atomic.schemas import AtomicOperationAction -@pytest.fixture() +@pytest.fixture def allowed_atomic_actions_list() -> list[str]: return [op.value for op in AtomicOperationAction] -@pytest.fixture() +def options_as_pydantic_choices_string(options: Sequence[str]) -> str: + if len(options) == 1: + return repr(options[0]) + return " or ".join( + ( + ", ".join(repr(op) for op in options[:-1]), + repr(options[-1]), + ), + ) + + +@pytest.fixture def allowed_atomic_actions_as_string(allowed_atomic_actions_list) -> str: - return ", ".join(repr(op) for op in allowed_atomic_actions_list) + return options_as_pydantic_choices_string(allowed_atomic_actions_list) diff --git a/tests/test_atomic/test_create_objects.py b/tests/test_atomic/test_create_objects.py index 3a076131..b1c8a62e 100644 --- a/tests/test_atomic/test_create_objects.py +++ b/tests/test_atomic/test_create_objects.py @@ -2,18 +2,16 @@ from typing import Callable import pytest +from fastapi import status from httpx import AsyncClient -from pytest import mark # noqa from sqlalchemy import and_, or_, select from sqlalchemy.engine import Result from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import joinedload from sqlalchemy.sql.functions import count -from starlette import status -from tests.misc.utils import fake -from tests.models import Child, Parent, ParentToChildAssociation, User, UserBio -from tests.schemas import ( +from examples.api_for_sqlalchemy.models import Child, Parent, ParentToChildAssociation, User, UserBio +from examples.api_for_sqlalchemy.schemas import ( ChildAttributesSchema, ComputerAttributesBaseSchema, ParentAttributesSchema, @@ -21,11 +19,9 @@ UserAttributesBaseSchema, UserBioAttributesBaseSchema, ) +from tests.misc.utils import fake COLUMN_CHARACTERS_LIMIT = 50 - -pytestmark = mark.asyncio - logging.basicConfig(level=logging.DEBUG) @@ -35,22 +31,15 @@ def random_sentence() -> str: class TestAtomicCreateObjects: async def test_operations_empty_list(self, client: AsyncClient): - data_atomic_request = { + data_atomic_request: dict[str, list] = { "atomic:operations": [], } response = await client.post("/operations", json=data_atomic_request) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text - assert response.json() == { - # TODO: JSON:API exception! - "detail": [ - { - "loc": ["body", "atomic:operations"], - "msg": "ensure this value has at least 1 items", - "type": "value_error.list.min_items", - "ctx": {"limit_value": 1}, - }, - ], - } + response_data = response.json() + detail, *_ = response_data["detail"] + assert detail["loc"] == ["body", "atomic:operations"] + assert detail["msg"] == "List should have at least 1 item after validation, not 0" async def test_create_one_object( self, @@ -58,14 +47,13 @@ async def test_create_one_object( async_session: AsyncSession, user_attributes: UserAttributesBaseSchema, ): - user = user_attributes data_atomic_request = { "atomic:operations": [ { "op": "add", "data": { "type": "user", - "attributes": user.dict(), + "attributes": user_attributes.model_dump(), }, }, ], @@ -78,17 +66,17 @@ async def test_create_one_object( assert results, results result: dict = results[0] stmt = select(User).where( - User.name == user.name, - User.age == user.age, - User.email == user.email, + User.name == user_attributes.name, + User.age == user_attributes.age, + User.email == user_attributes.email, ) db_result: Result = await async_session.execute(stmt) user_obj: User = db_result.scalar_one() assert result.pop("meta") is None assert result == { "data": { - "attributes": UserAttributesBaseSchema.from_orm(user_obj).dict(), - "id": str(user_obj.id), + "attributes": UserAttributesBaseSchema.model_validate(user_obj).model_dump(), + "id": f"{user_obj.id}", "type": "user", }, } @@ -108,7 +96,7 @@ async def test_create_two_objects( "op": "add", "data": { "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, } for user_data in users_data @@ -137,8 +125,8 @@ async def test_create_two_objects( assert result.pop("meta") is None assert result == { "data": { - "attributes": UserAttributesBaseSchema.from_orm(user).dict(), - "id": str(user.id), + "attributes": UserAttributesBaseSchema.model_validate(user).model_dump(), + "id": f"{user.id}", "type": "user", }, } @@ -171,7 +159,7 @@ async def test_atomic_rollback_on_create_error( "op": "add", "data": { "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, } for user_data in users_data @@ -185,7 +173,8 @@ async def test_atomic_rollback_on_create_error( assert errors, response_data error = errors[0] assert error == { - "detail": "Object creation error", + "detail": "Could not create object", + "meta": {"id": None, "type": "user"}, "source": {"pointer": "/data"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Bad Request", @@ -219,11 +208,11 @@ async def test_create_bio_with_relationship_to_user_to_one( "op": "add", "data": { "type": "user_bio", - "attributes": user_bio.dict(), + "attributes": user_bio.model_dump(), "relationships": { "user": { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "type": "user", }, }, @@ -241,12 +230,12 @@ async def test_create_bio_with_relationship_to_user_to_one( result_bio_data = results[0] res: Result = await async_session.execute(stmt_bio) user_bio_created: UserBio = res.scalar_one() - assert user_bio == UserBioAttributesBaseSchema.from_orm(user_bio_created) + assert user_bio == UserBioAttributesBaseSchema.model_validate(user_bio_created) assert result_bio_data == { "data": { - "attributes": user_bio.dict(), + "attributes": user_bio.model_dump(), "type": "user_bio", - "id": str(user_bio_created.id), + "id": f"{user_bio_created.id}", }, "meta": None, } @@ -301,14 +290,14 @@ async def test_create_user_and_user_bio_with_local_id( "data": { "type": "user", "lid": user_lid, - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, }, { "op": "add", "data": { "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), "relationships": { "user": { "data": { @@ -332,17 +321,17 @@ async def test_create_user_and_user_bio_with_local_id( "atomic:results": [ { "data": { - "id": str(user.id), + "id": f"{user.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, "meta": None, }, { "data": { - "id": str(user.bio.id), + "id": f"{user.bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, "meta": None, }, @@ -398,14 +387,14 @@ async def test_create_user_and_create_computer_for_user( "data": { "type": "user", "lid": user_lid, - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, }, { "op": "add", "data": { "type": "computer", - "attributes": computer_data.dict(), + "attributes": computer_data.model_dump(), "relationships": { "user": { "data": { @@ -430,17 +419,17 @@ async def test_create_user_and_create_computer_for_user( "atomic:results": [ { "data": { - "id": str(user.id), + "id": f"{user.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, "meta": None, }, { "data": { - "id": str(user.computers[0].id), + "id": f"{user.computers[0].id}", "type": "computer", - "attributes": computer_data.dict(), + "attributes": computer_data.model_dump(), }, "meta": None, }, @@ -502,14 +491,14 @@ async def test_create_user_and_create_bio_and_computer_for_user( "data": { "type": "user", "lid": user_lid, - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, }, { "op": "add", "data": { "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), "relationships": { "user": { "data": { @@ -524,7 +513,7 @@ async def test_create_user_and_create_bio_and_computer_for_user( "op": "add", "data": { "type": "computer", - "attributes": computer_data.dict(), + "attributes": computer_data.model_dump(), "relationships": { "user": { "data": { @@ -550,25 +539,25 @@ async def test_create_user_and_create_bio_and_computer_for_user( "atomic:results": [ { "data": { - "id": str(user.id), + "id": f"{user.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, "meta": None, }, { "data": { - "id": str(user.bio.id), + "id": f"{user.bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, "meta": None, }, { "data": { - "id": str(user.computers[0].id), + "id": f"{user.computers[0].id}", "type": "computer", - "attributes": computer_data.dict(), + "attributes": computer_data.model_dump(), }, "meta": None, }, @@ -627,14 +616,14 @@ async def test_resource_type_with_local_id_not_found( "op": "add", "data": { "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, } action_2 = { "op": "add", "data": { "type": "computer", - "attributes": computer_data.dict(), + "attributes": computer_data.model_dump(), "relationships": { "user": { "data": relationship_info, @@ -659,9 +648,9 @@ async def test_resource_type_with_local_id_not_found( assert response.json() == { "detail": { "data": { - **action_2["data"], "id": None, "lid": None, + **action_2["data"], }, "error": expected_error_text, "message": f"Validation error on operation {action_1['op']}", @@ -726,14 +715,14 @@ async def test_local_id_not_found( "data": { "type": "user", "lid": user_lid, - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, } action_2 = { "op": "add", "data": { "type": "computer", - "attributes": computer_data.dict(), + "attributes": computer_data.model_dump(), "relationships": { "user": { "data": relationship_info, @@ -787,7 +776,7 @@ async def test_create_and_associate_many_to_many( "data": { "lid": "new-parent", "type": "parent", - "attributes": parent_data.dict(), + "attributes": parent_data.model_dump(), }, }, # create child @@ -796,7 +785,7 @@ async def test_create_and_associate_many_to_many( "data": { "lid": "new-child", "type": "child", - "attributes": child_data.dict(), + "attributes": child_data.model_dump(), }, }, # create parent-to-child association @@ -854,24 +843,24 @@ async def test_create_and_associate_many_to_many( "atomic:results": [ { "data": { - "attributes": ParentAttributesSchema.from_orm(assoc.parent).dict(), - "id": str(assoc.parent.id), + "attributes": ParentAttributesSchema.model_validate(assoc.parent).model_dump(), + "id": f"{assoc.parent.id}", "type": "parent", }, "meta": None, }, { "data": { - "attributes": ChildAttributesSchema.from_orm(assoc.child).dict(), - "id": str(assoc.child.id), + "attributes": ChildAttributesSchema.model_validate(assoc.child).model_dump(), + "id": f"{assoc.child.id}", "type": "child", }, "meta": None, }, { "data": { - "attributes": ParentToChildAssociationAttributesSchema.from_orm(assoc).dict(), - "id": str(assoc.id), + "attributes": ParentToChildAssociationAttributesSchema.model_validate(assoc).model_dump(), + "id": f"{assoc.id}", "type": "parent-to-child-association", }, "meta": None, @@ -900,19 +889,24 @@ async def test_create_object_schema_validation_error( response = await client.post("/operations", json=data_atomic_request) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text # TODO: json:api exception - assert response.json() == { + response_data = response.json() + for response_ in response_data["detail"]["errors"]: + response_.pop("url") + assert response_data == { "detail": { "data": { - **action_add["data"], + "attributes": {}, "id": None, "lid": None, "relationships": None, + "type": "user", }, "errors": [ { + "input": {}, "loc": ["data", "attributes", "name"], - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], "message": f"Validation error on operation {action_add['op']}", diff --git a/tests/test_atomic/test_current_atomic_operation.py b/tests/test_atomic/test_current_atomic_operation.py index 0e2ac734..06644eeb 100644 --- a/tests/test_atomic/test_current_atomic_operation.py +++ b/tests/test_atomic/test_current_atomic_operation.py @@ -1,6 +1,4 @@ -from __future__ import annotations - -from typing import ClassVar, Dict, Literal, Optional +from typing import ClassVar, Literal, Optional import pytest from fastapi import Body, Depends, FastAPI, HTTPException, status @@ -9,13 +7,12 @@ from pytest_asyncio import fixture from sqlalchemy.ext.asyncio import AsyncSession +from examples.api_for_sqlalchemy.models import User +from examples.api_for_sqlalchemy.schemas import UserAttributesBaseSchema, UserSchema from fastapi_jsonapi.atomic import current_atomic_operation -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric from fastapi_jsonapi.utils.exceptions import handle_validation_error -from fastapi_jsonapi.views.utils import ( - HTTPMethod, - HTTPMethodConfig, -) +from fastapi_jsonapi.views import Operation, OperationConfig from tests.common_user_api_test import ( BaseGenericUserCreateUpdateWithBodyDependency, CustomNameAttributesJSONAPI, @@ -24,14 +21,6 @@ from tests.fixtures.app import build_app_custom from tests.fixtures.views import ArbitraryModelBase, SessionDependency, common_handler from tests.misc.utils import fake -from tests.models import User -from tests.schemas import ( - UserAttributesBaseSchema, - UserSchema, -) - -pytestmark = pytest.mark.asyncio - FIELD_CUSTOM_NAME = "custom_name" @@ -65,7 +54,7 @@ def get_validated_attribute_from_body(data: dict): # validated_data = CustomNameAttributesJSONAPI.parse_obj(data) # return validated_data.attributes.custom_name - validated_data = AttributesTopLevelBody.parse_obj({"body": {"data": data}}) + validated_data = AttributesTopLevelBody.model_validate({"body": {"data": data}}) # or # return get_custom_name_from_body_only_on_generic(data=validated_data) @@ -85,7 +74,7 @@ async def get_custom_name_from_body_universal( # dep_helper = DependencyHelper(request=request) # return await dep_helper.run(get_custom_name_from_body_only_on_generic) - return get_validated_attribute_from_body(atomic_operation.data.dict()) + return get_validated_attribute_from_body(atomic_operation.data.model_dump()) class ValidateCustomNameEquals(ValidateCustomNameEqualsBase): @@ -126,25 +115,16 @@ class UserUpdateCustomDependency(ArbitraryModelBase): allow: bool = Depends(validator_update.validate) -class UserCustomListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( +class UserCustomView(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[Operation, OperationConfig]] = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, ), - HTTPMethod.POST: HTTPMethodConfig( + Operation.CREATE: OperationConfig( dependencies=UserCreateCustomDependency, ), - } - - -class UserCustomDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=common_handler, - ), - HTTPMethod.PATCH: HTTPMethodConfig( + Operation.UPDATE: OperationConfig( dependencies=UserUpdateCustomDependency, ), } @@ -166,8 +146,7 @@ def app_w_deps(self, resource_type): model=User, schema=UserSchema, resource_type=resource_type, - class_list=UserCustomListView, - class_detail=UserCustomDetailView, + view=UserCustomView, path=f"/path_{resource_type}", ) return app @@ -277,7 +256,7 @@ async def test_atomic_create_user_error_required_body_field_not_passed( resource_type: str, user_attributes: UserAttributesBaseSchema, ): - user_attributes_data = user_attributes.dict() + user_attributes_data = user_attributes.model_dump() assert self.FIELD_CUSTOM_NAME not in user_attributes_data data_atomic_request = { "atomic:operations": [ @@ -300,18 +279,17 @@ async def test_atomic_update_user_error_required_body_field_not_passed( user_attributes: UserAttributesBaseSchema, user_1: User, ): - attributes_data = user_attributes.dict() + attributes_data = user_attributes.model_dump() assert self.FIELD_CUSTOM_NAME not in attributes_data - data_user_update = { - "id": user_1.id, - "type": resource_type, - "attributes": attributes_data, - } data_atomic_request = { "atomic:operations": [ { "op": "update", - "data": data_user_update, + "data": { + "id": f"{user_1.id}", + "type": resource_type, + "attributes": attributes_data, + }, }, ], } @@ -324,7 +302,7 @@ async def test_atomic_create_user_error_required_body_field_passed_but_invalid( resource_type: str, user_attributes: UserAttributesBaseSchema, ): - user_attributes_data = user_attributes.dict() + user_attributes_data = user_attributes.model_dump() user_attributes_data[self.FIELD_CUSTOM_NAME] = fake.word() assert user_attributes_data[self.FIELD_CUSTOM_NAME] != self.validator_create.expected_value data_atomic_request = { @@ -348,18 +326,17 @@ async def test_atomic_update_user_error_required_body_field_passed_but_invalid( user_attributes: UserAttributesBaseSchema, user_1: User, ): - attributes_data = user_attributes.dict() + attributes_data = user_attributes.model_dump() attributes_data[self.FIELD_CUSTOM_NAME] = fake.word() - data_user_update = { - "id": user_1.id, - "type": resource_type, - "attributes": attributes_data, - } data_atomic_request = { "atomic:operations": [ { "op": "update", - "data": data_user_update, + "data": { + "id": f"{user_1.id}", + "type": resource_type, + "attributes": attributes_data, + }, }, ], } @@ -373,15 +350,14 @@ async def test_atomic_create_user_success_use_current_atomic_operation_during_va resource_type: str, user_attributes: UserAttributesBaseSchema, ): - data_user_create = self.prepare_user_create_data( - user_attributes=user_attributes, - resource_type=resource_type, - ) data_atomic_request = { "atomic:operations": [ { "op": "add", - "data": data_user_create, + "data": self.prepare_user_create_data( + user_attributes=user_attributes, + resource_type=resource_type, + ), }, ], } @@ -410,16 +386,15 @@ async def test_atomic_update_user_success_use_current_atomic_operation_during_va user_attributes: UserAttributesBaseSchema, user_1: User, ): - data_user_update = self.prepare_user_update_data( - user=user_1, - user_attributes=user_attributes, - resource_type=resource_type, - ) data_atomic_request = { "atomic:operations": [ { "op": "update", - "data": data_user_update, + "data": self.prepare_user_update_data( + user=user_1, + user_attributes=user_attributes, + resource_type=resource_type, + ), }, ], } diff --git a/tests/test_atomic/test_delete_objects.py b/tests/test_atomic/test_delete_objects.py index ac923b8d..1bf4e282 100644 --- a/tests/test_atomic/test_delete_objects.py +++ b/tests/test_atomic/test_delete_objects.py @@ -1,17 +1,14 @@ import logging from typing import Awaitable, Callable +from fastapi import status from httpx import AsyncClient -from pytest import mark # noqa from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.sql.functions import count -from starlette import status +from examples.api_for_sqlalchemy.models import Computer from fastapi_jsonapi.atomic.schemas import AtomicOperationAction -from tests.models import Computer - -pytestmark = mark.asyncio logging.basicConfig(level=logging.DEBUG) @@ -41,14 +38,14 @@ async def test_delete_two_objects( { "op": "remove", "ref": { - "id": str(computer_1.id), + "id": f"{computer_1.id}", "type": "computer", }, }, { "op": "remove", "ref": { - "id": str(computer_2.id), + "id": f"{computer_2.id}", "type": "computer", }, }, @@ -78,13 +75,7 @@ async def test_delete_no_ref( } response = await client.post("/operations", json=data_atomic_request) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text - assert response.json() == { - # TODO: json:api exception - "detail": [ - { - "loc": ["body", "atomic:operations", 0, "__root__"], - "msg": f"ref should be present for action {AtomicOperationAction.remove.value!r}", - "type": "value_error", - }, - ], - } + response_data = response.json() + detail, *_ = response_data["detail"] + assert detail["loc"] == ["body", "atomic:operations", 0] + assert detail["msg"] == f"Value error, ref should be present for action {AtomicOperationAction.remove.value!r}" diff --git a/tests/test_atomic/test_dependencies.py b/tests/test_atomic/test_dependencies.py index 8a09fb04..e30eb88b 100644 --- a/tests/test_atomic/test_dependencies.py +++ b/tests/test_atomic/test_dependencies.py @@ -1,27 +1,22 @@ -from typing import ClassVar, Dict +from typing import ClassVar import pytest from fastapi import Depends, Query, status from httpx import AsyncClient from pytest_asyncio import fixture -from fastapi_jsonapi.misc.sqla.generics.base import DetailViewBaseGeneric, ListViewBaseGeneric -from fastapi_jsonapi.views.utils import ( - HTTPMethod, - HTTPMethodConfig, -) -from tests.fixtures.app import build_app_custom -from tests.fixtures.views import ArbitraryModelBase, SessionDependency, common_handler -from tests.misc.utils import fake -from tests.models import User -from tests.schemas import ( +from examples.api_for_sqlalchemy.models import User +from examples.api_for_sqlalchemy.schemas import ( UserAttributesBaseSchema, UserInSchema, UserPatchSchema, UserSchema, ) - -pytestmark = pytest.mark.asyncio +from fastapi_jsonapi.misc.sqla.generics.base import ViewBaseGeneric +from fastapi_jsonapi.views import Operation, OperationConfig +from tests.fixtures.app import build_app_custom +from tests.fixtures.views import ArbitraryModelBase, SessionDependency, common_handler +from tests.misc.utils import fake class CustomDependencyForCreate: @@ -57,28 +52,19 @@ class UserDeleteCustomDependency(ArbitraryModelBase): dep: CustomDependencyForDelete = Depends() -class UserCustomListView(ListViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( +class UserCustomView(ViewBaseGeneric): + operation_dependencies: ClassVar[dict[Operation, OperationConfig]] = { + Operation.ALL: OperationConfig( dependencies=SessionDependency, prepare_data_layer_kwargs=common_handler, ), - HTTPMethod.POST: HTTPMethodConfig( + Operation.CREATE: OperationConfig( dependencies=UserCreateCustomDependency, ), - } - - -class UserCustomDetailView(DetailViewBaseGeneric): - method_dependencies: ClassVar[Dict[HTTPMethod, HTTPMethodConfig]] = { - HTTPMethod.ALL: HTTPMethodConfig( - dependencies=SessionDependency, - prepare_data_layer_kwargs=common_handler, - ), - HTTPMethod.PATCH: HTTPMethodConfig( + Operation.UPDATE: OperationConfig( dependencies=UserUpdateCustomDependency, ), - HTTPMethod.DELETE: HTTPMethodConfig( + Operation.DELETE: OperationConfig( dependencies=UserDeleteCustomDependency, ), } @@ -97,8 +83,7 @@ def app_w_deps(self, resource_type): schema_in_post=UserInSchema, schema_in_patch=UserPatchSchema, resource_type=resource_type, - class_list=UserCustomListView, - class_detail=UserCustomDetailView, + view=UserCustomView, ) return app @@ -118,7 +103,10 @@ async def send_and_validate_atomic( assert response.status_code == expected_status, response.text response_data = response.json() # TODO: JSON:API exception - assert response_data == expected_body + detail, *_ = response_data["detail"] + expected_detail, *_ = response_data["detail"] + assert detail["loc"] == expected_detail["loc"] + assert detail["msg"] == expected_detail["msg"] async def test_on_create_atomic( self, @@ -136,7 +124,7 @@ async def test_on_create_atomic( "op": "add", "data": { "type": resource_type, - "attributes": user.dict(), + "attributes": user.model_dump(), }, }, ], @@ -145,9 +133,10 @@ async def test_on_create_atomic( expected_response_data = { "detail": [ { + "input": None, "loc": ["query", CustomDependencyForCreate.KEY], - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } @@ -172,20 +161,22 @@ async def test_on_update_atomic( "atomic:operations": [ { "op": "update", - "id": user_1.id, + "id": f"{user_1.id}", "data": { "type": resource_type, - "attributes": user.dict(), + "attributes": user.model_dump(), }, }, ], - } # TODO: JSON:API exception + } + # TODO: JSON:API exception expected_response_data = { "detail": [ { + "input": None, "loc": ["query", CustomDependencyForUpdate.KEY], - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } @@ -206,7 +197,7 @@ async def test_on_delete_atomic( { "op": "remove", "ref": { - "id": user_1.id, + "id": f"{user_1.id}", "type": resource_type, }, }, @@ -216,9 +207,10 @@ async def test_on_delete_atomic( expected_response_data = { "detail": [ { + "input": None, "loc": ["query", CustomDependencyForDelete.KEY], - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } diff --git a/tests/test_atomic/test_mixed_atomic.py b/tests/test_atomic/test_mixed_atomic.py index b28479ff..f679b817 100644 --- a/tests/test_atomic/test_mixed_atomic.py +++ b/tests/test_atomic/test_mixed_atomic.py @@ -1,21 +1,20 @@ -from __future__ import annotations - import logging -from typing import Awaitable, Callable +from typing import Awaitable, Callable, Optional import pytest +from fastapi import status from httpx import AsyncClient -from pytest import mark # noqa from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.sql.functions import count -from starlette import status +from examples.api_for_sqlalchemy.models import Computer, User, UserBio +from examples.api_for_sqlalchemy.schemas import ( + ComputerAttributesBaseSchema, + UserAttributesBaseSchema, + UserBioAttributesBaseSchema, +) from tests.misc.utils import fake -from tests.models import Computer, User, UserBio -from tests.schemas import ComputerAttributesBaseSchema, UserAttributesBaseSchema, UserBioAttributesBaseSchema - -pytestmark = mark.asyncio logging.basicConfig(level=logging.DEBUG) @@ -45,20 +44,9 @@ async def test_schema_validation_error( response = await client.post("/operations", json=atomic_request_data) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text response_data = response.json() - - assert response_data == { - # TODO: jsonapi exception? - "detail": [ - { - "loc": ["body", "atomic:operations", 0, "op"], - "msg": f"value is not a valid enumeration member; permitted: {allowed_atomic_actions_as_string}", - "type": "type_error.enum", - "ctx": { - "enum_values": allowed_atomic_actions_list, - }, - }, - ], - } + detail, *_ = response_data["detail"] + assert detail["loc"] == ["body", "atomic:operations", 0, "op"] + assert detail["msg"] == f"Input should be {allowed_atomic_actions_as_string}" async def test_create_and_update_atomic_success( self, @@ -80,8 +68,8 @@ async def test_create_and_update_atomic_success( :param user_1_bio: :return: """ - user_data = UserAttributesBaseSchema.from_orm(user_1) - user_bio_data = UserBioAttributesBaseSchema.from_orm(user_1_bio) + user_data = UserAttributesBaseSchema.model_validate(user_1) + user_bio_data = UserBioAttributesBaseSchema.model_validate(user_1_bio) user_data.name = fake.name() user_bio_data.favourite_movies = fake.sentence() assert user_1.name != user_data.name @@ -95,11 +83,11 @@ async def test_create_and_update_atomic_success( "op": "add", "data": { "type": "computer", - "attributes": new_computer.dict(), + "attributes": new_computer.model_dump(), "relationships": { "user": { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "type": "user", }, }, @@ -109,17 +97,17 @@ async def test_create_and_update_atomic_success( { "op": "update", "data": { - "id": str(user_1_bio.id), + "id": f"{user_1_bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, }, { "op": "update", "data": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, }, ], @@ -138,25 +126,25 @@ async def test_create_and_update_atomic_success( assert results == [ { "data": { - "id": str(computer.id), + "id": f"{computer.id}", "type": "computer", - "attributes": new_computer.dict(), + "attributes": new_computer.model_dump(), }, "meta": None, }, { "data": { - "id": str(user_1_bio.id), + "id": f"{user_1_bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, "meta": None, }, { "data": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, "meta": None, }, @@ -184,8 +172,8 @@ async def test_create_and_update_atomic_rollback( :param user_1_bio: :return: """ - user_data = UserAttributesBaseSchema.from_orm(user_1) - user_bio_data = UserBioAttributesBaseSchema.from_orm(user_1_bio) + user_data = UserAttributesBaseSchema.model_validate(user_1) + user_bio_data = UserBioAttributesBaseSchema.model_validate(user_1_bio) user_bio_data.favourite_movies = fake.sentence() assert user_1_bio.favourite_movies != user_bio_data.favourite_movies user_data.name = user_2.name @@ -201,11 +189,11 @@ async def test_create_and_update_atomic_rollback( "op": "add", "data": { "type": "computer", - "attributes": new_computer.dict(), + "attributes": new_computer.model_dump(), "relationships": { "user": { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "type": "user", }, }, @@ -215,17 +203,17 @@ async def test_create_and_update_atomic_rollback( { "op": "update", "data": { - "id": str(user_1_bio.id), + "id": f"{user_1_bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, }, { "op": "update", "data": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, }, ], @@ -248,12 +236,12 @@ async def test_create_and_update_atomic_rollback( assert cnt == 0, "no computers have to be created" assert errors == [ { - "detail": "Object update error", + "detail": "Could not update object", "source": {"pointer": "/data"}, "status_code": status.HTTP_400_BAD_REQUEST, "title": "Bad Request", "meta": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", }, }, @@ -285,8 +273,8 @@ async def test_create_update_and_delete_atomic_success( :return: """ computer: Computer = await computer_factory() - user_data = UserAttributesBaseSchema.from_orm(user_1) - user_bio_data = UserBioAttributesBaseSchema.from_orm(user_1_bio) + user_data = UserAttributesBaseSchema.model_validate(user_1) + user_bio_data = UserBioAttributesBaseSchema.model_validate(user_1_bio) user_data.name = fake.name() user_bio_data.favourite_movies = fake.sentence() assert user_1.name != user_data.name @@ -300,11 +288,11 @@ async def test_create_update_and_delete_atomic_success( "op": "add", "data": { "type": "computer", - "attributes": new_computer.dict(), + "attributes": new_computer.model_dump(), "relationships": { "user": { "data": { - "id": user_1.id, + "id": f"{user_1.id}", "type": "user", }, }, @@ -314,23 +302,23 @@ async def test_create_update_and_delete_atomic_success( { "op": "update", "data": { - "id": user_1_bio.id, + "id": f"{user_1_bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, }, { "op": "update", "data": { - "id": user_1.id, + "id": f"{user_1.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, }, { "op": "remove", "ref": { - "id": computer.id, + "id": f"{computer.id}", "type": "computer", }, }, @@ -349,29 +337,29 @@ async def test_create_update_and_delete_atomic_success( await async_session.refresh(user_1_bio) assert user_1.name == user_data.name assert user_1_bio.favourite_movies == user_bio_data.favourite_movies - computer: Computer = await async_session.scalar(select(Computer).where(Computer.user_id == user_1.id)) + computer = await async_session.scalar(select(Computer).where(Computer.user_id == user_1.id)) assert results == [ { "data": { - "id": str(computer.id), + "id": f"{computer.id}", "type": "computer", - "attributes": new_computer.dict(), + "attributes": new_computer.model_dump(), }, "meta": None, }, { "data": { - "id": str(user_1_bio.id), + "id": f"{user_1_bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, "meta": None, }, { "data": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, "meta": None, }, @@ -424,15 +412,15 @@ async def test_create_user_and_update_computer_and_link_to_user( "data": { "type": "user", "lid": user_lid, - "attributes": user_create.dict(), + "attributes": user_create.model_dump(), }, }, { "op": "update", "data": { - "id": str(computer_1.id), + "id": f"{computer_1.id}", "type": "computer", - "attributes": computer_update.dict(), + "attributes": computer_update.model_dump(), "relationships": { "user": { "data": { @@ -463,7 +451,7 @@ async def test_create_user_and_update_computer_and_link_to_user( ), ) ) - user: User | None = await async_session.scalar(user_stmt) + user: Optional[User] = await async_session.scalar(user_stmt) assert user await async_session.refresh(computer_1) assert computer_1.name == computer_update.name @@ -471,17 +459,17 @@ async def test_create_user_and_update_computer_and_link_to_user( assert results == [ { "data": { - "id": str(user.id), + "id": f"{user.id}", "type": "user", - "attributes": user_create.dict(), + "attributes": user_create.model_dump(), }, "meta": None, }, { "data": { - "id": str(computer_1.id), + "id": f"{computer_1.id}", "type": "computer", - "attributes": computer_update.dict(), + "attributes": computer_update.model_dump(), }, "meta": None, }, @@ -514,12 +502,12 @@ async def test_create_user_and_link_computer_one_operation( "op": "add", "data": { "type": "user", - "attributes": user_create.dict(), + "attributes": user_create.model_dump(), "relationships": { "computers": { "data": [ { - "id": computer_1.id, + "id": f"{computer_1.id}", "type": "computer", }, ], @@ -548,15 +536,15 @@ async def test_create_user_and_link_computer_one_operation( ), ) ) - new_user: User | None = await async_session.scalar(user_stmt) + new_user: Optional[User] = await async_session.scalar(user_stmt) assert isinstance(new_user, User) assert computer_1.user_id == new_user.id assert results == [ { "data": { - "id": str(new_user.id), + "id": f"{new_user.id}", "type": "user", - "attributes": user_create.dict(), + "attributes": user_create.model_dump(), }, "meta": None, }, @@ -583,7 +571,7 @@ async def create_user_and_link_existing_computer_to_user( "ref": { "type": "articles", "id": "13", - "relationship": "author" + "relationship": "user" }, "data": { "type": "people", diff --git a/tests/test_atomic/test_request.py b/tests/test_atomic/test_request.py index bbd68ccc..ae304455 100644 --- a/tests/test_atomic/test_request.py +++ b/tests/test_atomic/test_request.py @@ -54,7 +54,7 @@ class TestAtomicOperationRequest: "ref": { "type": "articles", "id": "13", - "relationship": "author", + "relationship": "user", }, "data": { "type": "people", @@ -71,7 +71,7 @@ class TestAtomicOperationRequest: "ref": { "type": "articles", "id": "13", - "relationship": "author", + "relationship": "user", }, "data": None, }, @@ -80,8 +80,8 @@ class TestAtomicOperationRequest: ], ) def test_request_data(self, operation_request: dict): - validated = AtomicOperationRequest.parse_obj(operation_request) - assert validated.dict(exclude_unset=True, by_alias=True) == operation_request + validated = AtomicOperationRequest.model_validate(operation_request) + assert validated.model_dump(exclude_unset=True, by_alias=True) == operation_request def test_not_supported_operation( self, @@ -103,10 +103,7 @@ def test_not_supported_operation( ], } with pytest.raises(ValidationError) as exc_info: - AtomicOperationRequest.parse_obj(atomic_request_data) + AtomicOperationRequest.model_validate(atomic_request_data) errors = exc_info.value.errors() error = errors[0] - assert ( - error.get("msg") - == f"value is not a valid enumeration member; permitted: {allowed_atomic_actions_as_string}" - ) + assert error.get("msg") == f"Input should be {allowed_atomic_actions_as_string}" diff --git a/tests/test_atomic/test_response.py b/tests/test_atomic/test_response.py index 6f3792f8..3802c91e 100644 --- a/tests/test_atomic/test_response.py +++ b/tests/test_atomic/test_response.py @@ -28,9 +28,9 @@ class TestAtomicResultResponse: { "data": { "links": { - "self": "https://example.com/authors/acb2ebd6-ed30-4877-80ce-52a14d77d470", + "self": "https://example.com/user/acb2ebd6-ed30-4877-80ce-52a14d77d470", }, - "type": "authors", + "type": "users", "id": "acb2ebd6-ed30-4877-80ce-52a14d77d470", "attributes": {"name": "dgeb"}, }, @@ -46,10 +46,10 @@ class TestAtomicResultResponse: "title": "JSON API paints my bikeshed!", }, "relationships": { - "author": { + "user": { "links": { - "self": "https://example.com/articles/bb3ad581-806f-4237-b748-f2ea0261845c/relationships/author", - "related": "https://example.com/articles/bb3ad581-806f-4237-b748-f2ea0261845c/author", + "self": "https://example.com/articles/bb3ad581-806f-4237-b748-f2ea0261845c/relationships/user", + "related": "https://example.com/articles/bb3ad581-806f-4237-b748-f2ea0261845c/user", }, }, }, @@ -60,5 +60,5 @@ class TestAtomicResultResponse: ], ) def test_response_data(self, operation_response: dict): - validated = AtomicResultResponse.parse_obj(operation_response) - assert validated.dict(exclude_unset=True, by_alias=True) == operation_response + validated = AtomicResultResponse.model_validate(operation_response) + assert validated.model_dump(exclude_unset=True, by_alias=True) == operation_response diff --git a/tests/test_atomic/test_update_objects.py b/tests/test_atomic/test_update_objects.py index aa29b467..e5069ecd 100644 --- a/tests/test_atomic/test_update_objects.py +++ b/tests/test_atomic/test_update_objects.py @@ -1,15 +1,13 @@ import logging import pytest +from fastapi import status from httpx import AsyncClient from sqlalchemy.ext.asyncio import AsyncSession -from starlette import status +from examples.api_for_sqlalchemy.models import Computer, User, UserBio +from examples.api_for_sqlalchemy.schemas import UserAttributesBaseSchema, UserBioAttributesBaseSchema from tests.misc.utils import fake -from tests.models import Computer, User, UserBio -from tests.schemas import UserAttributesBaseSchema, UserBioAttributesBaseSchema - -pytestmark = pytest.mark.asyncio logging.basicConfig(level=logging.DEBUG) @@ -22,8 +20,8 @@ async def test_update_two_objects( user_1: User, user_1_bio: UserBio, ): - user_data = UserAttributesBaseSchema.from_orm(user_1) - user_bio_data = UserBioAttributesBaseSchema.from_orm(user_1_bio) + user_data = UserAttributesBaseSchema.model_validate(user_1) + user_bio_data = UserBioAttributesBaseSchema.model_validate(user_1_bio) user_data.name = fake.name() user_bio_data.favourite_movies = fake.sentence() assert user_1.name != user_data.name @@ -33,17 +31,17 @@ async def test_update_two_objects( { "op": "update", "data": { - "id": str(user_1.id), + "id": f"{user_1.id}", "type": "user", - "attributes": user_data.dict(), + "attributes": user_data.model_dump(), }, }, { "op": "update", "data": { - "id": str(user_1_bio.id), + "id": f"{user_1_bio.id}", "type": "user_bio", - "attributes": user_bio_data.dict(), + "attributes": user_bio_data.model_dump(), }, }, ], @@ -62,16 +60,16 @@ async def test_update_two_objects( assert results == [ { "data": { - "attributes": user_data.dict(), - "id": str(user_1.id), + "attributes": user_data.model_dump(), + "id": f"{user_1.id}", "type": "user", }, "meta": None, }, { "data": { - "attributes": user_bio_data.dict(), - "id": str(user_1_bio.id), + "attributes": user_bio_data.model_dump(), + "id": f"{user_1_bio.id}", "type": "user_bio", }, "meta": None, @@ -96,7 +94,7 @@ async def test_update_to_one_relationship_atomic( "ref": { "type": "articles", "id": "13", - "relationship": "author" + "relationship": "user" }, "data": { "type": "people", @@ -118,12 +116,12 @@ async def test_update_to_one_relationship_atomic( "op": "update", "ref": { "type": "computer", - "id": computer_1.id, + "id": f"{computer_1.id}", "relationship": "user", }, "data": { "type": "user", - "id": user_1.id, + "id": f"{user_1.id}", }, }, ], @@ -154,7 +152,7 @@ async def test_update_to_one_relationship_clear_atomic( "ref": { "type": "articles", "id": "13", - "relationship": "author" + "relationship": "user" }, "data": null }] @@ -177,8 +175,8 @@ async def test_update_to_one_relationship_clear_atomic( "op": "update", "ref": { "type": "computer", - "id": computer_1.id, - "relationship": "author", + "id": f"{computer_1.id}", + "relationship": "user", }, "data": None, }, diff --git a/tests/test_data_layers/test_filtering/test_sqlalchemy.py b/tests/test_data_layers/test_filtering/test_sqlalchemy.py index ec27a528..36e37d0c 100644 --- a/tests/test_data_layers/test_filtering/test_sqlalchemy.py +++ b/tests/test_data_layers/test_filtering/test_sqlalchemy.py @@ -1,13 +1,11 @@ from typing import Any from unittest.mock import MagicMock, Mock +import pytest from fastapi import status -from pydantic import BaseModel -from pytest import raises # noqa PT013 +from pydantic import BaseModel, ConfigDict -from fastapi_jsonapi.data_layers.filtering.sqlalchemy import ( - build_filter_expression, -) +from fastapi_jsonapi.data_layers.sqla.query_building import build_filter_expression from fastapi_jsonapi.exceptions import InvalidType @@ -18,15 +16,16 @@ def __init__(self, *args, **kwargs): """This method is needed to handle incoming arguments""" class ModelSchema(BaseModel): - value: UserType + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) - class Config: - arbitrary_types_allowed = True + value: UserType model_column_mock = MagicMock() build_filter_expression( - schema_field=ModelSchema.__fields__["value"], + schema_field=ModelSchema.model_fields["value"], model_column=model_column_mock, operator="__eq__", value=Any, @@ -44,14 +43,15 @@ def __init__(self, *args, **kwargs): raise ValueError(msg) class ModelSchema(BaseModel): - user_type: UserType + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) - class Config: - arbitrary_types_allowed = True + user_type: UserType - with raises(InvalidType) as exc_info: + with pytest.raises(InvalidType) as exc_info: build_filter_expression( - schema_field=ModelSchema.__fields__["user_type"], + schema_field=ModelSchema.model_fields["user_type"], model_column=Mock(), operator=Mock(), value=Any, diff --git a/tests/test_fastapi_jsonapi/test_querystring.py b/tests/test_fastapi_jsonapi/test_querystring.py index dec04365..76a2b7fb 100644 --- a/tests/test_fastapi_jsonapi/test_querystring.py +++ b/tests/test_fastapi_jsonapi/test_querystring.py @@ -1,9 +1,9 @@ -import json from unittest.mock import MagicMock +import orjson as json import pytest from fastapi import status -from starlette.datastructures import QueryParams +from fastapi.datastructures import QueryParams from fastapi_jsonapi.exceptions import InvalidFilters from fastapi_jsonapi.exceptions.json_api import BadRequest @@ -14,11 +14,11 @@ def test__extract_item_key(): manager = QueryStringManager(MagicMock()) key = "fields[user]" - assert manager._extract_item_key(key) == "user" + assert manager.extract_item_key(key) == "user" with pytest.raises(BadRequest) as exc_info: # noqa: PT012 key = "fields[user" - manager._extract_item_key(key) + manager.extract_item_key(key) assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST assert exc_info.value.detail == { @@ -46,7 +46,7 @@ def test_filters__errors(): manager = QueryStringManager(request) with pytest.raises(InvalidFilters) as exc_info: - manager.filters + assert isinstance(manager.filters, list) assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST assert exc_info.value.detail == { @@ -70,14 +70,14 @@ def test_filters__errors(): "op": "", "val": "", }, - ), + ).decode(), ), ], ) manager = QueryStringManager(request) with pytest.raises(InvalidFilters) as exc_info: - manager.filters + assert isinstance(manager.filters, list) assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST assert exc_info.value.detail == { diff --git a/tests/test_utils/test_dependency_helper.py b/tests/test_utils/test_dependency_helper.py index e8ba5e93..07f05fb8 100644 --- a/tests/test_utils/test_dependency_helper.py +++ b/tests/test_utils/test_dependency_helper.py @@ -2,16 +2,10 @@ from string import ascii_letters from unittest.mock import AsyncMock -import pytest -from fastapi import ( - Depends, - Request, -) +from fastapi import Depends, Request from fastapi_jsonapi.utils.dependency_helper import DependencyHelper -pytestmark = pytest.mark.asyncio - class TestDependencyHelper: async def test_dependency_helper(self):