diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 1c2c021e..4a72f4bd 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -22,7 +22,7 @@ jobs: - uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: '3.13' - name: Install dependencies run: pip install -r requirements.txt diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5e917eb4..4c9acd2f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -21,14 +21,14 @@ jobs: python-version: - "pypy3.9" - "pypy3.10" - - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" + - "3.13" experimental: [ false ] include: - - python-version: "~3.13.0-0" + - python-version: "~3.14.0-0" experimental: true steps: diff --git a/.gitignore b/.gitignore index 003553f3..88e4e8d6 100644 --- a/.gitignore +++ b/.gitignore @@ -37,7 +37,6 @@ pip-delete-this-directory.txt ## Unit test / coverage reports htmlcov/ -.tox/ .coverage .coverage.* .cache diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0c1cf53d..f08d8c9c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer diff --git a/examples/webapp.py b/examples/webapp.py index 889e088c..9ae23a45 100644 --- a/examples/webapp.py +++ b/examples/webapp.py @@ -1,5 +1,5 @@ # This is code for the tutorial in README.md -from typing import Any, List, MutableMapping +from typing import Any, List, MutableMapping, Optional from aiohttp.web import Application @@ -95,7 +95,7 @@ class App(Service): def __init__( self, web_port: int = 8000, - web_bind: str = None, + web_bind: Optional[str] = None, websocket_port: int = 8001, **kwargs: Any ) -> None: diff --git a/mode/__init__.py b/mode/__init__.py index d8ee11d3..9ef99bdc 100644 --- a/mode/__init__.py +++ b/mode/__init__.py @@ -4,11 +4,12 @@ import sys import typing +from collections.abc import Mapping, Sequence # Lazy loading. # - See werkzeug/__init__.py for the rationale behind this. from types import ModuleType -from typing import Any, Mapping, Sequence +from typing import Any # -eof meta- diff --git a/mode/debug.py b/mode/debug.py index 21f4b0e6..c18e1fb5 100644 --- a/mode/debug.py +++ b/mode/debug.py @@ -4,7 +4,7 @@ import signal import traceback from types import FrameType -from typing import Any, Type +from typing import Any from .services import Service from .utils.logging import get_logger @@ -59,11 +59,11 @@ class BlockingDetector(Service): def __init__( self, timeout: Seconds, - raises: Type[BaseException] = Blocking, + raises: type[BaseException] = Blocking, **kwargs: Any, ) -> None: self.timeout: float = want_seconds(timeout) - self.raises: Type[BaseException] = raises + self.raises: type[BaseException] = raises super().__init__(**kwargs) @Service.task diff --git a/mode/locals.py b/mode/locals.py index b2a8a826..fce04c8b 100644 --- a/mode/locals.py +++ b/mode/locals.py @@ -74,37 +74,34 @@ class XProxy(MutableMappingRole, AsyncContextManagerRole): import sys from collections import deque -from functools import wraps -from types import GetSetDescriptorType, TracebackType -from typing import ( - AbstractSet, - Any, - AsyncContextManager, +from collections.abc import ( AsyncGenerator, AsyncIterable, AsyncIterator, Awaitable, - Callable, - ClassVar, - ContextManager, Coroutine, - Dict, Generator, - Generic, Iterable, Iterator, - List, Mapping, MutableMapping, MutableSequence, MutableSet, - Optional, Sequence, - Tuple, - Type, + Set, + ValuesView, +) +from contextlib import AbstractAsyncContextManager, AbstractContextManager +from functools import wraps +from types import GetSetDescriptorType, TracebackType +from typing import ( + Any, + Callable, + ClassVar, + Generic, + Optional, TypeVar, Union, - ValuesView, cast, no_type_check, overload, @@ -161,20 +158,20 @@ class XProxy(MutableMappingRole, AsyncContextManagerRole): def _default_cls_attr( - name: str, type_: Type, cls_value: Any -) -> Callable[[Type], GetSetDescriptorType]: + name: str, type_: type, cls_value: Any +) -> Callable[[type], GetSetDescriptorType]: # Proxy uses properties to forward the standard # class attributes __module__, __name__ and __doc__ to the real # object, but these needs to be a string when accessed from # the Proxy class directly. This is a hack to make that work. # -- See Issue #1087. - def __new__(cls: Type, getter: Callable) -> Any: + def __new__(cls: type, getter: Callable) -> Any: instance = type_.__new__(cls, cls_value) instance.__getter = getter # type: ignore return instance - def __get__(self: Type, obj: Any, cls: Optional[Type] = None) -> Any: + def __get__(self: type, obj: Any, cls: Optional[type] = None) -> Any: return self.__getter(obj) if obj is not None else self return type(name, (type_,), {"__new__": __new__, "__get__": __get__}) @@ -183,7 +180,7 @@ def __get__(self: Type, obj: Any, cls: Optional[Type] = None) -> Any: class Proxy(Generic[T]): """Proxy to another object.""" - __proxy_source__: ClassVar[Optional[Type[T]]] = None + __proxy_source__: ClassVar[Optional[type[T]]] = None # Code initially stolen from werkzeug.local.Proxy. if not SLOTS_ISSUE_PRESENT and not PYPY: # pragma: no cover @@ -195,7 +192,7 @@ class Proxy(Generic[T]): "__local", ) - def __init_subclass__(self, source: Optional[Type[T]] = None) -> None: + def __init_subclass__(self, source: Optional[type[T]] = None) -> None: super().__init_subclass__() if source is not None: self._init_from_source(source) @@ -205,7 +202,7 @@ def __init_subclass__(self, source: Optional[Type[T]] = None) -> None: self._init_from_source(self.__proxy_source__) @classmethod - def _init_from_source(cls, source: Type[T]) -> None: + def _init_from_source(cls, source: type[T]) -> None: # source must have metaclass ABCMeta abstractmethods = getattr(source, "__abstractmethods__", None) if abstractmethods is None: @@ -219,7 +216,7 @@ def _init_from_source(cls, source: Type[T]) -> None: @classmethod def _generate_proxy_method( - cls, source: Type[T], method_name: str + cls, source: type[T], method_name: str ) -> Callable: @wraps(getattr(source, method_name)) def _classmethod(self: Proxy[T], *args: Any, **kwargs: Any) -> Any: @@ -233,8 +230,8 @@ def _classmethod(self: Proxy[T], *args: Any, **kwargs: Any) -> Any: def __init__( self, local: Callable[..., T], - args: Optional[Tuple] = None, - kwargs: Optional[Dict] = None, + args: Optional[tuple] = None, + kwargs: Optional[dict] = None, name: Optional[str] = None, cache: bool = False, __doc__: Optional[str] = None, @@ -277,7 +274,7 @@ def __module__(self) -> str: def __doc__(self) -> Optional[str]: return cast(str, self._get_current_object().__doc__) - def _get_class(self) -> Type[T]: + def _get_class(self) -> type[T]: return self._get_current_object().__class__ @property @@ -285,7 +282,7 @@ def __class__(self) -> Any: return self._get_class() @__class__.setter - def __class__(self, t: Type[T]) -> None: + def __class__(self, t: type[T]) -> None: raise NotImplementedError() def _get_current_object(self) -> T: @@ -302,7 +299,7 @@ def _get_current_object(self) -> T: def __evaluate__( self, - _clean: Tuple[str, ...] = ( + _clean: tuple[str, ...] = ( "_Proxy__local", "_Proxy__args", "_Proxy__kwargs", @@ -342,7 +339,7 @@ def __maybe_evaluate__(self) -> T: return self._get_current_object() @property - def __dict__(self) -> Dict[str, Any]: # type: ignore + def __dict__(self) -> dict[str, Any]: # type: ignore try: return self._get_current_object().__dict__ except RuntimeError as err: # pragma: no cover @@ -363,7 +360,7 @@ def __bool__(self) -> bool: __nonzero__ = __bool__ # Py2 - def __dir__(self) -> List[str]: + def __dir__(self) -> list[str]: try: return dir(self._get_current_object()) except RuntimeError: # pragma: no cover @@ -392,8 +389,8 @@ def __str__(self) -> str: def __hash__(self) -> int: return hash(self._get_current_object()) - def __reduce__(self) -> Tuple: - return cast(Tuple, self._get_current_object().__reduce__()) + def __reduce__(self) -> tuple: + return cast(tuple, self._get_current_object().__reduce__()) class AwaitableRole(Awaitable[T]): @@ -426,7 +423,7 @@ def send(self, value: T_contra) -> T_co: def throw( self, - typ: Type[BaseException], + typ: type[BaseException], val: Optional[BaseException] = None, tb: Optional[TracebackType] = None, ) -> T_co: @@ -482,21 +479,21 @@ def _get_generator(self) -> AsyncGenerator[T_co, T_contra]: obj = self._get_current_object() # type: ignore return cast(AsyncGenerator[T_co, T_contra], obj) - def __anext__(self) -> Awaitable[T_co]: + def __anext__(self) -> Coroutine[Any, Any, T_co]: return self._get_generator().__anext__() - def asend(self, value: T_contra) -> Awaitable[T_co]: + def asend(self, value: T_contra) -> Coroutine[Any, Any, T_co]: return self._get_generator().asend(value) def athrow( self, - typ: Type[BaseException], + typ: type[BaseException], val: Optional[BaseException] = None, tb: Optional[TracebackType] = None, - ) -> Awaitable[T_co]: + ) -> Coroutine[Any, Any, T_co]: return self._get_generator().athrow(typ, val, tb) - def aclose(self) -> Awaitable[None]: + def aclose(self) -> Coroutine[Any, Any, None]: return self._get_generator().aclose() def __aiter__(self) -> AsyncGenerator[T_co, T_contra]: @@ -517,7 +514,7 @@ def _get_sequence(self) -> Sequence[T_co]: return cast(Sequence[T_co], obj) @overload - def __getitem__(self, i: int) -> T_co: ... + def __getitem__(self, s: int) -> T_co: ... @overload def __getitem__(self, s: slice) -> MutableSequence[T_co]: ... @@ -559,13 +556,13 @@ def insert(self, index: int, object: T) -> None: self._get_sequence().insert(index, object) @overload - def __setitem__(self, i: int, o: T) -> None: ... + def __setitem__(self, s: int, o: T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[T]) -> None: ... - def __setitem__(self, index_or_slice: Any, o: Any) -> None: - self._get_sequence().__setitem__(index_or_slice, o) + def __setitem__(self, s: Any, o: Any) -> None: + self._get_sequence().__setitem__(s, o) @overload def __delitem__(self, i: int) -> None: ... @@ -601,35 +598,35 @@ class MutableSequenceProxy( """Proxy to `typing.MutableSequence` object.""" -class SetRole(AbstractSet[T_co]): - """Role/Mixin for `typing.AbstractSet` proxy methods.""" +class SetRole(Set[T_co]): + """Role/Mixin for `collections.abc.Set` proxy methods.""" - def _get_set(self) -> AbstractSet[T_co]: + def _get_set(self) -> Set[T_co]: obj = self._get_current_object() # type: ignore - return cast(AbstractSet[T_co], obj) + return cast(Set[T_co], obj) - def __le__(self, s: AbstractSet[Any]) -> bool: + def __le__(self, s: Set[Any]) -> bool: return self._get_set().__le__(s) - def __lt__(self, s: AbstractSet[Any]) -> bool: + def __lt__(self, s: Set[Any]) -> bool: return self._get_set().__lt__(s) - def __gt__(self, s: AbstractSet[Any]) -> bool: + def __gt__(self, s: Set[Any]) -> bool: return self._get_set().__gt__(s) - def __ge__(self, s: AbstractSet[Any]) -> bool: + def __ge__(self, s: Set[Any]) -> bool: return self._get_set().__ge__(s) - def __and__(self, s: AbstractSet[Any]) -> AbstractSet[T_co]: + def __and__(self, s: Set[Any]) -> Set[T_co]: return self._get_set().__and__(s) - def __or__(self, s: AbstractSet[T]) -> AbstractSet[Union[T_co, T]]: + def __or__(self, s: Set[T]) -> Set[Union[T_co, T]]: return self._get_set().__or__(s) - def __sub__(self, s: AbstractSet[Any]) -> AbstractSet[T_co]: + def __sub__(self, s: Set[Any]) -> Set[T_co]: return self._get_set().__sub__(s) - def __xor__(self, s: AbstractSet[T]) -> AbstractSet[Union[T_co, T]]: + def __xor__(self, s: Set[T]) -> Set[Union[T_co, T]]: return self._get_set().__xor__(s) def isdisjoint(self, s: Iterable[Any]) -> bool: @@ -645,8 +642,8 @@ def __len__(self) -> int: return self._get_set().__len__() -class SetProxy(Proxy[AbstractSet[T_co]], SetRole[T_co]): - """Proxy to `typing.AbstractSet` object.""" +class SetProxy(Proxy[Set[T_co]], SetRole[T_co]): + """Proxy to `collections.abc.Set` object.""" class MutableSetRole(SetRole[T], MutableSet[T]): @@ -671,16 +668,16 @@ def pop(self) -> T: def remove(self, element: T) -> None: self._get_set().remove(element) - def __ior__(self, s: AbstractSet[S]) -> MutableSet[Union[T, S]]: + def __ior__(self, s: Set[S]) -> MutableSet[Union[T, S]]: return self._get_set().__ior__(s) - def __iand__(self, s: AbstractSet[Any]) -> MutableSet[T]: + def __iand__(self, s: Set[Any]) -> MutableSet[T]: return self._get_set().__iand__(s) - def __ixor__(self, s: AbstractSet[S]) -> MutableSet[Union[T, S]]: + def __ixor__(self, s: Set[S]) -> MutableSet[Union[T, S]]: return self._get_set().__ixor__(s) - def __isub__(self, s: AbstractSet[Any]) -> MutableSet[T]: + def __isub__(self, s: Set[Any]) -> MutableSet[T]: return self._get_set().__isub__(s) @@ -688,12 +685,12 @@ class MutableSetProxy(Proxy[MutableSet[T_co]], MutableSetRole[T_co]): """Proxy to `typing.MutableSet` object.""" -class ContextManagerRole(ContextManager[T]): +class ContextManagerRole(AbstractContextManager[T]): """Role/Mixin for `typing.ContextManager` proxy methods.""" - def _get_context(self) -> ContextManager[T]: + def _get_context(self) -> AbstractContextManager[T]: obj = self._get_current_object() # type: ignore - return cast(ContextManager[T], obj) + return cast(AbstractContextManager[T], obj) def __enter__(self) -> Any: return self._get_context().__enter__() @@ -702,30 +699,32 @@ def __exit__(self, *exc_info: Any) -> Any: return self._get_context().__exit__(*exc_info) -class ContextManagerProxy(Proxy[ContextManager[T]], ContextManagerRole[T]): - """Proxy to `typing.ContextManager` object.""" +class ContextManagerProxy( + Proxy[AbstractContextManager[T]], ContextManagerRole[T] +): + """Proxy to `contextlib.AbstractContextManager` object.""" -class AsyncContextManagerRole(AsyncContextManager[T_co]): - """Role/Mixin for `typing.AsyncContextManager` proxy methods.""" +class AsyncContextManagerRole(AbstractAsyncContextManager[T_co]): + """Role/Mixin for `contextlib.AbstractAsyncContextManager` proxy methods.""" - def __aenter__(self) -> Awaitable[T_co]: + def __aenter__(self) -> Coroutine[Any, Any, T_co]: obj = self._get_current_object() # type: ignore - return cast(Awaitable[T_co], obj.__aenter__()) + return obj.__aenter__() def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], - ) -> Awaitable[Optional[bool]]: + ) -> Coroutine[Any, Any, Optional[bool]]: obj = self._get_current_object() # type: ignore val = obj.__aexit__(exc_type, exc_value, traceback) - return cast(Awaitable[Optional[bool]], val) + return cast(Coroutine[Any, Any, Optional[bool]], val) class AsyncContextManagerProxy( - Proxy[AsyncContextManager[T_co]], AsyncContextManagerRole[T_co] + Proxy[AbstractAsyncContextManager[T_co]], AsyncContextManagerRole[T_co] ): """Proxy to `typing.AsyncContextManager` object.""" @@ -749,10 +748,10 @@ def get(self, k: KT, default: Union[VT_co, T]) -> Union[VT_co, T]: ... def get(self, *args: Any, **kwargs: Any) -> Any: return self._get_mapping().get(*args, **kwargs) - def items(self) -> AbstractSet[Tuple[KT, VT_co]]: + def items(self) -> Set[tuple[KT, VT_co]]: return self._get_mapping().items() - def keys(self) -> AbstractSet[KT]: + def keys(self) -> Set[KT]: return self._get_mapping().keys() def values(self) -> ValuesView[VT_co]: @@ -797,7 +796,7 @@ def pop(self, k: KT, default: Union[VT, T] = ...) -> Union[VT, T]: ... def pop(self, *args: Any, **kwargs: Any) -> Any: return self._get_mapping().pop(*args, **kwargs) - def popitem(self) -> Tuple[KT, VT]: + def popitem(self) -> tuple[KT, VT]: return self._get_mapping().popitem() def setdefault(self, k: KT, *args: Any) -> VT: @@ -807,7 +806,7 @@ def setdefault(self, k: KT, *args: Any) -> VT: def update(self, __m: Mapping[KT, VT], **kwargs: VT) -> None: ... @overload - def update(self, __m: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: ... + def update(self, __m: Iterable[tuple[KT, VT]], **kwargs: VT) -> None: ... @overload def update(self, **kwargs: VT) -> None: ... diff --git a/mode/loop/__init__.py b/mode/loop/__init__.py index d964cbdf..744119d1 100644 --- a/mode/loop/__init__.py +++ b/mode/loop/__init__.py @@ -59,7 +59,8 @@ """ import importlib -from typing import Mapping, Optional +from collections.abc import Mapping +from typing import Optional __all__ = ["LOOPS", "use"] diff --git a/mode/proxy.py b/mode/proxy.py index 88d97840..b5adfda0 100644 --- a/mode/proxy.py +++ b/mode/proxy.py @@ -4,7 +4,8 @@ """ import abc -from typing import Any, AsyncContextManager, ContextManager, Optional +from contextlib import AbstractAsyncContextManager, AbstractContextManager +from typing import Any, Optional from .services import ServiceBase from .types import ServiceT @@ -44,10 +45,12 @@ def add_dependency(self, service: ServiceT) -> ServiceT: async def add_runtime_dependency(self, service: ServiceT) -> ServiceT: return await self._service.add_runtime_dependency(service) - async def add_async_context(self, context: AsyncContextManager) -> Any: + async def add_async_context( + self, context: AbstractAsyncContextManager + ) -> Any: return await self._service.add_async_context(context) - def add_context(self, context: ContextManager) -> Any: + def add_context(self, context: AbstractContextManager) -> Any: return self._service.add_context(context) async def start(self) -> None: diff --git a/mode/services.py b/mode/services.py index eacd7187..5f77b838 100644 --- a/mode/services.py +++ b/mode/services.py @@ -3,33 +3,26 @@ import asyncio import logging import sys -from contextlib import AsyncExitStack, ExitStack -from datetime import tzinfo -from functools import wraps -from time import monotonic, perf_counter -from types import TracebackType -from typing import ( - Any, - AsyncContextManager, +from collections.abc import ( AsyncIterator, Awaitable, - Callable, - ClassVar, - ContextManager, Coroutine, - Dict, Iterable, - List, Mapping, MutableSequence, - NamedTuple, - Optional, Sequence, - Set, - Type, - Union, - cast, ) +from contextlib import ( + AbstractAsyncContextManager, + AbstractContextManager, + AsyncExitStack, + ExitStack, +) +from datetime import tzinfo +from functools import wraps +from time import monotonic, perf_counter +from types import TracebackType +from typing import Any, Callable, ClassVar, NamedTuple, Optional, Union, cast from .timers import Timer from .types import DiagT, ServiceT @@ -56,8 +49,8 @@ class WaitResults(NamedTuple): - done: List[WaitArgT] - results: List[Any] + done: list[WaitArgT] + results: list[Any] stopped: bool @@ -119,7 +112,7 @@ async def __aenter__(self) -> ServiceT: async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, + exc_type: Optional[type[BaseException]] = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> Optional[bool]: @@ -357,7 +350,7 @@ class Service(ServiceBase, ServiceCallbacks): """ abstract: ClassVar[bool] = True - Diag: Type[DiagT] = Diag + Diag: type[DiagT] = Diag #: Set to True if .stop must wait for the shutdown flag to be set. wait_for_shutdown = False @@ -401,11 +394,11 @@ class Service(ServiceBase, ServiceCallbacks): #: Note: Unlike ``add_dependency`` these futures will not be # restarted with the service: if you want that to happen make sure # calling service.start() again will add the future again. - _futures: Set[asyncio.Future] + _futures: set[asyncio.Future] #: The ``@Service.task`` decorator adds names of attributes #: that are ServiceTasks to this list (which is a class variable). - _tasks: ClassVar[Optional[Dict[str, Set[str]]]] = None + _tasks: ClassVar[Optional[dict[str, set[str]]]] = None @classmethod def from_awaitable( @@ -548,7 +541,7 @@ def _init_subclass_tasks(cls) -> None: clsid = cls._get_class_id() if cls._tasks is None: cls._tasks = {} - tasks: Set[str] = set() + tasks: set[str] = set() for base in iter_mro_reversed(cls, stop=Service): tasks |= { attr_name @@ -558,7 +551,7 @@ def _init_subclass_tasks(cls) -> None: cls._tasks[clsid] = tasks def _get_tasks(self) -> Iterable[ServiceTask]: - seen: Set[ServiceTask] = set() + seen: set[ServiceTask] = set() cls = type(self) if cls._tasks: for attr_name in cls._tasks[cls._get_class_id()]: @@ -640,21 +633,23 @@ async def remove_dependency(self, service: ServiceT) -> ServiceT: service.beacon.detach(self.beacon) return service - async def add_async_context(self, context: AsyncContextManager) -> Any: - if isinstance(context, AsyncContextManager): + async def add_async_context( + self, context: AbstractAsyncContextManager + ) -> Any: + if isinstance(context, AbstractAsyncContextManager): return await self.async_exit_stack.enter_async_context(context) - elif isinstance(context, ContextManager): # type: ignore + elif isinstance(context, AbstractContextManager): # type: ignore raise TypeError( "Use `self.add_context(ctx)` for non-async context" ) raise TypeError(f"Not a context/async context: {type(context)!r}") - def add_context(self, context: ContextManager) -> Any: - if isinstance(context, AsyncContextManager): + def add_context(self, context: AbstractContextManager) -> Any: + if isinstance(context, AbstractAsyncContextManager): raise TypeError( "Use `await self.add_async_context(ctx)` for async context" ) - elif isinstance(context, ContextManager): + elif isinstance(context, AbstractContextManager): return self.exit_stack.enter_context(context) raise TypeError(f"Not a context/async context: {type(context)!r}") @@ -774,10 +769,10 @@ async def wait_first( } futures[stopped] = asyncio.ensure_future(stopped.wait(), loop=loop) futures[crashed] = asyncio.ensure_future(crashed.wait(), loop=loop) - done: Set[asyncio.Future] - pending: Set[asyncio.Future] + done: set[asyncio.Future] + _pending: set[asyncio.Future] try: - done, pending = await asyncio.wait( + done, _pending = await asyncio.wait( futures.values(), return_when=asyncio.FIRST_COMPLETED, timeout=timeout, @@ -785,8 +780,8 @@ async def wait_first( for f in done: if f.done() and f.exception() is not None: f.result() # propagate exceptions - winners: List[WaitArgT] = [] - results: List[Any] = [] + winners: list[WaitArgT] = [] + results: list[Any] = [] for coro, fut in futures.items(): if fut.done(): winners.append(coro) @@ -905,7 +900,7 @@ async def crash(self, reason: BaseException) -> None: # if the service has no supervisor we go ahead # and mark parent nodes as crashed as well. root = self.beacon.root - seen: Set[NodeT] = set() + seen: set[NodeT] = set() for node in self.beacon.walk(): if node in seen: self.log.warning( diff --git a/mode/signals.py b/mode/signals.py index c95bbafc..96712ea1 100644 --- a/mode/signals.py +++ b/mode/signals.py @@ -2,21 +2,10 @@ import asyncio from collections import defaultdict +from collections.abc import Iterable, Mapping, MutableSet from functools import partial from types import MethodType -from typing import ( - Any, - Callable, - Iterable, - Mapping, - MutableSet, - Optional, - Set, - Tuple, - Type, - cast, - no_type_check, -) +from typing import Any, Callable, Optional, Union, cast, no_type_check from weakref import ReferenceType, WeakMethod, ref from .types.signals import ( @@ -27,7 +16,6 @@ SignalT, SyncSignalT, T, - T_contra, ) from .utils.futures import maybe_async @@ -44,11 +32,11 @@ def __init__( self, *, name: Optional[str] = None, - owner: Optional[Type] = None, + owner: Optional[type] = None, loop: Optional[asyncio.AbstractEventLoop] = None, default_sender: Any = None, receivers: Optional[MutableSet[SignalHandlerRefT]] = None, - filter_receivers: FilterReceiverMapping = None, + filter_receivers: Union[FilterReceiverMapping, None] = None, ) -> None: self.name = name or "" self.owner = owner @@ -85,7 +73,7 @@ def _with_default_sender(self, sender: Any = None) -> BaseSignalT: filter_receivers=self._filter_receivers, ) - def __set_name__(self, owner: Type, name: str) -> None: + def __set_name__(self, owner: type, name: str) -> None: """If signal is an attribute of a class, we use __set_name__ to show the location of the signal in __repr__. Examples: @@ -95,25 +83,28 @@ class X(Service): starting = Signal() >>> X.starting - + ``` """ if not self.name: self.name = name self.owner = owner - def unpack_sender_from_args(self, *args: Any) -> Tuple[T, Tuple[Any, ...]]: + def unpack_sender_from_args(self, *args: Any) -> tuple[T, tuple[Any, ...]]: sender = self.default_sender if sender is None: - if not args: + if not args or len(args) == 0: raise TypeError("Signal.send requires at least one argument") + if len(args) > 1: sender, *args = args # type: ignore else: sender, args = args[0], () return sender, args - def connect(self, fun: SignalHandlerT = None, **kwargs: Any) -> Callable: + def connect( + self, fun: Union[SignalHandlerT, None] = None, **kwargs: Any + ) -> Callable: if fun is not None: return self._connect(fun, **kwargs) return partial(self._connect, **kwargs) @@ -145,7 +136,7 @@ def disconnect( except ValueError: pass - def iter_receivers(self, sender: T_contra) -> Iterable[SignalHandlerT]: + def iter_receivers(self, sender: object) -> Iterable[SignalHandlerT]: if self._receivers or self._filter_receivers: r = self._update_receivers(self._receivers) if sender is not None: @@ -157,7 +148,7 @@ def iter_receivers(self, sender: T_contra) -> Iterable[SignalHandlerT]: def _update_receivers( self, r: MutableSet[SignalHandlerRefT] - ) -> Set[SignalHandlerT]: + ) -> set[SignalHandlerT]: live_receivers, dead_refs = self._get_live_receivers(r) for href in dead_refs: r.discard(href) @@ -165,9 +156,9 @@ def _update_receivers( def _get_live_receivers( self, r: MutableSet[SignalHandlerRefT] - ) -> Tuple[Set[SignalHandlerT], Set[SignalHandlerRefT]]: - live_receivers: Set[SignalHandlerT] = set() - dead_refs: Set[SignalHandlerRefT] = set() + ) -> tuple[set[SignalHandlerT], set[SignalHandlerRefT]]: + live_receivers: set[SignalHandlerT] = set() + dead_refs: set[SignalHandlerRefT] = set() for href in r: alive, value = self._is_alive(href) if alive and value is not None: @@ -178,7 +169,7 @@ def _get_live_receivers( def _is_alive( self, ref: SignalHandlerRefT - ) -> Tuple[bool, Optional[SignalHandlerT]]: + ) -> tuple[bool, Optional[SignalHandlerT]]: if isinstance(ref, ReferenceType): value = ref() return value is not None, value diff --git a/mode/supervisors.py b/mode/supervisors.py index 58b8a02b..679d31ec 100644 --- a/mode/supervisors.py +++ b/mode/supervisors.py @@ -8,7 +8,8 @@ """ import asyncio -from typing import Any, Awaitable, Callable, Dict, List, Optional, Type +from collections.abc import Awaitable +from typing import Any, Callable, Optional from .exceptions import MaxRestartsExceeded from .services import Service @@ -35,7 +36,7 @@ class SupervisorStrategy(Service, SupervisorStrategyT): _please_wakeup: Optional[asyncio.Future] #: the services we manage - _services: List[ServiceT] + _services: list[ServiceT] # rate limit state _bucket: Bucket @@ -44,14 +45,14 @@ class SupervisorStrategy(Service, SupervisorStrategyT): # if we have 10 services for example, and one of the crash, # we want to know the position of the service we are restarting. # This is needed for Faust and the @app.agent(concurrency=n) feature. - _index: Dict[ServiceT, int] + _index: dict[ServiceT, int] def __init__( self, *services: ServiceT, max_restarts: Seconds = 100.0, over: Seconds = 1.0, - raises: Type[BaseException] = MaxRestartsExceeded, + raises: type[BaseException] = MaxRestartsExceeded, replacement: Optional[ Callable[[ServiceT, int], Awaitable[ServiceT]] ] = None, @@ -131,8 +132,8 @@ async def _supervisor(self) -> None: finally: self._please_wakeup = None if not self.should_stop: - to_start: List[ServiceT] = [] - to_restart: List[ServiceT] = [] + to_start: list[ServiceT] = [] + to_restart: list[ServiceT] = [] for service in services: if service.started: if not self.service_operational(service): @@ -158,18 +159,18 @@ async def on_stop(self) -> None: "Unable to stop service %r: %r", service, exc ) - async def start_services(self, services: List[ServiceT]) -> None: + async def start_services(self, services: list[ServiceT]) -> None: for service in services: await self.start_service(service) async def start_service(self, service: ServiceT) -> None: await service.maybe_start() - async def restart_services(self, services: List[ServiceT]) -> None: + async def restart_services(self, services: list[ServiceT]) -> None: for service in services: await self.restart_service(service) - async def stop_services(self, services: List[ServiceT]) -> None: + async def stop_services(self, services: list[ServiceT]) -> None: # Stop them all simultaneously. await asyncio.gather(*[service.stop() for service in services]) @@ -205,7 +206,7 @@ class OneForOneSupervisor(SupervisorStrategy): class OneForAllSupervisor(SupervisorStrategy): """Supervisor that restarts all services when a service crashes.""" - async def restart_services(self, services: List[ServiceT]) -> None: + async def restart_services(self, services: list[ServiceT]) -> None: # we ignore the list of actual crashed services, # and restart all of them if services: @@ -219,7 +220,7 @@ async def restart_services(self, services: List[ServiceT]) -> None: class ForfeitOneForOneSupervisor(SupervisorStrategy): """Supervisor that if a service crashes, we do not restart it.""" - async def restart_services(self, services: List[ServiceT]) -> None: + async def restart_services(self, services: list[ServiceT]) -> None: if services: self.log.critical("Giving up on crashed services: %r", services) await self.stop_services(services) @@ -228,7 +229,7 @@ async def restart_services(self, services: List[ServiceT]) -> None: class ForfeitOneForAllSupervisor(SupervisorStrategy): """If one service in the group crashes, we give up on all of them.""" - async def restart_services(self, services: List[ServiceT]) -> None: + async def restart_services(self, services: list[ServiceT]) -> None: if services: self.log.critical( "Giving up on all services in group because %r crashed", diff --git a/mode/threads.py b/mode/threads.py index 429740fa..aa6354ba 100644 --- a/mode/threads.py +++ b/mode/threads.py @@ -12,18 +12,9 @@ import sys import threading import traceback +from collections.abc import Awaitable from time import monotonic -from typing import ( - Any, - Awaitable, - Callable, - Dict, - List, - NamedTuple, - Optional, - Tuple, - Type, -) +from typing import Any, Callable, NamedTuple, Optional from .services import Service from .utils.futures import ( @@ -47,8 +38,8 @@ class QueuedMethod(NamedTuple): promise: asyncio.Future method: Callable[..., Awaitable[Any]] - args: Tuple[Any, ...] - kwargs: Dict[str, Any] + args: tuple[Any, ...] + kwargs: dict[str, Any] class WorkerThread(threading.Thread): @@ -86,7 +77,7 @@ def stop(self) -> None: class ServiceThread(Service): """Service subclass running within a dedicated thread.""" - Worker: Type[WorkerThread] = WorkerThread + Worker: type[WorkerThread] = WorkerThread abstract = True wait_for_shutdown = True @@ -107,7 +98,7 @@ def __init__( executor: Any = None, loop: Optional[asyncio.AbstractEventLoop] = None, thread_loop: Optional[asyncio.AbstractEventLoop] = None, - Worker: Optional[Type[WorkerThread]] = None, + Worker: Optional[type[WorkerThread]] = None, **kwargs: Any, ) -> None: # cannot share loop between threads, so create a new one @@ -325,11 +316,11 @@ def label(self) -> str: class MethodQueue(Service): - Worker: Type[MethodQueueWorker] = MethodQueueWorker + Worker: type[MethodQueueWorker] = MethodQueueWorker _queue: asyncio.Queue _queue_ready: Event - _workers: List[MethodQueueWorker] + _workers: list[MethodQueueWorker] mundane_level = "debug" diff --git a/mode/timers.py b/mode/timers.py index 4d7a6138..1b6afdb3 100644 --- a/mode/timers.py +++ b/mode/timers.py @@ -1,9 +1,10 @@ """AsyncIO Timers.""" import asyncio +from collections.abc import AsyncIterator, Awaitable, Iterator from itertools import count from time import perf_counter -from typing import AsyncIterator, Awaitable, Callable, Iterator +from typing import Callable from .utils.logging import get_logger from .utils.times import Seconds, want_seconds diff --git a/mode/types/services.py b/mode/types/services.py index 7f69a5f4..f5fb964c 100644 --- a/mode/types/services.py +++ b/mode/types/services.py @@ -2,20 +2,14 @@ import abc import asyncio -from contextlib import AsyncExitStack, ExitStack -from typing import ( - Any, - AsyncContextManager, - Awaitable, - ContextManager, - Coroutine, - MutableMapping, - Optional, - Set, - Type, - TypeVar, - Union, +from collections.abc import Awaitable, Coroutine, MutableMapping +from contextlib import ( + AbstractAsyncContextManager, + AbstractContextManager, + AsyncExitStack, + ExitStack, ) +from typing import Any, Optional, TypeVar, Union from mode.utils.types.trees import NodeT @@ -31,7 +25,7 @@ class DiagT(abc.ABC): """Diag keeps track of a services diagnostic flags.""" - flags: Set[str] + flags: set[str] last_transition: MutableMapping[str, float] @abc.abstractmethod @@ -44,14 +38,14 @@ def set_flag(self, flag: str) -> None: ... def unset_flag(self, flag: str) -> None: ... -class ServiceT(AsyncContextManager): +class ServiceT(AbstractAsyncContextManager): """Abstract type for an asynchronous service that can be started/stopped. See Also: `mode.Service`. """ - Diag: Type[DiagT] + Diag: type[DiagT] diag: DiagT async_exit_stack: AsyncExitStack exit_stack: ExitStack @@ -66,7 +60,7 @@ class ServiceT(AsyncContextManager): def __init__( self, *, - beacon: NodeT = None, + beacon: Optional[NodeT] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> None: ... @@ -79,10 +73,12 @@ async def add_runtime_dependency( ) -> "ServiceT": ... @abc.abstractmethod - async def add_async_context(self, context: AsyncContextManager) -> Any: ... + async def add_async_context( + self, context: AbstractAsyncContextManager + ) -> Any: ... @abc.abstractmethod - def add_context(self, context: ContextManager) -> Any: ... + def add_context(self, context: AbstractContextManager) -> Any: ... @abc.abstractmethod async def start(self) -> None: ... diff --git a/mode/types/signals.py b/mode/types/signals.py index 40446cb9..a67122c1 100644 --- a/mode/types/signals.py +++ b/mode/types/signals.py @@ -3,18 +3,8 @@ import abc import asyncio import typing -from typing import ( - Any, - Awaitable, - Callable, - Generic, - MutableMapping, - MutableSet, - Optional, - Type, - TypeVar, - Union, -) +from collections.abc import Awaitable, MutableMapping, MutableSet +from typing import Any, Callable, Generic, Optional, TypeVar, Union from mypy_extensions import KwArg, NamedArg, VarArg @@ -50,14 +40,14 @@ class BaseSignalT(Generic[T]): """Base type for all signals.""" name: str - owner: Optional[Type] + owner: Optional[type] @abc.abstractmethod def __init__( self, *, name: Optional[str] = None, - owner: Optional[Type] = None, + owner: Optional[type] = None, loop: Optional[asyncio.AbstractEventLoop] = None, default_sender: Any = None, receivers: Optional[MutableSet[SignalHandlerRefT]] = None, diff --git a/mode/types/supervisors.py b/mode/types/supervisors.py index f1c00996..0428a52a 100644 --- a/mode/types/supervisors.py +++ b/mode/types/supervisors.py @@ -2,7 +2,8 @@ import abc import typing -from typing import Any, Awaitable, Callable, Optional, Type +from collections.abc import Awaitable +from typing import Any, Callable, Optional from mode.utils.times import Seconds @@ -23,7 +24,7 @@ class SupervisorStrategyT(ServiceT): max_restarts: float over: float - raises: Type[BaseException] + raises: type[BaseException] @abc.abstractmethod def __init__( @@ -31,8 +32,8 @@ def __init__( *services: ServiceT, max_restarts: Seconds = 100.0, over: Seconds = 1.0, - raises: Optional[Type[BaseException]] = None, - replacement: ReplacementT = None, + raises: Optional[type[BaseException]] = None, + replacement: Optional[ReplacementT] = None, **kwargs: Any, ) -> None: self.replacement: Optional[ReplacementT] = replacement diff --git a/mode/utils/aiter.py b/mode/utils/aiter.py index f7cf0dd5..c2c5c9b0 100644 --- a/mode/utils/aiter.py +++ b/mode/utils/aiter.py @@ -2,19 +2,9 @@ import collections.abc import sys +from collections.abc import AsyncIterable, AsyncIterator, Iterable, Iterator from functools import singledispatch -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Iterable, - Iterator, - List, - Optional, - Tuple, - TypeVar, - cast, -) +from typing import Any, Optional, TypeVar, cast __all__ = [ "aenumerate", @@ -31,7 +21,7 @@ async def aenumerate( it: AsyncIterable[T], start: int = 0 -) -> AsyncIterator[Tuple[int, T]]: +) -> AsyncIterator[tuple[int, T]]: """``async for`` version of ``enumerate``.""" i = start async for item in it: @@ -59,7 +49,7 @@ def __repr__(self) -> str: @singledispatch -def aiter(it: Any) -> AsyncIterator[T]: +def aiter(it: Any) -> AsyncIterator[object]: """Create iterator from iterable. Notes: @@ -136,7 +126,7 @@ def __aiter__(self) -> AsyncIterator[int]: return _ARangeIterator(self, iter(self._range)) -async def alist(ait: AsyncIterator[T]) -> List[T]: +async def alist(ait: AsyncIterator[T]) -> list[T]: """Convert async generator to list.""" return [x async for x in ait] @@ -158,7 +148,7 @@ async def aslice(ait: AsyncIterator[T], *slice_args: int) -> AsyncIterator[T]: return -async def chunks(it: AsyncIterable[T], n: int) -> AsyncIterable[List[T]]: +async def chunks(it: AsyncIterable[T], n: int) -> AsyncIterable[list[T]]: """Split an async iterator into chunks with `n` elements each. Example: diff --git a/mode/utils/collections.py b/mode/utils/collections.py index 01ecc25f..43174b6c 100644 --- a/mode/utils/collections.py +++ b/mode/utils/collections.py @@ -5,7 +5,20 @@ import threading import typing from collections import OrderedDict, UserList -from contextlib import nullcontext +from collections.abc import ( + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + MutableMapping, + MutableSequence, + MutableSet, + Sequence, + Set, + ValuesView, +) +from contextlib import AbstractContextManager, nullcontext from heapq import ( heapify, heappop, @@ -16,28 +29,12 @@ nsmallest, ) from typing import ( - AbstractSet, Any, Callable, - ContextManager, - Dict, Generic, - ItemsView, - Iterable, - Iterator, - KeysView, - List, - Mapping, - MutableMapping, - MutableSequence, - MutableSet, Optional, - Sequence, - Set, - Tuple, TypeVar, Union, - ValuesView, cast, overload, ) @@ -74,7 +71,7 @@ class LazySettings: ... VT = TypeVar("VT") _S = TypeVar("_S") -_Setlike = Union[AbstractSet[T], Iterable[T]] +_Setlike = Union[Set[T], Iterable[T]] class Heap(MutableSequence[T]): @@ -125,22 +122,22 @@ def replace(self, item: T) -> T: """ return heapreplace(self.data, item) - def nlargest(self, n: int, key: Optional[Callable] = None) -> List[T]: + def nlargest(self, n: int, key: Optional[Callable] = None) -> list[T]: """Find the n largest elements in the dataset.""" if key is not None: return nlargest(n, self.data, key=key) else: return nlargest(n, self.data) - def nsmallest(self, n: int, key: Optional[Callable] = None) -> List[T]: + def nsmallest(self, n: int, key: Optional[Callable] = None) -> list[T]: """Find the n smallest elements in the dataset.""" if key is not None: return nsmallest(n, self.data, key=key) else: return nsmallest(n, self.data) - def insert(self, index: int, object: T) -> None: - self.data.insert(index, object) + def insert(self, index: int, value: T) -> None: + self.data.insert(index, value) def __str__(self) -> str: return str(self.data) @@ -149,7 +146,7 @@ def __repr__(self) -> str: return repr(self.data) @overload - def __getitem__(self, i: int) -> T: ... + def __getitem__(self, s: int) -> T: ... @overload def __getitem__(self, s: slice) -> MutableSequence[T]: ... @@ -158,13 +155,13 @@ def __getitem__(self, s: Any) -> Any: return self.data.__getitem__(s) @overload - def __setitem__(self, i: int, o: T) -> None: ... + def __setitem__(self, s: int, o: T) -> None: ... @overload def __setitem__(self, s: slice, o: Iterable[T]) -> None: ... - def __setitem__(self, index_or_slice: Any, o: Any) -> None: - self.data.__setitem__(index_or_slice, o) + def __setitem__(self, s: Any, o: Any) -> None: + self.data.__setitem__(s, o) @overload def __delitem__(self, i: int) -> None: ... @@ -249,28 +246,28 @@ class FastUserSet(MutableSet[T]): # -- Immutable Methods -- - def __and__(self, other: AbstractSet[Any]) -> MutableSet[T]: + def __and__(self, other: Set[Any]) -> MutableSet[T]: return cast(MutableSet, self.data.__and__(other)) def __contains__(self, key: Any) -> bool: return self.data.__contains__(key) - def __ge__(self, other: AbstractSet[T]) -> bool: + def __ge__(self, other: Set[T]) -> bool: return self.data.__ge__(other) def __iter__(self) -> Iterator[T]: return iter(self.data) - def __le__(self, other: AbstractSet[T]) -> bool: + def __le__(self, other: Set[T]) -> bool: return self.data.__le__(other) def __len__(self) -> int: return len(self.data) - def __or__(self, other: AbstractSet) -> AbstractSet[Union[T, T_co]]: + def __or__(self, other: Set) -> Set[Union[T, object]]: return self.data.__or__(other) - def __rand__(self, other: AbstractSet[T]) -> MutableSet[T]: + def __rand__(self, other: Set[T]) -> MutableSet[T]: return self.__and__(other) def __reduce__(self) -> tuple: @@ -282,13 +279,13 @@ def __reduce_ex__(self, protocol: Any) -> tuple: def __repr__(self) -> str: return repr(self.data) - def __ror__(self, other: AbstractSet[T]) -> MutableSet[T]: + def __ror__(self, other: Set[T]) -> MutableSet[T]: return cast(MutableSet, self.data.__ror__(other)) # type: ignore - def __rsub__(self, other: AbstractSet[T]) -> MutableSet[T]: + def __rsub__(self, other: Set[T]) -> MutableSet[T]: return cast(MutableSet, other.__rsub__(self.data)) # type: ignore - def __rxor__(self, other: AbstractSet[T]) -> MutableSet[T]: + def __rxor__(self, other: Set[T]) -> MutableSet[T]: return cast(MutableSet, other.__rxor__(self.data)) # type: ignore def __sizeof__(self) -> int: @@ -297,10 +294,10 @@ def __sizeof__(self) -> int: def __str__(self) -> str: return str(self.data) - def __sub__(self, other: AbstractSet[Any]) -> MutableSet[T_co]: + def __sub__(self, other: Set[Any]) -> MutableSet[object]: return cast(MutableSet, self.data.__sub__(other)) - def __xor__(self, other: AbstractSet) -> MutableSet[T]: + def __xor__(self, other: Set) -> MutableSet[T]: return cast(MutableSet, self.data.__xor__(other)) def copy(self) -> MutableSet[T]: @@ -315,10 +312,10 @@ def intersection(self, other: _Setlike[T]) -> MutableSet[T]: def isdisjoint(self, other: Iterable[T]) -> bool: return self.data.isdisjoint(other) - def issubset(self, other: AbstractSet[T]) -> bool: + def issubset(self, other: Set[T]) -> bool: return self.data.issubset(other) # type: ignore - def issuperset(self, other: AbstractSet[T]) -> bool: + def issuperset(self, other: Set[T]) -> bool: return self.data.issuperset(other) # type: ignore def symmetric_difference(self, other: _Setlike[T]) -> MutableSet[T]: @@ -329,24 +326,24 @@ def union(self, other: _Setlike[T]) -> MutableSet[T]: # -- Mutable Methods -- - def __iand__(self, other: AbstractSet[Any]) -> "FastUserSet": + def __iand__(self, other: Set[Any]) -> "FastUserSet": self.data.__iand__(other) return self - def __ior__(self, other: AbstractSet[_S]) -> "FastUserSet": + def __ior__(self, other: Set[_S]) -> "FastUserSet": self.data.__ior__(other) return self - def __isub__(self, other: AbstractSet[Any]) -> "FastUserSet[T]": + def __isub__(self, other: Set[Any]) -> "FastUserSet[T]": self.data.__isub__(other) return self - def __ixor__(self, other: AbstractSet[_S]) -> "FastUserSet": + def __ixor__(self, other: Set[_S]) -> "FastUserSet": self.data.__ixor__(other) return self - def add(self, element: T) -> None: - self.data.add(element) + def add(self, value: T) -> None: + self.data.add(value) def clear(self) -> None: self.data.clear() @@ -354,8 +351,8 @@ def clear(self) -> None: def difference_update(self, other: _Setlike[T]) -> None: self.data.difference_update(other) # type: ignore - def discard(self, element: T) -> None: - self.data.discard(element) + def discard(self, value: T) -> None: + self.data.discard(value) def intersection_update(self, other: _Setlike[T]) -> None: self.data.intersection_update(other) # type: ignore @@ -363,8 +360,8 @@ def intersection_update(self, other: _Setlike[T]) -> None: def pop(self) -> T: return self.data.pop() - def remove(self, element: T) -> None: - self.data.remove(element) + def remove(self, value: T) -> None: + self.data.remove(value) def symmetric_difference_update(self, other: _Setlike[T]) -> None: self.data.symmetric_difference_update(other) # type: ignore @@ -385,7 +382,7 @@ def _keys(self) -> Iterator[KT]: ... def _values(self) -> Iterator[VT]: ... @abc.abstractmethod - def _items(self) -> Iterator[Tuple[KT, VT]]: ... + def _items(self) -> Iterator[tuple[KT, VT]]: ... class ProxyKeysView(KeysView[KT]): @@ -408,7 +405,7 @@ class ProxyItemsView(ItemsView): def __init__(self, mapping: MappingViewProxy) -> None: self._mapping = mapping - def __iter__(self) -> Iterator[Tuple[KT, VT]]: + def __iter__(self) -> Iterator[tuple[KT, VT]]: yield from self._mapping._items() @@ -426,7 +423,7 @@ class LRUCache(FastUserDict, MutableMapping[KT, VT], MappingViewProxy): limit: Optional[int] thread_safety: bool - _mutex: ContextManager + _mutex: AbstractContextManager data: OrderedDict def __init__( @@ -451,7 +448,7 @@ def update(self, *args: Any, **kwargs: Any) -> None: for _ in range(len(data) - limit): data.popitem(last=False) - def popitem(self, *, last: bool = True) -> Tuple[KT, VT]: + def popitem(self, *, last: bool = True) -> tuple[KT, VT]: with self._mutex: return self.data.popitem(last) @@ -487,7 +484,7 @@ def _values(self) -> Iterator[VT]: def items(self) -> ItemsView[KT, VT]: return ProxyItemsView(self) - def _items(self) -> Iterator[Tuple[KT, VT]]: + def _items(self) -> Iterator[tuple[KT, VT]]: with self._mutex: for k in self: try: @@ -503,17 +500,17 @@ def incr(self, key: KT, delta: int = 1) -> int: self[key] = cast(VT, str(newval)) return newval - def _new_lock(self) -> ContextManager: + def _new_lock(self) -> AbstractContextManager: if self.thread_safety: - return cast(ContextManager, threading.RLock()) - return cast(ContextManager, nullcontext()) + return cast(AbstractContextManager, threading.RLock()) + return cast(AbstractContextManager, nullcontext()) def __getstate__(self) -> Mapping[str, Any]: d = dict(vars(self)) d.pop("_mutex") return d - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: self.__dict__ = state self._mutex = self._new_lock() @@ -527,7 +524,7 @@ def on_discard(self, value: T) -> None: ... def on_clear(self) -> None: ... - def on_change(self, added: Set[T], removed: Set[T]) -> None: ... + def on_change(self, added: set[T], removed: set[T]) -> None: ... def add(self, element: T) -> None: if element not in self.data: @@ -551,53 +548,53 @@ def pop(self) -> T: def raw_update(self, *args: Any, **kwargs: Any) -> None: self.data.update(*args, **kwargs) # type: ignore - def __iand__(self, other: AbstractSet[Any]) -> "FastUserSet": + def __iand__(self, other: Set[Any]) -> "FastUserSet": self.on_change(added=set(), removed=cast(Set, self).difference(other)) self.data.__iand__(other) return self - def __ior__(self, other: AbstractSet[_S]) -> "FastUserSet": - self.on_change(added=cast(Set, other).difference(self), removed=set()) + def __ior__(self, other: Set[_S]) -> "FastUserSet": + self.on_change(added=cast(set, other).difference(self), removed=set()) self.data.__ior__(other) return self - def __isub__(self, other: AbstractSet[Any]) -> "FastUserSet": + def __isub__(self, other: Set[Any]) -> "FastUserSet": self.on_change( - added=set(), removed=cast(Set, self.data).intersection(other) + added=set(), removed=cast(set, self.data).intersection(other) ) self.data.__isub__(other) return self - def __ixor__(self, other: AbstractSet[_S]) -> "FastUserSet": + def __ixor__(self, other: Set[_S]) -> "FastUserSet": self.on_change( - added=cast(Set, other).difference(self.data), - removed=cast(Set, self.data).intersection(other), + added=cast(set, other).difference(self.data), + removed=cast(set, self.data).intersection(other), ) self.data.__ixor__(other) return self def difference_update(self, other: _Setlike[T]) -> None: - data = cast(Set, self.data) + data = cast(set, self.data) self.on_change(added=set(), removed=data.intersection(other)) data.difference_update(other) def intersection_update(self, other: _Setlike[T]) -> None: - data = cast(Set, self.data) - self.on_change(added=set(), removed=cast(Set, self).difference(other)) + data = cast(set, self.data) + self.on_change(added=set(), removed=cast(set, self).difference(other)) data.intersection_update(other) def symmetric_difference_update(self, other: _Setlike[T]) -> None: - data = cast(Set, self.data) + data = cast(set, self.data) self.on_change( - added=cast(Set, other).difference(self.data), + added=cast(set, other).difference(self.data), removed=data.intersection(other), ) data.symmetric_difference_update(other) def update(self, other: _Setlike[T]) -> None: # union update - self.on_change(added=cast(Set, other).difference(self), removed=set()) - cast(Set, self.data).update(other) + self.on_change(added=cast(set, other).difference(self), removed=set()) + cast(set, self.data).update(other) class ManagedUserDict(FastUserDict[KT, VT]): @@ -736,7 +733,7 @@ def _values(self) -> Iterator[str]: for key in self: yield getattr(obj, key) - def _items(self) -> Iterator[Tuple[str, VT]]: + def _items(self) -> Iterator[tuple[str, VT]]: obj = self.obj for key in self: yield key, getattr(obj, key) diff --git a/mode/utils/contexts.py b/mode/utils/contexts.py index 0410caee..b505dfa3 100644 --- a/mode/utils/contexts.py +++ b/mode/utils/contexts.py @@ -2,7 +2,7 @@ from contextlib import AbstractAsyncContextManager from types import TracebackType -from typing import Any, Optional, Type +from typing import Any, Optional __all__ = ["asyncnullcontext"] @@ -20,7 +20,7 @@ async def __aenter__(self) -> Any: async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, + exc_type: Optional[type[BaseException]] = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> None: ... diff --git a/mode/utils/futures.py b/mode/utils/futures.py index 0888e351..e1bfd4bf 100644 --- a/mode/utils/futures.py +++ b/mode/utils/futures.py @@ -2,7 +2,7 @@ import asyncio from inspect import isawaitable -from typing import Any, Callable, NoReturn, Optional, Set, Type, Union +from typing import Any, Callable, NoReturn, Optional, Union # These used to be here, now moved to .queues from .queues import FlowControlEvent, FlowControlQueue # noqa: F401 @@ -11,7 +11,7 @@ from asyncio import all_tasks # type: ignore except ImportError: # pragma: no cover - def all_tasks(loop: asyncio.AbstractEventLoop) -> Set[asyncio.Task]: + def all_tasks(loop: asyncio.AbstractEventLoop) -> set[asyncio.Task]: return asyncio.Task.all_tasks(loop=loop) @@ -108,7 +108,7 @@ def __call__(self, *args: Any, **kwargs: Any) -> NoReturn: # here to support inspect.signature raise NotImplementedError() - def __get__(self, obj: Any, type: Optional[Type] = None) -> Any: + def __get__(self, obj: Any, type: Optional[type] = None) -> Any: if obj is None: return self try: diff --git a/mode/utils/graphs/formatter.py b/mode/utils/graphs/formatter.py index 53fed310..ac20880c 100644 --- a/mode/utils/graphs/formatter.py +++ b/mode/utils/graphs/formatter.py @@ -1,6 +1,7 @@ """Formatting graphs.""" -from typing import Any, ClassVar, Dict, Mapping, Optional +from collections.abc import Mapping +from typing import Any, ClassVar, Optional from mode.utils.objects import label from mode.utils.types.graphs import _T, GraphFormatterT @@ -29,7 +30,7 @@ class DOT: NODE = '{INp}"{0}" [{attrs}]' EDGE = '{INp}"{0}" {dir} "{1}" [{attrs}]' ATTRSEP = ", " - DIRS: ClassVar[Dict] = {"graph": "--", "digraph": "->"} + DIRS: ClassVar[dict] = {"graph": "--", "digraph": "->"} TAIL = "{IN}}}" @@ -42,7 +43,7 @@ class GraphFormatter(GraphFormatterT): _head = DOT.HEAD.strip() _tail = DOT.TAIL.strip() _attrsep = DOT.ATTRSEP - _dirs: ClassVar[Dict] = dict(DOT.DIRS) + _dirs: ClassVar[dict] = dict(DOT.DIRS) scheme: Mapping[str, Any] = { "shape": "box", @@ -104,13 +105,13 @@ def head(self, **attrs: Any) -> str: def tail(self) -> str: return self.FMT(self._tail) - def label(self, obj: _T) -> str: + def label(self, obj: object) -> str: return label(obj) - def node(self, obj: _T, **attrs: Any) -> str: + def node(self, obj: object, **attrs: Any) -> str: return self.draw_node(obj, self.node_scheme, attrs) - def terminal_node(self, obj: _T, **attrs: Any) -> str: + def terminal_node(self, obj: object, **attrs: Any) -> str: return self.draw_node(obj, self.term_scheme, attrs) def edge(self, a: _T, b: _T, **attrs: Any) -> str: @@ -141,7 +142,7 @@ def draw_edge( def draw_node( self, - obj: _T, + obj: object, scheme: Optional[Mapping] = None, attrs: Optional[Mapping] = None, ) -> str: diff --git a/mode/utils/graphs/graph.py b/mode/utils/graphs/graph.py index 17a84ffa..5e787d38 100644 --- a/mode/utils/graphs/graph.py +++ b/mode/utils/graphs/graph.py @@ -1,21 +1,15 @@ """Data structure: Dependency graph.""" -from functools import partial -from typing import ( - IO, - Any, - Callable, - Counter, +from collections import Counter +from collections.abc import ( ItemsView, Iterable, Iterator, - List, MutableMapping, - Optional, Sequence, - Set, - cast, ) +from functools import partial +from typing import IO, Any, Callable, Optional, cast from mode.utils.types.graphs import _T, DependencyGraphT, GraphFormatterT @@ -44,14 +38,14 @@ class DependencyGraph(DependencyGraphT): def __init__( self, it: Optional[Iterable] = None, - formatter: GraphFormatterT[_T] = None, + formatter: Optional[GraphFormatterT[_T]] = None, ) -> None: self.formatter = formatter or GraphFormatter() self.adjacent = {} if it is not None: self.update(it) - def add_arc(self, obj: _T) -> None: + def add_arc(self, obj: object) -> None: """Add an object to the graph.""" self.adjacent.setdefault(obj, []) @@ -88,7 +82,7 @@ def topsort(self) -> Sequence: graph.add_edge(node_c, successor_c) return [t[0] for t in graph._khan62()] - def valency_of(self, obj: _T) -> int: + def valency_of(self, obj: object) -> int: """Return the valency (degree) of a vertex in the graph.""" try: sizes = [len(self[obj])] @@ -141,9 +135,9 @@ def _tarjan72(self) -> Sequence: See Also: :wikipedia:`Tarjan%27s_strongly_connected_components_algorithm` """ - result: List = [] - stack: List = [] - low: List = [] + result: list = [] + stack: list = [] + low: list = [] def visit(node: Any) -> None: if node in low: @@ -169,7 +163,9 @@ def visit(node: Any) -> None: return result - def to_dot(self, fh: IO, *, formatter: GraphFormatterT[_T] = None) -> None: + def to_dot( + self, fh: IO, *, formatter: Optional[GraphFormatterT[_T]] = None + ) -> None: """Convert the graph to DOT format. Arguments: @@ -177,7 +173,7 @@ def to_dot(self, fh: IO, *, formatter: GraphFormatterT[_T] = None) -> None: formatter (celery.utils.graph.GraphFormatter): Custom graph formatter to use. """ - seen: Set = set() + seen: set = set() draw = formatter or self.formatter write = partial(print, file=fh) @@ -199,13 +195,13 @@ def if_not_seen(fun: Callable[[Any], str], obj: Any) -> None: def __iter__(self) -> Iterator: return iter(self.adjacent) - def __getitem__(self, node: _T) -> Any: + def __getitem__(self, node: object) -> Any: return self.adjacent[node] def __len__(self) -> int: return len(self.adjacent) - def __contains__(self, obj: _T) -> bool: + def __contains__(self, obj: object) -> bool: return obj in self.adjacent def items(self) -> ItemsView: @@ -215,7 +211,7 @@ def __repr__(self) -> str: return "\n".join(self._repr_node(N) for N in self) def _repr_node( - self, obj: _T, level: int = 1, fmt: str = "{0}({1})" + self, obj: object, level: int = 1, fmt: str = "{0}({1})" ) -> str: output = [fmt.format(obj, self.valency_of(obj))] if obj in self: diff --git a/mode/utils/imports.py b/mode/utils/imports.py index 5a0bd5b5..36d0422f 100644 --- a/mode/utils/imports.py +++ b/mode/utils/imports.py @@ -5,26 +5,32 @@ import sys import typing import warnings +from collections.abc import ( + Generator, + Iterable, + Iterator, + Mapping, + MutableMapping, +) from contextlib import contextmanager, suppress from types import ModuleType from typing import ( Any, Callable, - Generator, Generic, - Iterable, - Iterator, - Mapping, - MutableMapping, NamedTuple, Optional, - Set, - Type, TypeVar, Union, cast, ) +try: + from importlib.metadata import entry_points # Python >= 3.10 +except ImportError: + from importlib_metadata import entry_points # type: ignore # Python < 3.10 + + from .collections import FastUserDict from .objects import cached_property from .text import didyoumean @@ -85,7 +91,7 @@ class FactoryMapping(FastUserDict, Generic[_T]): """ aliases: MutableMapping[str, str] - namespaces: Set + namespaces: set _finalized: bool = False def __init__(self, *args: Mapping, **kwargs: str) -> None: @@ -293,6 +299,7 @@ def symbol_by_name( raise ValueError(f"Cannot import {name!r}: {exc}").with_traceback( sys.exc_info()[2] ) from exc + if attribute_name: return cast(_T, getattr(module, attribute_name)) else: @@ -300,12 +307,12 @@ def symbol_by_name( except (ImportError, AttributeError): if default is None: raise - return default + return default class EntrypointExtension(NamedTuple): name: str - type: Type + type: type class RawEntrypointExtension(NamedTuple): @@ -336,7 +343,7 @@ def load_extension_classes(namespace: str) -> Iterable[EntrypointExtension]: """ for name, cls_name in load_extension_class_names(namespace): try: - cls: Type = symbol_by_name(cls_name) + cls: type = symbol_by_name(cls_name) except (ImportError, SyntaxError) as exc: warnings.warn( f"Cannot load {namespace} extension {cls_name!r}: {exc!r}", @@ -367,20 +374,19 @@ def load_extension_class_names( [('msgpack', 'faust_msgpack:msgpack')] ``` """ - try: - from importlib.metadata import entry_points - except ImportError: - return - eps = entry_points() - # For Python 3.10+, entry_points() returns an object with .select() + # Python 3.10+ if hasattr(eps, "select"): for ep in eps.select(group=namespace): - yield RawEntrypointExtension(ep.name, f"{ep.module}:{ep.attr}") + yield RawEntrypointExtension( + ep.name, ":".join([ep.module, ep.attr]) + ) + # Python <3.10 else: - # For Python 3.8/3.9, entry_points is a dict for ep in eps.get(namespace, []): - yield RawEntrypointExtension(ep.name, f"{ep.module}:{ep.attr}") + yield RawEntrypointExtension( + ep.name, ":".join([ep.module, ep.attr]) + ) @contextmanager diff --git a/mode/utils/locals.py b/mode/utils/locals.py index 955b1f1a..54f364a5 100644 --- a/mode/utils/locals.py +++ b/mode/utils/locals.py @@ -10,9 +10,10 @@ ``` """ +from collections.abc import Generator, Sequence from contextlib import contextmanager from contextvars import ContextVar -from typing import Generator, Generic, List, Optional, Sequence, TypeVar +from typing import Generic, Optional, TypeVar __all__ = ["LocalStack"] @@ -35,7 +36,7 @@ class LocalStack(Generic[T]): request object. """ - _stack: ContextVar[Optional[List[T]]] + _stack: ContextVar[Optional[list[T]]] def __init__(self) -> None: self._stack = ContextVar("_stack") diff --git a/mode/utils/locks.py b/mode/utils/locks.py index be89882c..241c2567 100644 --- a/mode/utils/locks.py +++ b/mode/utils/locks.py @@ -7,7 +7,7 @@ import asyncio from collections import deque -from typing import Deque, Optional +from typing import Optional class Event: @@ -19,7 +19,7 @@ class Event: false. """ - _waiters: Deque[asyncio.Future] + _waiters: deque[asyncio.Future] _value: bool _loop: Optional[asyncio.AbstractEventLoop] diff --git a/mode/utils/logging.py b/mode/utils/logging.py index bf8a4c38..2adcf092 100644 --- a/mode/utils/logging.py +++ b/mode/utils/logging.py @@ -8,7 +8,8 @@ import sys import threading import traceback -from contextlib import ExitStack, contextmanager +from collections.abc import Iterable, Iterator, Mapping +from contextlib import AbstractContextManager, ExitStack, contextmanager from functools import singledispatch, wraps from itertools import count from logging import Logger @@ -22,19 +23,10 @@ BinaryIO, Callable, ClassVar, - ContextManager, - Dict, - Iterable, - Iterator, - List, - Mapping, NamedTuple, Optional, Protocol, - Set, TextIO, - Tuple, - Type, Union, cast, ) @@ -108,18 +100,18 @@ def current_flight_recorder() -> Optional["flight_recorder"]: def _logger_config( - handlers: List[str], level: Union[str, int] = "INFO" -) -> Dict: + handlers: list[str], level: Union[str, int] = "INFO" +) -> dict: return {"handlers": handlers, "level": level} def create_logconfig( version: int = 1, disable_existing_loggers: bool = False, - formatters: Dict = DEFAULT_FORMATTERS, - handlers: Optional[Dict] = None, - root: Optional[Dict] = None, -) -> Dict: + formatters: dict = DEFAULT_FORMATTERS, + handlers: Optional[dict] = None, + root: Optional[dict] = None, +) -> dict: return { "version": version, # do not disable existing loggers from other modules. @@ -139,8 +131,8 @@ def create_logconfig( FormatterHandler2 = Callable[[Any, logging.LogRecord], Any] Severity = Union[int, str] -_formatter_registry: Set[FormatterHandler] = set() -_formatter_registry2: Set[FormatterHandler2] = set() +_formatter_registry: set[FormatterHandler] = set() +_formatter_registry2: set[FormatterHandler2] = set() def get_logger(name: str) -> Logger: @@ -405,8 +397,8 @@ def setup_logging( *, loglevel: Optional[Union[str, int]] = None, logfile: Optional[Union[str, IO]] = None, - loghandlers: Optional[List[logging.Handler]] = None, - logging_config: Optional[Dict] = None, + loghandlers: Optional[list[logging.Handler]] = None, + logging_config: Optional[dict] = None, ) -> int: """Configure logging subsystem.""" stream: Optional[IO] = None @@ -438,8 +430,8 @@ def _setup_logging( level: Optional[Union[int, str]] = None, filename: Optional[str] = None, stream: Optional[IO] = None, - loghandlers: Optional[List[logging.Handler]] = None, - logging_config: Optional[Dict] = None, + loghandlers: Optional[list[logging.Handler]] = None, + logging_config: Optional[dict] = None, ) -> None: handlers = {} if filename: @@ -489,7 +481,7 @@ class Logwrapped: severity: int ident: str - _ignore: ClassVar[Set[str]] = {"__enter__", "__exit__"} + _ignore: ClassVar[set[str]] = {"__enter__", "__exit__"} def __init__( self, @@ -534,7 +526,7 @@ def __wrapped(*args: Any, **kwargs: Any) -> Any: def __repr__(self) -> str: return repr(self.obj) - def __dir__(self) -> List[str]: + def __dir__(self) -> list[str]: return dir(self.obj) @@ -605,11 +597,11 @@ class LogMessage(NamedTuple): severity: int message: str asctime: str - args: Tuple[Any, ...] - kwargs: Dict[str, Any] + args: tuple[Any, ...] + kwargs: dict[str, Any] -class flight_recorder(ContextManager, LogSeverityMixin): +class flight_recorder(AbstractContextManager, LogSeverityMixin): """Flight Recorder context for use with `with` statement. This is a logging utility to log stuff only when something @@ -681,11 +673,11 @@ def _background_refresh(self) -> None: loop: asyncio.AbstractEventLoop started_at_date: Optional[str] enabled_by: Optional[asyncio.Task] - extra_context: Dict[str, Any] + extra_context: dict[str, Any] _fut: Optional[asyncio.Future] - _logs: List[LogMessage] - _default_context: Dict[str, Any] + _logs: list[LogMessage] + _default_context: dict[str, Any] def __init__( self, @@ -802,7 +794,7 @@ def flush_logs(self, ident: Optional[str] = None) -> None: finally: logs.clear() - def _fill_extra_context(self, kwargs: Dict) -> None: + def _fill_extra_context(self, kwargs: dict) -> None: if self.extra_context: extra = kwargs["extra"] = kwargs.get("extra") or {} extra["data"] = {**self.extra_context, **(extra.get("data") or {})} @@ -821,7 +813,7 @@ def __enter__(self) -> "flight_recorder": def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, + exc_type: Optional[type[BaseException]] = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> Optional[bool]: @@ -940,7 +932,7 @@ def readable(self) -> bool: def readline(self, limit: int = -1) -> AnyStr: raise NotImplementedError() - def readlines(self, hint: int = -1) -> List[AnyStr]: + def readlines(self, hint: int = -1) -> list[AnyStr]: raise NotImplementedError() def seek(self, offset: int, whence: int = 0) -> int: @@ -969,7 +961,7 @@ def __enter__(self) -> "FileLogProxy": def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, + exc_type: Optional[type[BaseException]] = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> Optional[bool]: ... diff --git a/mode/utils/mocks.py b/mode/utils/mocks.py index 5cd4eb32..55eb4cfb 100644 --- a/mode/utils/mocks.py +++ b/mode/utils/mocks.py @@ -4,9 +4,10 @@ import sys import types import unittest.mock +from collections.abc import Iterator from contextlib import contextmanager from types import ModuleType -from typing import Any, Iterator, cast +from typing import Any, cast from unittest.mock import MagicMock __all__ = ["IN", "call", "mask_module", "patch_module"] diff --git a/mode/utils/objects.py b/mode/utils/objects.py index 70c73c35..87d3af55 100644 --- a/mode/utils/objects.py +++ b/mode/utils/objects.py @@ -4,32 +4,31 @@ import collections.abc import sys import typing +from collections.abc import ( + Iterable, + Mapping, + MutableMapping, + MutableSequence, + MutableSet, + Sequence, + Set, +) from contextlib import suppress from decimal import Decimal from functools import total_ordering from pathlib import Path from typing import ( - AbstractSet, Any, Callable, ClassVar, - Dict, ForwardRef, - FrozenSet, Generic, - Iterable, - List, - Mapping, - MutableMapping, - MutableSequence, - MutableSet, Optional, - Sequence, - Set, - Tuple, - Type, TypeVar, cast, + get_args, + get_origin, + get_type_hints, ) try: @@ -48,19 +47,16 @@ def _type_check(arg, msg, is_argument=True, module=None): # type: ignore return arg -try: - from typing import _ClassVar # type: ignore -except ImportError: # pragma: no cover - # CPython 3.7 - from typing import _GenericAlias # type: ignore +def _is_class_var(typ): + # Works for typing.ClassVar and types.GenericAlias (Python 3.9+) + origin = getattr(typ, "__origin__", None) + return origin is ClassVar - def _is_class_var(x: Any) -> bool: - return isinstance(x, _GenericAlias) and x.__origin__ is ClassVar -else: # pragma: no cover - # CPython 3.6 - def _is_class_var(x: Any) -> bool: - return type(x) is _ClassVar +def _get_globalns(cls): + # Get the global namespace for a class + module = sys.modules.get(cls.__module__) + return module.__dict__ if module else {} __all__ = [ @@ -108,33 +104,41 @@ class _UsingKwargsInNew(_InitSubclassCheck, ident=909): ... RT = TypeVar("RT") #: Mapping of attribute name to attribute type. -FieldMapping = Mapping[str, Type] +FieldMapping = Mapping[str, type] #: Mapping of attribute name to attributes default value. DefaultsMapping = Mapping[str, Any] -SET_TYPES: Tuple[Type, ...] = ( - AbstractSet, - FrozenSet, - MutableSet, +SET_TYPES: tuple[type, ...] = ( Set, + frozenset, + MutableSet, + set, collections.abc.Set, ) -LIST_TYPES: Tuple[Type, ...] = ( - List, +LIST_TYPES: tuple[type, ...] = ( + list, Sequence, MutableSequence, collections.abc.Sequence, ) -DICT_TYPES: Tuple[Type, ...] = ( - Dict, +DICT_TYPES: tuple[type, ...] = ( + dict, Mapping, MutableMapping, collections.abc.Mapping, ) # XXX cast required for mypy bug # "expression has type Tuple[_SpecialForm]" -TUPLE_TYPES: Tuple[Type, ...] = cast(Tuple[Type, ...], (Tuple,)) +TUPLE_TYPES: tuple[type, ...] = cast(tuple[type, ...], (tuple,)) + + +if sys.version_info >= (3, 10): + import types + + UNION_TYPES = (typing.Union, types.UnionType) +else: + UNION_TYPES = (typing.Union,) class InvalidAnnotation(Exception): @@ -158,7 +162,7 @@ def __repr__(self) -> str: return f"<{type(self).__name__}: {self.value!r}>" -def _restore_from_keywords(typ: Type, kwargs: Dict) -> Any: +def _restore_from_keywords(typ: type, kwargs: dict) -> Any: # This function is used to restore pickled KeywordReduce object. return typ(**kwargs) @@ -206,7 +210,7 @@ def __reduce_keywords__(self) -> Mapping[str, Any]: def __reduce_keywords__(self) -> Mapping: raise NotImplementedError() - def __reduce__(self) -> Tuple: + def __reduce__(self) -> tuple: return _restore_from_keywords, (type(self), self.__reduce_keywords__()) @@ -261,16 +265,30 @@ def _detect_main_name() -> str: # pragma: no cover return ".".join([*seen, path.stem]) +def _normalize_forwardref(t): + if isinstance(t, str): + return t + origin = getattr(t, "__origin__", None) + args = getattr(t, "__args__", None) + if origin and args: + if origin is ClassVar: + return origin[_normalize_forwardref(args[0])] + return origin[tuple(_normalize_forwardref(a) for a in args)] + if hasattr(t, "__qualname__") and "" in t.__qualname__: + return t.__name__ + return t + + def annotations( - cls: Type, + cls: type, *, - stop: Type = object, - invalid_types: Optional[Set] = None, + stop: type = object, + invalid_types: Optional[set] = None, alias_types: Optional[Mapping] = None, skip_classvar: bool = False, - globalns: Optional[Dict[str, Any]] = None, - localns: Optional[Dict[str, Any]] = None, -) -> Tuple[FieldMapping, DefaultsMapping]: + globalns: Optional[dict[str, Any]] = None, + localns: Optional[dict[str, Any]] = None, +) -> tuple[FieldMapping, DefaultsMapping]: """Get class field definition in MRO order. Arguments: @@ -314,8 +332,8 @@ def annotations( {'z': 0.0} ``` """ - fields: Dict[str, Type] = {} - defaults: Dict[str, Any] = {} + fields: dict[str, type] = {} + defaults: dict[str, Any] = {} for subcls in iter_mro_reversed(cls, stop=stop): defaults.update(subcls.__dict__) with suppress(AttributeError): @@ -329,20 +347,28 @@ def annotations( localns=localns, ) ) - return fields, defaults + + # Normalize all field types for forward refs + normalized_fields = { + k: _normalize_forwardref(v) for k, v in fields.items() + } + return normalized_fields, defaults def local_annotations( - cls: Type, + cls: type, *, - invalid_types: Optional[Set] = None, + invalid_types: Optional[set] = None, alias_types: Optional[Mapping] = None, skip_classvar: bool = False, - globalns: Optional[Dict[str, Any]] = None, - localns: Optional[Dict[str, Any]] = None, -) -> Iterable[Tuple[str, Type]]: + globalns: Optional[dict[str, Any]] = None, + localns: Optional[dict[str, Any]] = None, +) -> Iterable[tuple[str, type]]: + d = get_type_hints( + cls, globalns if globalns is not None else _get_globalns(cls), localns + ) return _resolve_refs( - cls.__annotations__, + d, globalns if globalns is not None else _get_globalns(cls), localns, invalid_types or set(), @@ -352,13 +378,13 @@ def local_annotations( def _resolve_refs( - d: Dict[str, Any], - globalns: Optional[Dict[str, Any]] = None, - localns: Optional[Dict[str, Any]] = None, - invalid_types: Optional[Set] = None, + d: dict[str, Any], + globalns: Optional[dict[str, Any]] = None, + localns: Optional[dict[str, Any]] = None, + invalid_types: Optional[set] = None, alias_types: Optional[Mapping] = None, skip_classvar: bool = False, -) -> Iterable[Tuple[str, Type]]: +) -> Iterable[tuple[str, type]]: invalid_types = invalid_types or set() alias_types = alias_types or {} for k, v in d.items(): @@ -371,11 +397,11 @@ def _resolve_refs( def eval_type( typ: Any, - globalns: Optional[Dict[str, Any]] = None, - localns: Optional[Dict[str, Any]] = None, - invalid_types: Optional[Set] = None, + globalns: Optional[dict[str, Any]] = None, + localns: Optional[dict[str, Any]] = None, + invalid_types: Optional[set] = None, alias_types: Optional[Mapping] = None, -) -> Type: +) -> type: """Convert (possible) string annotation to actual type. Examples: @@ -399,9 +425,9 @@ def eval_type( def _ForwardRef_safe_eval( ref: ForwardRef, - globalns: Optional[Dict[str, Any]] = None, - localns: Optional[Dict[str, Any]] = None, -) -> Type: + globalns: Optional[dict[str, Any]] = None, + localns: Optional[dict[str, Any]] = None, +) -> type: # On 3.6/3.7 ForwardRef._evaluate crashes if str references ClassVar if not ref.__forward_evaluated__: if globalns is None and localns is None: @@ -420,11 +446,7 @@ def _ForwardRef_safe_eval( return ref.__forward_value__ -def _get_globalns(typ: Type) -> Dict[str, Any]: - return sys.modules[typ.__module__].__dict__ - - -def iter_mro_reversed(cls: Type, stop: Type) -> Iterable[Type]: +def iter_mro_reversed(cls: type, stop: type) -> Iterable[type]: """Iterate over superclasses, in reverse Method Resolution Order. The stop argument specifies a base class that when seen will @@ -458,81 +480,66 @@ def iter_mro_reversed(cls: Type, stop: Type) -> Iterable[Type]: wanted = False for subcls in reversed(cls.__mro__): if wanted: - yield cast(Type, subcls) + yield cast(type, subcls) else: wanted = subcls == stop -def remove_optional(typ: Type) -> Type: +def remove_optional(typ: type) -> type: _, typ = _remove_optional(typ) return typ -def is_union(typ: Type) -> bool: - name = typ.__class__.__name__ - return any( - [ - name == "_UnionGenericAlias", # 3.9 - name == "_GenericAlias" and typ.__origin__ is typing.Union, # 3.7 - name == "_Union", # 3.6 - ] - ) +def is_union(typ: type) -> bool: + return get_origin(typ) in UNION_TYPES -def is_optional(typ: Type) -> bool: - if is_union(typ): - args = getattr(typ, "__args__", ()) - return any(True for arg in args if arg is None or arg is type(None)) +def is_optional(typ: type) -> bool: + origin = get_origin(typ) + if origin in UNION_TYPES: + args = get_args(typ) + return any(arg is type(None) for arg in args) return False -def _remove_optional( - typ: Type, *, find_origin: bool = False -) -> Tuple[List[Any], Type]: - args = getattr(typ, "__args__", ()) - if is_union(typ): - # Optional[List[int]] -> Union[List[int], NoneType] - # returns: ((int,), list) - found_None = False - union_type_args: Optional[List] = None - union_type: Optional[Type] = None - for arg in args: - if arg is None or arg is type(None): - found_None = True - else: - union_type_args = getattr(arg, "__args__", ()) - union_type = arg - if find_origin: - if union_type is not None and sys.version_info.minor == 6: - union_type = _py36_maybe_unwrap_GenericMeta(union_type) - else: - union_type = getattr(arg, "__origin__", arg) - if union_type is not None and found_None: - return cast(List, union_type_args), union_type +def _remove_optional(typ: type, *, find_origin: bool = False) -> Any: + origin = get_origin(typ) + args = get_args(typ) + if origin in UNION_TYPES: + non_none_args = [arg for arg in args if arg is not type(None)] + if len(non_none_args) == 1: + typ = non_none_args[0] + origin = get_origin(typ) + args = get_args(typ) + else: + # If multiple non-None args, treat as a union + typ = typing.Union[tuple(non_none_args)] + origin = get_origin(typ) + args = get_args(typ) if find_origin: - if hasattr(typ, "__origin__"): - # List[int] -> ((int,), list) - typ = _py36_maybe_unwrap_GenericMeta(typ) - + if origin is None: + return (), typ + else: + return args, origin return args, typ -def _py36_maybe_unwrap_GenericMeta(typ: Type) -> Type: +def _py36_maybe_unwrap_GenericMeta(typ: type) -> type: if typ.__class__.__name__ == "GenericMeta": # Py3.6 orig_bases = typ.__orig_bases__ if orig_bases and orig_bases[0] in (list, tuple, dict, set): - return cast(Type, orig_bases[0]) - return cast(Type, getattr(typ, "__origin__", typ)) + return cast(type, orig_bases[0]) + return cast(type, getattr(typ, "__origin__", typ)) def guess_polymorphic_type( - typ: Type, + typ: type, *, - set_types: Tuple[Type, ...] = SET_TYPES, - list_types: Tuple[Type, ...] = LIST_TYPES, - tuple_types: Tuple[Type, ...] = TUPLE_TYPES, - dict_types: Tuple[Type, ...] = DICT_TYPES, -) -> Tuple[Type, Type]: + set_types: tuple[type, ...] = SET_TYPES, + list_types: tuple[type, ...] = LIST_TYPES, + tuple_types: tuple[type, ...] = TUPLE_TYPES, + dict_types: tuple[type, ...] = DICT_TYPES, +) -> tuple[type, type]: """Try to find the polymorphic and concrete type of an abstract type. Returns tuple of `(polymorphic_type, concrete_type)`. @@ -543,7 +550,7 @@ def guess_polymorphic_type( >>> guess_polymorphic_type(List[int]) (list, int) - >>> guess_polymorphic_type(Optional[List[int]]) + >>> guess_polymorphic_type(Optional[list[int]]) (list, int) >>> guess_polymorphic_type(MutableMapping[int, str]) @@ -559,7 +566,7 @@ def guess_polymorphic_type( # Set[x] return set, _unary_type_arg(args) elif issubclass(typ, list_types): - # List[x] + # list[x] return list, _unary_type_arg(args) elif issubclass(typ, dict_types): # Dict[_, x] @@ -570,7 +577,7 @@ def guess_polymorphic_type( guess_concrete_type = guess_polymorphic_type # XXX compat -def _unary_type_arg(args: List[Type]) -> Any: +def _unary_type_arg(args: list[type]) -> Any: return args[0] if args else Any @@ -587,8 +594,8 @@ def shortlabel(s: Any) -> str: def _label( label_attr: str, s: Any, - pass_types: Tuple[Type, ...] = (str,), - str_types: Tuple[Type, ...] = (str, int, float, Decimal), + pass_types: tuple[type, ...] = (str,), + str_types: tuple[type, ...] = (str, int, float, Decimal), ) -> str: if isinstance(s, pass_types): return cast(str, s) @@ -650,7 +657,7 @@ def __init__( def is_set(self, obj: Any) -> bool: return self.__name__ in obj.__dict__ - def __get__(self, obj: Any, type: Optional[Type] = None) -> RT: + def __get__(self, obj: Any, type: Optional[type] = None) -> RT: if obj is None: if type is not None and self.class_attribute: return cast(RT, getattr(type, self.class_attribute)) diff --git a/mode/utils/queues.py b/mode/utils/queues.py index c09bca75..c4de8b5a 100644 --- a/mode/utils/queues.py +++ b/mode/utils/queues.py @@ -4,17 +4,7 @@ import math import typing from collections import deque -from typing import ( - Any, - Callable, - Deque, - List, - Optional, - Set, - TypeVar, - cast, - no_type_check, -) +from typing import Any, Callable, Optional, TypeVar, cast, no_type_check from weakref import WeakSet from .locks import Event @@ -124,7 +114,7 @@ class FlowControlQueue(asyncio.Queue): pressure_high_ratio = 1.25 # divided by pressure_drop_ratio = 0.40 # multiplied by - _pending_pressure_drop_callbacks: Set[Callable] + _pending_pressure_drop_callbacks: set[Callable] def __init__( self, @@ -225,9 +215,9 @@ class ThrowableQueue(FlowControlQueue): """Queue that can be notified of errors.""" def __init__(self, *args: Any, **kwargs: Any) -> None: - self._putters: List[asyncio.Future] + self._putters: list[asyncio.Future] super().__init__(*args, **kwargs) - self._errors: Deque[BaseException] = deque() + self._errors: deque[BaseException] = deque() @typing.no_type_check async def get(self) -> _T: diff --git a/mode/utils/text.py b/mode/utils/text.py index 41c04541..c536500a 100644 --- a/mode/utils/text.py +++ b/mode/utils/text.py @@ -1,7 +1,8 @@ """Text and string manipulation utilities.""" +from collections.abc import Iterable, Iterator from difflib import SequenceMatcher -from typing import IO, AnyStr, Iterable, Iterator, NamedTuple, Optional, Union +from typing import IO, AnyStr, NamedTuple, Optional, Union __all__ = [ "FuzzyMatch", diff --git a/mode/utils/times.py b/mode/utils/times.py index f92d8420..faed160d 100644 --- a/mode/utils/times.py +++ b/mode/utils/times.py @@ -4,19 +4,12 @@ import asyncio import sys import time +from collections.abc import Mapping +from contextlib import AbstractAsyncContextManager from datetime import timedelta from functools import singledispatch from types import TracebackType -from typing import ( - AsyncContextManager, - Callable, - List, - Mapping, - NamedTuple, - Optional, - Type, - Union, -) +from typing import Callable, NamedTuple, Optional, Union from .text import pluralize @@ -47,7 +40,7 @@ class Unit(NamedTuple): format: Callable[[float], str] -TIME_UNITS: List[Unit] = [ +TIME_UNITS: list[Unit] = [ Unit("day", 60 * 60 * 24.0, lambda n: format(n, ".2f")), Unit("hour", 60 * 60.0, lambda n: format(n, ".2f")), Unit("minute", 60.0, lambda n: format(n, ".2f")), @@ -64,7 +57,7 @@ class Unit(NamedTuple): } -class Bucket(AsyncContextManager): +class Bucket(AbstractAsyncContextManager): """Rate limiting state. A bucket "pours" tokens at a rate of ``rate`` per second (or over'). @@ -125,9 +118,9 @@ def __init__( rate: Seconds, over: Seconds = 1.0, *, - fill_rate: Seconds = None, - capacity: Seconds = None, - raises: Optional[Type[BaseException]] = None, + fill_rate: Optional[Seconds] = None, + capacity: Optional[Seconds] = None, + raises: Optional[type[BaseException]] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> None: self.rate = want_seconds(rate) @@ -165,7 +158,7 @@ async def __aenter__(self) -> "Bucket": async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, + exc_type: Optional[type[BaseException]] = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> Optional[bool]: @@ -233,8 +226,8 @@ def rate_limit( rate: float, over: Seconds = 1.0, *, - bucket_type: Type[Bucket] = TokenBucket, - raises: Optional[Type[BaseException]] = None, + bucket_type: type[Bucket] = TokenBucket, + raises: Optional[type[BaseException]] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Bucket: """Create rate limiting manager.""" diff --git a/mode/utils/tracebacks.py b/mode/utils/tracebacks.py index eccaa3e7..e7dab4c2 100644 --- a/mode/utils/tracebacks.py +++ b/mode/utils/tracebacks.py @@ -4,19 +4,10 @@ import inspect import io import sys +from collections.abc import AsyncGenerator, Coroutine, Generator, Mapping from traceback import StackSummary, print_list, walk_tb from types import FrameType, TracebackType -from typing import ( - IO, - Any, - AsyncGenerator, - Coroutine, - Generator, - Mapping, - Optional, - Union, - cast, -) +from typing import IO, Any, Optional, Union, cast __all__ = ["Traceback", "format_task_stack", "print_task_stack"] diff --git a/mode/utils/trees.py b/mode/utils/trees.py index b2a8645f..f5682969 100644 --- a/mode/utils/trees.py +++ b/mode/utils/trees.py @@ -1,7 +1,9 @@ """Data structure: Trees.""" +from collections import deque +from collections.abc import Iterator from contextlib import suppress -from typing import Any, Deque, Iterator, List, Optional, TypeVar, Union, cast +from typing import Any, Optional, TypeVar, Union, cast from .graphs import DependencyGraph from .objects import shortlabel @@ -46,9 +48,9 @@ def __init__( self, data: T, *, - root: NodeT = None, - parent: NodeT = None, - children: Optional[List[NodeT[T]]] = None, + root: Optional[NodeT] = None, + parent: Optional[NodeT] = None, + children: Optional[list[NodeT[T]]] = None, ) -> None: self.data = data if root is not None: @@ -98,7 +100,7 @@ def discard(self, data: T) -> None: def traverse(self) -> Iterator[NodeT[T]]: """Iterate over the tree in BFS order.""" - stack: Deque[NodeT[T]] = Deque([self]) + stack: deque[NodeT[T]] = deque([self]) while stack: node = stack.popleft() yield node @@ -121,7 +123,7 @@ def walk(self) -> Iterator[NodeT[T]]: def as_graph(self) -> DependencyGraphT: """Convert to `~mode.utils.graphs.DependencyGraph`.""" graph = DependencyGraph() - stack: Deque[NodeT] = Deque([self]) + stack: deque[NodeT] = deque([self]) while stack: node = stack.popleft() for child in node.children: diff --git a/mode/utils/types/graphs.py b/mode/utils/types/graphs.py index 6ebc64e0..270b46df 100644 --- a/mode/utils/types/graphs.py +++ b/mode/utils/types/graphs.py @@ -1,17 +1,8 @@ """Type classes for `mode.utils.graphs`.""" import abc -from typing import ( - IO, - Any, - Generic, - Iterable, - Mapping, - MutableMapping, - Optional, - Sequence, - TypeVar, -) +from collections.abc import Iterable, Mapping, MutableMapping, Sequence +from typing import IO, Any, Generic, Optional, TypeVar __all__ = ["DependencyGraphT", "GraphFormatterT"] @@ -120,5 +111,5 @@ def edges(self) -> Iterable: ... @abc.abstractmethod def to_dot( - self, fh: IO, *, formatter: GraphFormatterT[_T] = None + self, fh: IO, *, formatter: Optional[GraphFormatterT[_T]] = None ) -> None: ... diff --git a/mode/utils/types/trees.py b/mode/utils/types/trees.py index 5d38672d..d2a02242 100644 --- a/mode/utils/types/trees.py +++ b/mode/utils/types/trees.py @@ -1,7 +1,8 @@ """Type classes for `mode.utils.trees`.""" import abc -from typing import Any, Generic, Iterator, List, Optional, TypeVar, Union +from collections.abc import Iterator +from typing import Any, Generic, Optional, TypeVar, Union from .graphs import DependencyGraphT @@ -13,7 +14,7 @@ class NodeT(Generic[_T]): """Node in a tree data structure.""" - children: List[Any] + children: list[Any] data: Any = None @classmethod diff --git a/mode/utils/typing.py b/mode/utils/typing.py deleted file mode 100644 index f270e491..00000000 --- a/mode/utils/typing.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Backport of :mod:`typing` additions in Python 3.7.""" - -# pragma: no cover -import typing - -__all__ = [ - "AsyncContextManager", - "AsyncGenerator", - "ChainMap", - "Counter", - "Deque", - "NoReturn", - "Protocol", -] - -if typing.TYPE_CHECKING: - from typing import AsyncContextManager -else: - try: - from typing import AsyncContextManager - except ImportError: - from typing import AsyncContextManager - -if typing.TYPE_CHECKING: - from typing import AsyncGenerator -else: - try: - from typing import AsyncGenerator - except ImportError: # Python 3.6.0 - from typing import AsyncGenerator - - -if typing.TYPE_CHECKING: - from typing import ChainMap -else: - try: - from typing import ChainMap - except ImportError: - from typing import ChainMap - - -if typing.TYPE_CHECKING: - from typing import Counter -else: - try: - from typing import Counter - except ImportError: - from typing import Counter - - -if typing.TYPE_CHECKING: - from typing import Deque -else: - try: - from typing import Deque - except ImportError: - from typing import Deque - - -if typing.TYPE_CHECKING: - from typing import NoReturn -else: - try: - from typing import NoReturn - except ImportError: - from typing import NoReturn - - -if typing.TYPE_CHECKING: - from typing import Protocol -else: - try: - from typing import Protocol - except ImportError: - from typing_extensions import Protocol diff --git a/mode/worker.py b/mode/worker.py index 10f5c606..da2e7984 100644 --- a/mode/worker.py +++ b/mode/worker.py @@ -11,23 +11,10 @@ import signal import sys import traceback +from collections.abc import Iterable, Iterator from contextlib import contextmanager, suppress from logging import Handler, Logger -from typing import ( - IO, - Any, - Callable, - ClassVar, - Dict, - Iterable, - Iterator, - List, - NoReturn, - Optional, - Tuple, - Union, - cast, -) +from typing import IO, Any, Callable, ClassVar, NoReturn, Optional, Union, cast from .debug import BlockingDetector from .services import Service @@ -48,7 +35,7 @@ class _TupleAsListRepr(reprlib.Repr): - def repr_tuple(self, x: Tuple, level: int) -> str: + def repr_tuple(self, x: tuple, level: int) -> str: return self.repr_list(cast(list, x), level) @@ -83,11 +70,11 @@ class Worker(Service): debug: bool quiet: bool blocking_timeout: Seconds - logging_config: Optional[Dict] + logging_config: Optional[dict] loglevel: Optional[Union[str, int]] logfile: Optional[Union[str, IO]] console_port: int - loghandlers: List[Handler] + loghandlers: list[Handler] redirect_stdouts: bool redirect_stdouts_level: int @@ -107,15 +94,15 @@ def __init__( *services: ServiceT, debug: bool = False, quiet: bool = False, - logging_config: Optional[Dict] = None, + logging_config: Optional[dict] = None, loglevel: Optional[Union[str, int]] = None, logfile: Optional[Union[str, IO]] = None, redirect_stdouts: bool = True, - redirect_stdouts_level: logging.Severity = None, + redirect_stdouts_level: Optional[logging.Severity] = None, stdout: Optional[IO] = sys.stdout, stderr: Optional[IO] = sys.stderr, console_port: int = 50101, - loghandlers: Optional[List[Handler]] = None, + loghandlers: Optional[list[Handler]] = None, blocking_timeout: Seconds = 10.0, loop: Optional[asyncio.AbstractEventLoop] = None, override_logging: bool = True, @@ -345,7 +332,7 @@ async def on_started(self) -> None: async def _add_monitor(self) -> Any: try: - import aiomonitor + import aiomonitor # type: ignore except ImportError: self.log.warning( "Cannot start console: aiomonitor is not installed" diff --git a/pyproject.toml b/pyproject.toml index d193c06c..d38155fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,12 +22,11 @@ dynamic = [ ] description = "AsyncIO Service-based programming" readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.9" keywords = ["asyncio", "service", "bootsteps", "graph", "coroutine"] authors = [ { name = "Ask Solem Hoel", email= "ask@robinhood.com" }, ] - maintainers = [ { name = "William Barnhart", email = "williambbarnhart@gmail.com" }, ] @@ -36,11 +35,11 @@ classifiers = [ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: BSD License", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: POSIX", @@ -53,7 +52,8 @@ classifiers = [ ] dependencies = [ "colorlog>=6.0.0,<7.0.0", - "croniter>=2.0.0,<3.0.0", + "croniter>=2.0.0,<6.0.0", + "importlib-metadata; python_version < '3.10'", "mypy_extensions", ] @@ -150,7 +150,7 @@ warn_unused_configs = true warn_unused_ignores = true [tool.ruff] -target-version = "py38" +target-version = "py39" line-length = 79 exclude = [ ".bzr", @@ -168,7 +168,6 @@ exclude = [ ".pytype", ".ruff_cache", ".svn", - ".tox", ".venv", ".vscode", "__pypackages__", diff --git a/requirements-tests.txt b/requirements-tests.txt index dbed0d42..4cb11bb4 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -16,5 +16,4 @@ vulture yarl # Conditional -pre-commit>=3.6.2; python_version >= '3.9' -pre-commit>=3.5.0; python_version < '3.9' +pre-commit>=4.0.1 diff --git a/tests/functional/test_service.py b/tests/functional/test_service.py index 3e011dc7..955e0d60 100644 --- a/tests/functional/test_service.py +++ b/tests/functional/test_service.py @@ -1,6 +1,6 @@ import asyncio import logging -from typing import AsyncContextManager, ContextManager +from contextlib import AbstractAsyncContextManager, AbstractContextManager from unittest.mock import Mock import pytest @@ -12,7 +12,7 @@ class X(mode.Service): ... -class Context(ContextManager): +class Context(AbstractContextManager): acquires = 0 releases = 0 @@ -24,7 +24,7 @@ def __exit__(self, *exc_info): self.releases += 1 -class AsyncContext(AsyncContextManager): +class AsyncContext(AbstractAsyncContextManager): acquires = 0 releases = 0 diff --git a/tests/functional/test_signals.py b/tests/functional/test_signals.py index 8925d2a7..f14c994f 100644 --- a/tests/functional/test_signals.py +++ b/tests/functional/test_signals.py @@ -191,7 +191,7 @@ def test_disconnect_raises(self, sig): sig.disconnect(Mock(), sender=Mock()) def test_iter_receivers(self, sig): - receivers, alive_refs, dead_refs = self.create_refs(sig) + receivers, alive_refs, _dead_refs = self.create_refs(sig) sig._receivers = receivers sig._live_receivers = set() sig._update_receivers = Mock(return_value=alive_refs) @@ -203,8 +203,8 @@ def test_iter_receivers_no_receivers(self, sig): assert list(sig.iter_receivers(None)) == [] def test__get_live_receivers(self, sig): - receivers, alive_refs, dead_refs = self.create_refs(sig) - alive, dead = sig._get_live_receivers(receivers) + receivers, alive_refs, _dead_refs = self.create_refs(sig) + sig._get_live_receivers(receivers) sig._update_receivers(receivers) assert receivers == set(alive_refs) diff --git a/tests/functional/test_timers.py b/tests/functional/test_timers.py index a7fac2e3..f4f7772d 100644 --- a/tests/functional/test_timers.py +++ b/tests/functional/test_timers.py @@ -2,7 +2,7 @@ from contextlib import asynccontextmanager from functools import reduce from itertools import chain -from typing import List, NamedTuple, Optional, Tuple +from typing import NamedTuple, Optional from unittest.mock import ANY, AsyncMock, Mock, patch import pytest @@ -170,21 +170,21 @@ def to_next_interval( expected_new_interval=expected_new_interval, ) - def interval_to_clock_sequence(self, interval: Interval) -> List[float]: + def interval_to_clock_sequence(self, interval: Interval) -> list[float]: # Timer calls clock() twice per iteration, # so in an interval this provides the clock for wakeup time # and the yield time. return [interval.wakeup_time, interval.yield_time] - def to_clock_values(self, *intervals: Interval) -> List[float]: + def to_clock_values(self, *intervals: Interval) -> list[float]: return list(chain(*map(self.interval_to_clock_sequence, intervals))) def build_intervals( self, timer: Timer, first_interval: Interval, - *values: Tuple[float, float], - ) -> List[Interval]: + *values: tuple[float, float], + ) -> list[Interval]: """Build intervals from tuples of ``(sleep_time, yield_time)``. If a tuple is missing (is None), then default values @@ -194,7 +194,7 @@ def build_intervals( intervals = [first_interval] def on_reduce( - previous_interval: Interval, tup: Tuple[float, float] + previous_interval: Interval, tup: tuple[float, float] ) -> Interval: sleep_time, yield_time = tup next_interval = self.to_next_interval( @@ -210,13 +210,13 @@ def on_reduce( return intervals async def assert_intervals( - self, timer: Timer, intervals: List[Interval] + self, timer: Timer, intervals: list[Interval] ) -> None: assert await self.consume_timer(timer, limit=len(intervals)) == [ interval.expected_new_interval for interval in intervals ] - async def consume_timer(self, timer: Timer, limit: int) -> List[float]: + async def consume_timer(self, timer: Timer, limit: int) -> list[float]: return [sleep_time async for sleep_time in aslice(timer, 0, limit)] diff --git a/tests/functional/utils/test_aiter.py b/tests/functional/utils/test_aiter.py index 040b3ba9..85f07261 100644 --- a/tests/functional/utils/test_aiter.py +++ b/tests/functional/utils/test_aiter.py @@ -1,4 +1,4 @@ -from typing import AsyncIterable +from collections.abc import AsyncIterable import pytest diff --git a/tests/unit/test_locals.py b/tests/unit/test_locals.py index c7bd4bd3..c2358803 100644 --- a/tests/unit/test_locals.py +++ b/tests/unit/test_locals.py @@ -1,6 +1,5 @@ import abc -from typing import ( - AbstractSet, +from collections.abc import ( AsyncGenerator, AsyncIterable, AsyncIterator, @@ -10,6 +9,7 @@ MutableSequence, MutableSet, Sequence, + Set, ) from unittest.mock import MagicMock, Mock @@ -61,7 +61,8 @@ def real(): assert x.__doc__ == "real function" - assert x.__class__ is type(real) + assert x.__class__ == type(real) # noqa: E721 + assert x.__dict__ == real.__dict__ assert repr(x) == repr(real) assert x.__module__ @@ -467,7 +468,7 @@ def s(self, *, orig): return SetProxy(lambda: orig) def test_type(self, *, s): - assert isinstance(s, AbstractSet) + assert isinstance(s, Set) def test_contains(self, *, s): assert 1 in s diff --git a/tests/unit/test_services.py b/tests/unit/test_services.py index 1122a39f..07f730be 100644 --- a/tests/unit/test_services.py +++ b/tests/unit/test_services.py @@ -1,6 +1,7 @@ import asyncio +from contextlib import AbstractAsyncContextManager, AbstractContextManager from functools import partial -from typing import AsyncContextManager, ClassVar, ContextManager, Dict +from typing import ClassVar from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, patch import pytest @@ -110,7 +111,7 @@ def test_repr(): @pytest.mark.asyncio async def test_subclass_can_override_Service_task(): class ATaskService(Service): - values: ClassVar[Dict] = [] + values: ClassVar[dict] = [] def __post_init__(self): self.event = asyncio.Event() @@ -334,11 +335,11 @@ async def test_remove_dependency__no_beacon(self, *, service): @pytest.mark.asyncio async def test_add_async_context__non_async(self, *, service): - class Cx(ContextManager): + class Cx(AbstractContextManager): def __exit__(self, *args): return None - assert isinstance(Cx(), ContextManager) + assert isinstance(Cx(), AbstractContextManager) with pytest.raises(TypeError): await service.add_async_context(Cx()) @@ -349,11 +350,11 @@ async def test_add_async_context__not_context(self, *, service): await service.add_async_context(object()) def test_add_context__is_async(self, *, service): - class Cx(AsyncContextManager): + class Cx(AbstractAsyncContextManager): async def __aexit__(self, *args): return None - assert isinstance(Cx(), AsyncContextManager) + assert isinstance(Cx(), AbstractAsyncContextManager) with pytest.raises(TypeError): service.add_context(Cx()) @@ -367,9 +368,10 @@ async def test__wait_stopped(self, *, service): service._stopped = Mock() service._crashed = Mock() - with patch("asyncio.wait", AsyncMock()) as wait, patch( - "asyncio.ensure_future", Mock() - ) as ensure_future: + with ( + patch("asyncio.wait", AsyncMock()) as wait, + patch("asyncio.ensure_future", Mock()) as ensure_future, + ): f1 = Mock() f2 = Mock() f3 = Mock() @@ -585,9 +587,10 @@ async def test_wait__one(self, *, service): @pytest.mark.asyncio async def test_wait_many(self, *, service): - with patch("asyncio.wait", AsyncMock()) as wait, patch( - "asyncio.ensure_future", Mock() - ) as ensure_future: + with ( + patch("asyncio.wait", AsyncMock()) as wait, + patch("asyncio.ensure_future", Mock()) as ensure_future, + ): service._wait_one = AsyncMock() m1 = AsyncMock() m2 = AsyncMock() diff --git a/tests/unit/test_worker.py b/tests/unit/test_worker.py index 56b1d553..172aeb85 100644 --- a/tests/unit/test_worker.py +++ b/tests/unit/test_worker.py @@ -379,7 +379,7 @@ async def test_on_started(self, worker): async def test__add_monitor(self, worker): worker.add_context = Mock() with patch_module("aiomonitor"): - import aiomonitor + import aiomonitor # type: ignore await worker._add_monitor() diff --git a/tests/unit/utils/test_cron.py b/tests/unit/utils/test_cron.py index 8c88c0ef..c29cf610 100644 --- a/tests/unit/utils/test_cron.py +++ b/tests/unit/utils/test_cron.py @@ -1,13 +1,4 @@ -import sys - -if sys.version_info >= (3, 9): - from zoneinfo import ZoneInfo - - make_tz = ZoneInfo -else: - import pytz - - make_tz = pytz.timezone +from zoneinfo import ZoneInfo from freezegun import freeze_time @@ -34,7 +25,7 @@ def test_secs_for_next(): @freeze_time("2000-01-01 00:00:00") def test_secs_for_next_with_tz(): - pacific = make_tz("US/Pacific") + pacific = ZoneInfo("America/Los_Angeles") every_8pm_cron_format = "0 20 * * *" # In Pacific time it's 16:00 so only 4 hours until 8:00pm diff --git a/tests/unit/utils/test_imports.py b/tests/unit/utils/test_imports.py index 0162d790..dcc6df94 100644 --- a/tests/unit/utils/test_imports.py +++ b/tests/unit/utils/test_imports.py @@ -187,7 +187,7 @@ def test_load_extension_class_names(): @contextmanager def patch_importlib_metadata_entry_points(): with patch( - "importlib.metadata.entry_points" + "mode.utils.imports.entry_points" ) as importlib_metadata_entry_points: ep1 = Mock(name="ep1") ep1.name = "ep1" @@ -197,8 +197,11 @@ def patch_importlib_metadata_entry_points(): ep2.name = "ep2" ep2.module = "bar" ep2.attr = "c" + # For Python >=3.10 mock_entry_points = Mock() mock_entry_points.select.return_value = [ep1, ep2] + # For Python <3.10 + mock_entry_points.get.return_value = [ep1, ep2] importlib_metadata_entry_points.return_value = mock_entry_points yield diff --git a/tests/unit/utils/test_objects.py b/tests/unit/utils/test_objects.py index 0e384af3..05074c1a 100644 --- a/tests/unit/utils/test_objects.py +++ b/tests/unit/utils/test_objects.py @@ -2,22 +2,8 @@ import collections.abc import pickle import typing -from typing import ( - AbstractSet, - ClassVar, - Dict, - FrozenSet, - Generic, - List, - Mapping, - MutableMapping, - MutableSet, - Optional, - Sequence, - Set, - Tuple, - Union, -) +from collections.abc import Mapping, MutableMapping, MutableSet, Sequence, Set +from typing import ClassVar, Optional, Union from unittest.mock import ANY, Mock import pytest @@ -29,6 +15,7 @@ InvalidAnnotation, KeywordReduce, Unordered, + _normalize_forwardref, _remove_optional, _restore_from_keywords, annotations, @@ -73,7 +60,6 @@ class A(B): ... ( [ ServiceCallbacks, - Generic, *EXTRA_GENERIC_INHERITS_FROM, ANY, ServiceT, @@ -187,27 +173,32 @@ class Y: ... @pytest.mark.skip(reason="Needs fixing, typing.List eval does not work") def test_eval_type(): - assert eval_type("list") is list - assert eval_type("typing.List") is typing.List + assert eval_type("list") == list # noqa: E721 + assert eval_type("typing.List") == list # noqa: E721 def test_annotations(): class X: Foo: ClassVar[int] = 3 foo: "int" - bar: List["X"] - baz: Union[List["X"], str] + bar: list["X"] + baz: Union[list["X"], str] mas: int = 3 fields, defaults = annotations(X, globalns=globals(), localns=locals()) - assert fields == { + expected = { "Foo": ClassVar[int], "foo": int, - "bar": List[X], - "baz": Union[List[X], str], + "bar": list[X], + "baz": Union[list[X], str], "mas": int, } + + norm_fields = {k: _normalize_forwardref(v) for k, v in fields.items()} + norm_expected = {k: _normalize_forwardref(v) for k, v in expected.items()} + assert len(norm_fields) == 5 + assert norm_fields == norm_expected assert defaults["mas"] == 3 @@ -215,33 +206,36 @@ def test_annotations__skip_classvar(): class X: Foo: ClassVar[int] = 3 foo: "int" - bar: List["X"] - baz: Union[List["X"], str] + bar: list["X"] + baz: Union[list["X"], str] mas: int = 3 fields, defaults = annotations( X, globalns=globals(), localns=locals(), skip_classvar=True ) - assert fields == { + expected = { "foo": int, - "bar": List[X], - "baz": Union[List[X], str], + "bar": list[X], + "baz": Union[list[X], str], "mas": int, } + norm_fields = {k: _normalize_forwardref(v) for k, v in fields.items()} + norm_expected = {k: _normalize_forwardref(v) for k, v in expected.items()} + assert norm_fields == norm_expected assert defaults["mas"] == 3 def test_annotations__invalid_type(): class X: - foo: List + foo: list with pytest.raises(InvalidAnnotation): annotations( X, globalns=globals(), localns=locals(), - invalid_types={List}, + invalid_types={list}, skip_classvar=True, ) @@ -256,29 +250,22 @@ class X: annotations(X, globalns=None, localns=None) -# Union[type(None)] actually returns None -# so we have to construct this object to test condition in code. -WeirdNoneUnion = Union[str, int] -WeirdNoneUnion.__args__ = (type(None), type(None)) - - @pytest.mark.parametrize( "input,expected", [ (Optional[str], str), (Union[str, None], str), (Union[str, type(None)], str), - (Optional[List[str]], List[str]), + (Optional[list[str]], list[str]), (Optional[Mapping[int, str]], Mapping[int, str]), - (Optional[AbstractSet[int]], AbstractSet[int]), (Optional[Set[int]], Set[int]), - (Optional[Tuple[int, ...]], Tuple[int, ...]), - (Optional[Dict[int, str]], Dict[int, str]), - (Optional[List[int]], List[int]), + (Optional[set[int]], set[int]), + (Optional[tuple[int, ...]], tuple[int, ...]), + (Optional[dict[int, str]], dict[int, str]), + (Optional[list[int]], list[int]), (str, str), - (List[str], List[str]), + (list[str], list[str]), (Union[str, int, float], Union[str, int, float]), - (WeirdNoneUnion, WeirdNoneUnion), ], ) def test_remove_optional(input, expected): @@ -291,25 +278,18 @@ def test_remove_optional(input, expected): (Optional[str], ((), str)), (Union[str, None], ((), str)), (Union[str, type(None)], ((), str)), - (Optional[List[str]], ((str,), list)), + (Optional[list[str]], ((str,), list)), ( Optional[Mapping[int, str]], ((int, str), IN(dict, collections.abc.Mapping, typing.Mapping)), ), - ( - Optional[AbstractSet[int]], - ((int,), IN(set, collections.abc.Set, typing.AbstractSet)), - ), - ( - Optional[Set[int]], - ((int,), IN(set, collections.abc.Set, typing.AbstractSet)), - ), - (Optional[Tuple[int, ...]], ((int, ...), IN(tuple, typing.Tuple))), - (Optional[Dict[int, str]], ((int, str), dict)), - (Optional[List[int]], ((int,), list)), + (Optional[Set[int]], ((int,), IN(set, collections.abc.Set))), + (Optional[set[int]], ((int,), IN(set, collections.abc.Set))), + (Optional[tuple[int, ...]], ((int, ...), IN(tuple, tuple))), + (Optional[dict[int, str]], ((int, str), dict)), + (Optional[list[int]], ((int,), list)), (str, ((), str)), - (List[str], ((str,), list)), - (WeirdNoneUnion, ((type(None), type(None)), Union)), + (list[str], ((str,), list)), ], ) def test__remove_optional__find_origin(input, expected): @@ -332,7 +312,7 @@ def test__remove_optional_edgecase(): (Union[str, None], True), (Union[str, type(None)], True), (str, False), - (List[str], False), + (list[str], False), (Union[str, int, float], False), ], ) @@ -343,15 +323,15 @@ def test_is_optional(input, expected): @pytest.mark.parametrize( "input,expected", [ - (Tuple[int, ...], (tuple, int)), - (List[int], (list, int)), + (tuple[int, ...], (tuple, int)), + (list[int], (list, int)), (Mapping[str, int], (dict, int)), - (Dict[str, int], (dict, int)), + (dict[str, int], (dict, int)), (MutableMapping[str, int], (dict, int)), - (Set[str], (set, str)), - (FrozenSet[str], (set, str)), + (set[str], (set, str)), + (frozenset[str], (set, str)), (MutableSet[str], (set, str)), - (AbstractSet[str], (set, str)), + (Set[str], (set, str)), (Sequence[str], (list, str)), ], ) diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 1ad5241f..00000000 --- a/tox.ini +++ /dev/null @@ -1,37 +0,0 @@ -[tox] -envlist = 3.12,3.11,3.10,3.9,3.8,ruff,apicheck,typecheck,docstyle,bandit - -[testenv] -deps= - -r{toxinidir}/requirements/default.txt - -r{toxinidir}/requirements/test.txt - -r{toxinidir}/requirements/ci.txt - - linkcheck,apicheck: -r{toxinidir}/requirements/docs.txt - ruff,docstyle: -r{toxinidir}/requirements/dist.txt - bandit: bandit -sitepackages = False -recreate = False -commands = py.test --random-order --open-files -xvv --cov=mode --cov-branch -basepython = - 3.12: python3.12 - 3.11: python3.11 - 3.10: python3.10 - 3.9: python3.9 - 3.8,ruff,typecheck,apicheck,linkcheck,docstyle,bandit: python3.8 - -[testenv:apicheck] -commands = - sphinx-build -b apicheck -d {envtmpdir}/doctrees docs docs/_build/apicheck - -[testenv:linkcheck] -commands = - sphinx-build -b linkcheck -d {envtmpdir}/doctrees docs docs/_build/linkcheck - -[testenv:ruff] -commands = - ruff {toxinidir}/mode - -[testenv:typecheck] -commands = - mypy -p mode