diff --git a/aiocache/decorators.pyi b/aiocache/decorators.pyi new file mode 100644 index 00000000..1a45cac8 --- /dev/null +++ b/aiocache/decorators.pyi @@ -0,0 +1,217 @@ +from typing import ( + Callable, + Concatenate, + Mapping, + ParamSpec, + Protocol, + Sequence, + Type, + TypeVar, + overload, +) + +from aiocache import BaseCache, Cache +from aiocache.plugins import BasePlugin +from aiocache.serializers import BaseSerializer + +Params = ParamSpec("Params") +ReturnType = TypeVar("ReturnType") +DecoratorKWArgs = TypeVar("DecoratorKWArgs") +SerializerType = TypeVar("SerializerType", bound=BaseSerializer) +CacheType = TypeVar("CacheType", bound=BaseCache) +MCReturnType = TypeVar("MCReturnType", bound=Mapping) +MCKey = TypeVar("MCKey") +MCVal = TypeVar("MCVal") + +class CachedBaseProtocol(Protocol[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]): + ttl: int | None + namespace: str | None + serializer: SerializerType | None + plugins: Sequence[BasePlugin] | None + alias: str | None + + cache: CacheType + _cache: CacheType + _serializer: SerializerType + _namespace: str | None + _plugins: Sequence[BasePlugin] | None + _kwargs: dict[str, DecoratorKWArgs] + +class CachedDecorated( + CachedBaseProtocol[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs] +): + key_builder: Callable[Params, str] | None + skip_cache_func: Callable[[ReturnType], bool] | None + noself: bool + + def __call__(self, *args: Params.args, **kwargs: Params.kwargs) -> ReturnType: ... + def get_cache_key(self, *args: Params.args, **kwargs: Params.kwargs) -> str: ... + async def get_from_cache(self, key: str) -> ReturnType | None: ... + async def set_in_cache(self, key: str, value: ReturnType) -> None: ... + +class CachedStampedeDecorated( + CachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs] +): + lease: int + +class MultiCachedDecorated( + CachedBaseProtocol[ + CacheType, Params, MCReturnType[MCKey, MCVal], SerializerType, DecoratorKWArgs + ] +): + keys_from_attr: str + key_builder: ( + Callable[ + Concatenate[ + MCKey, + MultiCachedDecorated[ + CacheType, + Params, + MCReturnType[MCKey, MCVal], + SerializerType, + DecoratorKWArgs, + ], + Params, + ], + str, + ] + | None + ) + skip_cache_func: Callable[[MCKey, MCVal], bool] | None + + def __call__( + self, + *args: Params.args, + cache_read: bool = True, + cache_write: bool = True, + aiocache_wait_for_write: bool = True, + **kwargs: Params.kwargs, + ) -> MCReturnType: ... + def get_cache_keys( + self, + f: MultiCachedDecorated[ + CacheType, Params, MCReturnType[MCKey, MCVal], SerializerType, DecoratorKWArgs + ], + *args: Params.args, + **kwargs: Params.kwargs, + ) -> str: ... + async def get_from_cache(self, *keys: MCKey) -> list[MCVal | None]: ... + async def set_in_cache( + self, + result: MCReturnType[MCKey, MCVal], + fn: MultiCachedDecorated[ + CacheType, Params, MCReturnType[MCKey, MCVal], SerializerType, DecoratorKWArgs + ], + fn_args: Params.args, + fn_kwargs: Params.kwargs, + ) -> None: ... + +@overload +def cached( + ttl: int | None = None, + *, + key_builder: Callable[Params, str] | None = None, + skip_cache_func: Callable[[ReturnType], bool] | None = None, + cache: Type[CacheType] = Cache.MEMORY, + noself: bool = False, + alias: str, + **kwargs: DecoratorKWArgs, +) -> CachedDecorated[CacheType, Params, ReturnType, None, DecoratorKWArgs]: ... +@overload +def cached( + ttl: int | None = None, + *, + key_builder: Callable[Params, str] | None = None, + skip_cache_func: Callable[[ReturnType], bool] | None = None, + cache: Type[CacheType] = Cache.MEMORY, + noself: bool = False, + namespace: str | None = None, + serializer: SerializerType | None = None, + plugins: Sequence[BasePlugin] | None = None, + alias: None = None, + **kwargs: DecoratorKWArgs, +) -> CachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ... +@overload +def cached_stampede( + lease: int = 2, + ttl: int | None = None, + *, + key_builder: Callable[Params, str] | None = None, + skip_cache_func: Callable[[ReturnType], bool] | None = None, + cache: Type[CacheType] = Cache.MEMORY, + noself: bool = False, + alias: str, + **kwargs: DecoratorKWArgs, +) -> CachedStampedeDecorated[CacheType, Params, ReturnType, None, DecoratorKWArgs]: ... +@overload +def cached_stampede( + lease: int = 2, + ttl: int | None = None, + *, + key_builder: Callable[Params, str] | None = None, + skip_cache_func: Callable[[ReturnType], bool] | None = None, + cache: Type[CacheType] = Cache.MEMORY, + noself: bool = False, + namespace: str | None = None, + serializer: SerializerType | None = None, + plugins: Sequence[BasePlugin] | None = None, + alias: None = None, + **kwargs: DecoratorKWArgs, +) -> CachedStampedeDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ... +@overload +def multi_cached( + keys_from_attr: str, + *, + key_builder: ( + Callable[ + Concatenate[ + MCKey, + MultiCachedDecorated[ + CacheType, + Params, + MCReturnType[MCKey, MCVal], + SerializerType, + DecoratorKWArgs, + ], + Params, + ], + str, + ] + | None + ) = None, + skip_cache_func: Callable[[MCKey, MCVal], bool] | None = None, + ttl: int | None = None, + cache: Type[CacheType] = Cache.MEMORY, + alias: str, + **kwargs: DecoratorKWArgs, +) -> MultiCachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ... +@overload +def multi_cached( + keys_from_attr: str, + *, + namespace: str | None = None, + key_builder: ( + Callable[ + Concatenate[ + MCKey, + MultiCachedDecorated[ + CacheType, + Params, + MCReturnType[MCKey, MCVal], + SerializerType, + DecoratorKWArgs, + ], + Params, + ], + str, + ] + | None + ) = None, + skip_cache_func: Callable[[MCKey, MCVal], bool] | None = None, + ttl: int | None = None, + cache: Type[CacheType] = Cache.MEMORY, + serializer: SerializerType | None = None, + plugins: Sequence[BasePlugin] | None = None, + alias: None = None, + **kwargs: DecoratorKWArgs, +) -> MultiCachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ... diff --git a/setup.cfg b/setup.cfg index 2e23f1ae..8bcf169c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,3 +25,6 @@ source = aiocache [coverage:report] show_missing = true skip_covered = true + +[options.package_data] +{name} = py.typed, *.pyi diff --git a/tests/ut/test_decorators.py b/tests/ut/test_decorators.py index cfa81e1b..e26e1dcd 100644 --- a/tests/ut/test_decorators.py +++ b/tests/ut/test_decorators.py @@ -60,7 +60,7 @@ def test_init(self): def test_fails_at_instantiation(self): with pytest.raises(TypeError): - @cached(wrong_param=1) + @cached(wrong_param=1) # type: ignore[misc] async def fn() -> None: """Dummy function.""" @@ -373,7 +373,7 @@ def f(): def test_fails_at_instantiation(self): with pytest.raises(TypeError): - @multi_cached(wrong_param=1) + @multi_cached(wrong_param=1) # type: ignore[misc] async def fn() -> None: """Dummy function.""" @@ -476,8 +476,9 @@ async def test_cache_write_doesnt_wait_for_future(self, mocker, decorator, decor mocker.spy(decorator, "set_in_cache") with patch.object(decorator, "get_from_cache", autospec=True, return_value=[None, None]): with patch("aiocache.decorators.asyncio.ensure_future", autospec=True): - await decorator_call(1, keys=["a", "b"], value="value", - aiocache_wait_for_write=False) + await decorator_call( + 1, keys=["a", "b"], value="value", aiocache_wait_for_write=False + ) decorator.set_in_cache.assert_not_awaited() decorator.set_in_cache.assert_called_once_with({"a": ANY, "b": ANY}, stub_dict, ANY, ANY)