Skip to content

Commit

Permalink
Adds typing to the decorators
Browse files Browse the repository at this point in the history
  • Loading branch information
fabiob committed Apr 3, 2024
1 parent 63fb637 commit 87cc6a3
Show file tree
Hide file tree
Showing 3 changed files with 225 additions and 4 deletions.
217 changes: 217 additions & 0 deletions aiocache/decorators.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
from typing import (
Callable,
Concatenate,
Mapping,
ParamSpec,
Protocol,
Sequence,
Type,
TypeVar,
overload,
)

from aiocache import BaseCache, Cache
from aiocache.plugins import BasePlugin
from aiocache.serializers import BaseSerializer

Params = ParamSpec("Params")
ReturnType = TypeVar("ReturnType")
DecoratorKWArgs = TypeVar("DecoratorKWArgs")
SerializerType = TypeVar("SerializerType", bound=BaseSerializer)
CacheType = TypeVar("CacheType", bound=BaseCache)
MCReturnType = TypeVar("MCReturnType", bound=Mapping)
MCKey = TypeVar("MCKey")
MCVal = TypeVar("MCVal")

class CachedBaseProtocol(Protocol[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]):
ttl: int | None
namespace: str | None
serializer: SerializerType | None
plugins: Sequence[BasePlugin] | None
alias: str | None

cache: CacheType
_cache: CacheType
_serializer: SerializerType
_namespace: str | None
_plugins: Sequence[BasePlugin] | None
_kwargs: dict[str, DecoratorKWArgs]

class CachedDecorated(
CachedBaseProtocol[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]
):
key_builder: Callable[Params, str] | None
skip_cache_func: Callable[[ReturnType], bool] | None
noself: bool

def __call__(self, *args: Params.args, **kwargs: Params.kwargs) -> ReturnType: ...
def get_cache_key(self, *args: Params.args, **kwargs: Params.kwargs) -> str: ...
async def get_from_cache(self, key: str) -> ReturnType | None: ...
async def set_in_cache(self, key: str, value: ReturnType) -> None: ...

class CachedStampedeDecorated(
CachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]
):
lease: int

class MultiCachedDecorated(
CachedBaseProtocol[
CacheType, Params, MCReturnType[MCKey, MCVal], SerializerType, DecoratorKWArgs
]
):
keys_from_attr: str
key_builder: (
Callable[
Concatenate[
MCKey,
MultiCachedDecorated[
CacheType,
Params,
MCReturnType[MCKey, MCVal],
SerializerType,
DecoratorKWArgs,
],
Params,
],
str,
]
| None
)
skip_cache_func: Callable[[MCKey, MCVal], bool] | None

def __call__(
self,
*args: Params.args,
cache_read: bool = True,
cache_write: bool = True,
aiocache_wait_for_write: bool = True,
**kwargs: Params.kwargs,
) -> MCReturnType: ...
def get_cache_keys(
self,
f: MultiCachedDecorated[
CacheType, Params, MCReturnType[MCKey, MCVal], SerializerType, DecoratorKWArgs
],
*args: Params.args,
**kwargs: Params.kwargs,
) -> str: ...
async def get_from_cache(self, *keys: MCKey) -> list[MCVal | None]: ...
async def set_in_cache(
self,
result: MCReturnType[MCKey, MCVal],
fn: MultiCachedDecorated[
CacheType, Params, MCReturnType[MCKey, MCVal], SerializerType, DecoratorKWArgs
],
fn_args: Params.args,
fn_kwargs: Params.kwargs,
) -> None: ...

@overload
def cached(
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
alias: str,
**kwargs: DecoratorKWArgs,
) -> CachedDecorated[CacheType, Params, ReturnType, None, DecoratorKWArgs]: ...
@overload
def cached(
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
namespace: str | None = None,
serializer: SerializerType | None = None,
plugins: Sequence[BasePlugin] | None = None,
alias: None = None,
**kwargs: DecoratorKWArgs,
) -> CachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ...
@overload
def cached_stampede(
lease: int = 2,
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
alias: str,
**kwargs: DecoratorKWArgs,
) -> CachedStampedeDecorated[CacheType, Params, ReturnType, None, DecoratorKWArgs]: ...
@overload
def cached_stampede(
lease: int = 2,
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
namespace: str | None = None,
serializer: SerializerType | None = None,
plugins: Sequence[BasePlugin] | None = None,
alias: None = None,
**kwargs: DecoratorKWArgs,
) -> CachedStampedeDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ...
@overload
def multi_cached(
keys_from_attr: str,
*,
key_builder: (
Callable[
Concatenate[
MCKey,
MultiCachedDecorated[
CacheType,
Params,
MCReturnType[MCKey, MCVal],
SerializerType,
DecoratorKWArgs,
],
Params,
],
str,
]
| None
) = None,
skip_cache_func: Callable[[MCKey, MCVal], bool] | None = None,
ttl: int | None = None,
cache: Type[CacheType] = Cache.MEMORY,
alias: str,
**kwargs: DecoratorKWArgs,
) -> MultiCachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ...
@overload
def multi_cached(
keys_from_attr: str,
*,
namespace: str | None = None,
key_builder: (
Callable[
Concatenate[
MCKey,
MultiCachedDecorated[
CacheType,
Params,
MCReturnType[MCKey, MCVal],
SerializerType,
DecoratorKWArgs,
],
Params,
],
str,
]
| None
) = None,
skip_cache_func: Callable[[MCKey, MCVal], bool] | None = None,
ttl: int | None = None,
cache: Type[CacheType] = Cache.MEMORY,
serializer: SerializerType | None = None,
plugins: Sequence[BasePlugin] | None = None,
alias: None = None,
**kwargs: DecoratorKWArgs,
) -> MultiCachedDecorated[CacheType, Params, ReturnType, SerializerType, DecoratorKWArgs]: ...
3 changes: 3 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,6 @@ source = aiocache
[coverage:report]
show_missing = true
skip_covered = true

[options.package_data]
{name} = py.typed, *.pyi
9 changes: 5 additions & 4 deletions tests/ut/test_decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def test_init(self):
def test_fails_at_instantiation(self):
with pytest.raises(TypeError):

@cached(wrong_param=1)
@cached(wrong_param=1) # type: ignore[misc]
async def fn() -> None:
"""Dummy function."""

Expand Down Expand Up @@ -373,7 +373,7 @@ def f():
def test_fails_at_instantiation(self):
with pytest.raises(TypeError):

@multi_cached(wrong_param=1)
@multi_cached(wrong_param=1) # type: ignore[misc]
async def fn() -> None:
"""Dummy function."""

Expand Down Expand Up @@ -476,8 +476,9 @@ async def test_cache_write_doesnt_wait_for_future(self, mocker, decorator, decor
mocker.spy(decorator, "set_in_cache")
with patch.object(decorator, "get_from_cache", autospec=True, return_value=[None, None]):
with patch("aiocache.decorators.asyncio.ensure_future", autospec=True):
await decorator_call(1, keys=["a", "b"], value="value",
aiocache_wait_for_write=False)
await decorator_call(
1, keys=["a", "b"], value="value", aiocache_wait_for_write=False
)

decorator.set_in_cache.assert_not_awaited()
decorator.set_in_cache.assert_called_once_with({"a": ANY, "b": ANY}, stub_dict, ANY, ANY)
Expand Down

0 comments on commit 87cc6a3

Please sign in to comment.