Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Remove unused @lru_cache decorator #13595

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/13595.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Remove unused `@lru_cache` decorator.
104 changes: 0 additions & 104 deletions synapse/util/caches/descriptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import functools
import inspect
import logging
Expand Down Expand Up @@ -146,109 +145,6 @@ def __init__(
)


class _LruCachedFunction(Generic[F]):
cache: LruCache[CacheKey, Any]
__call__: F


def lru_cache(
*, max_entries: int = 1000, cache_context: bool = False
) -> Callable[[F], _LruCachedFunction[F]]:
"""A method decorator that applies a memoizing cache around the function.

This is more-or-less a drop-in equivalent to functools.lru_cache, although note
that the signature is slightly different.

The main differences with functools.lru_cache are:
(a) the size of the cache can be controlled via the cache_factor mechanism
(b) the wrapped function can request a "cache_context" which provides a
callback mechanism to indicate that the result is no longer valid
(c) prometheus metrics are exposed automatically.

The function should take zero or more arguments, which are used as the key for the
cache. Single-argument functions use that argument as the cache key; otherwise the
arguments are built into a tuple.

Cached functions can be "chained" (i.e. a cached function can call other cached
functions and get appropriately invalidated when they called caches are
invalidated) by adding a special "cache_context" argument to the function
and passing that as a kwarg to all caches called. For example:

@lru_cache(cache_context=True)
def foo(self, key, cache_context):
r1 = self.bar1(key, on_invalidate=cache_context.invalidate)
r2 = self.bar2(key, on_invalidate=cache_context.invalidate)
return r1 + r2

The wrapped function also has a 'cache' property which offers direct access to the
underlying LruCache.
"""

def func(orig: F) -> _LruCachedFunction[F]:
desc = LruCacheDescriptor(
orig,
max_entries=max_entries,
cache_context=cache_context,
)
return cast(_LruCachedFunction[F], desc)

return func


class LruCacheDescriptor(_CacheDescriptorBase):
"""Helper for @lru_cache"""

class _Sentinel(enum.Enum):
sentinel = object()

def __init__(
self,
orig: Callable[..., Any],
max_entries: int = 1000,
cache_context: bool = False,
):
super().__init__(
orig, num_args=None, uncached_args=None, cache_context=cache_context
)
self.max_entries = max_entries

def __get__(self, obj: Optional[Any], owner: Optional[Type]) -> Callable[..., Any]:
cache: LruCache[CacheKey, Any] = LruCache(
cache_name=self.name,
max_size=self.max_entries,
)

get_cache_key = self.cache_key_builder
sentinel = LruCacheDescriptor._Sentinel.sentinel

@functools.wraps(self.orig)
def _wrapped(*args: Any, **kwargs: Any) -> Any:
invalidate_callback = kwargs.pop("on_invalidate", None)
callbacks = (invalidate_callback,) if invalidate_callback else ()

cache_key = get_cache_key(args, kwargs)

ret = cache.get(cache_key, default=sentinel, callbacks=callbacks)
if ret != sentinel:
return ret

# Add our own `cache_context` to argument list if the wrapped function
# has asked for one
if self.add_cache_context:
kwargs["cache_context"] = _CacheContext.get_instance(cache, cache_key)

ret2 = self.orig(obj, *args, **kwargs)
cache.set(cache_key, ret2, callbacks=callbacks)

return ret2

wrapped = cast(CachedFunction, _wrapped)
wrapped.cache = cache
obj.__dict__[self.name] = wrapped

return wrapped


class DeferredCacheDescriptor(_CacheDescriptorBase):
"""A method decorator that applies a memoizing cache around the function.

Expand Down
40 changes: 4 additions & 36 deletions tests/util/caches/test_descriptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,46 +28,14 @@
make_deferred_yieldable,
)
from synapse.util.caches import descriptors
from synapse.util.caches.descriptors import cached, cachedList, lru_cache
from synapse.util.caches.descriptors import cached, cachedList

from tests import unittest
from tests.test_utils import get_awaitable_result

logger = logging.getLogger(__name__)


class LruCacheDecoratorTestCase(unittest.TestCase):
def test_base(self):
class Cls:
def __init__(self):
self.mock = mock.Mock()

@lru_cache()
def fn(self, arg1, arg2):
return self.mock(arg1, arg2)

obj = Cls()
obj.mock.return_value = "fish"
r = obj.fn(1, 2)
self.assertEqual(r, "fish")
obj.mock.assert_called_once_with(1, 2)
obj.mock.reset_mock()

# a call with different params should call the mock again
obj.mock.return_value = "chips"
r = obj.fn(1, 3)
self.assertEqual(r, "chips")
obj.mock.assert_called_once_with(1, 3)
obj.mock.reset_mock()

# the two values should now be cached
r = obj.fn(1, 2)
self.assertEqual(r, "fish")
r = obj.fn(1, 3)
self.assertEqual(r, "chips")
obj.mock.assert_not_called()


def run_on_reactor():
d = defer.Deferred()
reactor.callLater(0, d.callback, 0)
Expand Down Expand Up @@ -478,10 +446,10 @@ async def func1(self, key, cache_context):

@cached(cache_context=True)
async def func2(self, key, cache_context):
return self.func3(key, on_invalidate=cache_context.invalidate)
return await self.func3(key, on_invalidate=cache_context.invalidate)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am a bit confused why these changes are needed, given we seem to use maybeDeferred:

https://github.com/matrix-org/synapse/blob/19c0e55ef7742d67cff1cb6fb7c3e862b86ea788/synapse/util/caches/descriptors.py

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would love to understand it more. Traceback for completeness.

I spent some time staring at this in a debugger and couldn't divine any insight.

==============================================================================
Error: 
Traceback (most recent call last):
  File "/home/runner/work/synapse/synapse/tests/util/caches/test_descriptors.py", line 459, in test_invalidate_cascade
    r = get_awaitable_result(obj.func1("k1", on_invalidate=top_invalidate))
  File "/home/runner/work/synapse/synapse/tests/test_utils/__init__.py", line 40, in get_awaitable_result
    next(i)
  File "/home/runner/work/synapse/synapse/tests/util/caches/test_descriptors.py", line 445, in func1
    return await self.func2(key, on_invalidate=cache_context.invalidate)
  File "/home/runner/.cache/pypoetry/virtualenvs/matrix-synapse-pswDeSvb-py3.7/lib/python3.7/site-packages/twisted/internet/defer.py", line 151, in maybeDeferred
    result = f(*args, **kw)
  File "/home/runner/work/synapse/synapse/synapse/logging/context.py", line 753, in g
    return run_in_background(f, *args, **kwargs)
  File "/home/runner/work/synapse/synapse/synapse/logging/context.py", line 814, in run_in_background
    res = defer.ensureDeferred(res)
  File "/home/runner/.cache/pypoetry/virtualenvs/matrix-synapse-pswDeSvb-py3.7/lib/python3.7/site-packages/twisted/internet/defer.py", line 911, in ensureDeferred
    return _cancellableInlineCallbacks(coro)
  File "/home/runner/.cache/pypoetry/virtualenvs/matrix-synapse-pswDeSvb-py3.7/lib/python3.7/site-packages/twisted/internet/defer.py", line 1529, in _cancellableInlineCallbacks
    _inlineCallbacks(None, g, status)
  File "/home/runner/.cache/pypoetry/virtualenvs/matrix-synapse-pswDeSvb-py3.7/lib/python3.7/site-packages/twisted/internet/defer.py", line 1421, in _inlineCallbacks
    status.deferred.callback(getattr(e, "value", None))
  File "/home/runner/.cache/pypoetry/virtualenvs/matrix-synapse-pswDeSvb-py3.7/lib/python3.7/site-packages/twisted/internet/defer.py", line 459, in callback
    assert not isinstance(result, Deferred)
builtins.AssertionError: 

tests.util.caches.test_descriptors.DescriptorTestCase.test_invalidate_cascade
-------------------------------------------------------------------------------


@lru_cache(cache_context=True)
def func3(self, key, cache_context):
@cached(cache_context=True)
async def func3(self, key, cache_context):
self.invalidate = cache_context.invalidate
return 42

Expand Down