mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-10-01 08:25:44 -04:00
Fix LruCache callback deduplication (#6213)
This commit is contained in:
parent
a6ebef1bfd
commit
affcc2cc36
1
changelog.d/6213.bugfix
Normal file
1
changelog.d/6213.bugfix
Normal file
@ -0,0 +1 @@
|
||||
Fix LruCache callback deduplication.
|
@ -17,8 +17,8 @@ import functools
|
||||
import inspect
|
||||
import logging
|
||||
import threading
|
||||
from collections import namedtuple
|
||||
from typing import Any, cast
|
||||
from typing import Any, Tuple, Union, cast
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
from six import itervalues
|
||||
|
||||
@ -38,6 +38,8 @@ from . import register_cache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CacheKey = Union[Tuple, Any]
|
||||
|
||||
|
||||
class _CachedFunction(Protocol):
|
||||
invalidate = None # type: Any
|
||||
@ -430,7 +432,7 @@ class CacheDescriptor(_CacheDescriptorBase):
|
||||
# Add our own `cache_context` to argument list if the wrapped function
|
||||
# has asked for one
|
||||
if self.add_cache_context:
|
||||
kwargs["cache_context"] = _CacheContext(cache, cache_key)
|
||||
kwargs["cache_context"] = _CacheContext.get_instance(cache, cache_key)
|
||||
|
||||
try:
|
||||
cached_result_d = cache.get(cache_key, callback=invalidate_callback)
|
||||
@ -624,14 +626,38 @@ class CacheListDescriptor(_CacheDescriptorBase):
|
||||
return wrapped
|
||||
|
||||
|
||||
class _CacheContext(namedtuple("_CacheContext", ("cache", "key"))):
|
||||
# We rely on _CacheContext implementing __eq__ and __hash__ sensibly,
|
||||
# which namedtuple does for us (i.e. two _CacheContext are the same if
|
||||
# their caches and keys match). This is important in particular to
|
||||
# dedupe when we add callbacks to lru cache nodes, otherwise the number
|
||||
# of callbacks would grow.
|
||||
def invalidate(self):
|
||||
self.cache.invalidate(self.key)
|
||||
class _CacheContext:
|
||||
"""Holds cache information from the cached function higher in the calling order.
|
||||
|
||||
Can be used to invalidate the higher level cache entry if something changes
|
||||
on a lower level.
|
||||
"""
|
||||
|
||||
_cache_context_objects = (
|
||||
WeakValueDictionary()
|
||||
) # type: WeakValueDictionary[Tuple[Cache, CacheKey], _CacheContext]
|
||||
|
||||
def __init__(self, cache, cache_key): # type: (Cache, CacheKey) -> None
|
||||
self._cache = cache
|
||||
self._cache_key = cache_key
|
||||
|
||||
def invalidate(self): # type: () -> None
|
||||
"""Invalidates the cache entry referred to by the context."""
|
||||
self._cache.invalidate(self._cache_key)
|
||||
|
||||
@classmethod
|
||||
def get_instance(cls, cache, cache_key): # type: (Cache, CacheKey) -> _CacheContext
|
||||
"""Returns an instance constructed with the given arguments.
|
||||
|
||||
A new instance is only created if none already exists.
|
||||
"""
|
||||
|
||||
# We make sure there are no identical _CacheContext instances. This is
|
||||
# important in particular to dedupe when we add callbacks to lru cache
|
||||
# nodes, otherwise the number of callbacks would grow.
|
||||
return cls._cache_context_objects.setdefault(
|
||||
(cache, cache_key), cls(cache, cache_key)
|
||||
)
|
||||
|
||||
|
||||
def cached(
|
||||
|
Loading…
Reference in New Issue
Block a user