Fix LruCache callback deduplication (#6213)

This commit is contained in:
V02460 2019-11-07 10:43:51 +01:00 committed by Richard van der Hoff
parent a6ebef1bfd
commit affcc2cc36
2 changed files with 38 additions and 11 deletions

1
changelog.d/6213.bugfix Normal file
View File

@ -0,0 +1 @@
Fix LruCache callback deduplication.

View File

@ -17,8 +17,8 @@ import functools
import inspect import inspect
import logging import logging
import threading import threading
from collections import namedtuple from typing import Any, Tuple, Union, cast
from typing import Any, cast from weakref import WeakValueDictionary
from six import itervalues from six import itervalues
@ -38,6 +38,8 @@ from . import register_cache
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
CacheKey = Union[Tuple, Any]
class _CachedFunction(Protocol): class _CachedFunction(Protocol):
invalidate = None # type: Any invalidate = None # type: Any
@ -430,7 +432,7 @@ class CacheDescriptor(_CacheDescriptorBase):
# Add our own `cache_context` to argument list if the wrapped function # Add our own `cache_context` to argument list if the wrapped function
# has asked for one # has asked for one
if self.add_cache_context: if self.add_cache_context:
kwargs["cache_context"] = _CacheContext(cache, cache_key) kwargs["cache_context"] = _CacheContext.get_instance(cache, cache_key)
try: try:
cached_result_d = cache.get(cache_key, callback=invalidate_callback) cached_result_d = cache.get(cache_key, callback=invalidate_callback)
@ -624,14 +626,38 @@ class CacheListDescriptor(_CacheDescriptorBase):
return wrapped return wrapped
class _CacheContext(namedtuple("_CacheContext", ("cache", "key"))): class _CacheContext:
# We rely on _CacheContext implementing __eq__ and __hash__ sensibly, """Holds cache information from the cached function higher in the calling order.
# which namedtuple does for us (i.e. two _CacheContext are the same if
# their caches and keys match). This is important in particular to Can be used to invalidate the higher level cache entry if something changes
# dedupe when we add callbacks to lru cache nodes, otherwise the number on a lower level.
# of callbacks would grow. """
def invalidate(self):
self.cache.invalidate(self.key) _cache_context_objects = (
WeakValueDictionary()
) # type: WeakValueDictionary[Tuple[Cache, CacheKey], _CacheContext]
def __init__(self, cache, cache_key): # type: (Cache, CacheKey) -> None
self._cache = cache
self._cache_key = cache_key
def invalidate(self): # type: () -> None
"""Invalidates the cache entry referred to by the context."""
self._cache.invalidate(self._cache_key)
@classmethod
def get_instance(cls, cache, cache_key): # type: (Cache, CacheKey) -> _CacheContext
"""Returns an instance constructed with the given arguments.
A new instance is only created if none already exists.
"""
# We make sure there are no identical _CacheContext instances. This is
# important in particular to dedupe when we add callbacks to lru cache
# nodes, otherwise the number of callbacks would grow.
return cls._cache_context_objects.setdefault(
(cache, cache_key), cls(cache, cache_key)
)
def cached( def cached(