mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-09-17 04:44:37 -04:00
Ensure invalidation list does not grow unboundedly
This commit is contained in:
parent
c0d7d9d642
commit
45fd2c8942
4 changed files with 104 additions and 20 deletions
|
@ -25,6 +25,7 @@ from synapse.util.logcontext import (
|
|||
from . import DEBUG_CACHES, register_cache
|
||||
|
||||
from twisted.internet import defer
|
||||
from collections import namedtuple
|
||||
|
||||
import os
|
||||
import functools
|
||||
|
@ -210,16 +211,17 @@ class CacheDescriptor(object):
|
|||
# whenever we are invalidated
|
||||
invalidate_callback = kwargs.pop("on_invalidate", None)
|
||||
|
||||
# Add our own `cache_context` to argument list if the wrapped function
|
||||
# has asked for one
|
||||
self_context = _CacheContext(cache, None)
|
||||
# Add temp cache_context so inspect.getcallargs doesn't explode
|
||||
if self.add_cache_context:
|
||||
kwargs["cache_context"] = self_context
|
||||
kwargs["cache_context"] = None
|
||||
|
||||
arg_dict = inspect.getcallargs(self.orig, obj, *args, **kwargs)
|
||||
cache_key = tuple(arg_dict[arg_nm] for arg_nm in self.arg_names)
|
||||
|
||||
self_context.key = cache_key
|
||||
# Add our own `cache_context` to argument list if the wrapped function
|
||||
# has asked for one
|
||||
if self.add_cache_context:
|
||||
kwargs["cache_context"] = _CacheContext(cache, cache_key)
|
||||
|
||||
try:
|
||||
cached_result_d = cache.get(cache_key, callback=invalidate_callback)
|
||||
|
@ -414,13 +416,7 @@ class CacheListDescriptor(object):
|
|||
return wrapped
|
||||
|
||||
|
||||
class _CacheContext(object):
|
||||
__slots__ = ["cache", "key"]
|
||||
|
||||
def __init__(self, cache, key):
|
||||
self.cache = cache
|
||||
self.key = key
|
||||
|
||||
class _CacheContext(namedtuple("_CacheContext", ("cache", "key"))):
|
||||
def invalidate(self):
|
||||
self.cache.invalidate(self.key)
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ def enumerate_leaves(node, depth):
|
|||
class _Node(object):
|
||||
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks"]
|
||||
|
||||
def __init__(self, prev_node, next_node, key, value, callbacks=[]):
|
||||
def __init__(self, prev_node, next_node, key, value, callbacks=set()):
|
||||
self.prev_node = prev_node
|
||||
self.next_node = next_node
|
||||
self.key = key
|
||||
|
@ -66,7 +66,7 @@ class LruCache(object):
|
|||
|
||||
return inner
|
||||
|
||||
def add_node(key, value, callbacks=[]):
|
||||
def add_node(key, value, callbacks=set()):
|
||||
prev_node = list_root
|
||||
next_node = prev_node.next_node
|
||||
node = _Node(prev_node, next_node, key, value, callbacks)
|
||||
|
@ -94,7 +94,7 @@ class LruCache(object):
|
|||
|
||||
for cb in node.callbacks:
|
||||
cb()
|
||||
node.callbacks = []
|
||||
node.callbacks.clear()
|
||||
|
||||
@synchronized
|
||||
def cache_get(key, default=None, callback=None):
|
||||
|
@ -102,7 +102,7 @@ class LruCache(object):
|
|||
if node is not None:
|
||||
move_node_to_front(node)
|
||||
if callback:
|
||||
node.callbacks.append(callback)
|
||||
node.callbacks.add(callback)
|
||||
return node.value
|
||||
else:
|
||||
return default
|
||||
|
@ -114,18 +114,18 @@ class LruCache(object):
|
|||
if value != node.value:
|
||||
for cb in node.callbacks:
|
||||
cb()
|
||||
node.callbacks = []
|
||||
node.callbacks.clear()
|
||||
|
||||
if callback:
|
||||
node.callbacks.append(callback)
|
||||
node.callbacks.add(callback)
|
||||
|
||||
move_node_to_front(node)
|
||||
node.value = value
|
||||
else:
|
||||
if callback:
|
||||
callbacks = [callback]
|
||||
callbacks = set([callback])
|
||||
else:
|
||||
callbacks = []
|
||||
callbacks = set()
|
||||
add_node(key, value, callbacks)
|
||||
if len(cache) > max_size:
|
||||
todelete = list_root.prev_node
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue