mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-05-03 13:54:47 -04:00
Optionally track memory usage of each LruCache (#9881)
This will double count slightly in the presence of interned strings. It's off by default as it can consume a lot of resources.
This commit is contained in:
parent
1fb9a2d0bf
commit
ef889c98a6
8 changed files with 97 additions and 1 deletions
|
@ -32,9 +32,36 @@ from typing import (
|
|||
from typing_extensions import Literal
|
||||
|
||||
from synapse.config import cache as cache_config
|
||||
from synapse.util import caches
|
||||
from synapse.util.caches import CacheMetric, register_cache
|
||||
from synapse.util.caches.treecache import TreeCache
|
||||
|
||||
try:
|
||||
from pympler.asizeof import Asizer
|
||||
|
||||
def _get_size_of(val: Any, *, recurse=True) -> int:
|
||||
"""Get an estimate of the size in bytes of the object.
|
||||
|
||||
Args:
|
||||
val: The object to size.
|
||||
recurse: If true will include referenced values in the size,
|
||||
otherwise only sizes the given object.
|
||||
"""
|
||||
# Ignore singleton values when calculating memory usage.
|
||||
if val in ((), None, ""):
|
||||
return 0
|
||||
|
||||
sizer = Asizer()
|
||||
sizer.exclude_refs((), None, "")
|
||||
return sizer.asizeof(val, limit=100 if recurse else 0)
|
||||
|
||||
|
||||
except ImportError:
|
||||
|
||||
def _get_size_of(val: Any, *, recurse=True) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
# Function type: the type used for invalidation callbacks
|
||||
FT = TypeVar("FT", bound=Callable[..., Any])
|
||||
|
||||
|
@ -56,7 +83,7 @@ def enumerate_leaves(node, depth):
|
|||
|
||||
|
||||
class _Node:
|
||||
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks"]
|
||||
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks", "memory"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -84,6 +111,16 @@ class _Node:
|
|||
|
||||
self.add_callbacks(callbacks)
|
||||
|
||||
self.memory = 0
|
||||
if caches.TRACK_MEMORY_USAGE:
|
||||
self.memory = (
|
||||
_get_size_of(key)
|
||||
+ _get_size_of(value)
|
||||
+ _get_size_of(self.callbacks, recurse=False)
|
||||
+ _get_size_of(self, recurse=False)
|
||||
)
|
||||
self.memory += _get_size_of(self.memory, recurse=False)
|
||||
|
||||
def add_callbacks(self, callbacks: Collection[Callable[[], None]]) -> None:
|
||||
"""Add to stored list of callbacks, removing duplicates."""
|
||||
|
||||
|
@ -233,6 +270,9 @@ class LruCache(Generic[KT, VT]):
|
|||
if size_callback:
|
||||
cached_cache_len[0] += size_callback(node.value)
|
||||
|
||||
if caches.TRACK_MEMORY_USAGE and metrics:
|
||||
metrics.inc_memory_usage(node.memory)
|
||||
|
||||
def move_node_to_front(node):
|
||||
prev_node = node.prev_node
|
||||
next_node = node.next_node
|
||||
|
@ -258,6 +298,9 @@ class LruCache(Generic[KT, VT]):
|
|||
|
||||
node.run_and_clear_callbacks()
|
||||
|
||||
if caches.TRACK_MEMORY_USAGE and metrics:
|
||||
metrics.dec_memory_usage(node.memory)
|
||||
|
||||
return deleted_len
|
||||
|
||||
@overload
|
||||
|
@ -373,6 +416,9 @@ class LruCache(Generic[KT, VT]):
|
|||
if size_callback:
|
||||
cached_cache_len[0] = 0
|
||||
|
||||
if caches.TRACK_MEMORY_USAGE and metrics:
|
||||
metrics.clear_memory_usage()
|
||||
|
||||
@synchronized
|
||||
def cache_contains(key: KT) -> bool:
|
||||
return key in cache
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue