forked-synapse/synapse/util/caches/dictionary_cache.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

145 lines
4.8 KiB
Python
Raw Normal View History

2015-08-04 10:56:56 -04:00
# -*- coding: utf-8 -*-
2016-01-06 23:26:29 -05:00
# Copyright 2015, 2016 OpenMarket Ltd
2015-08-04 10:56:56 -04:00
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
2020-10-16 10:56:39 -04:00
import enum
import logging
2018-07-09 02:09:20 -04:00
import threading
from collections import namedtuple
2020-10-16 10:56:39 -04:00
from typing import Any
2018-07-09 02:09:20 -04:00
from synapse.util.caches.lrucache import LruCache
logger = logging.getLogger(__name__)
2015-08-04 10:56:56 -04:00
class DictionaryEntry(namedtuple("DictionaryEntry", ("full", "known_absent", "value"))):
"""Returned when getting an entry from the cache
Attributes:
full (bool): Whether the cache has the full or dict or just some keys.
If not full then not all requested keys will necessarily be present
in `value`
known_absent (set): Keys that were looked up in the dict and were not
there.
value (dict): The full or partial dict value
"""
2019-06-20 05:32:02 -04:00
def __len__(self):
return len(self.value)
2015-08-04 10:56:56 -04:00
2020-10-16 10:56:39 -04:00
class _Sentinel(enum.Enum):
# defining a sentinel in this way allows mypy to correctly handle the
# type of a dictionary lookup.
sentinel = object()
2020-09-04 06:54:56 -04:00
class DictionaryCache:
2015-08-10 09:16:24 -04:00
"""Caches key -> dictionary lookups, supporting caching partial dicts, i.e.
fetching a subset of dictionary keys for a particular key.
"""
2015-08-04 10:56:56 -04:00
def __init__(self, name, max_entries=1000):
2020-10-16 10:56:39 -04:00
self.cache = LruCache(
max_size=max_entries, cache_name=name, size_callback=len
) # type: LruCache[Any, DictionaryEntry]
2015-08-04 10:56:56 -04:00
self.name = name
self.sequence = 0
self.thread = None
def check_thread(self):
expected_thread = self.thread
if expected_thread is None:
self.thread = threading.current_thread()
else:
if expected_thread is not threading.current_thread():
raise ValueError(
"Cache objects can only be accessed from the main thread"
)
def get(self, key, dict_keys=None):
"""Fetch an entry out of the cache
Args:
key
dict_key(list): If given a set of keys then return only those keys
that exist in the cache.
Returns:
DictionaryEntry
"""
2020-10-16 10:56:39 -04:00
entry = self.cache.get(key, _Sentinel.sentinel)
if entry is not _Sentinel.sentinel:
if dict_keys is None:
return DictionaryEntry(
entry.full, entry.known_absent, dict(entry.value)
2019-06-20 05:32:02 -04:00
)
else:
return DictionaryEntry(
entry.full,
entry.known_absent,
{k: entry.value[k] for k in dict_keys if k in entry.value},
)
return DictionaryEntry(False, set(), {})
2015-08-04 10:56:56 -04:00
def invalidate(self, key):
self.check_thread()
# Increment the sequence number so that any SELECT statements that
# raced with the INSERT don't update the cache (SYN-369)
self.sequence += 1
self.cache.pop(key, None)
def invalidate_all(self):
self.check_thread()
self.sequence += 1
self.cache.clear()
def update(self, sequence, key, value, fetched_keys=None):
"""Updates the entry in the cache
Args:
sequence
key (K)
value (dict[X,Y]): The value to update the cache with.
fetched_keys (None|set[X]): All of the dictionary keys which were
fetched from the database.
If None, this is the complete value for key K. Otherwise, it
is used to infer a list of keys which we know don't exist in
the full dict.
"""
self.check_thread()
if self.sequence == sequence:
# Only update the cache if the caches sequence number matches the
# number that the cache had before the SELECT was started (SYN-369)
if fetched_keys is None:
self._insert(key, value, set())
else:
self._update_or_insert(key, value, fetched_keys)
2015-08-04 10:56:56 -04:00
def _update_or_insert(self, key, value, known_absent):
# We pop and reinsert as we need to tell the cache the size may have
# changed
entry = self.cache.pop(key, DictionaryEntry(False, set(), {}))
2015-08-04 10:56:56 -04:00
entry.value.update(value)
entry.known_absent.update(known_absent)
self.cache[key] = entry
2015-08-04 10:56:56 -04:00
def _insert(self, key, value, known_absent):
self.cache[key] = DictionaryEntry(True, known_absent, value)