Merge pull request #3334 from matrix-org/rav/cache_factor_override

Cache factor override system for specific caches
This commit is contained in:
Richard van der Hoff 2018-06-04 17:19:33 +01:00 committed by GitHub
commit 28b0490dfd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 18 additions and 4 deletions

View File

@ -171,6 +171,10 @@ def main():
if cache_factor: if cache_factor:
os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor) os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor)
cache_factors = config.get("synctl_cache_factors", {})
for cache_name, factor in cache_factors.iteritems():
os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor)
worker_configfiles = [] worker_configfiles = []
if options.worker: if options.worker:
start_stop_synapse = False start_stop_synapse = False

View File

@ -23,7 +23,7 @@ from twisted.internet import defer
from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.background_updates import BackgroundUpdateStore
from synapse.storage.engines import PostgresEngine from synapse.storage.engines import PostgresEngine
from synapse.util.caches import intern_string, CACHE_SIZE_FACTOR from synapse.util.caches import intern_string, get_cache_factor_for
from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.caches.dictionary_cache import DictionaryCache
from synapse.util.stringutils import to_ascii from synapse.util.stringutils import to_ascii
@ -57,7 +57,7 @@ class StateGroupWorkerStore(SQLBaseStore):
super(StateGroupWorkerStore, self).__init__(db_conn, hs) super(StateGroupWorkerStore, self).__init__(db_conn, hs)
self._state_group_cache = DictionaryCache( self._state_group_cache = DictionaryCache(
"*stateGroupCache*", 100000 * CACHE_SIZE_FACTOR "*stateGroupCache*", 500000 * get_cache_factor_for("stateGroupCache")
) )
@cached(max_entries=100000, iterable=True) @cached(max_entries=100000, iterable=True)

View File

@ -22,6 +22,16 @@ import six
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.5)) CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.5))
def get_cache_factor_for(cache_name):
env_var = "SYNAPSE_CACHE_FACTOR_" + cache_name.upper()
factor = os.environ.get(env_var)
if factor:
return float(factor)
return CACHE_SIZE_FACTOR
caches_by_name = {} caches_by_name = {}
collectors_by_name = {} collectors_by_name = {}

View File

@ -17,7 +17,7 @@ import logging
from synapse.util.async import ObservableDeferred from synapse.util.async import ObservableDeferred
from synapse.util import unwrapFirstError, logcontext from synapse.util import unwrapFirstError, logcontext
from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.caches import get_cache_factor_for
from synapse.util.caches.lrucache import LruCache from synapse.util.caches.lrucache import LruCache
from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
from synapse.util.stringutils import to_ascii from synapse.util.stringutils import to_ascii
@ -313,7 +313,7 @@ class CacheDescriptor(_CacheDescriptorBase):
orig, num_args=num_args, inlineCallbacks=inlineCallbacks, orig, num_args=num_args, inlineCallbacks=inlineCallbacks,
cache_context=cache_context) cache_context=cache_context)
max_entries = int(max_entries * CACHE_SIZE_FACTOR) max_entries = int(max_entries * get_cache_factor_for(orig.__name__))
self.max_entries = max_entries self.max_entries = max_entries
self.tree = tree self.tree = tree