mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-10-01 11:49:51 -04:00
Wire up the dictionarycache to the metrics
This commit is contained in:
parent
2df8dd9b37
commit
4807616e16
@ -12,3 +12,16 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import synapse.metrics
|
||||||
|
|
||||||
|
DEBUG_CACHES = False
|
||||||
|
|
||||||
|
metrics = synapse.metrics.get_metrics_for("synapse.util.caches")
|
||||||
|
|
||||||
|
caches_by_name = {}
|
||||||
|
cache_counter = metrics.register_cache(
|
||||||
|
"cache",
|
||||||
|
lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()},
|
||||||
|
labels=["name"],
|
||||||
|
)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@ -17,7 +17,8 @@ import logging
|
|||||||
from synapse.util.async import ObservableDeferred
|
from synapse.util.async import ObservableDeferred
|
||||||
from synapse.util import unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.caches.lrucache import LruCache
|
from synapse.util.caches.lrucache import LruCache
|
||||||
import synapse.metrics
|
|
||||||
|
from . import caches_by_name, DEBUG_CACHES, cache_counter
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
@ -30,18 +31,6 @@ import threading
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
DEBUG_CACHES = False
|
|
||||||
|
|
||||||
metrics = synapse.metrics.get_metrics_for("synapse.util.caches")
|
|
||||||
|
|
||||||
caches_by_name = {}
|
|
||||||
cache_counter = metrics.register_cache(
|
|
||||||
"cache",
|
|
||||||
lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()},
|
|
||||||
labels=["name"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_CacheSentinel = object()
|
_CacheSentinel = object()
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
from synapse.util.caches.lrucache import LruCache
|
from synapse.util.caches.lrucache import LruCache
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
from . import caches_by_name, cache_counter
|
||||||
import threading
|
import threading
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
@ -42,6 +43,7 @@ class DictionaryCache(object):
|
|||||||
__slots__ = []
|
__slots__ = []
|
||||||
|
|
||||||
self.sentinel = Sentinel()
|
self.sentinel = Sentinel()
|
||||||
|
caches_by_name[name] = self.cache
|
||||||
|
|
||||||
def check_thread(self):
|
def check_thread(self):
|
||||||
expected_thread = self.thread
|
expected_thread = self.thread
|
||||||
@ -54,25 +56,21 @@ class DictionaryCache(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get(self, key, dict_keys=None):
|
def get(self, key, dict_keys=None):
|
||||||
try:
|
entry = self.cache.get(key, self.sentinel)
|
||||||
entry = self.cache.get(key, self.sentinel)
|
if entry is not self.sentinel:
|
||||||
if entry is not self.sentinel:
|
cache_counter.inc_hits(self.name)
|
||||||
# cache_counter.inc_hits(self.name)
|
|
||||||
|
|
||||||
if dict_keys is None:
|
if dict_keys is None:
|
||||||
return DictionaryEntry(entry.full, dict(entry.value))
|
return DictionaryEntry(entry.full, dict(entry.value))
|
||||||
else:
|
else:
|
||||||
return DictionaryEntry(entry.full, {
|
return DictionaryEntry(entry.full, {
|
||||||
k: entry.value[k]
|
k: entry.value[k]
|
||||||
for k in dict_keys
|
for k in dict_keys
|
||||||
if k in entry.value
|
if k in entry.value
|
||||||
})
|
})
|
||||||
|
|
||||||
# cache_counter.inc_misses(self.name)
|
cache_counter.inc_misses(self.name)
|
||||||
return DictionaryEntry(False, {})
|
return DictionaryEntry(False, {})
|
||||||
except:
|
|
||||||
logger.exception("get failed")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def invalidate(self, key):
|
def invalidate(self, key):
|
||||||
self.check_thread()
|
self.check_thread()
|
||||||
@ -88,18 +86,14 @@ class DictionaryCache(object):
|
|||||||
self.cache.clear()
|
self.cache.clear()
|
||||||
|
|
||||||
def update(self, sequence, key, value, full=False):
|
def update(self, sequence, key, value, full=False):
|
||||||
try:
|
self.check_thread()
|
||||||
self.check_thread()
|
if self.sequence == sequence:
|
||||||
if self.sequence == sequence:
|
# Only update the cache if the caches sequence number matches the
|
||||||
# Only update the cache if the caches sequence number matches the
|
# number that the cache had before the SELECT was started (SYN-369)
|
||||||
# number that the cache had before the SELECT was started (SYN-369)
|
if full:
|
||||||
if full:
|
self._insert(key, value)
|
||||||
self._insert(key, value)
|
else:
|
||||||
else:
|
self._update_or_insert(key, value)
|
||||||
self._update_or_insert(key, value)
|
|
||||||
except:
|
|
||||||
logger.exception("update failed")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _update_or_insert(self, key, value):
|
def _update_or_insert(self, key, value):
|
||||||
entry = self.cache.setdefault(key, DictionaryEntry(False, {}))
|
entry = self.cache.setdefault(key, DictionaryEntry(False, {}))
|
||||||
|
Loading…
Reference in New Issue
Block a user