2015-08-11 12:59:32 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-08-11 12:59:32 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-08-12 05:13:35 -04:00
|
|
|
|
|
|
|
import synapse.metrics
|
2016-03-22 14:22:52 -04:00
|
|
|
from lrucache import LruCache
|
2016-03-23 12:13:05 -04:00
|
|
|
import os
|
|
|
|
|
|
|
|
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
|
2015-08-12 05:13:35 -04:00
|
|
|
|
|
|
|
DEBUG_CACHES = False
|
|
|
|
|
|
|
|
metrics = synapse.metrics.get_metrics_for("synapse.util.caches")
|
|
|
|
|
|
|
|
caches_by_name = {}
|
2016-06-02 06:29:44 -04:00
|
|
|
# cache_counter = metrics.register_cache(
|
|
|
|
# "cache",
|
|
|
|
# lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()},
|
|
|
|
# labels=["name"],
|
|
|
|
# )
|
|
|
|
|
|
|
|
|
|
|
|
def register_cache(name, cache):
|
|
|
|
caches_by_name[name] = cache
|
|
|
|
return metrics.register_cache(
|
|
|
|
"cache",
|
|
|
|
lambda: len(cache),
|
|
|
|
name,
|
|
|
|
)
|
|
|
|
|
2016-03-22 14:22:52 -04:00
|
|
|
|
2016-03-23 12:13:05 -04:00
|
|
|
_string_cache = LruCache(int(5000 * CACHE_SIZE_FACTOR))
|
2016-03-22 14:22:52 -04:00
|
|
|
caches_by_name["string_cache"] = _string_cache
|
|
|
|
|
|
|
|
|
2016-03-23 12:13:05 -04:00
|
|
|
KNOWN_KEYS = {
|
|
|
|
key: key for key in
|
|
|
|
(
|
|
|
|
"auth_events",
|
|
|
|
"content",
|
|
|
|
"depth",
|
|
|
|
"event_id",
|
|
|
|
"hashes",
|
|
|
|
"origin",
|
|
|
|
"origin_server_ts",
|
|
|
|
"prev_events",
|
|
|
|
"room_id",
|
|
|
|
"sender",
|
|
|
|
"signatures",
|
|
|
|
"state_key",
|
|
|
|
"type",
|
|
|
|
"unsigned",
|
|
|
|
"user_id",
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-03-22 14:22:52 -04:00
|
|
|
def intern_string(string):
|
2016-03-23 12:13:05 -04:00
|
|
|
"""Takes a (potentially) unicode string and interns using custom cache
|
|
|
|
"""
|
2016-03-22 14:22:52 -04:00
|
|
|
return _string_cache.setdefault(string, string)
|
2016-03-23 12:13:05 -04:00
|
|
|
|
|
|
|
|
|
|
|
def intern_dict(dictionary):
|
|
|
|
"""Takes a dictionary and interns well known keys and their values
|
|
|
|
"""
|
2016-03-23 12:34:59 -04:00
|
|
|
return {
|
|
|
|
KNOWN_KEYS.get(key, key): _intern_known_values(key, value)
|
|
|
|
for key, value in dictionary.items()
|
|
|
|
}
|
2016-03-23 12:13:05 -04:00
|
|
|
|
|
|
|
|
2016-03-23 12:34:59 -04:00
|
|
|
def _intern_known_values(key, value):
|
2016-03-23 12:13:05 -04:00
|
|
|
intern_str_keys = ("event_id", "room_id")
|
|
|
|
intern_unicode_keys = ("sender", "user_id", "type", "state_key")
|
|
|
|
|
2016-03-23 12:34:59 -04:00
|
|
|
if key in intern_str_keys:
|
|
|
|
return intern(value.encode('ascii'))
|
2016-03-23 12:13:05 -04:00
|
|
|
|
2016-03-23 12:34:59 -04:00
|
|
|
if key in intern_unicode_keys:
|
|
|
|
return intern_string(value)
|
2016-03-23 12:13:05 -04:00
|
|
|
|
2016-03-23 12:34:59 -04:00
|
|
|
return value
|