Stop sub-classing object (#8249)

This commit is contained in:
Patrick Cloke 2020-09-04 06:54:56 -04:00 committed by GitHub
parent 9f8abdcc38
commit c619253db8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
168 changed files with 293 additions and 292 deletions

View file

@ -43,7 +43,7 @@ response_cache_total = Gauge("synapse_util_caches_response_cache:total", "", ["n
@attr.s
class CacheMetric(object):
class CacheMetric:
_cache = attr.ib()
_cache_type = attr.ib(type=str)

View file

@ -64,7 +64,7 @@ cache_pending_metric = Gauge(
_CacheSentinel = object()
class CacheEntry(object):
class CacheEntry:
__slots__ = ["deferred", "callbacks", "invalidated"]
def __init__(self, deferred, callbacks):
@ -80,7 +80,7 @@ class CacheEntry(object):
self.callbacks.clear()
class Cache(object):
class Cache:
__slots__ = (
"cache",
"name",
@ -288,7 +288,7 @@ class Cache(object):
self._pending_deferred_cache.clear()
class _CacheDescriptorBase(object):
class _CacheDescriptorBase:
def __init__(self, orig: _CachedFunction, num_args, cache_context=False):
self.orig = orig
@ -705,7 +705,7 @@ def cachedList(
Example:
class Example(object):
class Example:
@cached(num_args=2)
def do_something(self, first_arg):
...

View file

@ -40,7 +40,7 @@ class DictionaryEntry(namedtuple("DictionaryEntry", ("full", "known_absent", "va
return len(self.value)
class DictionaryCache(object):
class DictionaryCache:
"""Caches key -> dictionary lookups, supporting caching partial dicts, i.e.
fetching a subset of dictionary keys for a particular key.
"""
@ -53,7 +53,7 @@ class DictionaryCache(object):
self.thread = None
# caches_by_name[name] = self.cache
class Sentinel(object):
class Sentinel:
__slots__ = []
self.sentinel = Sentinel()

View file

@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
SENTINEL = object()
class ExpiringCache(object):
class ExpiringCache:
def __init__(
self,
cache_name,
@ -190,7 +190,7 @@ class ExpiringCache(object):
return False
class _CacheEntry(object):
class _CacheEntry:
__slots__ = ["time", "value"]
def __init__(self, time, value):

View file

@ -30,7 +30,7 @@ def enumerate_leaves(node, depth):
yield m
class _Node(object):
class _Node:
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks"]
def __init__(self, prev_node, next_node, key, value, callbacks=set()):
@ -41,7 +41,7 @@ class _Node(object):
self.callbacks = callbacks
class LruCache(object):
class LruCache:
"""
Least-recently-used cache.
Supports del_multi only if cache_type=TreeCache

View file

@ -23,7 +23,7 @@ from synapse.util.caches import register_cache
logger = logging.getLogger(__name__)
class ResponseCache(object):
class ResponseCache:
"""
This caches a deferred response. Until the deferred completes it will be
returned from the cache. This means that if the client retries the request

View file

@ -3,7 +3,7 @@ from typing import Dict
SENTINEL = object()
class TreeCache(object):
class TreeCache:
"""
Tree-based backing store for LruCache. Allows subtrees of data to be deleted
efficiently.
@ -89,7 +89,7 @@ def iterate_tree_cache_entry(d):
yield d
class _Entry(object):
class _Entry:
__slots__ = ["value"]
def __init__(self, value):

View file

@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
SENTINEL = object()
class TTLCache(object):
class TTLCache:
"""A key/value cache implementation where each entry has its own TTL"""
def __init__(self, cache_name, timer=time.time):
@ -154,7 +154,7 @@ class TTLCache(object):
@attr.s(frozen=True, slots=True)
class _CacheEntry(object):
class _CacheEntry:
"""TTLCache entry"""
# expiry_time is the first attribute, so that entries are sorted by expiry.