mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-10-01 11:49:51 -04:00
Limit number of entries to prefill from cache
Some tables, like device_inbox, take a long time to query at startup for the stream change cache prefills. This is likely because they are slower growing streams and so are more fragmented on disk. For now, lets pull fewer entries out to make startup quicker. In future, we should add a better index to make it even faster.
This commit is contained in:
parent
9898bbd9dc
commit
dd52d4de4c
@ -189,7 +189,8 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
db_conn, "device_inbox",
|
db_conn, "device_inbox",
|
||||||
entity_column="user_id",
|
entity_column="user_id",
|
||||||
stream_column="stream_id",
|
stream_column="stream_id",
|
||||||
max_value=max_device_inbox_id
|
max_value=max_device_inbox_id,
|
||||||
|
limit=1000,
|
||||||
)
|
)
|
||||||
self._device_inbox_stream_cache = StreamChangeCache(
|
self._device_inbox_stream_cache = StreamChangeCache(
|
||||||
"DeviceInboxStreamChangeCache", min_device_inbox_id,
|
"DeviceInboxStreamChangeCache", min_device_inbox_id,
|
||||||
@ -202,6 +203,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
|||||||
entity_column="destination",
|
entity_column="destination",
|
||||||
stream_column="stream_id",
|
stream_column="stream_id",
|
||||||
max_value=max_device_inbox_id,
|
max_value=max_device_inbox_id,
|
||||||
|
limit=1000,
|
||||||
)
|
)
|
||||||
self._device_federation_outbox_stream_cache = StreamChangeCache(
|
self._device_federation_outbox_stream_cache = StreamChangeCache(
|
||||||
"DeviceFederationOutboxStreamChangeCache", min_device_outbox_id,
|
"DeviceFederationOutboxStreamChangeCache", min_device_outbox_id,
|
||||||
|
@ -838,18 +838,19 @@ class SQLBaseStore(object):
|
|||||||
return txn.execute(sql, keyvalues.values())
|
return txn.execute(sql, keyvalues.values())
|
||||||
|
|
||||||
def _get_cache_dict(self, db_conn, table, entity_column, stream_column,
|
def _get_cache_dict(self, db_conn, table, entity_column, stream_column,
|
||||||
max_value):
|
max_value, limit=100000):
|
||||||
# Fetch a mapping of room_id -> max stream position for "recent" rooms.
|
# Fetch a mapping of room_id -> max stream position for "recent" rooms.
|
||||||
# It doesn't really matter how many we get, the StreamChangeCache will
|
# It doesn't really matter how many we get, the StreamChangeCache will
|
||||||
# do the right thing to ensure it respects the max size of cache.
|
# do the right thing to ensure it respects the max size of cache.
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT %(entity)s, MAX(%(stream)s) FROM %(table)s"
|
"SELECT %(entity)s, MAX(%(stream)s) FROM %(table)s"
|
||||||
" WHERE %(stream)s > ? - 100000"
|
" WHERE %(stream)s > ? - %(limit)s"
|
||||||
" GROUP BY %(entity)s"
|
" GROUP BY %(entity)s"
|
||||||
) % {
|
) % {
|
||||||
"table": table,
|
"table": table,
|
||||||
"entity": entity_column,
|
"entity": entity_column,
|
||||||
"stream": stream_column,
|
"stream": stream_column,
|
||||||
|
"limit": limit,
|
||||||
}
|
}
|
||||||
|
|
||||||
sql = self.database_engine.convert_param_style(sql)
|
sql = self.database_engine.convert_param_style(sql)
|
||||||
|
Loading…
Reference in New Issue
Block a user