mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-07-28 02:05:25 -04:00
Optimise backfill calculation (#12522)
Try to avoid an OOM by checking fewer extremities. Generally this is a big rewrite of _maybe_backfill, to try and fix some of the TODOs and other problems in it. It's best reviewed commit-by-commit.
This commit is contained in:
parent
e75c7e3b6d
commit
17d99f758a
5 changed files with 168 additions and 106 deletions
|
@ -695,7 +695,9 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
|||
# Return all events where not all sets can reach them.
|
||||
return {eid for eid, n in event_to_missing_sets.items() if n}
|
||||
|
||||
async def get_oldest_event_ids_with_depth_in_room(self, room_id) -> Dict[str, int]:
|
||||
async def get_oldest_event_ids_with_depth_in_room(
|
||||
self, room_id
|
||||
) -> List[Tuple[str, int]]:
|
||||
"""Gets the oldest events(backwards extremities) in the room along with the
|
||||
aproximate depth.
|
||||
|
||||
|
@ -708,7 +710,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
|||
room_id: Room where we want to find the oldest events
|
||||
|
||||
Returns:
|
||||
Map from event_id to depth
|
||||
List of (event_id, depth) tuples
|
||||
"""
|
||||
|
||||
def get_oldest_event_ids_with_depth_in_room_txn(txn, room_id):
|
||||
|
@ -741,7 +743,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
|||
|
||||
txn.execute(sql, (room_id, False))
|
||||
|
||||
return dict(txn)
|
||||
return txn.fetchall()
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_oldest_event_ids_with_depth_in_room",
|
||||
|
@ -751,7 +753,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
|||
|
||||
async def get_insertion_event_backward_extremities_in_room(
|
||||
self, room_id
|
||||
) -> Dict[str, int]:
|
||||
) -> List[Tuple[str, int]]:
|
||||
"""Get the insertion events we know about that we haven't backfilled yet.
|
||||
|
||||
We use this function so that we can compare and see if someones current
|
||||
|
@ -763,7 +765,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
|||
room_id: Room where we want to find the oldest events
|
||||
|
||||
Returns:
|
||||
Map from event_id to depth
|
||||
List of (event_id, depth) tuples
|
||||
"""
|
||||
|
||||
def get_insertion_event_backward_extremities_in_room_txn(txn, room_id):
|
||||
|
@ -778,8 +780,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
|||
"""
|
||||
|
||||
txn.execute(sql, (room_id,))
|
||||
|
||||
return dict(txn)
|
||||
return txn.fetchall()
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_insertion_event_backward_extremities_in_room",
|
||||
|
@ -1295,22 +1296,19 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
|||
event_results.reverse()
|
||||
return event_results
|
||||
|
||||
async def get_successor_events(self, event_ids: Iterable[str]) -> List[str]:
|
||||
"""Fetch all events that have the given events as a prev event
|
||||
async def get_successor_events(self, event_id: str) -> List[str]:
|
||||
"""Fetch all events that have the given event as a prev event
|
||||
|
||||
Args:
|
||||
event_ids: The events to use as the previous events.
|
||||
event_id: The event to search for as a prev_event.
|
||||
"""
|
||||
rows = await self.db_pool.simple_select_many_batch(
|
||||
return await self.db_pool.simple_select_onecol(
|
||||
table="event_edges",
|
||||
column="prev_event_id",
|
||||
iterable=event_ids,
|
||||
retcols=("event_id",),
|
||||
keyvalues={"prev_event_id": event_id},
|
||||
retcol="event_id",
|
||||
desc="get_successor_events",
|
||||
)
|
||||
|
||||
return [row["event_id"] for row in rows]
|
||||
|
||||
@wrap_as_background_process("delete_old_forward_extrem_cache")
|
||||
async def _delete_old_forward_extrem_cache(self) -> None:
|
||||
def _delete_old_forward_extrem_cache_txn(txn):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue