Small speed up joining large remote rooms (#9825)

There are a couple of points in `persist_events` where we are doing a
query per event in series, which we can replace.
This commit is contained in:
Erik Johnston 2021-04-16 14:44:55 +01:00 committed by GitHub
parent 5a153772c1
commit 601b893352
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 34 additions and 21 deletions

1
changelog.d/9825.misc Normal file
View File

@ -0,0 +1 @@
Small speed up for joining large remote rooms.

View File

@ -1378,16 +1378,20 @@ class PersistEventsStore:
],
)
for event, _ in events_and_contexts:
if not event.internal_metadata.is_redacted():
# If we're persisting an unredacted event we go and ensure
# that we mark any redactions that reference this event as
# requiring censoring.
self.db_pool.simple_update_txn(
txn,
table="redactions",
keyvalues={"redacts": event.event_id},
updatevalues={"have_censored": False},
sql = "UPDATE redactions SET have_censored = ? WHERE redacts = ?"
txn.execute_batch(
sql,
(
(
False,
event.event_id,
)
for event, _ in events_and_contexts
if not event.internal_metadata.is_redacted()
),
)
state_events_and_contexts = [
@ -1881,18 +1885,26 @@ class PersistEventsStore:
),
)
for event, _ in events_and_contexts:
user_ids = self.db_pool.simple_select_onecol_txn(
room_to_event_ids = {} # type: Dict[str, List[str]]
for e, _ in events_and_contexts:
room_to_event_ids.setdefault(e.room_id, []).append(e.event_id)
for room_id, event_ids in room_to_event_ids.items():
rows = self.db_pool.simple_select_many_txn(
txn,
table="event_push_actions_staging",
keyvalues={"event_id": event.event_id},
retcol="user_id",
column="event_id",
iterable=event_ids,
keyvalues={},
retcols=("user_id",),
)
for uid in user_ids:
user_ids = {row["user_id"] for row in rows}
for user_id in user_ids:
txn.call_after(
self.store.get_unread_event_push_actions_by_room_for_user.invalidate_many,
(event.room_id, uid),
(room_id, user_id),
)
# Now we delete the staging area for *all* events that were being