Merge remote-tracking branch 'origin/release-v1.62' into develop

This commit is contained in:
Patrick Cloke 2022-06-30 13:27:24 -04:00
commit b0366853ca
6 changed files with 64 additions and 33 deletions

1
changelog.d/13140.bugfix Normal file
View File

@ -0,0 +1 @@
Fix unread counts for users on large servers. Introduced in v1.62.0rc1.

1
changelog.d/13141.bugfix Normal file
View File

@ -0,0 +1 @@
Fix DB performance when deleting old push notifications. Introduced in v1.62.0rc1.

View File

@ -58,9 +58,7 @@ from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateSt
from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore
from synapse.storage.databases.main.event_push_actions import ( from synapse.storage.databases.main.event_push_actions import EventPushActionsStore
EventPushActionsWorkerStore,
)
from synapse.storage.databases.main.events_bg_updates import ( from synapse.storage.databases.main.events_bg_updates import (
EventsBackgroundUpdatesStore, EventsBackgroundUpdatesStore,
) )
@ -202,7 +200,7 @@ R = TypeVar("R")
class Store( class Store(
EventPushActionsWorkerStore, EventPushActionsStore,
ClientIpBackgroundUpdateStore, ClientIpBackgroundUpdateStore,
DeviceInboxBackgroundUpdateStore, DeviceInboxBackgroundUpdateStore,
DeviceBackgroundUpdateStore, DeviceBackgroundUpdateStore,

View File

@ -864,18 +864,20 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
limit = 100 limit = 100
min_stream_id = self.db_pool.simple_select_one_onecol_txn( min_receipts_stream_id = self.db_pool.simple_select_one_onecol_txn(
txn, txn,
table="event_push_summary_last_receipt_stream_id", table="event_push_summary_last_receipt_stream_id",
keyvalues={}, keyvalues={},
retcol="stream_id", retcol="stream_id",
) )
max_receipts_stream_id = self._receipts_id_gen.get_current_token()
sql = """ sql = """
SELECT r.stream_id, r.room_id, r.user_id, e.stream_ordering SELECT r.stream_id, r.room_id, r.user_id, e.stream_ordering
FROM receipts_linearized AS r FROM receipts_linearized AS r
INNER JOIN events AS e USING (event_id) INNER JOIN events AS e USING (event_id)
WHERE r.stream_id > ? AND user_id LIKE ? WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ?
ORDER BY r.stream_id ASC ORDER BY r.stream_id ASC
LIMIT ? LIMIT ?
""" """
@ -887,13 +889,21 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
txn.execute( txn.execute(
sql, sql,
( (
min_stream_id, min_receipts_stream_id,
max_receipts_stream_id,
user_filter, user_filter,
limit, limit,
), ),
) )
rows = txn.fetchall() rows = txn.fetchall()
old_rotate_stream_ordering = self.db_pool.simple_select_one_onecol_txn(
txn,
table="event_push_summary_stream_ordering",
keyvalues={},
retcol="stream_ordering",
)
# For each new read receipt we delete push actions from before it and # For each new read receipt we delete push actions from before it and
# recalculate the summary. # recalculate the summary.
for _, room_id, user_id, stream_ordering in rows: for _, room_id, user_id, stream_ordering in rows:
@ -912,13 +922,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
(room_id, user_id, stream_ordering), (room_id, user_id, stream_ordering),
) )
old_rotate_stream_ordering = self.db_pool.simple_select_one_onecol_txn(
txn,
table="event_push_summary_stream_ordering",
keyvalues={},
retcol="stream_ordering",
)
notif_count, unread_count = self._get_notif_unread_count_for_user_room( notif_count, unread_count = self._get_notif_unread_count_for_user_room(
txn, room_id, user_id, stream_ordering, old_rotate_stream_ordering txn, room_id, user_id, stream_ordering, old_rotate_stream_ordering
) )
@ -937,18 +940,19 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
# We always update `event_push_summary_last_receipt_stream_id` to # We always update `event_push_summary_last_receipt_stream_id` to
# ensure that we don't rescan the same receipts for remote users. # ensure that we don't rescan the same receipts for remote users.
#
# This requires repeatable read to be safe, as we need the upper_limit = max_receipts_stream_id
# `MAX(stream_id)` to not include any new rows that have been committed if len(rows) >= limit:
# since the start of the transaction (since those rows won't have been # If we pulled out a limited number of rows we only update the
# returned by the query above). Alternatively we could query the max # position to the last receipt we processed, so we continue
# stream ID at the start of the transaction and bound everything by # processing the rest next iteration.
# that. upper_limit = rows[-1][0]
txn.execute(
""" self.db_pool.simple_update_txn(
UPDATE event_push_summary_last_receipt_stream_id txn,
SET stream_id = (SELECT COALESCE(MAX(stream_id), 0) FROM receipts_linearized) table="event_push_summary_last_receipt_stream_id",
""" keyvalues={},
updatevalues={"stream_id": upper_limit},
) )
return len(rows) < limit return len(rows) < limit
@ -1199,6 +1203,16 @@ class EventPushActionsStore(EventPushActionsWorkerStore):
where_clause="highlight=1", where_clause="highlight=1",
) )
# Add index to make deleting old push actions faster.
self.db_pool.updates.register_background_index_update(
"event_push_actions_stream_highlight_index",
index_name="event_push_actions_stream_highlight_index",
table="event_push_actions",
columns=["highlight", "stream_ordering"],
where_clause="highlight=0",
psql_only=True,
)
async def get_push_actions_for_user( async def get_push_actions_for_user(
self, self,
user_id: str, user_id: str,

View File

@ -0,0 +1,19 @@
/* Copyright 2022 The Matrix.org Foundation C.I.C
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- Add an index to `event_push_actions` to make deleting old non-highlight push
-- actions faster.
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
(7202, 'event_push_actions_stream_highlight_index', '{}');

View File

@ -136,15 +136,12 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
last_read_stream_ordering[0] = stream last_read_stream_ordering[0] = stream
self.get_success( self.get_success(
self.store.db_pool.runInteraction( self.store.insert_receipt(
"",
self.store._insert_linearized_receipt_txn,
room_id, room_id,
"m.read", "m.read",
user_id, user_id=user_id,
f"$test{stream}:example.com", event_ids=[f"$test{stream}:example.com"],
{}, data={},
stream,
) )
) )
@ -168,6 +165,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase):
_inject_actions(6, PlAIN_NOTIF) _inject_actions(6, PlAIN_NOTIF)
_rotate(7) _rotate(7)
_assert_counts(1, 0)
self.get_success( self.get_success(
self.store.db_pool.simple_delete( self.store.db_pool.simple_delete(