2020-07-01 06:08:25 -04:00
|
|
|
# Copyright 2015 OpenMarket Ltd
|
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-12-10 12:51:15 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
import logging
|
2020-09-02 12:19:37 -04:00
|
|
|
from typing import Dict, List, Optional, Tuple, Union
|
|
|
|
|
|
|
|
import attr
|
2018-06-28 09:49:57 -04:00
|
|
|
|
2020-10-09 07:37:51 -04:00
|
|
|
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
2020-10-02 10:20:45 -04:00
|
|
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
2020-10-09 07:37:51 -04:00
|
|
|
from synapse.storage.database import DatabasePool, LoggingTransaction
|
2020-08-07 08:02:55 -04:00
|
|
|
from synapse.util import json_encoder
|
2020-08-14 07:24:26 -04:00
|
|
|
from synapse.util.caches.descriptors import cached
|
2018-04-28 07:19:12 -04:00
|
|
|
|
2015-12-10 12:51:15 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2017-02-14 11:54:37 -05:00
|
|
|
DEFAULT_NOTIF_ACTION = ["notify", {"set_tweak": "highlight", "value": False}]
|
|
|
|
DEFAULT_HIGHLIGHT_ACTION = [
|
2019-04-03 05:07:29 -04:00
|
|
|
"notify",
|
|
|
|
{"set_tweak": "sound", "value": "default"},
|
|
|
|
{"set_tweak": "highlight"},
|
2017-02-14 11:37:25 -05:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def _serialize_action(actions, is_highlight):
|
2017-02-14 11:54:37 -05:00
|
|
|
"""Custom serializer for actions. This allows us to "compress" common actions.
|
|
|
|
|
|
|
|
We use the fact that most users have the same actions for notifs (and for
|
2017-02-16 10:03:36 -05:00
|
|
|
highlights).
|
|
|
|
We store these default actions as the empty string rather than the full JSON.
|
|
|
|
Since the empty string isn't valid JSON there is no risk of this clashing with
|
|
|
|
any real JSON actions
|
2017-02-14 11:54:37 -05:00
|
|
|
"""
|
2017-02-14 11:37:25 -05:00
|
|
|
if is_highlight:
|
2017-02-14 11:54:37 -05:00
|
|
|
if actions == DEFAULT_HIGHLIGHT_ACTION:
|
|
|
|
return "" # We use empty string as the column is non-NULL
|
2017-02-14 11:37:25 -05:00
|
|
|
else:
|
2017-02-14 11:54:37 -05:00
|
|
|
if actions == DEFAULT_NOTIF_ACTION:
|
2017-02-14 11:37:25 -05:00
|
|
|
return ""
|
2020-08-07 08:02:55 -04:00
|
|
|
return json_encoder.encode(actions)
|
2017-02-14 11:37:25 -05:00
|
|
|
|
|
|
|
|
|
|
|
def _deserialize_action(actions, is_highlight):
|
2017-02-14 11:54:37 -05:00
|
|
|
"""Custom deserializer for actions. This allows us to "compress" common actions"""
|
2017-02-14 11:37:25 -05:00
|
|
|
if actions:
|
2020-07-16 11:32:19 -04:00
|
|
|
return db_to_json(actions)
|
2017-02-14 11:37:25 -05:00
|
|
|
|
|
|
|
if is_highlight:
|
2017-02-14 11:54:37 -05:00
|
|
|
return DEFAULT_HIGHLIGHT_ACTION
|
2017-02-14 11:37:25 -05:00
|
|
|
else:
|
2017-02-14 11:54:37 -05:00
|
|
|
return DEFAULT_NOTIF_ACTION
|
2017-02-14 11:37:25 -05:00
|
|
|
|
|
|
|
|
2018-02-21 06:01:13 -05:00
|
|
|
class EventPushActionsWorkerStore(SQLBaseStore):
|
2020-08-05 16:38:57 -04:00
|
|
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
2020-09-18 09:56:44 -04:00
|
|
|
super().__init__(database, db_conn, hs)
|
2018-03-01 09:05:41 -05:00
|
|
|
|
2018-03-01 10:59:40 -05:00
|
|
|
# These get correctly set by _find_stream_orderings_for_times_txn
|
2018-03-01 11:00:05 -05:00
|
|
|
self.stream_ordering_month_ago = None
|
|
|
|
self.stream_ordering_day_ago = None
|
2018-03-01 09:05:41 -05:00
|
|
|
|
2020-10-02 10:20:45 -04:00
|
|
|
cur = db_conn.cursor(txn_name="_find_stream_orderings_for_times_txn")
|
2018-03-01 09:05:41 -05:00
|
|
|
self._find_stream_orderings_for_times_txn(cur)
|
|
|
|
cur.close()
|
|
|
|
|
|
|
|
self.find_stream_orderings_looping_call = self._clock.looping_call(
|
|
|
|
self._find_stream_orderings_for_times, 10 * 60 * 1000
|
|
|
|
)
|
2020-10-09 07:37:51 -04:00
|
|
|
|
2018-02-14 08:37:56 -05:00
|
|
|
self._rotate_delay = 3
|
|
|
|
self._rotate_count = 10000
|
2020-10-09 07:37:51 -04:00
|
|
|
self._doing_notif_rotation = False
|
|
|
|
if hs.config.run_background_tasks:
|
|
|
|
self._rotate_notif_loop = self._clock.looping_call(
|
|
|
|
self._rotate_notifs, 30 * 60 * 1000
|
|
|
|
)
|
2018-03-01 09:05:41 -05:00
|
|
|
|
2020-08-14 07:24:26 -04:00
|
|
|
@cached(num_args=3, tree=True, max_entries=5000)
|
|
|
|
async def get_unread_event_push_actions_by_room_for_user(
|
2020-09-02 12:19:37 -04:00
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
user_id: str,
|
|
|
|
last_read_event_id: Optional[str],
|
|
|
|
) -> Dict[str, int]:
|
|
|
|
"""Get the notification count, the highlight count and the unread message count
|
|
|
|
for a given user in a given room after the given read receipt.
|
|
|
|
|
|
|
|
Note that this function assumes the user to be a current member of the room,
|
|
|
|
since it's either called by the sync handler to handle joined room entries, or by
|
|
|
|
the HTTP pusher to calculate the badge of unread joined rooms.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id: The room to retrieve the counts in.
|
|
|
|
user_id: The user to retrieve the counts for.
|
|
|
|
last_read_event_id: The event associated with the latest read receipt for
|
|
|
|
this user in this room. None if no receipt for this user in this room.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
A dict containing the counts mentioned earlier in this docstring,
|
|
|
|
respectively under the keys "notify_count", "highlight_count" and
|
|
|
|
"unread_count".
|
|
|
|
"""
|
2020-08-14 07:24:26 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2017-02-03 13:12:53 -05:00
|
|
|
"get_unread_event_push_actions_by_room",
|
|
|
|
self._get_unread_counts_by_receipt_txn,
|
2019-04-03 05:07:29 -04:00
|
|
|
room_id,
|
|
|
|
user_id,
|
|
|
|
last_read_event_id,
|
2017-02-03 13:12:53 -05:00
|
|
|
)
|
2016-07-04 14:44:55 -04:00
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
def _get_unread_counts_by_receipt_txn(
|
2020-09-02 12:19:37 -04:00
|
|
|
self,
|
|
|
|
txn,
|
|
|
|
room_id,
|
|
|
|
user_id,
|
|
|
|
last_read_event_id,
|
2019-04-03 05:07:29 -04:00
|
|
|
):
|
2020-09-02 12:19:37 -04:00
|
|
|
stream_ordering = None
|
|
|
|
|
|
|
|
if last_read_event_id is not None:
|
|
|
|
stream_ordering = self.get_stream_id_for_event_txn(
|
|
|
|
txn,
|
|
|
|
last_read_event_id,
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
if stream_ordering is None:
|
|
|
|
# Either last_read_event_id is None, or it's an event we don't have (e.g.
|
|
|
|
# because it's been purged), in which case retrieve the stream ordering for
|
|
|
|
# the latest membership event from this user in this room (which we assume is
|
|
|
|
# a join).
|
|
|
|
event_id = self.db_pool.simple_select_one_onecol_txn(
|
|
|
|
txn=txn,
|
|
|
|
table="local_current_membership",
|
|
|
|
keyvalues={"room_id": room_id, "user_id": user_id},
|
|
|
|
retcol="event_id",
|
|
|
|
)
|
2016-11-23 10:57:04 -05:00
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
stream_ordering = self.get_stream_id_for_event_txn(txn, event_id)
|
2016-11-23 10:57:04 -05:00
|
|
|
|
2017-02-03 13:12:53 -05:00
|
|
|
return self._get_unread_counts_by_pos_txn(
|
2018-05-11 10:30:11 -04:00
|
|
|
txn, room_id, user_id, stream_ordering
|
2017-02-03 13:12:53 -05:00
|
|
|
)
|
2016-11-23 10:57:04 -05:00
|
|
|
|
2018-05-11 10:30:11 -04:00
|
|
|
def _get_unread_counts_by_pos_txn(self, txn, room_id, user_id, stream_ordering):
|
2017-02-03 13:12:53 -05:00
|
|
|
sql = (
|
2020-09-02 12:19:37 -04:00
|
|
|
"SELECT"
|
|
|
|
" COUNT(CASE WHEN notif = 1 THEN 1 END),"
|
|
|
|
" COUNT(CASE WHEN highlight = 1 THEN 1 END),"
|
|
|
|
" COUNT(CASE WHEN unread = 1 THEN 1 END)"
|
2017-02-03 13:12:53 -05:00
|
|
|
" FROM event_push_actions ea"
|
2020-09-02 12:19:37 -04:00
|
|
|
" WHERE user_id = ?"
|
|
|
|
" AND room_id = ?"
|
|
|
|
" AND stream_ordering > ?"
|
2018-05-11 10:30:11 -04:00
|
|
|
)
|
2020-06-10 12:54:33 -04:00
|
|
|
|
2020-07-01 06:08:25 -04:00
|
|
|
txn.execute(sql, (user_id, room_id, stream_ordering))
|
|
|
|
row = txn.fetchone()
|
2020-09-02 12:19:37 -04:00
|
|
|
|
|
|
|
(notif_count, highlight_count, unread_count) = (0, 0, 0)
|
|
|
|
|
|
|
|
if row:
|
|
|
|
(notif_count, highlight_count, unread_count) = row
|
2017-02-03 13:12:53 -05:00
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
txn.execute(
|
|
|
|
"""
|
2020-09-02 12:19:37 -04:00
|
|
|
SELECT notif_count, unread_count FROM event_push_summary
|
|
|
|
WHERE room_id = ? AND user_id = ? AND stream_ordering > ?
|
|
|
|
""",
|
2019-04-03 05:07:29 -04:00
|
|
|
(room_id, user_id, stream_ordering),
|
|
|
|
)
|
2017-02-03 13:12:53 -05:00
|
|
|
row = txn.fetchone()
|
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
if row:
|
|
|
|
notif_count += row[0]
|
2020-09-07 10:15:06 -04:00
|
|
|
|
|
|
|
if row[1] is not None:
|
|
|
|
# The unread_count column of event_push_summary is NULLable, so we need
|
|
|
|
# to make sure we don't try increasing the unread counts if it's NULL
|
|
|
|
# for this row.
|
|
|
|
unread_count += row[1]
|
2020-09-02 12:19:37 -04:00
|
|
|
|
|
|
|
return {
|
|
|
|
"notify_count": notif_count,
|
|
|
|
"unread_count": unread_count,
|
|
|
|
"highlight_count": highlight_count,
|
|
|
|
}
|
2015-12-10 12:51:15 -05:00
|
|
|
|
2020-07-30 07:20:41 -04:00
|
|
|
async def get_push_action_users_in_range(
|
|
|
|
self, min_stream_ordering, max_stream_ordering
|
|
|
|
):
|
2016-04-06 10:42:15 -04:00
|
|
|
def f(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT DISTINCT(user_id) FROM event_push_actions WHERE"
|
2020-09-08 10:26:06 -04:00
|
|
|
" stream_ordering >= ? AND stream_ordering <= ? AND notif = 1"
|
2016-04-06 10:42:15 -04:00
|
|
|
)
|
|
|
|
txn.execute(sql, (min_stream_ordering, max_stream_ordering))
|
2017-03-23 13:53:49 -04:00
|
|
|
return [r[0] for r in txn]
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
ret = await self.db_pool.runInteraction("get_push_action_users_in_range", f)
|
2019-07-23 09:00:55 -04:00
|
|
|
return ret
|
2016-04-06 10:42:15 -04:00
|
|
|
|
2020-07-30 07:20:41 -04:00
|
|
|
async def get_unread_push_actions_for_user_in_range_for_http(
|
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
min_stream_ordering: int,
|
|
|
|
max_stream_ordering: int,
|
|
|
|
limit: int = 20,
|
|
|
|
) -> List[dict]:
|
2016-07-28 15:24:24 -04:00
|
|
|
"""Get a list of the most recent unread push actions for a given user,
|
|
|
|
within the given stream ordering range. Called by the httppusher.
|
|
|
|
|
|
|
|
Args:
|
2020-07-30 07:20:41 -04:00
|
|
|
user_id: The user to fetch push actions for.
|
|
|
|
min_stream_ordering: The exclusive lower bound on the
|
2016-07-28 15:24:24 -04:00
|
|
|
stream ordering of event push actions to fetch.
|
2020-07-30 07:20:41 -04:00
|
|
|
max_stream_ordering: The inclusive upper bound on the
|
2016-07-28 15:24:24 -04:00
|
|
|
stream ordering of event push actions to fetch.
|
2020-07-30 07:20:41 -04:00
|
|
|
limit: The maximum number of rows to return.
|
2016-07-28 15:24:24 -04:00
|
|
|
Returns:
|
2020-07-30 07:20:41 -04:00
|
|
|
A list of dicts with the keys "event_id", "room_id", "stream_ordering", "actions".
|
2016-07-28 15:24:24 -04:00
|
|
|
The list will be ordered by ascending stream_ordering.
|
|
|
|
The list will have between 0~limit entries.
|
|
|
|
"""
|
|
|
|
# find rooms that have a read receipt in them and return the next
|
|
|
|
# push actions
|
2016-04-13 09:16:45 -04:00
|
|
|
def get_after_receipt(txn):
|
2016-07-28 15:24:24 -04:00
|
|
|
# find rooms that have a read receipt in them and return the next
|
|
|
|
# push actions
|
2016-04-06 10:42:15 -04:00
|
|
|
sql = (
|
2017-02-14 11:37:25 -05:00
|
|
|
"SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions,"
|
|
|
|
" ep.highlight "
|
2016-07-28 15:24:24 -04:00
|
|
|
" FROM ("
|
|
|
|
" SELECT room_id,"
|
|
|
|
" MAX(stream_ordering) as stream_ordering"
|
|
|
|
" FROM events"
|
|
|
|
" INNER JOIN receipts_linearized USING (room_id, event_id)"
|
|
|
|
" WHERE receipt_type = 'm.read' AND user_id = ?"
|
|
|
|
" GROUP BY room_id"
|
2016-05-04 08:19:59 -04:00
|
|
|
") AS rl,"
|
|
|
|
" event_push_actions AS ep"
|
2016-04-29 14:28:56 -04:00
|
|
|
" WHERE"
|
2016-04-13 09:16:45 -04:00
|
|
|
" ep.room_id = rl.room_id"
|
2018-05-11 10:30:11 -04:00
|
|
|
" AND ep.stream_ordering > rl.stream_ordering"
|
2016-04-13 09:16:45 -04:00
|
|
|
" AND ep.user_id = ?"
|
2016-07-28 15:24:24 -04:00
|
|
|
" AND ep.stream_ordering > ?"
|
|
|
|
" AND ep.stream_ordering <= ?"
|
2020-09-02 12:19:37 -04:00
|
|
|
" AND ep.notif = 1"
|
2016-07-28 15:24:24 -04:00
|
|
|
" ORDER BY ep.stream_ordering ASC LIMIT ?"
|
2016-04-06 10:42:15 -04:00
|
|
|
)
|
2019-04-03 05:07:29 -04:00
|
|
|
args = [user_id, user_id, min_stream_ordering, max_stream_ordering, limit]
|
2016-04-06 10:42:15 -04:00
|
|
|
txn.execute(sql, args)
|
|
|
|
return txn.fetchall()
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
after_read_receipt = await self.db_pool.runInteraction(
|
2016-07-28 15:24:24 -04:00
|
|
|
"get_unread_push_actions_for_user_in_range_http_arr", get_after_receipt
|
2016-04-13 09:16:45 -04:00
|
|
|
)
|
|
|
|
|
2016-07-28 15:24:24 -04:00
|
|
|
# There are rooms with push actions in them but you don't have a read receipt in
|
|
|
|
# them e.g. rooms you've been invited to, so get push actions for rooms which do
|
|
|
|
# not have read receipts in them too.
|
2016-04-13 09:16:45 -04:00
|
|
|
def get_no_receipt(txn):
|
|
|
|
sql = (
|
2016-04-29 14:16:15 -04:00
|
|
|
"SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions,"
|
2017-02-14 11:37:25 -05:00
|
|
|
" ep.highlight "
|
2016-04-29 14:16:15 -04:00
|
|
|
" FROM event_push_actions AS ep"
|
2016-07-28 15:24:24 -04:00
|
|
|
" INNER JOIN events AS e USING (room_id, event_id)"
|
|
|
|
" WHERE"
|
|
|
|
" ep.room_id NOT IN ("
|
|
|
|
" SELECT room_id FROM receipts_linearized"
|
|
|
|
" WHERE receipt_type = 'm.read' AND user_id = ?"
|
|
|
|
" GROUP BY room_id"
|
|
|
|
" )"
|
|
|
|
" AND ep.user_id = ?"
|
|
|
|
" AND ep.stream_ordering > ?"
|
|
|
|
" AND ep.stream_ordering <= ?"
|
2020-09-02 12:19:37 -04:00
|
|
|
" AND ep.notif = 1"
|
2016-07-28 15:24:24 -04:00
|
|
|
" ORDER BY ep.stream_ordering ASC LIMIT ?"
|
|
|
|
)
|
2019-04-03 05:07:29 -04:00
|
|
|
args = [user_id, user_id, min_stream_ordering, max_stream_ordering, limit]
|
2016-07-28 15:24:24 -04:00
|
|
|
txn.execute(sql, args)
|
|
|
|
return txn.fetchall()
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
no_read_receipt = await self.db_pool.runInteraction(
|
2016-07-28 15:24:24 -04:00
|
|
|
"get_unread_push_actions_for_user_in_range_http_nrr", get_no_receipt
|
|
|
|
)
|
|
|
|
|
|
|
|
notifs = [
|
|
|
|
{
|
|
|
|
"event_id": row[0],
|
|
|
|
"room_id": row[1],
|
|
|
|
"stream_ordering": row[2],
|
2017-02-14 11:37:25 -05:00
|
|
|
"actions": _deserialize_action(row[3], row[4]),
|
2019-04-03 05:07:29 -04:00
|
|
|
}
|
|
|
|
for row in after_read_receipt + no_read_receipt
|
2016-07-28 15:24:24 -04:00
|
|
|
]
|
|
|
|
|
|
|
|
# Now sort it so it's ordered correctly, since currently it will
|
|
|
|
# contain results from the first query, correctly ordered, followed
|
|
|
|
# by results from the second query, but we want them all ordered
|
|
|
|
# by stream_ordering, oldest first.
|
|
|
|
notifs.sort(key=lambda r: r["stream_ordering"])
|
|
|
|
|
|
|
|
# Take only up to the limit. We have to stop at the limit because
|
|
|
|
# one of the subqueries may have hit the limit.
|
2019-07-23 09:00:55 -04:00
|
|
|
return notifs[:limit]
|
2016-07-28 15:24:24 -04:00
|
|
|
|
2020-07-30 07:20:41 -04:00
|
|
|
async def get_unread_push_actions_for_user_in_range_for_email(
|
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
min_stream_ordering: int,
|
|
|
|
max_stream_ordering: int,
|
|
|
|
limit: int = 20,
|
|
|
|
) -> List[dict]:
|
2016-07-28 11:47:37 -04:00
|
|
|
"""Get a list of the most recent unread push actions for a given user,
|
2016-07-28 15:24:24 -04:00
|
|
|
within the given stream ordering range. Called by the emailpusher
|
2016-07-28 11:47:37 -04:00
|
|
|
|
|
|
|
Args:
|
2020-07-30 07:20:41 -04:00
|
|
|
user_id: The user to fetch push actions for.
|
|
|
|
min_stream_ordering: The exclusive lower bound on the
|
2016-07-28 15:24:24 -04:00
|
|
|
stream ordering of event push actions to fetch.
|
2020-07-30 07:20:41 -04:00
|
|
|
max_stream_ordering: The inclusive upper bound on the
|
2016-07-28 15:24:24 -04:00
|
|
|
stream ordering of event push actions to fetch.
|
2020-07-30 07:20:41 -04:00
|
|
|
limit: The maximum number of rows to return.
|
2016-07-28 11:47:37 -04:00
|
|
|
Returns:
|
2020-07-30 07:20:41 -04:00
|
|
|
A list of dicts with the keys "event_id", "room_id", "stream_ordering", "actions", "received_ts".
|
2016-07-28 15:24:24 -04:00
|
|
|
The list will be ordered by descending received_ts.
|
2016-07-28 11:47:37 -04:00
|
|
|
The list will have between 0~limit entries.
|
|
|
|
"""
|
|
|
|
# find rooms that have a read receipt in them and return the most recent
|
|
|
|
# push actions
|
2016-04-13 09:16:45 -04:00
|
|
|
def get_after_receipt(txn):
|
2016-04-06 10:42:15 -04:00
|
|
|
sql = (
|
2016-07-28 15:24:24 -04:00
|
|
|
"SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions,"
|
2017-02-14 11:37:25 -05:00
|
|
|
" ep.highlight, e.received_ts"
|
2016-07-28 15:24:24 -04:00
|
|
|
" FROM ("
|
|
|
|
" SELECT room_id,"
|
|
|
|
" MAX(stream_ordering) as stream_ordering"
|
|
|
|
" FROM events"
|
|
|
|
" INNER JOIN receipts_linearized USING (room_id, event_id)"
|
2016-04-29 14:16:15 -04:00
|
|
|
" WHERE receipt_type = 'm.read' AND user_id = ?"
|
2016-04-13 09:16:45 -04:00
|
|
|
" GROUP BY room_id"
|
2016-05-04 08:19:59 -04:00
|
|
|
") AS rl,"
|
|
|
|
" event_push_actions AS ep"
|
2016-04-29 14:28:56 -04:00
|
|
|
" INNER JOIN events AS e USING (room_id, event_id)"
|
|
|
|
" WHERE"
|
2016-04-13 09:16:45 -04:00
|
|
|
" ep.room_id = rl.room_id"
|
2018-05-11 10:30:11 -04:00
|
|
|
" AND ep.stream_ordering > rl.stream_ordering"
|
2016-04-13 09:16:45 -04:00
|
|
|
" AND ep.user_id = ?"
|
2016-07-29 04:57:13 -04:00
|
|
|
" AND ep.stream_ordering > ?"
|
2016-07-28 15:24:24 -04:00
|
|
|
" AND ep.stream_ordering <= ?"
|
2020-09-02 12:19:37 -04:00
|
|
|
" AND ep.notif = 1"
|
2016-07-28 15:24:24 -04:00
|
|
|
" ORDER BY ep.stream_ordering DESC LIMIT ?"
|
2016-04-13 09:16:45 -04:00
|
|
|
)
|
2019-04-03 05:07:29 -04:00
|
|
|
args = [user_id, user_id, min_stream_ordering, max_stream_ordering, limit]
|
2016-04-06 10:42:15 -04:00
|
|
|
txn.execute(sql, args)
|
|
|
|
return txn.fetchall()
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
after_read_receipt = await self.db_pool.runInteraction(
|
2016-07-28 15:24:24 -04:00
|
|
|
"get_unread_push_actions_for_user_in_range_email_arr", get_after_receipt
|
2016-04-13 09:16:45 -04:00
|
|
|
)
|
|
|
|
|
2016-07-28 11:47:37 -04:00
|
|
|
# There are rooms with push actions in them but you don't have a read receipt in
|
|
|
|
# them e.g. rooms you've been invited to, so get push actions for rooms which do
|
|
|
|
# not have read receipts in them too.
|
2016-04-13 09:16:45 -04:00
|
|
|
def get_no_receipt(txn):
|
|
|
|
sql = (
|
2016-04-29 14:16:15 -04:00
|
|
|
"SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions,"
|
2017-02-14 11:37:25 -05:00
|
|
|
" ep.highlight, e.received_ts"
|
2016-04-29 14:16:15 -04:00
|
|
|
" FROM event_push_actions AS ep"
|
2016-07-28 15:24:24 -04:00
|
|
|
" INNER JOIN events AS e USING (room_id, event_id)"
|
|
|
|
" WHERE"
|
|
|
|
" ep.room_id NOT IN ("
|
|
|
|
" SELECT room_id FROM receipts_linearized"
|
|
|
|
" WHERE receipt_type = 'm.read' AND user_id = ?"
|
|
|
|
" GROUP BY room_id"
|
|
|
|
" )"
|
|
|
|
" AND ep.user_id = ?"
|
|
|
|
" AND ep.stream_ordering > ?"
|
|
|
|
" AND ep.stream_ordering <= ?"
|
2020-09-02 12:19:37 -04:00
|
|
|
" AND ep.notif = 1"
|
2016-07-28 15:24:24 -04:00
|
|
|
" ORDER BY ep.stream_ordering DESC LIMIT ?"
|
2016-04-13 09:16:45 -04:00
|
|
|
)
|
2019-04-03 05:07:29 -04:00
|
|
|
args = [user_id, user_id, min_stream_ordering, max_stream_ordering, limit]
|
2016-04-13 09:16:45 -04:00
|
|
|
txn.execute(sql, args)
|
|
|
|
return txn.fetchall()
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
no_read_receipt = await self.db_pool.runInteraction(
|
2016-07-28 15:24:24 -04:00
|
|
|
"get_unread_push_actions_for_user_in_range_email_nrr", get_no_receipt
|
2016-04-13 09:16:45 -04:00
|
|
|
)
|
|
|
|
|
2016-06-22 13:07:14 -04:00
|
|
|
# Make a list of dicts from the two sets of results.
|
|
|
|
notifs = [
|
2016-04-06 10:42:15 -04:00
|
|
|
{
|
|
|
|
"event_id": row[0],
|
2016-04-19 09:24:36 -04:00
|
|
|
"room_id": row[1],
|
|
|
|
"stream_ordering": row[2],
|
2017-02-14 11:37:25 -05:00
|
|
|
"actions": _deserialize_action(row[3], row[4]),
|
|
|
|
"received_ts": row[5],
|
2019-04-03 05:07:29 -04:00
|
|
|
}
|
|
|
|
for row in after_read_receipt + no_read_receipt
|
2016-06-22 13:07:14 -04:00
|
|
|
]
|
|
|
|
|
|
|
|
# Now sort it so it's ordered correctly, since currently it will
|
|
|
|
# contain results from the first query, correctly ordered, followed
|
|
|
|
# by results from the second query, but we want them all ordered
|
2016-07-28 11:47:37 -04:00
|
|
|
# by received_ts (most recent first)
|
2016-06-22 13:07:14 -04:00
|
|
|
notifs.sort(key=lambda r: -(r["received_ts"] or 0))
|
|
|
|
|
|
|
|
# Now return the first `limit`
|
2019-07-23 09:00:55 -04:00
|
|
|
return notifs[:limit]
|
2016-04-06 10:42:15 -04:00
|
|
|
|
2020-08-28 11:34:50 -04:00
|
|
|
async def get_if_maybe_push_in_range_for_user(
|
|
|
|
self, user_id: str, min_stream_ordering: int
|
|
|
|
) -> bool:
|
2019-04-02 11:45:33 -04:00
|
|
|
"""A fast check to see if there might be something to push for the
|
|
|
|
user since the given stream ordering. May return false positives.
|
|
|
|
|
|
|
|
Useful to know whether to bother starting a pusher on start up or not.
|
|
|
|
|
|
|
|
Args:
|
2020-08-28 11:34:50 -04:00
|
|
|
user_id
|
|
|
|
min_stream_ordering
|
2019-04-02 11:45:33 -04:00
|
|
|
|
|
|
|
Returns:
|
2020-08-28 11:34:50 -04:00
|
|
|
True if there may be push to process, False if there definitely isn't.
|
2019-04-02 11:45:33 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
def _get_if_maybe_push_in_range_for_user_txn(txn):
|
|
|
|
sql = """
|
|
|
|
SELECT 1 FROM event_push_actions
|
2020-09-02 12:19:37 -04:00
|
|
|
WHERE user_id = ? AND stream_ordering > ? AND notif = 1
|
2019-04-02 11:45:33 -04:00
|
|
|
LIMIT 1
|
|
|
|
"""
|
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
txn.execute(sql, (user_id, min_stream_ordering))
|
2019-04-02 11:45:33 -04:00
|
|
|
return bool(txn.fetchone())
|
|
|
|
|
2020-08-28 11:34:50 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-04-02 11:45:33 -04:00
|
|
|
"get_if_maybe_push_in_range_for_user",
|
|
|
|
_get_if_maybe_push_in_range_for_user_txn,
|
|
|
|
)
|
|
|
|
|
2020-08-28 11:34:50 -04:00
|
|
|
async def add_push_actions_to_staging(
|
2020-09-02 12:19:37 -04:00
|
|
|
self,
|
|
|
|
event_id: str,
|
|
|
|
user_id_actions: Dict[str, List[Union[dict, str]]],
|
|
|
|
count_as_unread: bool,
|
2020-08-28 11:34:50 -04:00
|
|
|
) -> None:
|
2018-02-27 07:01:36 -05:00
|
|
|
"""Add the push actions for the event to the push action staging area.
|
|
|
|
|
|
|
|
Args:
|
2020-08-28 11:34:50 -04:00
|
|
|
event_id
|
|
|
|
user_id_actions: A mapping of user_id to list of push actions, where
|
|
|
|
an action can either be a string or dict.
|
2020-09-02 12:19:37 -04:00
|
|
|
count_as_unread: Whether this event should increment unread counts.
|
2018-02-27 07:01:36 -05:00
|
|
|
"""
|
|
|
|
if not user_id_actions:
|
|
|
|
return
|
|
|
|
|
|
|
|
# This is a helper function for generating the necessary tuple that
|
2020-09-02 12:19:37 -04:00
|
|
|
# can be used to insert into the `event_push_actions_staging` table.
|
2018-02-27 07:01:36 -05:00
|
|
|
def _gen_entry(user_id, actions):
|
|
|
|
is_highlight = 1 if _action_has_highlight(actions) else 0
|
2020-09-02 12:19:37 -04:00
|
|
|
notif = 1 if "notify" in actions else 0
|
2018-02-27 07:01:36 -05:00
|
|
|
return (
|
|
|
|
event_id, # event_id column
|
|
|
|
user_id, # user_id column
|
|
|
|
_serialize_action(actions, is_highlight), # actions column
|
2020-09-02 12:19:37 -04:00
|
|
|
notif, # notif column
|
2018-02-27 07:01:36 -05:00
|
|
|
is_highlight, # highlight column
|
2020-09-02 12:19:37 -04:00
|
|
|
int(count_as_unread), # unread column
|
2018-02-27 07:01:36 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
def _add_push_actions_to_staging_txn(txn):
|
2019-12-04 05:15:55 -05:00
|
|
|
# We don't use simple_insert_many here to avoid the overhead
|
2018-02-27 07:01:36 -05:00
|
|
|
# of generating lists of dicts.
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
INSERT INTO event_push_actions_staging
|
2020-09-02 12:19:37 -04:00
|
|
|
(event_id, user_id, actions, notif, highlight, unread)
|
|
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
2018-02-27 07:01:36 -05:00
|
|
|
"""
|
|
|
|
|
2021-01-21 09:44:12 -05:00
|
|
|
txn.execute_batch(
|
2019-04-03 05:07:29 -04:00
|
|
|
sql,
|
|
|
|
(
|
|
|
|
_gen_entry(user_id, actions)
|
2020-06-15 07:03:36 -04:00
|
|
|
for user_id, actions in user_id_actions.items()
|
2019-04-03 05:07:29 -04:00
|
|
|
),
|
|
|
|
)
|
2018-02-27 07:01:36 -05:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2018-02-27 07:01:36 -05:00
|
|
|
"add_push_actions_to_staging", _add_push_actions_to_staging_txn
|
|
|
|
)
|
|
|
|
|
2020-07-30 07:20:41 -04:00
|
|
|
async def remove_push_actions_from_staging(self, event_id: str) -> None:
|
2018-02-27 07:01:36 -05:00
|
|
|
"""Called if we failed to persist the event to ensure that stale push
|
|
|
|
actions don't build up in the DB
|
|
|
|
"""
|
|
|
|
|
2018-04-27 06:07:40 -04:00
|
|
|
try:
|
2020-08-05 16:38:57 -04:00
|
|
|
res = await self.db_pool.simple_delete(
|
2018-04-27 06:07:40 -04:00
|
|
|
table="event_push_actions_staging",
|
2019-04-03 05:07:29 -04:00
|
|
|
keyvalues={"event_id": event_id},
|
2018-04-27 06:07:40 -04:00
|
|
|
desc="remove_push_actions_from_staging",
|
|
|
|
)
|
2019-07-23 09:00:55 -04:00
|
|
|
return res
|
2018-04-27 06:07:40 -04:00
|
|
|
except Exception:
|
|
|
|
# this method is called from an exception handler, so propagating
|
|
|
|
# another exception here really isn't helpful - there's nothing
|
|
|
|
# the caller can do about it. Just log the exception and move on.
|
|
|
|
logger.exception(
|
2019-04-03 05:07:29 -04:00
|
|
|
"Error removing push actions after event persistence failure"
|
2018-04-27 06:07:40 -04:00
|
|
|
)
|
2018-02-27 07:01:36 -05:00
|
|
|
|
2020-10-09 07:37:51 -04:00
|
|
|
@wrap_as_background_process("event_push_action_stream_orderings")
|
|
|
|
async def _find_stream_orderings_for_times(self) -> None:
|
|
|
|
await self.db_pool.runInteraction(
|
2018-03-01 09:05:41 -05:00
|
|
|
"_find_stream_orderings_for_times",
|
2018-07-25 04:41:12 -04:00
|
|
|
self._find_stream_orderings_for_times_txn,
|
2018-03-01 09:05:41 -05:00
|
|
|
)
|
|
|
|
|
2020-10-09 07:37:51 -04:00
|
|
|
def _find_stream_orderings_for_times_txn(self, txn: LoggingTransaction) -> None:
|
2018-03-01 09:05:41 -05:00
|
|
|
logger.info("Searching for stream ordering 1 month ago")
|
|
|
|
self.stream_ordering_month_ago = self._find_first_stream_ordering_after_ts_txn(
|
|
|
|
txn, self._clock.time_msec() - 30 * 24 * 60 * 60 * 1000
|
|
|
|
)
|
|
|
|
logger.info(
|
2019-04-03 05:07:29 -04:00
|
|
|
"Found stream ordering 1 month ago: it's %d", self.stream_ordering_month_ago
|
2018-03-01 09:05:41 -05:00
|
|
|
)
|
|
|
|
logger.info("Searching for stream ordering 1 day ago")
|
|
|
|
self.stream_ordering_day_ago = self._find_first_stream_ordering_after_ts_txn(
|
|
|
|
txn, self._clock.time_msec() - 24 * 60 * 60 * 1000
|
|
|
|
)
|
|
|
|
logger.info(
|
2019-04-03 05:07:29 -04:00
|
|
|
"Found stream ordering 1 day ago: it's %d", self.stream_ordering_day_ago
|
2018-03-01 09:05:41 -05:00
|
|
|
)
|
|
|
|
|
2020-08-28 11:34:50 -04:00
|
|
|
async def find_first_stream_ordering_after_ts(self, ts: int) -> int:
|
2018-03-05 06:53:39 -05:00
|
|
|
"""Gets the stream ordering corresponding to a given timestamp.
|
|
|
|
|
|
|
|
Specifically, finds the stream_ordering of the first event that was
|
2018-03-05 06:47:48 -05:00
|
|
|
received on or after the timestamp. This is done by a binary search on
|
|
|
|
the events table, since there is no index on received_ts, so is
|
2018-03-05 06:53:39 -05:00
|
|
|
relatively slow.
|
|
|
|
|
|
|
|
Args:
|
2020-08-28 11:34:50 -04:00
|
|
|
ts: timestamp in millis
|
2018-03-05 06:53:39 -05:00
|
|
|
|
|
|
|
Returns:
|
2020-08-28 11:34:50 -04:00
|
|
|
stream ordering of the first event received on/after the timestamp
|
2018-03-01 09:05:41 -05:00
|
|
|
"""
|
2020-08-28 11:34:50 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2018-03-05 06:53:39 -05:00
|
|
|
"_find_first_stream_ordering_after_ts_txn",
|
|
|
|
self._find_first_stream_ordering_after_ts_txn,
|
|
|
|
ts,
|
|
|
|
)
|
|
|
|
|
2018-03-05 06:47:48 -05:00
|
|
|
@staticmethod
|
|
|
|
def _find_first_stream_ordering_after_ts_txn(txn, ts):
|
2018-03-01 09:05:41 -05:00
|
|
|
"""
|
2018-03-05 06:47:48 -05:00
|
|
|
Find the stream_ordering of the first event that was received on or
|
|
|
|
after a given timestamp. This is relatively slow as there is no index
|
|
|
|
on received_ts but we can then use this to delete push actions before
|
2018-03-01 09:05:41 -05:00
|
|
|
this.
|
|
|
|
|
|
|
|
received_ts must necessarily be in the same order as stream_ordering
|
|
|
|
and stream_ordering is indexed, so we manually binary search using
|
|
|
|
stream_ordering
|
2018-03-05 06:47:48 -05:00
|
|
|
|
|
|
|
Args:
|
|
|
|
txn (twisted.enterprise.adbapi.Transaction):
|
|
|
|
ts (int): timestamp to search for
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
int: stream ordering
|
2018-03-01 09:05:41 -05:00
|
|
|
"""
|
|
|
|
txn.execute("SELECT MAX(stream_ordering) FROM events")
|
|
|
|
max_stream_ordering = txn.fetchone()[0]
|
|
|
|
|
|
|
|
if max_stream_ordering is None:
|
|
|
|
return 0
|
|
|
|
|
2018-03-05 06:47:48 -05:00
|
|
|
# We want the first stream_ordering in which received_ts is greater
|
|
|
|
# than or equal to ts. Call this point X.
|
|
|
|
#
|
|
|
|
# We maintain the invariants:
|
|
|
|
#
|
|
|
|
# range_start <= X <= range_end
|
|
|
|
#
|
2018-03-01 09:05:41 -05:00
|
|
|
range_start = 0
|
2018-03-05 06:47:48 -05:00
|
|
|
range_end = max_stream_ordering + 1
|
|
|
|
|
|
|
|
# Given a stream_ordering, look up the timestamp at that
|
|
|
|
# stream_ordering.
|
|
|
|
#
|
|
|
|
# The array may be sparse (we may be missing some stream_orderings).
|
|
|
|
# We treat the gaps as the same as having the same value as the
|
|
|
|
# preceding entry, because we will pick the lowest stream_ordering
|
|
|
|
# which satisfies our requirement of received_ts >= ts.
|
|
|
|
#
|
|
|
|
# For example, if our array of events indexed by stream_ordering is
|
|
|
|
# [10, <none>, 20], we should treat this as being equivalent to
|
|
|
|
# [10, 10, 20].
|
|
|
|
#
|
2018-03-01 09:05:41 -05:00
|
|
|
sql = (
|
|
|
|
"SELECT received_ts FROM events"
|
2018-03-05 06:47:48 -05:00
|
|
|
" WHERE stream_ordering <= ?"
|
|
|
|
" ORDER BY stream_ordering DESC"
|
2018-03-01 09:05:41 -05:00
|
|
|
" LIMIT 1"
|
|
|
|
)
|
|
|
|
|
2018-03-05 06:47:48 -05:00
|
|
|
while range_end - range_start > 0:
|
|
|
|
middle = (range_end + range_start) // 2
|
2018-03-01 09:05:41 -05:00
|
|
|
txn.execute(sql, (middle,))
|
2018-03-05 06:47:48 -05:00
|
|
|
row = txn.fetchone()
|
|
|
|
if row is None:
|
|
|
|
# no rows with stream_ordering<=middle
|
|
|
|
range_start = middle + 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
middle_ts = row[0]
|
2018-03-01 09:05:41 -05:00
|
|
|
if ts > middle_ts:
|
2018-03-05 06:47:48 -05:00
|
|
|
# we got a timestamp lower than the one we were looking for.
|
|
|
|
# definitely need to look higher: X > middle.
|
|
|
|
range_start = middle + 1
|
2018-03-01 09:05:41 -05:00
|
|
|
else:
|
2018-03-05 06:47:48 -05:00
|
|
|
# we got a timestamp higher than (or the same as) the one we
|
|
|
|
# were looking for. We aren't yet sure about the point we
|
|
|
|
# looked up, but we can be sure that X <= middle.
|
2018-03-01 09:05:41 -05:00
|
|
|
range_end = middle
|
|
|
|
|
|
|
|
return range_end
|
|
|
|
|
2020-07-30 07:20:41 -04:00
|
|
|
async def get_time_of_last_push_action_before(self, stream_ordering):
|
2020-03-09 09:58:38 -04:00
|
|
|
def f(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT e.received_ts"
|
|
|
|
" FROM event_push_actions AS ep"
|
|
|
|
" JOIN events e ON ep.room_id = e.room_id AND ep.event_id = e.event_id"
|
2020-09-08 10:26:06 -04:00
|
|
|
" WHERE ep.stream_ordering > ? AND notif = 1"
|
2020-03-09 09:58:38 -04:00
|
|
|
" ORDER BY ep.stream_ordering ASC"
|
|
|
|
" LIMIT 1"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (stream_ordering,))
|
|
|
|
return txn.fetchone()
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
result = await self.db_pool.runInteraction(
|
|
|
|
"get_time_of_last_push_action_before", f
|
|
|
|
)
|
2020-03-09 09:58:38 -04:00
|
|
|
return result[0] if result else None
|
|
|
|
|
2020-10-09 07:37:51 -04:00
|
|
|
@wrap_as_background_process("rotate_notifs")
|
2020-07-30 07:20:41 -04:00
|
|
|
async def _rotate_notifs(self):
|
2017-02-03 13:12:53 -05:00
|
|
|
if self._doing_notif_rotation or self.stream_ordering_day_ago is None:
|
|
|
|
return
|
|
|
|
self._doing_notif_rotation = True
|
|
|
|
|
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
logger.info("Rotating notifications")
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
caught_up = await self.db_pool.runInteraction(
|
2019-04-03 05:07:29 -04:00
|
|
|
"_rotate_notifs", self._rotate_notifs_txn
|
2017-02-03 13:12:53 -05:00
|
|
|
)
|
|
|
|
if caught_up:
|
|
|
|
break
|
2020-07-30 07:20:41 -04:00
|
|
|
await self.hs.get_clock().sleep(self._rotate_delay)
|
2017-02-03 13:12:53 -05:00
|
|
|
finally:
|
|
|
|
self._doing_notif_rotation = False
|
|
|
|
|
|
|
|
def _rotate_notifs_txn(self, txn):
|
|
|
|
"""Archives older notifications into event_push_summary. Returns whether
|
|
|
|
the archiving process has caught up or not.
|
|
|
|
"""
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
old_rotate_stream_ordering = self.db_pool.simple_select_one_onecol_txn(
|
2017-02-18 09:41:31 -05:00
|
|
|
txn,
|
|
|
|
table="event_push_summary_stream_ordering",
|
|
|
|
keyvalues={},
|
|
|
|
retcol="stream_ordering",
|
|
|
|
)
|
|
|
|
|
2017-02-03 13:12:53 -05:00
|
|
|
# We don't to try and rotate millions of rows at once, so we cap the
|
|
|
|
# maximum stream ordering we'll rotate before.
|
2019-04-03 05:07:29 -04:00
|
|
|
txn.execute(
|
|
|
|
"""
|
2017-02-03 13:12:53 -05:00
|
|
|
SELECT stream_ordering FROM event_push_actions
|
2017-02-18 09:41:31 -05:00
|
|
|
WHERE stream_ordering > ?
|
2018-02-14 08:37:56 -05:00
|
|
|
ORDER BY stream_ordering ASC LIMIT 1 OFFSET ?
|
2019-04-03 05:07:29 -04:00
|
|
|
""",
|
|
|
|
(old_rotate_stream_ordering, self._rotate_count),
|
|
|
|
)
|
2017-02-03 13:12:53 -05:00
|
|
|
stream_row = txn.fetchone()
|
|
|
|
if stream_row:
|
2019-10-31 11:43:24 -04:00
|
|
|
(offset_stream_ordering,) = stream_row
|
2017-02-03 13:12:53 -05:00
|
|
|
rotate_to_stream_ordering = min(
|
|
|
|
self.stream_ordering_day_ago, offset_stream_ordering
|
|
|
|
)
|
|
|
|
caught_up = offset_stream_ordering >= self.stream_ordering_day_ago
|
|
|
|
else:
|
|
|
|
rotate_to_stream_ordering = self.stream_ordering_day_ago
|
|
|
|
caught_up = True
|
|
|
|
|
2017-02-18 09:41:31 -05:00
|
|
|
logger.info("Rotating notifications up to: %s", rotate_to_stream_ordering)
|
|
|
|
|
2017-02-03 13:12:53 -05:00
|
|
|
self._rotate_notifs_before_txn(txn, rotate_to_stream_ordering)
|
|
|
|
|
|
|
|
# We have caught up iff we were limited by `stream_ordering_day_ago`
|
|
|
|
return caught_up
|
|
|
|
|
|
|
|
def _rotate_notifs_before_txn(self, txn, rotate_to_stream_ordering):
|
2020-08-05 16:38:57 -04:00
|
|
|
old_rotate_stream_ordering = self.db_pool.simple_select_one_onecol_txn(
|
2017-02-03 13:12:53 -05:00
|
|
|
txn,
|
|
|
|
table="event_push_summary_stream_ordering",
|
|
|
|
keyvalues={},
|
|
|
|
retcol="stream_ordering",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Calculate the new counts that should be upserted into event_push_summary
|
|
|
|
sql = """
|
|
|
|
SELECT user_id, room_id,
|
2020-09-02 12:19:37 -04:00
|
|
|
coalesce(old.%s, 0) + upd.cnt,
|
2017-02-03 13:12:53 -05:00
|
|
|
upd.stream_ordering,
|
|
|
|
old.user_id
|
|
|
|
FROM (
|
2020-09-02 12:19:37 -04:00
|
|
|
SELECT user_id, room_id, count(*) as cnt,
|
2017-02-03 13:12:53 -05:00
|
|
|
max(stream_ordering) as stream_ordering
|
|
|
|
FROM event_push_actions
|
|
|
|
WHERE ? <= stream_ordering AND stream_ordering < ?
|
2020-06-11 12:58:26 -04:00
|
|
|
AND highlight = 0
|
2020-09-02 12:19:37 -04:00
|
|
|
AND %s = 1
|
2017-02-03 13:12:53 -05:00
|
|
|
GROUP BY user_id, room_id
|
|
|
|
) AS upd
|
|
|
|
LEFT JOIN event_push_summary AS old USING (user_id, room_id)
|
|
|
|
"""
|
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
# First get the count of unread messages.
|
|
|
|
txn.execute(
|
|
|
|
sql % ("unread_count", "unread"),
|
|
|
|
(old_rotate_stream_ordering, rotate_to_stream_ordering),
|
|
|
|
)
|
|
|
|
|
|
|
|
# We need to merge results from the two requests (the one that retrieves the
|
|
|
|
# unread count and the one that retrieves the notifications count) into a single
|
|
|
|
# object because we might not have the same amount of rows in each of them. To do
|
|
|
|
# this, we use a dict indexed on the user ID and room ID to make it easier to
|
|
|
|
# populate.
|
|
|
|
summaries = {} # type: Dict[Tuple[str, str], _EventPushSummary]
|
|
|
|
for row in txn:
|
|
|
|
summaries[(row[0], row[1])] = _EventPushSummary(
|
|
|
|
unread_count=row[2],
|
|
|
|
stream_ordering=row[3],
|
|
|
|
old_user_id=row[4],
|
|
|
|
notif_count=0,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Then get the count of notifications.
|
|
|
|
txn.execute(
|
|
|
|
sql % ("notif_count", "notif"),
|
|
|
|
(old_rotate_stream_ordering, rotate_to_stream_ordering),
|
|
|
|
)
|
|
|
|
|
|
|
|
for row in txn:
|
|
|
|
if (row[0], row[1]) in summaries:
|
|
|
|
summaries[(row[0], row[1])].notif_count = row[2]
|
|
|
|
else:
|
|
|
|
# Because the rules on notifying are different than the rules on marking
|
|
|
|
# a message unread, we might end up with messages that notify but aren't
|
|
|
|
# marked unread, so we might not have a summary for this (user, room)
|
|
|
|
# tuple to complete.
|
|
|
|
summaries[(row[0], row[1])] = _EventPushSummary(
|
|
|
|
unread_count=0,
|
|
|
|
stream_ordering=row[3],
|
|
|
|
old_user_id=row[4],
|
|
|
|
notif_count=row[2],
|
|
|
|
)
|
2020-06-12 06:07:26 -04:00
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
logger.info("Rotating notifications, handling %d rows", len(summaries))
|
2017-02-18 09:41:31 -05:00
|
|
|
|
2017-02-03 13:12:53 -05:00
|
|
|
# If the `old.user_id` above is NULL then we know there isn't already an
|
|
|
|
# entry in the table, so we simply insert it. Otherwise we update the
|
|
|
|
# existing table.
|
2020-08-05 16:38:57 -04:00
|
|
|
self.db_pool.simple_insert_many_txn(
|
2017-02-03 13:12:53 -05:00
|
|
|
txn,
|
|
|
|
table="event_push_summary",
|
|
|
|
values=[
|
|
|
|
{
|
2020-09-02 12:19:37 -04:00
|
|
|
"user_id": user_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"notif_count": summary.notif_count,
|
|
|
|
"unread_count": summary.unread_count,
|
|
|
|
"stream_ordering": summary.stream_ordering,
|
2017-02-03 13:12:53 -05:00
|
|
|
}
|
2020-09-02 12:19:37 -04:00
|
|
|
for ((user_id, room_id), summary) in summaries.items()
|
|
|
|
if summary.old_user_id is None
|
2019-04-03 05:07:29 -04:00
|
|
|
],
|
2017-02-03 13:12:53 -05:00
|
|
|
)
|
|
|
|
|
2021-01-21 09:44:12 -05:00
|
|
|
txn.execute_batch(
|
2017-02-03 13:12:53 -05:00
|
|
|
"""
|
2020-09-02 12:19:37 -04:00
|
|
|
UPDATE event_push_summary
|
|
|
|
SET notif_count = ?, unread_count = ?, stream_ordering = ?
|
2017-02-03 13:12:53 -05:00
|
|
|
WHERE user_id = ? AND room_id = ?
|
|
|
|
""",
|
2020-09-02 12:19:37 -04:00
|
|
|
(
|
|
|
|
(
|
|
|
|
summary.notif_count,
|
|
|
|
summary.unread_count,
|
|
|
|
summary.stream_ordering,
|
|
|
|
user_id,
|
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
for ((user_id, room_id), summary) in summaries.items()
|
|
|
|
if summary.old_user_id is not None
|
|
|
|
),
|
2017-02-03 13:12:53 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(
|
|
|
|
"DELETE FROM event_push_actions"
|
|
|
|
" WHERE ? <= stream_ordering AND stream_ordering < ? AND highlight = 0",
|
2019-04-03 05:07:29 -04:00
|
|
|
(old_rotate_stream_ordering, rotate_to_stream_ordering),
|
2017-02-03 13:12:53 -05:00
|
|
|
)
|
|
|
|
|
2017-02-18 09:41:31 -05:00
|
|
|
logger.info("Rotating notifications, deleted %s push actions", txn.rowcount)
|
|
|
|
|
2017-02-03 13:12:53 -05:00
|
|
|
txn.execute(
|
|
|
|
"UPDATE event_push_summary_stream_ordering SET stream_ordering = ?",
|
2019-04-03 05:07:29 -04:00
|
|
|
(rotate_to_stream_ordering,),
|
2017-02-03 13:12:53 -05:00
|
|
|
)
|
|
|
|
|
2021-01-18 10:47:59 -05:00
|
|
|
def _remove_old_push_actions_before_txn(
|
|
|
|
self, txn, room_id, user_id, stream_ordering
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Purges old push actions for a user and room before a given
|
|
|
|
stream_ordering.
|
|
|
|
|
|
|
|
We however keep a months worth of highlighted notifications, so that
|
|
|
|
users can still get a list of recent highlights.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn: The transcation
|
|
|
|
room_id: Room ID to delete from
|
|
|
|
user_id: user ID to delete for
|
|
|
|
stream_ordering: The lowest stream ordering which will
|
|
|
|
not be deleted.
|
|
|
|
"""
|
|
|
|
txn.call_after(
|
2021-05-27 05:33:56 -04:00
|
|
|
self.get_unread_event_push_actions_by_room_for_user.invalidate,
|
2021-01-18 10:47:59 -05:00
|
|
|
(room_id, user_id),
|
|
|
|
)
|
|
|
|
|
|
|
|
# We need to join on the events table to get the received_ts for
|
|
|
|
# event_push_actions and sqlite won't let us use a join in a delete so
|
|
|
|
# we can't just delete where received_ts < x. Furthermore we can
|
|
|
|
# only identify event_push_actions by a tuple of room_id, event_id
|
|
|
|
# we we can't use a subquery.
|
|
|
|
# Instead, we look up the stream ordering for the last event in that
|
|
|
|
# room received before the threshold time and delete event_push_actions
|
|
|
|
# in the room with a stream_odering before that.
|
|
|
|
txn.execute(
|
|
|
|
"DELETE FROM event_push_actions "
|
|
|
|
" WHERE user_id = ? AND room_id = ? AND "
|
|
|
|
" stream_ordering <= ?"
|
|
|
|
" AND ((stream_ordering < ? AND highlight = 1) or highlight = 0)",
|
|
|
|
(user_id, room_id, stream_ordering, self.stream_ordering_month_ago),
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(
|
|
|
|
"""
|
|
|
|
DELETE FROM event_push_summary
|
|
|
|
WHERE room_id = ? AND user_id = ? AND stream_ordering <= ?
|
|
|
|
""",
|
|
|
|
(room_id, user_id, stream_ordering),
|
|
|
|
)
|
|
|
|
|
2016-02-03 05:50:49 -05:00
|
|
|
|
2020-10-09 07:37:51 -04:00
|
|
|
class EventPushActionsStore(EventPushActionsWorkerStore):
|
|
|
|
EPA_HIGHLIGHT_INDEX = "epa_highlight_index"
|
|
|
|
|
|
|
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
|
|
|
super().__init__(database, db_conn, hs)
|
|
|
|
|
|
|
|
self.db_pool.updates.register_background_index_update(
|
|
|
|
self.EPA_HIGHLIGHT_INDEX,
|
|
|
|
index_name="event_push_actions_u_highlight",
|
|
|
|
table="event_push_actions",
|
|
|
|
columns=["user_id", "stream_ordering"],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.db_pool.updates.register_background_index_update(
|
|
|
|
"event_push_actions_highlights_index",
|
|
|
|
index_name="event_push_actions_highlights_index",
|
|
|
|
table="event_push_actions",
|
|
|
|
columns=["user_id", "room_id", "topological_ordering", "stream_ordering"],
|
|
|
|
where_clause="highlight=1",
|
|
|
|
)
|
|
|
|
|
|
|
|
async def get_push_actions_for_user(
|
|
|
|
self, user_id, before=None, limit=50, only_highlight=False
|
|
|
|
):
|
|
|
|
def f(txn):
|
|
|
|
before_clause = ""
|
|
|
|
if before:
|
|
|
|
before_clause = "AND epa.stream_ordering < ?"
|
|
|
|
args = [user_id, before, limit]
|
|
|
|
else:
|
|
|
|
args = [user_id, limit]
|
|
|
|
|
|
|
|
if only_highlight:
|
|
|
|
if len(before_clause) > 0:
|
|
|
|
before_clause += " "
|
|
|
|
before_clause += "AND epa.highlight = 1"
|
|
|
|
|
|
|
|
# NB. This assumes event_ids are globally unique since
|
|
|
|
# it makes the query easier to index
|
|
|
|
sql = (
|
|
|
|
"SELECT epa.event_id, epa.room_id,"
|
|
|
|
" epa.stream_ordering, epa.topological_ordering,"
|
|
|
|
" epa.actions, epa.highlight, epa.profile_tag, e.received_ts"
|
|
|
|
" FROM event_push_actions epa, events e"
|
|
|
|
" WHERE epa.event_id = e.event_id"
|
|
|
|
" AND epa.user_id = ? %s"
|
|
|
|
" AND epa.notif = 1"
|
|
|
|
" ORDER BY epa.stream_ordering DESC"
|
|
|
|
" LIMIT ?" % (before_clause,)
|
|
|
|
)
|
|
|
|
txn.execute(sql, args)
|
|
|
|
return self.db_pool.cursor_to_dict(txn)
|
|
|
|
|
|
|
|
push_actions = await self.db_pool.runInteraction("get_push_actions_for_user", f)
|
|
|
|
for pa in push_actions:
|
|
|
|
pa["actions"] = _deserialize_action(pa["actions"], pa["highlight"])
|
|
|
|
return push_actions
|
|
|
|
|
|
|
|
|
2016-02-03 05:50:49 -05:00
|
|
|
def _action_has_highlight(actions):
|
|
|
|
for action in actions:
|
|
|
|
try:
|
|
|
|
if action.get("set_tweak", None) == "highlight":
|
|
|
|
return action.get("value", True)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return False
|
2020-09-02 12:19:37 -04:00
|
|
|
|
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
@attr.s(slots=True)
|
2020-09-02 12:19:37 -04:00
|
|
|
class _EventPushSummary:
|
|
|
|
"""Summary of pending event push actions for a given user in a given room.
|
|
|
|
Used in _rotate_notifs_before_txn to manipulate results from event_push_actions.
|
|
|
|
"""
|
|
|
|
|
|
|
|
unread_count = attr.ib(type=int)
|
|
|
|
stream_ordering = attr.ib(type=int)
|
|
|
|
old_user_id = attr.ib(type=str)
|
|
|
|
notif_count = attr.ib(type=int)
|