2015-12-22 10:19:34 -05:00
|
|
|
# Copyright 2015 OpenMarket Ltd
|
2017-10-10 06:47:10 -04:00
|
|
|
# Copyright 2017 New Vector Ltd
|
2015-12-22 10:19:34 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2022-08-16 07:22:17 -04:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
2022-09-29 11:12:09 -04:00
|
|
|
Any,
|
2022-08-16 07:22:17 -04:00
|
|
|
Collection,
|
|
|
|
Dict,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
|
|
|
)
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from prometheus_client import Counter
|
|
|
|
|
2022-10-04 09:47:04 -04:00
|
|
|
from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes
|
2022-05-20 04:54:12 -04:00
|
|
|
from synapse.event_auth import auth_types_for_event, get_user_power_level
|
2022-05-16 08:42:45 -04:00
|
|
|
from synapse.events import EventBase, relation_from_event
|
2020-09-02 12:19:37 -04:00
|
|
|
from synapse.events.snapshot import EventContext
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.state import POWER_KEY
|
2022-03-25 10:58:56 -04:00
|
|
|
from synapse.storage.databases.main.roommember import EventIdMembership
|
2022-07-11 16:08:39 -04:00
|
|
|
from synapse.storage.state import StateFilter
|
2022-10-12 06:26:39 -04:00
|
|
|
from synapse.synapse_rust.push import FilteredPushRules, PushRuleEvaluator
|
2022-07-11 16:08:39 -04:00
|
|
|
from synapse.util.caches import register_cache
|
2022-05-11 07:15:21 -04:00
|
|
|
from synapse.util.metrics import measure_func
|
2022-07-11 16:08:39 -04:00
|
|
|
from synapse.visibility import filter_event_for_clients_with_state
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 07:12:48 -04:00
|
|
|
from synapse.server import HomeServer
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2018-05-22 18:32:57 -04:00
|
|
|
push_rules_invalidation_counter = Counter(
|
2018-05-23 14:03:51 -04:00
|
|
|
"synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter", ""
|
|
|
|
)
|
2018-05-22 18:32:57 -04:00
|
|
|
push_rules_state_size_counter = Counter(
|
2018-05-23 14:03:51 -04:00
|
|
|
"synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter", ""
|
|
|
|
)
|
2017-07-13 06:37:09 -04:00
|
|
|
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
STATE_EVENT_TYPES_TO_MARK_UNREAD = {
|
|
|
|
EventTypes.Topic,
|
|
|
|
EventTypes.Name,
|
|
|
|
EventTypes.RoomAvatar,
|
|
|
|
EventTypes.Tombstone,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def _should_count_as_unread(event: EventBase, context: EventContext) -> bool:
|
|
|
|
# Exclude rejected and soft-failed events.
|
|
|
|
if context.rejected or event.internal_metadata.is_soft_failed():
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Exclude notices.
|
|
|
|
if (
|
|
|
|
not event.is_state()
|
|
|
|
and event.type == EventTypes.Message
|
|
|
|
and event.content.get("msgtype") == "m.notice"
|
|
|
|
):
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Exclude edits.
|
2022-05-16 08:42:45 -04:00
|
|
|
relates_to = relation_from_event(event)
|
|
|
|
if relates_to and relates_to.rel_type == RelationTypes.REPLACE:
|
2020-09-02 12:19:37 -04:00
|
|
|
return False
|
|
|
|
|
|
|
|
# Mark events that have a non-empty string body as unread.
|
|
|
|
body = event.content.get("body")
|
|
|
|
if isinstance(body, str) and body:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Mark some state events as unread.
|
|
|
|
if event.is_state() and event.type in STATE_EVENT_TYPES_TO_MARK_UNREAD:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Mark encrypted events as unread.
|
|
|
|
if not event.is_state() and event.type == EventTypes.Encrypted:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-09-04 06:54:56 -04:00
|
|
|
class BulkPushRuleEvaluator:
|
2017-05-02 05:46:01 -04:00
|
|
|
"""Calculates the outcome of push rules for an event for all users in the
|
|
|
|
room at once.
|
2015-12-22 12:19:22 -05:00
|
|
|
"""
|
2017-05-02 05:46:01 -04:00
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2017-05-02 05:46:01 -04:00
|
|
|
self.hs = hs
|
2022-02-23 06:04:02 -05:00
|
|
|
self.store = hs.get_datastores().main
|
2022-05-11 07:15:21 -04:00
|
|
|
self.clock = hs.get_clock()
|
2021-07-01 14:25:37 -04:00
|
|
|
self._event_auth_handler = hs.get_event_auth_handler()
|
2017-05-02 05:46:01 -04:00
|
|
|
|
2022-10-25 09:38:01 -04:00
|
|
|
self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled
|
|
|
|
|
2018-05-21 20:48:57 -04:00
|
|
|
self.room_push_rule_cache_metrics = register_cache(
|
2017-07-13 06:37:09 -04:00
|
|
|
"cache",
|
2018-05-21 20:48:57 -04:00
|
|
|
"room_push_rule_cache",
|
2020-05-11 13:45:23 -04:00
|
|
|
cache=[], # Meaningless size, as this isn't a cache that stores values,
|
|
|
|
resizable=False,
|
2017-07-13 06:37:09 -04:00
|
|
|
)
|
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
async def _get_rules_for_event(
|
2022-07-11 16:08:39 -04:00
|
|
|
self,
|
|
|
|
event: EventBase,
|
2022-08-16 07:22:17 -04:00
|
|
|
) -> Dict[str, FilteredPushRules]:
|
2022-07-11 16:08:39 -04:00
|
|
|
"""Get the push rules for all users who may need to be notified about
|
|
|
|
the event.
|
|
|
|
|
|
|
|
Note: this does not check if the user is allowed to see the event.
|
2017-05-02 05:46:01 -04:00
|
|
|
|
|
|
|
Returns:
|
2022-07-11 16:08:39 -04:00
|
|
|
Mapping of user ID to their push rules.
|
2017-05-02 05:46:01 -04:00
|
|
|
"""
|
2022-07-11 16:08:39 -04:00
|
|
|
# We get the users who may need to be notified by first fetching the
|
|
|
|
# local users currently in the room, finding those that have push rules,
|
|
|
|
# and *then* checking which users are actually allowed to see the event.
|
|
|
|
#
|
|
|
|
# The alternative is to first fetch all users that were joined at the
|
|
|
|
# event, but that requires fetching the full state at the event, which
|
|
|
|
# may be expensive for large rooms with few local users.
|
2017-05-02 05:46:01 -04:00
|
|
|
|
2022-07-11 16:08:39 -04:00
|
|
|
local_users = await self.store.get_local_users_in_room(event.room_id)
|
2017-05-02 05:46:01 -04:00
|
|
|
|
2022-07-20 07:06:13 -04:00
|
|
|
# Filter out appservice users.
|
|
|
|
local_users = [
|
|
|
|
u
|
|
|
|
for u in local_users
|
|
|
|
if not self.store.get_if_app_services_interested_in_user(u)
|
|
|
|
]
|
|
|
|
|
2017-05-02 05:46:01 -04:00
|
|
|
# if this event is an invite event, we may need to run rules for the user
|
|
|
|
# who's been invited, otherwise they won't get told they've been invited
|
2022-07-11 16:08:39 -04:00
|
|
|
if event.type == EventTypes.Member and event.membership == Membership.INVITE:
|
2017-05-02 05:46:01 -04:00
|
|
|
invited = event.state_key
|
2022-07-11 16:08:39 -04:00
|
|
|
if invited and self.hs.is_mine_id(invited) and invited not in local_users:
|
|
|
|
local_users = list(local_users)
|
|
|
|
local_users.append(invited)
|
2017-05-02 05:46:01 -04:00
|
|
|
|
2022-07-11 16:08:39 -04:00
|
|
|
rules_by_user = await self.store.bulk_get_push_rules(local_users)
|
2017-05-02 05:46:01 -04:00
|
|
|
|
2022-07-11 16:08:39 -04:00
|
|
|
logger.debug("Users in room: %s", local_users)
|
|
|
|
|
|
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
|
|
logger.debug(
|
|
|
|
"Returning push rules for %r %r",
|
|
|
|
event.room_id,
|
|
|
|
list(rules_by_user.keys()),
|
|
|
|
)
|
|
|
|
|
|
|
|
return rules_by_user
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2020-12-11 11:43:53 -05:00
|
|
|
async def _get_power_levels_and_sender_level(
|
2022-10-21 13:46:22 -04:00
|
|
|
self,
|
|
|
|
event: EventBase,
|
|
|
|
context: EventContext,
|
|
|
|
event_id_to_event: Mapping[str, EventBase],
|
2022-09-28 08:31:53 -04:00
|
|
|
) -> Tuple[dict, Optional[int]]:
|
2022-10-21 13:46:22 -04:00
|
|
|
"""
|
|
|
|
Given an event and an event context, get the power level event relevant to the event
|
|
|
|
and the power level of the sender of the event.
|
|
|
|
Args:
|
|
|
|
event: event to check
|
|
|
|
context: context of event to check
|
|
|
|
event_id_to_event: a mapping of event_id to event for a set of events being
|
|
|
|
batch persisted. This is needed as the sought-after power level event may
|
|
|
|
be in this batch rather than the DB
|
|
|
|
"""
|
2022-09-28 08:31:53 -04:00
|
|
|
# There are no power levels and sender levels possible to get from outlier
|
|
|
|
if event.internal_metadata.is_outlier():
|
|
|
|
return {}, None
|
|
|
|
|
2022-05-20 04:54:12 -04:00
|
|
|
event_types = auth_types_for_event(event.room_version, event)
|
|
|
|
prev_state_ids = await context.get_prev_state_ids(
|
|
|
|
StateFilter.from_types(event_types)
|
|
|
|
)
|
2018-07-23 08:00:22 -04:00
|
|
|
pl_event_id = prev_state_ids.get(POWER_KEY)
|
2022-05-20 04:54:12 -04:00
|
|
|
|
2022-10-21 13:46:22 -04:00
|
|
|
# fastpath: if there's a power level event, that's all we need, and
|
|
|
|
# not having a power level event is an extreme edge case
|
2017-10-05 08:27:12 -04:00
|
|
|
if pl_event_id:
|
2022-10-21 13:46:22 -04:00
|
|
|
# Get the power level event from the batch, or fall back to the database.
|
|
|
|
pl_event = event_id_to_event.get(pl_event_id)
|
|
|
|
if pl_event:
|
|
|
|
auth_events = {POWER_KEY: pl_event}
|
|
|
|
else:
|
|
|
|
auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
|
2017-10-05 08:20:22 -04:00
|
|
|
else:
|
2021-07-01 14:25:37 -04:00
|
|
|
auth_events_ids = self._event_auth_handler.compute_auth_events(
|
2018-07-23 08:00:22 -04:00
|
|
|
event, prev_state_ids, for_verification=False
|
2017-10-05 08:20:22 -04:00
|
|
|
)
|
2020-12-11 11:43:53 -05:00
|
|
|
auth_events_dict = await self.store.get_events(auth_events_ids)
|
2022-10-21 13:46:22 -04:00
|
|
|
# Some needed auth events might be in the batch, combine them with those
|
|
|
|
# fetched from the database.
|
|
|
|
for auth_event_id in auth_events_ids:
|
|
|
|
auth_event = event_id_to_event.get(auth_event_id)
|
|
|
|
if auth_event:
|
|
|
|
auth_events_dict[auth_event_id] = auth_event
|
2020-12-11 11:43:53 -05:00
|
|
|
auth_events = {(e.type, e.state_key): e for e in auth_events_dict.values()}
|
2017-10-10 06:38:31 -04:00
|
|
|
|
2017-10-10 10:23:00 -04:00
|
|
|
sender_level = get_user_power_level(event.sender, auth_events)
|
|
|
|
|
2017-10-10 10:34:05 -04:00
|
|
|
pl_event = auth_events.get(POWER_KEY)
|
|
|
|
|
2019-08-30 11:28:26 -04:00
|
|
|
return pl_event.content if pl_event else {}, sender_level
|
2017-10-05 07:39:18 -04:00
|
|
|
|
2022-10-25 09:38:01 -04:00
|
|
|
async def _related_events(self, event: EventBase) -> Dict[str, Dict[str, str]]:
|
|
|
|
"""Fetches the related events for 'event'. Sets the im.vector.is_falling_back key if the event is from a fallback relation
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Mapping of relation type to flattened events.
|
|
|
|
"""
|
|
|
|
related_events: Dict[str, Dict[str, str]] = {}
|
|
|
|
if self._related_event_match_enabled:
|
|
|
|
related_event_id = event.content.get("m.relates_to", {}).get("event_id")
|
|
|
|
relation_type = event.content.get("m.relates_to", {}).get("rel_type")
|
|
|
|
if related_event_id is not None and relation_type is not None:
|
|
|
|
related_event = await self.store.get_event(
|
|
|
|
related_event_id, allow_none=True
|
|
|
|
)
|
|
|
|
if related_event is not None:
|
|
|
|
related_events[relation_type] = _flatten_dict(related_event)
|
|
|
|
|
|
|
|
reply_event_id = (
|
|
|
|
event.content.get("m.relates_to", {})
|
|
|
|
.get("m.in_reply_to", {})
|
|
|
|
.get("event_id")
|
|
|
|
)
|
|
|
|
|
|
|
|
# convert replies to pseudo relations
|
|
|
|
if reply_event_id is not None:
|
|
|
|
related_event = await self.store.get_event(
|
|
|
|
reply_event_id, allow_none=True
|
|
|
|
)
|
|
|
|
|
|
|
|
if related_event is not None:
|
|
|
|
related_events["m.in_reply_to"] = _flatten_dict(related_event)
|
|
|
|
|
|
|
|
# indicate that this is from a fallback relation.
|
|
|
|
if relation_type == "m.thread" and event.content.get(
|
|
|
|
"m.relates_to", {}
|
|
|
|
).get("is_falling_back", False):
|
|
|
|
related_events["m.in_reply_to"][
|
|
|
|
"im.vector.is_falling_back"
|
|
|
|
] = ""
|
|
|
|
|
|
|
|
return related_events
|
|
|
|
|
2022-10-21 13:46:22 -04:00
|
|
|
async def action_for_events_by_user(
|
|
|
|
self, events_and_context: List[Tuple[EventBase, EventContext]]
|
2020-12-11 11:43:53 -05:00
|
|
|
) -> None:
|
2022-10-21 13:46:22 -04:00
|
|
|
"""Given a list of events and their associated contexts, evaluate the push rules
|
|
|
|
for each event, check if the message should increment the unread count, and
|
|
|
|
insert the results into the event_push_actions_staging table.
|
2017-05-02 05:46:01 -04:00
|
|
|
"""
|
2022-10-21 13:46:22 -04:00
|
|
|
# For batched events the power level events may not have been persisted yet,
|
|
|
|
# so we pass in the batched events. Thus if the event cannot be found in the
|
|
|
|
# database we can check in the batch.
|
|
|
|
event_id_to_event = {e.event_id: e for e, _ in events_and_context}
|
|
|
|
for event, context in events_and_context:
|
|
|
|
await self._action_for_event_by_user(event, context, event_id_to_event)
|
|
|
|
|
|
|
|
@measure_func("action_for_event_by_user")
|
|
|
|
async def _action_for_event_by_user(
|
|
|
|
self,
|
|
|
|
event: EventBase,
|
|
|
|
context: EventContext,
|
|
|
|
event_id_to_event: Mapping[str, EventBase],
|
|
|
|
) -> None:
|
|
|
|
|
|
|
|
if (
|
|
|
|
not event.internal_metadata.is_notifiable()
|
|
|
|
or event.internal_metadata.is_historical()
|
|
|
|
):
|
|
|
|
# Push rules for events that aren't notifiable can't be processed by this and
|
|
|
|
# we want to skip push notification actions for historical messages
|
|
|
|
# because we don't want to notify people about old history back in time.
|
|
|
|
# The historical messages also do not have the proper `context.current_state_ids`
|
|
|
|
# and `state_groups` because they have `prev_events` that aren't persisted yet
|
|
|
|
# (historical messages persisted in reverse-chronological order).
|
2022-05-11 07:15:21 -04:00
|
|
|
return
|
|
|
|
|
2022-09-01 12:52:03 -04:00
|
|
|
# Disable counting as unread unless the experimental configuration is
|
|
|
|
# enabled, as it can cause additional (unwanted) rows to be added to the
|
|
|
|
# event_push_actions table.
|
|
|
|
count_as_unread = False
|
|
|
|
if self.hs.config.experimental.msc2654_enabled:
|
|
|
|
count_as_unread = _should_count_as_unread(event, context)
|
2020-09-02 12:19:37 -04:00
|
|
|
|
2022-07-11 16:08:39 -04:00
|
|
|
rules_by_user = await self._get_rules_for_event(event)
|
2022-08-16 07:22:17 -04:00
|
|
|
actions_by_user: Dict[str, Collection[Union[Mapping, str]]] = {}
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2022-07-11 16:08:39 -04:00
|
|
|
room_member_count = await self.store.get_number_joined_users_in_room(
|
|
|
|
event.room_id
|
|
|
|
)
|
2016-03-22 09:52:45 -04:00
|
|
|
|
2019-10-31 11:43:24 -04:00
|
|
|
(
|
|
|
|
power_levels,
|
|
|
|
sender_power_level,
|
2022-10-21 13:46:22 -04:00
|
|
|
) = await self._get_power_levels_and_sender_level(
|
|
|
|
event, context, event_id_to_event
|
|
|
|
)
|
2017-10-05 07:39:18 -04:00
|
|
|
|
2022-10-12 06:26:39 -04:00
|
|
|
# Find the event's thread ID.
|
2022-09-14 13:11:16 -04:00
|
|
|
relation = relation_from_event(event)
|
2022-10-12 06:26:39 -04:00
|
|
|
# If the event does not have a relation, then it cannot have a thread ID.
|
2022-10-04 09:47:04 -04:00
|
|
|
thread_id = MAIN_TIMELINE
|
2022-09-14 13:11:16 -04:00
|
|
|
if relation:
|
2022-10-04 11:36:16 -04:00
|
|
|
# Recursively attempt to find the thread this event relates to.
|
2022-09-14 13:11:16 -04:00
|
|
|
if relation.rel_type == RelationTypes.THREAD:
|
|
|
|
thread_id = relation.parent_id
|
2022-10-04 11:36:16 -04:00
|
|
|
else:
|
|
|
|
# Since the event has not yet been persisted we check whether
|
|
|
|
# the parent is part of a thread.
|
2022-10-12 12:15:52 -04:00
|
|
|
thread_id = await self.store.get_thread_id(relation.parent_id)
|
2022-05-24 09:23:23 -04:00
|
|
|
|
2022-10-25 09:38:01 -04:00
|
|
|
related_events = await self._related_events(event)
|
|
|
|
|
2022-10-06 09:00:03 -04:00
|
|
|
# It's possible that old room versions have non-integer power levels (floats or
|
|
|
|
# strings). Workaround this by explicitly converting to int.
|
|
|
|
notification_levels = power_levels.get("notifications", {})
|
|
|
|
if not event.room_version.msc3667_int_only_power_levels:
|
|
|
|
for user_id, level in notification_levels.items():
|
|
|
|
notification_levels[user_id] = int(level)
|
|
|
|
|
2022-09-29 11:12:09 -04:00
|
|
|
evaluator = PushRuleEvaluator(
|
|
|
|
_flatten_dict(event),
|
2022-07-11 16:08:39 -04:00
|
|
|
room_member_count,
|
2022-05-24 09:23:23 -04:00
|
|
|
sender_power_level,
|
2022-10-06 09:00:03 -04:00
|
|
|
notification_levels,
|
2022-10-25 09:38:01 -04:00
|
|
|
related_events,
|
|
|
|
self._related_event_match_enabled,
|
2017-10-05 07:39:18 -04:00
|
|
|
)
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2022-07-11 16:08:39 -04:00
|
|
|
users = rules_by_user.keys()
|
|
|
|
profiles = await self.store.get_subset_users_in_room_with_profiles(
|
|
|
|
event.room_id, users
|
|
|
|
)
|
|
|
|
|
2020-06-15 07:03:36 -04:00
|
|
|
for uid, rules in rules_by_user.items():
|
2017-07-07 09:04:40 -04:00
|
|
|
if event.sender == uid:
|
|
|
|
continue
|
|
|
|
|
2017-04-25 09:38:51 -04:00
|
|
|
display_name = None
|
2022-07-11 16:08:39 -04:00
|
|
|
profile = profiles.get(uid)
|
|
|
|
if profile:
|
|
|
|
display_name = profile.display_name
|
2017-04-25 10:39:19 -04:00
|
|
|
|
|
|
|
if not display_name:
|
2016-12-08 08:32:05 -05:00
|
|
|
# Handle the case where we are pushing a membership event to
|
|
|
|
# that user, as they might not be already joined.
|
|
|
|
if event.type == EventTypes.Member and event.state_key == uid:
|
|
|
|
display_name = event.content.get("displayname", None)
|
2021-11-02 09:55:52 -04:00
|
|
|
if not isinstance(display_name, str):
|
|
|
|
display_name = None
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2020-09-07 11:56:27 -04:00
|
|
|
if count_as_unread:
|
|
|
|
# Add an element for the current user if the event needs to be marked as
|
|
|
|
# unread, so that add_push_actions_to_staging iterates over it.
|
|
|
|
# If the event shouldn't be marked as unread but should notify the
|
|
|
|
# current user, it'll be added to the dict later.
|
|
|
|
actions_by_user[uid] = []
|
2020-09-02 12:19:37 -04:00
|
|
|
|
2022-09-29 11:12:09 -04:00
|
|
|
actions = evaluator.run(rules, uid, display_name)
|
|
|
|
if "notify" in actions:
|
|
|
|
# Push rules say we should notify the user of this event
|
|
|
|
actions_by_user[uid] = actions
|
2015-12-22 10:19:34 -05:00
|
|
|
|
2022-09-30 12:40:33 -04:00
|
|
|
# If there aren't any actions then we can skip the rest of the
|
|
|
|
# processing.
|
|
|
|
if not actions_by_user:
|
|
|
|
return
|
|
|
|
|
2022-09-30 09:27:00 -04:00
|
|
|
# This is a check for the case where user joins a room without being
|
|
|
|
# allowed to see history, and then the server receives a delayed event
|
|
|
|
# from before the user joined, which they should not be pushed for
|
|
|
|
#
|
|
|
|
# We do this *after* calculating the push actions as a) its unlikely
|
|
|
|
# that we'll filter anyone out and b) for large rooms its likely that
|
|
|
|
# most users will have push disabled and so the set of users to check is
|
|
|
|
# much smaller.
|
|
|
|
uids_with_visibility = await filter_event_for_clients_with_state(
|
|
|
|
self.store, actions_by_user.keys(), event, context
|
|
|
|
)
|
|
|
|
|
|
|
|
for user_id in set(actions_by_user).difference(uids_with_visibility):
|
|
|
|
actions_by_user.pop(user_id, None)
|
|
|
|
|
2018-02-21 06:29:49 -05:00
|
|
|
# Mark in the DB staging area the push actions for users who should be
|
|
|
|
# notified for this event. (This will then get handled when we persist
|
|
|
|
# the event)
|
2020-09-02 12:19:37 -04:00
|
|
|
await self.store.add_push_actions_to_staging(
|
|
|
|
event.event_id,
|
|
|
|
actions_by_user,
|
|
|
|
count_as_unread,
|
2022-09-14 13:11:16 -04:00
|
|
|
thread_id,
|
2020-09-02 12:19:37 -04:00
|
|
|
)
|
2018-02-20 06:30:54 -05:00
|
|
|
|
2016-01-18 09:09:47 -05:00
|
|
|
|
2022-03-25 10:58:56 -04:00
|
|
|
MemberMap = Dict[str, Optional[EventIdMembership]]
|
2021-10-11 12:42:10 -04:00
|
|
|
Rule = Dict[str, dict]
|
|
|
|
RulesByUser = Dict[str, List[Rule]]
|
|
|
|
StateGroup = Union[object, int]
|
2022-09-29 11:12:09 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _flatten_dict(
|
|
|
|
d: Union[EventBase, Mapping[str, Any]],
|
|
|
|
prefix: Optional[List[str]] = None,
|
|
|
|
result: Optional[Dict[str, str]] = None,
|
|
|
|
) -> Dict[str, str]:
|
|
|
|
if prefix is None:
|
|
|
|
prefix = []
|
|
|
|
if result is None:
|
|
|
|
result = {}
|
|
|
|
for key, value in d.items():
|
|
|
|
if isinstance(value, str):
|
|
|
|
result[".".join(prefix + [key])] = value.lower()
|
|
|
|
elif isinstance(value, Mapping):
|
|
|
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
|
|
|
|
|
|
|
return result
|