2016-09-06 13:16:20 -04:00
|
|
|
#
|
2023-11-21 15:29:58 -05:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 06:26:48 -05:00
|
|
|
# Copyright 2016 OpenMarket Ltd
|
2023-11-21 15:29:58 -05:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2016-09-06 13:16:20 -04:00
|
|
|
#
|
|
|
|
#
|
|
|
|
|
|
|
|
import logging
|
2023-07-24 11:23:19 -04:00
|
|
|
from http import HTTPStatus
|
|
|
|
from typing import TYPE_CHECKING, Any, Dict, Optional
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2022-12-06 04:52:55 -05:00
|
|
|
from synapse.api.constants import EduTypes, EventContentFields, ToDeviceEventTypes
|
2023-07-24 11:23:19 -04:00
|
|
|
from synapse.api.errors import Codes, SynapseError
|
2021-02-19 13:20:34 -05:00
|
|
|
from synapse.api.ratelimiting import Ratelimiter
|
2020-01-30 12:06:38 -05:00
|
|
|
from synapse.logging.context import run_in_background
|
2019-08-22 13:21:10 -04:00
|
|
|
from synapse.logging.opentracing import (
|
2021-04-01 12:08:21 -04:00
|
|
|
SynapseTags,
|
2019-08-22 13:21:10 -04:00
|
|
|
get_active_span_text_map,
|
2019-09-03 05:21:30 -04:00
|
|
|
log_kv,
|
2019-08-22 13:21:10 -04:00
|
|
|
set_tag,
|
|
|
|
)
|
2023-04-21 07:06:39 -04:00
|
|
|
from synapse.replication.http.devices import (
|
|
|
|
ReplicationMultiUserDevicesResyncRestServlet,
|
|
|
|
)
|
2022-05-16 11:35:31 -04:00
|
|
|
from synapse.types import JsonDict, Requester, StreamKeyType, UserID, get_domain_from_id
|
2020-08-20 10:32:33 -04:00
|
|
|
from synapse.util import json_encoder
|
2016-09-06 13:16:20 -04:00
|
|
|
from synapse.util.stringutils import random_string
|
|
|
|
|
2020-10-09 07:20:51 -04:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 07:12:48 -04:00
|
|
|
from synapse.server import HomeServer
|
2020-10-09 07:20:51 -04:00
|
|
|
|
|
|
|
|
2016-09-06 13:16:20 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-09-04 06:54:56 -04:00
|
|
|
class DeviceMessageHandler:
|
2020-10-09 07:20:51 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2016-09-06 13:16:20 -04:00
|
|
|
"""
|
|
|
|
Args:
|
2020-10-09 07:20:51 -04:00
|
|
|
hs: server
|
2016-09-06 13:16:20 -04:00
|
|
|
"""
|
2022-02-23 06:04:02 -05:00
|
|
|
self.store = hs.get_datastores().main
|
2016-09-06 13:16:20 -04:00
|
|
|
self.notifier = hs.get_notifier()
|
2018-01-16 13:25:28 -05:00
|
|
|
self.is_mine = hs.is_mine
|
2023-07-24 11:23:19 -04:00
|
|
|
if hs.config.experimental.msc3814_enabled:
|
|
|
|
self.event_sources = hs.get_event_sources()
|
|
|
|
self.device_handler = hs.get_device_handler()
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-01-07 15:19:26 -05:00
|
|
|
# We only need to poke the federation sender explicitly if its on the
|
|
|
|
# same instance. Other federation sender instances will get notified by
|
|
|
|
# `synapse.app.generic_worker.FederationSenderHandler` when it sees it
|
|
|
|
# in the to-device replication stream.
|
|
|
|
self.federation_sender = None
|
|
|
|
if hs.should_send_federation():
|
|
|
|
self.federation_sender = hs.get_federation_sender()
|
|
|
|
|
|
|
|
# If we can handle the to device EDUs we do so, otherwise we route them
|
|
|
|
# to the appropriate worker.
|
|
|
|
if hs.get_instance_name() in hs.config.worker.writers.to_device:
|
|
|
|
hs.get_federation_registry().register_edu_handler(
|
2022-05-27 07:14:36 -04:00
|
|
|
EduTypes.DIRECT_TO_DEVICE, self.on_direct_to_device_edu
|
2021-01-07 15:19:26 -05:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
hs.get_federation_registry().register_instances_for_edu(
|
2022-05-27 07:14:36 -04:00
|
|
|
EduTypes.DIRECT_TO_DEVICE,
|
2021-01-07 15:19:26 -05:00
|
|
|
hs.config.worker.writers.to_device,
|
|
|
|
)
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-01-07 13:06:52 -05:00
|
|
|
# The handler to call when we think a user's device list might be out of
|
|
|
|
# sync. We do all device list resyncing on the master instance, so if
|
|
|
|
# we're on a worker we hit the device resync replication API.
|
|
|
|
if hs.config.worker.worker_app is None:
|
2023-04-21 07:06:39 -04:00
|
|
|
self._multi_user_device_resync = (
|
|
|
|
hs.get_device_handler().device_list_updater.multi_user_device_resync
|
2021-01-07 13:06:52 -05:00
|
|
|
)
|
|
|
|
else:
|
2023-04-21 07:06:39 -04:00
|
|
|
self._multi_user_device_resync = (
|
|
|
|
ReplicationMultiUserDevicesResyncRestServlet.make_client(hs)
|
2021-01-07 13:06:52 -05:00
|
|
|
)
|
2020-01-30 12:06:38 -05:00
|
|
|
|
2021-05-11 06:02:56 -04:00
|
|
|
# a rate limiter for room key requests. The keys are
|
|
|
|
# (sending_user_id, sending_device_id).
|
2021-02-19 13:20:34 -05:00
|
|
|
self._ratelimiter = Ratelimiter(
|
2021-03-30 07:06:09 -04:00
|
|
|
store=self.store,
|
2021-02-19 13:20:34 -05:00
|
|
|
clock=hs.get_clock(),
|
2023-08-29 19:39:39 -04:00
|
|
|
cfg=hs.config.ratelimiting.rc_key_requests,
|
2021-02-19 13:20:34 -05:00
|
|
|
)
|
|
|
|
|
2020-10-09 07:20:51 -04:00
|
|
|
async def on_direct_to_device_edu(self, origin: str, content: JsonDict) -> None:
|
2021-11-09 09:31:15 -05:00
|
|
|
"""
|
|
|
|
Handle receiving to-device messages from remote homeservers.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
origin: The remote homeserver.
|
|
|
|
content: The JSON dictionary containing the to-device messages.
|
|
|
|
"""
|
2016-09-06 13:16:20 -04:00
|
|
|
local_messages = {}
|
|
|
|
sender_user_id = content["sender"]
|
|
|
|
if origin != get_domain_from_id(sender_user_id):
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning(
|
2016-09-06 13:16:20 -04:00
|
|
|
"Dropping device message from %r with spoofed sender %r",
|
|
|
|
origin,
|
|
|
|
sender_user_id,
|
|
|
|
)
|
|
|
|
message_type = content["type"]
|
|
|
|
message_id = content["message_id"]
|
|
|
|
for user_id, by_device in content["messages"].items():
|
2018-01-16 13:25:28 -05:00
|
|
|
# we use UserID.from_string to catch invalid user ids
|
|
|
|
if not self.is_mine(UserID.from_string(user_id)):
|
2021-05-11 06:02:56 -04:00
|
|
|
logger.warning("To-device message to non-local user %s", user_id)
|
2018-01-16 13:25:28 -05:00
|
|
|
raise SynapseError(400, "Not a user here")
|
|
|
|
|
2020-01-28 09:43:21 -05:00
|
|
|
if not by_device:
|
|
|
|
continue
|
|
|
|
|
2021-05-11 06:02:56 -04:00
|
|
|
# Ratelimit key requests by the sending user.
|
|
|
|
if message_type == ToDeviceEventTypes.RoomKeyRequest:
|
|
|
|
allowed, _ = await self._ratelimiter.can_do_action(
|
|
|
|
None, (sender_user_id, None)
|
|
|
|
)
|
|
|
|
if not allowed:
|
|
|
|
logger.info(
|
|
|
|
"Dropping room_key_request from %s to %s due to rate limit",
|
|
|
|
sender_user_id,
|
|
|
|
user_id,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
2016-09-06 13:16:20 -04:00
|
|
|
messages_by_device = {
|
|
|
|
device_id: {
|
|
|
|
"content": message_content,
|
|
|
|
"type": message_type,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
}
|
|
|
|
for device_id, message_content in by_device.items()
|
|
|
|
}
|
2020-01-28 09:43:21 -05:00
|
|
|
local_messages[user_id] = messages_by_device
|
|
|
|
|
2020-06-16 08:06:17 -04:00
|
|
|
await self._check_for_unknown_devices(
|
2020-01-28 09:43:21 -05:00
|
|
|
message_type, sender_user_id, by_device
|
|
|
|
)
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Add messages to the database.
|
|
|
|
# Retrieve the stream id of the last-processed to-device message.
|
|
|
|
last_stream_id = await self.store.add_messages_from_remote_to_device_inbox(
|
2016-09-06 13:16:20 -04:00
|
|
|
origin, message_id, local_messages
|
|
|
|
)
|
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Notify listeners that there are new to-device messages to process,
|
|
|
|
# handing them the latest stream id.
|
2016-09-06 13:16:20 -04:00
|
|
|
self.notifier.on_new_event(
|
2022-05-16 11:35:31 -04:00
|
|
|
StreamKeyType.TO_DEVICE, last_stream_id, users=local_messages.keys()
|
2016-09-06 13:16:20 -04:00
|
|
|
)
|
|
|
|
|
2020-06-16 08:06:17 -04:00
|
|
|
async def _check_for_unknown_devices(
|
2020-01-28 09:43:21 -05:00
|
|
|
self,
|
|
|
|
message_type: str,
|
|
|
|
sender_user_id: str,
|
|
|
|
by_device: Dict[str, Dict[str, Any]],
|
2020-10-09 07:20:51 -04:00
|
|
|
) -> None:
|
2020-06-16 08:06:17 -04:00
|
|
|
"""Checks inbound device messages for unknown remote devices, and if
|
2020-01-28 09:43:21 -05:00
|
|
|
found marks the remote cache for the user as stale.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if message_type != "m.room_key_request":
|
|
|
|
return
|
|
|
|
|
|
|
|
# Get the sending device IDs
|
|
|
|
requesting_device_ids = set()
|
|
|
|
for message_content in by_device.values():
|
|
|
|
device_id = message_content.get("requesting_device_id")
|
|
|
|
requesting_device_ids.add(device_id)
|
|
|
|
|
|
|
|
# Check if we are tracking the devices of the remote user.
|
2020-06-16 08:06:17 -04:00
|
|
|
room_ids = await self.store.get_rooms_for_user(sender_user_id)
|
2020-01-28 09:43:21 -05:00
|
|
|
if not room_ids:
|
|
|
|
logger.info(
|
|
|
|
"Received device message from remote device we don't"
|
|
|
|
" share a room with: %s %s",
|
|
|
|
sender_user_id,
|
|
|
|
requesting_device_ids,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
# If we are tracking check that we know about the sending
|
|
|
|
# devices.
|
2020-06-16 08:06:17 -04:00
|
|
|
cached_devices = await self.store.get_cached_devices_for_user(sender_user_id)
|
2020-01-28 09:43:21 -05:00
|
|
|
|
|
|
|
unknown_devices = requesting_device_ids - set(cached_devices)
|
|
|
|
if unknown_devices:
|
|
|
|
logger.info(
|
|
|
|
"Received device message from remote device not in our cache: %s %s",
|
|
|
|
sender_user_id,
|
|
|
|
unknown_devices,
|
|
|
|
)
|
2023-01-10 06:17:59 -05:00
|
|
|
await self.store.mark_remote_users_device_caches_as_stale((sender_user_id,))
|
2020-01-30 12:06:38 -05:00
|
|
|
|
|
|
|
# Immediately attempt a resync in the background
|
2023-04-21 07:06:39 -04:00
|
|
|
run_in_background(self._multi_user_device_resync, user_ids=[sender_user_id])
|
2020-01-28 09:43:21 -05:00
|
|
|
|
2020-10-09 07:20:51 -04:00
|
|
|
async def send_device_message(
|
|
|
|
self,
|
2021-02-19 13:20:34 -05:00
|
|
|
requester: Requester,
|
2020-10-09 07:20:51 -04:00
|
|
|
message_type: str,
|
|
|
|
messages: Dict[str, Dict[str, JsonDict]],
|
|
|
|
) -> None:
|
2021-11-09 09:31:15 -05:00
|
|
|
"""
|
|
|
|
Handle a request from a user to send to-device message(s).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
requester: The user that is sending the to-device messages.
|
|
|
|
message_type: The type of to-device messages that are being sent.
|
|
|
|
messages: A dictionary containing recipients mapped to messages intended for them.
|
|
|
|
"""
|
2021-02-19 13:20:34 -05:00
|
|
|
sender_user_id = requester.user.to_string()
|
|
|
|
|
2022-12-06 04:52:55 -05:00
|
|
|
set_tag(SynapseTags.TO_DEVICE_TYPE, message_type)
|
|
|
|
set_tag(SynapseTags.TO_DEVICE_SENDER, sender_user_id)
|
2016-09-06 13:16:20 -04:00
|
|
|
local_messages = {}
|
2021-07-16 13:22:36 -04:00
|
|
|
remote_messages: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
2016-09-06 13:16:20 -04:00
|
|
|
for user_id, by_device in messages.items():
|
2022-12-06 04:52:55 -05:00
|
|
|
# add an opentracing log entry for each message
|
|
|
|
for device_id, message_content in by_device.items():
|
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"event": "send_to_device_message",
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": device_id,
|
|
|
|
EventContentFields.TO_DEVICE_MSGID: message_content.get(
|
|
|
|
EventContentFields.TO_DEVICE_MSGID
|
|
|
|
),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2021-02-19 13:20:34 -05:00
|
|
|
# Ratelimit local cross-user key requests by the sending device.
|
|
|
|
if (
|
2021-05-11 06:02:56 -04:00
|
|
|
message_type == ToDeviceEventTypes.RoomKeyRequest
|
2021-02-19 13:20:34 -05:00
|
|
|
and user_id != sender_user_id
|
2021-05-11 06:02:56 -04:00
|
|
|
):
|
|
|
|
allowed, _ = await self._ratelimiter.can_do_action(
|
2021-03-30 07:06:09 -04:00
|
|
|
requester, (sender_user_id, requester.device_id)
|
2021-02-19 13:20:34 -05:00
|
|
|
)
|
2021-05-11 06:02:56 -04:00
|
|
|
if not allowed:
|
2022-12-06 04:52:55 -05:00
|
|
|
log_kv({"message": f"dropping key requests to {user_id}"})
|
2021-05-11 06:02:56 -04:00
|
|
|
logger.info(
|
|
|
|
"Dropping room_key_request from %s to %s due to rate limit",
|
|
|
|
sender_user_id,
|
|
|
|
user_id,
|
|
|
|
)
|
|
|
|
continue
|
2021-02-19 13:20:34 -05:00
|
|
|
|
2018-01-16 13:25:28 -05:00
|
|
|
# we use UserID.from_string to catch invalid user ids
|
|
|
|
if self.is_mine(UserID.from_string(user_id)):
|
2016-09-06 13:16:20 -04:00
|
|
|
messages_by_device = {
|
|
|
|
device_id: {
|
|
|
|
"content": message_content,
|
|
|
|
"type": message_type,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
}
|
|
|
|
for device_id, message_content in by_device.items()
|
|
|
|
}
|
|
|
|
if messages_by_device:
|
|
|
|
local_messages[user_id] = messages_by_device
|
|
|
|
else:
|
|
|
|
destination = get_domain_from_id(user_id)
|
|
|
|
remote_messages.setdefault(destination, {})[user_id] = by_device
|
|
|
|
|
2019-08-22 13:21:10 -04:00
|
|
|
context = get_active_span_text_map()
|
|
|
|
|
2016-09-06 13:16:20 -04:00
|
|
|
remote_edu_contents = {}
|
|
|
|
for destination, messages in remote_messages.items():
|
2022-12-06 04:52:55 -05:00
|
|
|
# The EDU contains a "message_id" property which is used for
|
|
|
|
# idempotence. Make up a random one.
|
|
|
|
message_id = random_string(16)
|
|
|
|
log_kv({"destination": destination, "message_id": message_id})
|
|
|
|
|
2021-04-01 12:08:21 -04:00
|
|
|
remote_edu_contents[destination] = {
|
|
|
|
"messages": messages,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
"type": message_type,
|
|
|
|
"message_id": message_id,
|
|
|
|
"org.matrix.opentracing_context": json_encoder.encode(context),
|
|
|
|
}
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Add messages to the database.
|
|
|
|
# Retrieve the stream id of the last-processed to-device message.
|
|
|
|
last_stream_id = await self.store.add_messages_to_device_inbox(
|
2016-09-06 13:16:20 -04:00
|
|
|
local_messages, remote_edu_contents
|
|
|
|
)
|
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Notify listeners that there are new to-device messages to process,
|
|
|
|
# handing them the latest stream id.
|
2016-09-06 13:16:20 -04:00
|
|
|
self.notifier.on_new_event(
|
2022-05-16 11:35:31 -04:00
|
|
|
StreamKeyType.TO_DEVICE, last_stream_id, users=local_messages.keys()
|
2016-09-06 13:16:20 -04:00
|
|
|
)
|
|
|
|
|
2021-01-07 15:19:26 -05:00
|
|
|
if self.federation_sender:
|
2023-09-04 12:14:09 -04:00
|
|
|
# Enqueue a new federation transaction to send the new
|
|
|
|
# device messages to each remote destination.
|
|
|
|
await self.federation_sender.send_device_messages(remote_messages.keys())
|
2023-07-24 11:23:19 -04:00
|
|
|
|
|
|
|
async def get_events_for_dehydrated_device(
|
|
|
|
self,
|
|
|
|
requester: Requester,
|
|
|
|
device_id: str,
|
|
|
|
since_token: Optional[str],
|
|
|
|
limit: int,
|
|
|
|
) -> JsonDict:
|
|
|
|
"""Fetches up to `limit` events sent to `device_id` starting from `since_token`
|
|
|
|
and returns the new since token. If there are no more messages, returns an empty
|
|
|
|
array.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
requester: the user requesting the messages
|
|
|
|
device_id: ID of the dehydrated device
|
|
|
|
since_token: stream id to start from when fetching messages
|
|
|
|
limit: the number of messages to fetch
|
|
|
|
Returns:
|
|
|
|
A dict containing the to-device messages, as well as a token that the client
|
|
|
|
can provide in the next call to fetch the next batch of messages
|
|
|
|
"""
|
|
|
|
|
|
|
|
user_id = requester.user.to_string()
|
|
|
|
|
|
|
|
# only allow fetching messages for the dehydrated device id currently associated
|
|
|
|
# with the user
|
|
|
|
dehydrated_device = await self.device_handler.get_dehydrated_device(user_id)
|
|
|
|
if dehydrated_device is None:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.FORBIDDEN,
|
|
|
|
"No dehydrated device exists",
|
|
|
|
Codes.FORBIDDEN,
|
|
|
|
)
|
|
|
|
|
|
|
|
dehydrated_device_id, _ = dehydrated_device
|
|
|
|
if device_id != dehydrated_device_id:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.FORBIDDEN,
|
|
|
|
"You may only fetch messages for your dehydrated device",
|
|
|
|
Codes.FORBIDDEN,
|
|
|
|
)
|
|
|
|
|
|
|
|
since_stream_id = 0
|
|
|
|
if since_token:
|
|
|
|
if not since_token.startswith("d"):
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_REQUEST,
|
|
|
|
"from parameter %r has an invalid format" % (since_token,),
|
|
|
|
errcode=Codes.INVALID_PARAM,
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
since_stream_id = int(since_token[1:])
|
|
|
|
except Exception:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_REQUEST,
|
|
|
|
"from parameter %r has an invalid format" % (since_token,),
|
|
|
|
errcode=Codes.INVALID_PARAM,
|
|
|
|
)
|
|
|
|
|
|
|
|
to_token = self.event_sources.get_current_token().to_device_key
|
|
|
|
|
|
|
|
messages, stream_id = await self.store.get_messages_for_device(
|
|
|
|
user_id, device_id, since_stream_id, to_token, limit
|
|
|
|
)
|
|
|
|
|
|
|
|
for message in messages:
|
|
|
|
# Remove the message id before sending to client
|
|
|
|
message_id = message.pop("message_id", None)
|
|
|
|
if message_id:
|
|
|
|
set_tag(SynapseTags.TO_DEVICE_EDU_ID, message_id)
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
"Returning %d to-device messages between %d and %d (current token: %d) for "
|
|
|
|
"dehydrated device %s, user_id %s",
|
|
|
|
len(messages),
|
|
|
|
since_stream_id,
|
|
|
|
stream_id,
|
|
|
|
to_token,
|
|
|
|
device_id,
|
|
|
|
user_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
return {
|
|
|
|
"events": messages,
|
|
|
|
"next_batch": f"d{stream_id}",
|
|
|
|
}
|