2016-09-06 13:16:20 -04:00
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2020-10-09 07:20:51 -04:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-05-11 06:02:56 -04:00
|
|
|
from synapse.api.constants import ToDeviceEventTypes
|
2018-01-16 13:25:28 -05:00
|
|
|
from synapse.api.errors import SynapseError
|
2021-02-19 13:20:34 -05:00
|
|
|
from synapse.api.ratelimiting import Ratelimiter
|
2020-01-30 12:06:38 -05:00
|
|
|
from synapse.logging.context import run_in_background
|
2019-08-22 13:21:10 -04:00
|
|
|
from synapse.logging.opentracing import (
|
2021-04-01 12:08:21 -04:00
|
|
|
SynapseTags,
|
2019-08-22 13:21:10 -04:00
|
|
|
get_active_span_text_map,
|
2019-09-03 05:21:30 -04:00
|
|
|
log_kv,
|
2019-08-22 13:21:10 -04:00
|
|
|
set_tag,
|
|
|
|
)
|
2021-01-07 13:06:52 -05:00
|
|
|
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
|
2021-02-19 13:20:34 -05:00
|
|
|
from synapse.types import JsonDict, Requester, UserID, get_domain_from_id
|
2020-08-20 10:32:33 -04:00
|
|
|
from synapse.util import json_encoder
|
2016-09-06 13:16:20 -04:00
|
|
|
from synapse.util.stringutils import random_string
|
|
|
|
|
2020-10-09 07:20:51 -04:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 07:12:48 -04:00
|
|
|
from synapse.server import HomeServer
|
2020-10-09 07:20:51 -04:00
|
|
|
|
|
|
|
|
2016-09-06 13:16:20 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-09-04 06:54:56 -04:00
|
|
|
class DeviceMessageHandler:
|
2020-10-09 07:20:51 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2016-09-06 13:16:20 -04:00
|
|
|
"""
|
|
|
|
Args:
|
2020-10-09 07:20:51 -04:00
|
|
|
hs: server
|
2016-09-06 13:16:20 -04:00
|
|
|
"""
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.notifier = hs.get_notifier()
|
2018-01-16 13:25:28 -05:00
|
|
|
self.is_mine = hs.is_mine
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-01-07 15:19:26 -05:00
|
|
|
# We only need to poke the federation sender explicitly if its on the
|
|
|
|
# same instance. Other federation sender instances will get notified by
|
|
|
|
# `synapse.app.generic_worker.FederationSenderHandler` when it sees it
|
|
|
|
# in the to-device replication stream.
|
|
|
|
self.federation_sender = None
|
|
|
|
if hs.should_send_federation():
|
|
|
|
self.federation_sender = hs.get_federation_sender()
|
|
|
|
|
|
|
|
# If we can handle the to device EDUs we do so, otherwise we route them
|
|
|
|
# to the appropriate worker.
|
|
|
|
if hs.get_instance_name() in hs.config.worker.writers.to_device:
|
|
|
|
hs.get_federation_registry().register_edu_handler(
|
|
|
|
"m.direct_to_device", self.on_direct_to_device_edu
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
hs.get_federation_registry().register_instances_for_edu(
|
|
|
|
"m.direct_to_device",
|
|
|
|
hs.config.worker.writers.to_device,
|
|
|
|
)
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-01-07 13:06:52 -05:00
|
|
|
# The handler to call when we think a user's device list might be out of
|
|
|
|
# sync. We do all device list resyncing on the master instance, so if
|
|
|
|
# we're on a worker we hit the device resync replication API.
|
|
|
|
if hs.config.worker.worker_app is None:
|
|
|
|
self._user_device_resync = (
|
|
|
|
hs.get_device_handler().device_list_updater.user_device_resync
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self._user_device_resync = (
|
|
|
|
ReplicationUserDevicesResyncRestServlet.make_client(hs)
|
|
|
|
)
|
2020-01-30 12:06:38 -05:00
|
|
|
|
2021-05-11 06:02:56 -04:00
|
|
|
# a rate limiter for room key requests. The keys are
|
|
|
|
# (sending_user_id, sending_device_id).
|
2021-02-19 13:20:34 -05:00
|
|
|
self._ratelimiter = Ratelimiter(
|
2021-03-30 07:06:09 -04:00
|
|
|
store=self.store,
|
2021-02-19 13:20:34 -05:00
|
|
|
clock=hs.get_clock(),
|
2021-09-13 13:07:12 -04:00
|
|
|
rate_hz=hs.config.ratelimiting.rc_key_requests.per_second,
|
|
|
|
burst_count=hs.config.ratelimiting.rc_key_requests.burst_count,
|
2021-02-19 13:20:34 -05:00
|
|
|
)
|
|
|
|
|
2020-10-09 07:20:51 -04:00
|
|
|
async def on_direct_to_device_edu(self, origin: str, content: JsonDict) -> None:
|
2021-11-09 09:31:15 -05:00
|
|
|
"""
|
|
|
|
Handle receiving to-device messages from remote homeservers.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
origin: The remote homeserver.
|
|
|
|
content: The JSON dictionary containing the to-device messages.
|
|
|
|
"""
|
2016-09-06 13:16:20 -04:00
|
|
|
local_messages = {}
|
|
|
|
sender_user_id = content["sender"]
|
|
|
|
if origin != get_domain_from_id(sender_user_id):
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning(
|
2016-09-06 13:16:20 -04:00
|
|
|
"Dropping device message from %r with spoofed sender %r",
|
|
|
|
origin,
|
|
|
|
sender_user_id,
|
|
|
|
)
|
|
|
|
message_type = content["type"]
|
|
|
|
message_id = content["message_id"]
|
|
|
|
for user_id, by_device in content["messages"].items():
|
2018-01-16 13:25:28 -05:00
|
|
|
# we use UserID.from_string to catch invalid user ids
|
|
|
|
if not self.is_mine(UserID.from_string(user_id)):
|
2021-05-11 06:02:56 -04:00
|
|
|
logger.warning("To-device message to non-local user %s", user_id)
|
2018-01-16 13:25:28 -05:00
|
|
|
raise SynapseError(400, "Not a user here")
|
|
|
|
|
2020-01-28 09:43:21 -05:00
|
|
|
if not by_device:
|
|
|
|
continue
|
|
|
|
|
2021-05-11 06:02:56 -04:00
|
|
|
# Ratelimit key requests by the sending user.
|
|
|
|
if message_type == ToDeviceEventTypes.RoomKeyRequest:
|
|
|
|
allowed, _ = await self._ratelimiter.can_do_action(
|
|
|
|
None, (sender_user_id, None)
|
|
|
|
)
|
|
|
|
if not allowed:
|
|
|
|
logger.info(
|
|
|
|
"Dropping room_key_request from %s to %s due to rate limit",
|
|
|
|
sender_user_id,
|
|
|
|
user_id,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
2016-09-06 13:16:20 -04:00
|
|
|
messages_by_device = {
|
|
|
|
device_id: {
|
|
|
|
"content": message_content,
|
|
|
|
"type": message_type,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
}
|
|
|
|
for device_id, message_content in by_device.items()
|
|
|
|
}
|
2020-01-28 09:43:21 -05:00
|
|
|
local_messages[user_id] = messages_by_device
|
|
|
|
|
2020-06-16 08:06:17 -04:00
|
|
|
await self._check_for_unknown_devices(
|
2020-01-28 09:43:21 -05:00
|
|
|
message_type, sender_user_id, by_device
|
|
|
|
)
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Add messages to the database.
|
|
|
|
# Retrieve the stream id of the last-processed to-device message.
|
|
|
|
last_stream_id = await self.store.add_messages_from_remote_to_device_inbox(
|
2016-09-06 13:16:20 -04:00
|
|
|
origin, message_id, local_messages
|
|
|
|
)
|
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Notify listeners that there are new to-device messages to process,
|
|
|
|
# handing them the latest stream id.
|
2016-09-06 13:16:20 -04:00
|
|
|
self.notifier.on_new_event(
|
2021-11-09 09:31:15 -05:00
|
|
|
"to_device_key", last_stream_id, users=local_messages.keys()
|
2016-09-06 13:16:20 -04:00
|
|
|
)
|
|
|
|
|
2020-06-16 08:06:17 -04:00
|
|
|
async def _check_for_unknown_devices(
|
2020-01-28 09:43:21 -05:00
|
|
|
self,
|
|
|
|
message_type: str,
|
|
|
|
sender_user_id: str,
|
|
|
|
by_device: Dict[str, Dict[str, Any]],
|
2020-10-09 07:20:51 -04:00
|
|
|
) -> None:
|
2020-06-16 08:06:17 -04:00
|
|
|
"""Checks inbound device messages for unknown remote devices, and if
|
2020-01-28 09:43:21 -05:00
|
|
|
found marks the remote cache for the user as stale.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if message_type != "m.room_key_request":
|
|
|
|
return
|
|
|
|
|
|
|
|
# Get the sending device IDs
|
|
|
|
requesting_device_ids = set()
|
|
|
|
for message_content in by_device.values():
|
|
|
|
device_id = message_content.get("requesting_device_id")
|
|
|
|
requesting_device_ids.add(device_id)
|
|
|
|
|
|
|
|
# Check if we are tracking the devices of the remote user.
|
2020-06-16 08:06:17 -04:00
|
|
|
room_ids = await self.store.get_rooms_for_user(sender_user_id)
|
2020-01-28 09:43:21 -05:00
|
|
|
if not room_ids:
|
|
|
|
logger.info(
|
|
|
|
"Received device message from remote device we don't"
|
|
|
|
" share a room with: %s %s",
|
|
|
|
sender_user_id,
|
|
|
|
requesting_device_ids,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
# If we are tracking check that we know about the sending
|
|
|
|
# devices.
|
2020-06-16 08:06:17 -04:00
|
|
|
cached_devices = await self.store.get_cached_devices_for_user(sender_user_id)
|
2020-01-28 09:43:21 -05:00
|
|
|
|
|
|
|
unknown_devices = requesting_device_ids - set(cached_devices)
|
|
|
|
if unknown_devices:
|
|
|
|
logger.info(
|
|
|
|
"Received device message from remote device not in our cache: %s %s",
|
|
|
|
sender_user_id,
|
|
|
|
unknown_devices,
|
|
|
|
)
|
2020-06-16 08:06:17 -04:00
|
|
|
await self.store.mark_remote_user_device_cache_as_stale(sender_user_id)
|
2020-01-30 12:06:38 -05:00
|
|
|
|
|
|
|
# Immediately attempt a resync in the background
|
2021-01-18 06:08:26 -05:00
|
|
|
run_in_background(self._user_device_resync, user_id=sender_user_id)
|
2020-01-28 09:43:21 -05:00
|
|
|
|
2020-10-09 07:20:51 -04:00
|
|
|
async def send_device_message(
|
|
|
|
self,
|
2021-02-19 13:20:34 -05:00
|
|
|
requester: Requester,
|
2020-10-09 07:20:51 -04:00
|
|
|
message_type: str,
|
|
|
|
messages: Dict[str, Dict[str, JsonDict]],
|
|
|
|
) -> None:
|
2021-11-09 09:31:15 -05:00
|
|
|
"""
|
|
|
|
Handle a request from a user to send to-device message(s).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
requester: The user that is sending the to-device messages.
|
|
|
|
message_type: The type of to-device messages that are being sent.
|
|
|
|
messages: A dictionary containing recipients mapped to messages intended for them.
|
|
|
|
"""
|
2021-02-19 13:20:34 -05:00
|
|
|
sender_user_id = requester.user.to_string()
|
|
|
|
|
2021-04-01 12:08:21 -04:00
|
|
|
message_id = random_string(16)
|
|
|
|
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
|
|
|
|
|
|
|
log_kv({"number_of_to_device_messages": len(messages)})
|
2019-09-03 05:21:30 -04:00
|
|
|
set_tag("sender", sender_user_id)
|
2016-09-06 13:16:20 -04:00
|
|
|
local_messages = {}
|
2021-07-16 13:22:36 -04:00
|
|
|
remote_messages: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
2016-09-06 13:16:20 -04:00
|
|
|
for user_id, by_device in messages.items():
|
2021-02-19 13:20:34 -05:00
|
|
|
# Ratelimit local cross-user key requests by the sending device.
|
|
|
|
if (
|
2021-05-11 06:02:56 -04:00
|
|
|
message_type == ToDeviceEventTypes.RoomKeyRequest
|
2021-02-19 13:20:34 -05:00
|
|
|
and user_id != sender_user_id
|
2021-05-11 06:02:56 -04:00
|
|
|
):
|
|
|
|
allowed, _ = await self._ratelimiter.can_do_action(
|
2021-03-30 07:06:09 -04:00
|
|
|
requester, (sender_user_id, requester.device_id)
|
2021-02-19 13:20:34 -05:00
|
|
|
)
|
2021-05-11 06:02:56 -04:00
|
|
|
if not allowed:
|
|
|
|
logger.info(
|
|
|
|
"Dropping room_key_request from %s to %s due to rate limit",
|
|
|
|
sender_user_id,
|
|
|
|
user_id,
|
|
|
|
)
|
|
|
|
continue
|
2021-02-19 13:20:34 -05:00
|
|
|
|
2018-01-16 13:25:28 -05:00
|
|
|
# we use UserID.from_string to catch invalid user ids
|
|
|
|
if self.is_mine(UserID.from_string(user_id)):
|
2016-09-06 13:16:20 -04:00
|
|
|
messages_by_device = {
|
|
|
|
device_id: {
|
|
|
|
"content": message_content,
|
|
|
|
"type": message_type,
|
|
|
|
"sender": sender_user_id,
|
2021-04-01 12:08:21 -04:00
|
|
|
"message_id": message_id,
|
2016-09-06 13:16:20 -04:00
|
|
|
}
|
|
|
|
for device_id, message_content in by_device.items()
|
|
|
|
}
|
|
|
|
if messages_by_device:
|
|
|
|
local_messages[user_id] = messages_by_device
|
2021-04-01 12:08:21 -04:00
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": list(messages_by_device),
|
|
|
|
}
|
|
|
|
)
|
2016-09-06 13:16:20 -04:00
|
|
|
else:
|
|
|
|
destination = get_domain_from_id(user_id)
|
|
|
|
remote_messages.setdefault(destination, {})[user_id] = by_device
|
|
|
|
|
2019-08-22 13:21:10 -04:00
|
|
|
context = get_active_span_text_map()
|
|
|
|
|
2016-09-06 13:16:20 -04:00
|
|
|
remote_edu_contents = {}
|
|
|
|
for destination, messages in remote_messages.items():
|
2021-04-01 12:08:21 -04:00
|
|
|
log_kv({"destination": destination})
|
|
|
|
remote_edu_contents[destination] = {
|
|
|
|
"messages": messages,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
"type": message_type,
|
|
|
|
"message_id": message_id,
|
|
|
|
"org.matrix.opentracing_context": json_encoder.encode(context),
|
|
|
|
}
|
2016-09-06 13:16:20 -04:00
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Add messages to the database.
|
|
|
|
# Retrieve the stream id of the last-processed to-device message.
|
|
|
|
last_stream_id = await self.store.add_messages_to_device_inbox(
|
2016-09-06 13:16:20 -04:00
|
|
|
local_messages, remote_edu_contents
|
|
|
|
)
|
|
|
|
|
2021-11-09 09:31:15 -05:00
|
|
|
# Notify listeners that there are new to-device messages to process,
|
|
|
|
# handing them the latest stream id.
|
2016-09-06 13:16:20 -04:00
|
|
|
self.notifier.on_new_event(
|
2021-11-09 09:31:15 -05:00
|
|
|
"to_device_key", last_stream_id, users=local_messages.keys()
|
2016-09-06 13:16:20 -04:00
|
|
|
)
|
|
|
|
|
2021-01-07 15:19:26 -05:00
|
|
|
if self.federation_sender:
|
|
|
|
for destination in remote_messages.keys():
|
|
|
|
# Enqueue a new federation transaction to send the new
|
|
|
|
# device messages to each remote destination.
|
|
|
|
self.federation_sender.send_device_messages(destination)
|