2015-01-16 08:21:14 -05:00
|
|
|
#
|
2023-11-21 15:29:58 -05:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 06:26:48 -05:00
|
|
|
# Copyright 2020 Sorunome
|
|
|
|
# Copyright 2014-2022 The Matrix.org Foundation C.I.C.
|
2023-11-21 15:29:58 -05:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2015-01-16 08:21:14 -05:00
|
|
|
#
|
|
|
|
#
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import logging
|
2020-06-16 08:51:47 -04:00
|
|
|
import urllib
|
2021-10-27 12:27:23 -04:00
|
|
|
from typing import (
|
2023-04-24 13:12:06 -04:00
|
|
|
TYPE_CHECKING,
|
2021-10-27 12:27:23 -04:00
|
|
|
Any,
|
2023-11-29 14:03:42 -05:00
|
|
|
BinaryIO,
|
2021-10-27 12:27:23 -04:00
|
|
|
Callable,
|
|
|
|
Collection,
|
|
|
|
Dict,
|
2021-12-02 11:18:10 -05:00
|
|
|
Generator,
|
2021-10-27 12:27:23 -04:00
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
|
|
|
)
|
2018-09-12 09:23:32 -04:00
|
|
|
|
2021-05-20 11:11:48 -04:00
|
|
|
import attr
|
|
|
|
import ijson
|
|
|
|
|
2023-02-01 16:35:24 -05:00
|
|
|
from synapse.api.constants import Direction, Membership
|
2020-05-01 10:15:08 -04:00
|
|
|
from synapse.api.errors import Codes, HttpResponseException, SynapseError
|
2024-06-05 08:43:36 -04:00
|
|
|
from synapse.api.ratelimiting import Ratelimiter
|
2021-05-20 11:11:48 -04:00
|
|
|
from synapse.api.room_versions import RoomVersion
|
2019-07-29 12:47:27 -04:00
|
|
|
from synapse.api.urls import (
|
|
|
|
FEDERATION_UNSTABLE_PREFIX,
|
|
|
|
FEDERATION_V1_PREFIX,
|
|
|
|
FEDERATION_V2_PREFIX,
|
|
|
|
)
|
2021-05-20 11:11:48 -04:00
|
|
|
from synapse.events import EventBase, make_event_from_dict
|
2021-07-26 11:53:09 -04:00
|
|
|
from synapse.federation.units import Transaction
|
2023-04-24 13:12:06 -04:00
|
|
|
from synapse.http.matrixfederationclient import ByteParser, LegacyJsonSendParser
|
2022-04-08 08:06:51 -04:00
|
|
|
from synapse.http.types import QueryParams
|
2023-05-24 16:23:26 -04:00
|
|
|
from synapse.types import JsonDict, UserID
|
2022-10-06 14:17:50 -04:00
|
|
|
from synapse.util import ExceptionBundle
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2023-04-24 13:12:06 -04:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.app.homeserver import HomeServer
|
|
|
|
|
2015-01-16 08:21:14 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-09-04 06:54:56 -04:00
|
|
|
class TransportLayerClient:
|
2015-01-16 08:21:14 -05:00
|
|
|
"""Sends federation HTTP requests to other servers"""
|
|
|
|
|
2023-04-24 13:12:06 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-12-02 11:09:24 -05:00
|
|
|
self.client = hs.get_federation_http_client()
|
2023-05-05 10:06:22 -04:00
|
|
|
self._is_mine_server_name = hs.is_mine_server_name
|
2016-01-26 08:52:29 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def get_room_state_ids(
|
|
|
|
self, destination: str, room_id: str, event_id: str
|
|
|
|
) -> JsonDict:
|
2022-02-22 07:17:10 -05:00
|
|
|
"""Requests the IDs of all state for a given room at the given event.
|
2016-08-03 09:47:37 -04:00
|
|
|
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
destination: The host name of the remote homeserver we want
|
2016-08-03 09:47:37 -04:00
|
|
|
to get the state from.
|
2022-02-22 07:17:10 -05:00
|
|
|
room_id: the room we want the state of
|
2021-07-26 11:53:09 -04:00
|
|
|
event_id: The event we want the context at.
|
2016-08-03 09:47:37 -04:00
|
|
|
|
|
|
|
Returns:
|
2021-07-26 11:53:09 -04:00
|
|
|
Results in a dict received from the remote homeserver.
|
2016-08-03 09:47:37 -04:00
|
|
|
"""
|
|
|
|
logger.debug("get_room_state_ids dest=%s, room=%s", destination, room_id)
|
|
|
|
|
2019-03-08 11:55:52 -05:00
|
|
|
path = _create_v1_path("/state_ids/%s", room_id)
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.get_json(
|
2016-08-03 09:47:37 -04:00
|
|
|
destination,
|
|
|
|
path=path,
|
|
|
|
args={"event_id": event_id},
|
2019-03-13 14:26:06 -04:00
|
|
|
try_trailing_slash_on_400=True,
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2022-02-22 07:17:10 -05:00
|
|
|
async def get_room_state(
|
|
|
|
self, room_version: RoomVersion, destination: str, room_id: str, event_id: str
|
|
|
|
) -> "StateRequestResponse":
|
|
|
|
"""Requests the full state for a given room at the given event.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_version: the version of the room (required to build the event objects)
|
|
|
|
destination: The host name of the remote homeserver we want
|
|
|
|
to get the state from.
|
|
|
|
room_id: the room we want the state of
|
|
|
|
event_id: The event we want the context at.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Results in a dict received from the remote homeserver.
|
|
|
|
"""
|
|
|
|
path = _create_v1_path("/state/%s", room_id)
|
|
|
|
return await self.client.get_json(
|
|
|
|
destination,
|
|
|
|
path=path,
|
|
|
|
args={"event_id": event_id},
|
2023-01-25 11:11:06 -05:00
|
|
|
# This can take a looooooong time for large rooms. Give this a generous
|
|
|
|
# timeout of 10 minutes to avoid the partial state resync timing out early
|
|
|
|
# and trying a bunch of servers who haven't seen our join yet.
|
|
|
|
timeout=600_000,
|
2022-02-22 07:17:10 -05:00
|
|
|
parser=_StateParser(room_version),
|
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def get_event(
|
|
|
|
self, destination: str, event_id: str, timeout: Optional[int] = None
|
|
|
|
) -> JsonDict:
|
2015-01-16 08:21:14 -05:00
|
|
|
"""Requests the pdu with give id and origin from the given server.
|
|
|
|
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
destination: The host name of the remote homeserver we want
|
2015-01-16 08:21:14 -05:00
|
|
|
to get the state from.
|
2021-07-26 11:53:09 -04:00
|
|
|
event_id: The id of the event being requested.
|
|
|
|
timeout: How long to try (in ms) the destination for before
|
2015-05-22 10:18:04 -04:00
|
|
|
giving up. None indicates no timeout.
|
2015-01-16 08:21:14 -05:00
|
|
|
|
|
|
|
Returns:
|
2021-07-26 11:53:09 -04:00
|
|
|
Results in a dict received from the remote homeserver.
|
2015-01-16 08:21:14 -05:00
|
|
|
"""
|
|
|
|
logger.debug("get_pdu dest=%s, event_id=%s", destination, event_id)
|
|
|
|
|
2019-03-08 11:55:52 -05:00
|
|
|
path = _create_v1_path("/event/%s", event_id)
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.get_json(
|
2019-03-08 13:25:59 -05:00
|
|
|
destination, path=path, timeout=timeout, try_trailing_slash_on_400=True
|
|
|
|
)
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def backfill(
|
2021-10-27 12:27:23 -04:00
|
|
|
self, destination: str, room_id: str, event_tuples: Collection[str], limit: int
|
2023-04-24 13:12:06 -04:00
|
|
|
) -> Optional[Union[JsonDict, list]]:
|
2015-01-16 08:21:14 -05:00
|
|
|
"""Requests `limit` previous PDUs in a given context before list of
|
|
|
|
PDUs.
|
|
|
|
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
destination
|
|
|
|
room_id
|
2021-10-27 12:27:23 -04:00
|
|
|
event_tuples:
|
|
|
|
Must be a Collection that is falsy when empty.
|
|
|
|
(Iterable is not enough here!)
|
2021-07-26 11:53:09 -04:00
|
|
|
limit
|
2015-01-16 08:21:14 -05:00
|
|
|
|
|
|
|
Returns:
|
2021-07-26 11:53:09 -04:00
|
|
|
Results in a dict received from the remote homeserver.
|
2015-01-16 08:21:14 -05:00
|
|
|
"""
|
|
|
|
logger.debug(
|
2019-10-24 13:17:33 -04:00
|
|
|
"backfill dest=%s, room_id=%s, event_tuples=%r, limit=%s",
|
2015-01-16 13:59:04 -05:00
|
|
|
destination,
|
|
|
|
room_id,
|
2019-10-24 13:17:33 -04:00
|
|
|
event_tuples,
|
2015-01-16 13:59:04 -05:00
|
|
|
str(limit),
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if not event_tuples:
|
|
|
|
# TODO: raise?
|
2021-07-26 11:53:09 -04:00
|
|
|
return None
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2019-03-08 11:55:52 -05:00
|
|
|
path = _create_v1_path("/backfill/%s", room_id)
|
2015-01-16 08:21:14 -05:00
|
|
|
|
|
|
|
args = {"v": event_tuples, "limit": [str(limit)]}
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.get_json(
|
2019-03-13 14:26:06 -04:00
|
|
|
destination, path=path, args=args, try_trailing_slash_on_400=True
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2021-12-02 02:02:20 -05:00
|
|
|
async def timestamp_to_event(
|
2023-02-01 16:35:24 -05:00
|
|
|
self, destination: str, room_id: str, timestamp: int, direction: Direction
|
2021-12-02 02:02:20 -05:00
|
|
|
) -> Union[JsonDict, List]:
|
|
|
|
"""
|
|
|
|
Calls a remote federating server at `destination` asking for their
|
|
|
|
closest event to the given timestamp in the given direction.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination: Domain name of the remote homeserver
|
|
|
|
room_id: Room to fetch the event from
|
|
|
|
timestamp: The point in time (inclusive) we should navigate from in
|
|
|
|
the given direction to find the closest event.
|
2023-02-01 16:35:24 -05:00
|
|
|
direction: indicates whether we should navigate forward
|
2021-12-02 02:02:20 -05:00
|
|
|
or backward from the given timestamp to find the closest event.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Response dict received from the remote homeserver.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
Various exceptions when the request fails
|
|
|
|
"""
|
2022-11-28 16:54:18 -05:00
|
|
|
path = _create_v1_path(
|
|
|
|
"/timestamp_to_event/%s",
|
2021-12-02 02:02:20 -05:00
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
|
2023-02-01 16:35:24 -05:00
|
|
|
args = {"ts": [str(timestamp)], "dir": [direction.value]}
|
2021-12-02 02:02:20 -05:00
|
|
|
|
|
|
|
remote_response = await self.client.get_json(
|
|
|
|
destination, path=path, args=args, try_trailing_slash_on_400=True
|
|
|
|
)
|
|
|
|
|
|
|
|
return remote_response
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def send_transaction(
|
|
|
|
self,
|
|
|
|
transaction: Transaction,
|
|
|
|
json_data_callback: Optional[Callable[[], JsonDict]] = None,
|
|
|
|
) -> JsonDict:
|
2015-01-16 08:21:14 -05:00
|
|
|
"""Sends the given Transaction to its destination
|
|
|
|
|
|
|
|
Args:
|
2021-08-06 09:39:59 -04:00
|
|
|
transaction
|
2015-01-16 08:21:14 -05:00
|
|
|
|
|
|
|
Returns:
|
2020-07-30 08:01:33 -04:00
|
|
|
Succeeds when we get a 2xx HTTP response. The result
|
2018-10-16 05:44:49 -04:00
|
|
|
will be the decoded JSON body.
|
|
|
|
|
|
|
|
Fails with ``HTTPRequestException`` if we get an HTTP response
|
|
|
|
code >= 300.
|
|
|
|
|
|
|
|
Fails with ``NotRetryingDestination`` if we are not yet ready
|
|
|
|
to retry this server.
|
|
|
|
|
|
|
|
Fails with ``FederationDeniedError`` if this destination
|
|
|
|
is not on our federation whitelist
|
2015-01-16 08:21:14 -05:00
|
|
|
"""
|
|
|
|
logger.debug(
|
|
|
|
"send_data dest=%s, txid=%s",
|
2022-04-27 09:03:44 -04:00
|
|
|
transaction.destination,
|
|
|
|
transaction.transaction_id,
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2023-05-05 10:06:22 -04:00
|
|
|
if self._is_mine_server_name(transaction.destination):
|
2015-01-16 08:21:14 -05:00
|
|
|
raise RuntimeError("Transport layer cannot send to itself!")
|
|
|
|
|
|
|
|
# FIXME: This is only used by the tests. The actual json sent is
|
|
|
|
# generated by the json_data_callback.
|
|
|
|
json_data = transaction.get_dict()
|
|
|
|
|
2022-04-27 09:03:44 -04:00
|
|
|
path = _create_v1_path("/send/%s", transaction.transaction_id)
|
2018-04-10 06:16:08 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2022-04-27 09:03:44 -04:00
|
|
|
transaction.destination,
|
2018-04-10 06:16:08 -04:00
|
|
|
path=path,
|
2015-01-16 08:21:14 -05:00
|
|
|
data=json_data,
|
|
|
|
json_data_callback=json_data_callback,
|
2015-11-17 13:26:50 -05:00
|
|
|
long_retries=True,
|
2019-03-13 14:26:06 -04:00
|
|
|
try_trailing_slash_on_400=True,
|
2023-09-04 09:04:43 -04:00
|
|
|
# Sending a transaction should always succeed, if it doesn't
|
|
|
|
# then something is wrong and we should backoff.
|
|
|
|
backoff_on_all_error_codes=True,
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2020-07-30 08:01:33 -04:00
|
|
|
async def make_query(
|
2021-12-02 11:18:10 -05:00
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
query_type: str,
|
2022-04-08 08:06:51 -04:00
|
|
|
args: QueryParams,
|
2021-12-02 11:18:10 -05:00
|
|
|
retry_on_dns_fail: bool,
|
|
|
|
ignore_backoff: bool = False,
|
2022-02-22 10:10:10 -05:00
|
|
|
prefix: str = FEDERATION_V1_PREFIX,
|
2021-12-02 11:18:10 -05:00
|
|
|
) -> JsonDict:
|
2022-02-22 10:10:10 -05:00
|
|
|
path = _create_path(prefix, "/query/%s", query_type)
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2021-12-02 11:18:10 -05:00
|
|
|
return await self.client.get_json(
|
2015-01-16 08:21:14 -05:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
args=args,
|
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
2016-03-04 09:29:58 -05:00
|
|
|
timeout=10000,
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff=ignore_backoff,
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2020-07-30 08:01:33 -04:00
|
|
|
async def make_membership_event(
|
2021-07-26 11:53:09 -04:00
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
room_id: str,
|
|
|
|
user_id: str,
|
|
|
|
membership: str,
|
|
|
|
params: Optional[Mapping[str, Union[str, Iterable[str]]]],
|
|
|
|
) -> JsonDict:
|
2017-04-20 19:46:54 -04:00
|
|
|
"""Asks a remote server to build and sign us a membership event
|
|
|
|
|
|
|
|
Note that this does not append any events to any graphs.
|
|
|
|
|
|
|
|
Args:
|
2022-11-16 10:25:24 -05:00
|
|
|
destination: address of remote homeserver
|
|
|
|
room_id: room to join/leave
|
|
|
|
user_id: user to be joined/left
|
|
|
|
membership: one of join/leave
|
|
|
|
params: Query parameters to include in the request.
|
2017-04-20 19:46:54 -04:00
|
|
|
|
|
|
|
Returns:
|
2020-07-30 08:01:33 -04:00
|
|
|
Succeeds when we get a 2xx HTTP response. The result
|
2017-04-20 19:46:54 -04:00
|
|
|
will be the decoded JSON body (ie, the new event).
|
|
|
|
|
|
|
|
Fails with ``HTTPRequestException`` if we get an HTTP response
|
|
|
|
code >= 300.
|
|
|
|
|
|
|
|
Fails with ``NotRetryingDestination`` if we are not yet ready
|
|
|
|
to retry this server.
|
2018-01-22 13:11:18 -05:00
|
|
|
|
|
|
|
Fails with ``FederationDeniedError`` if the remote destination
|
|
|
|
is not in our federation whitelist
|
2017-04-20 19:46:54 -04:00
|
|
|
"""
|
2021-06-15 07:45:14 -04:00
|
|
|
valid_memberships = {Membership.JOIN, Membership.LEAVE, Membership.KNOCK}
|
2021-06-09 14:39:51 -04:00
|
|
|
|
2015-10-20 06:58:58 -04:00
|
|
|
if membership not in valid_memberships:
|
|
|
|
raise RuntimeError(
|
|
|
|
"make_membership_event called with membership='%s', must be one of %s"
|
|
|
|
% (membership, ",".join(valid_memberships))
|
|
|
|
)
|
2021-06-15 07:45:14 -04:00
|
|
|
path = _create_v1_path("/make_%s/%s/%s", membership, room_id, user_id)
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2017-04-20 20:50:36 -04:00
|
|
|
ignore_backoff = False
|
|
|
|
retry_on_dns_fail = False
|
|
|
|
|
|
|
|
if membership == Membership.LEAVE:
|
|
|
|
# we particularly want to do our best to send leave events. The
|
|
|
|
# problem is that if it fails, we won't retry it later, so if the
|
|
|
|
# remote server was just having a momentary blip, the room will be
|
|
|
|
# out of sync.
|
|
|
|
ignore_backoff = True
|
|
|
|
retry_on_dns_fail = True
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.get_json(
|
2015-01-16 08:21:14 -05:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2018-08-06 08:45:37 -04:00
|
|
|
args=params,
|
2017-04-20 20:50:36 -04:00
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
2016-04-15 06:22:23 -04:00
|
|
|
timeout=20000,
|
2017-04-20 20:50:36 -04:00
|
|
|
ignore_backoff=ignore_backoff,
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2021-05-20 11:11:48 -04:00
|
|
|
async def send_join_v1(
|
|
|
|
self,
|
2021-07-26 11:53:09 -04:00
|
|
|
room_version: RoomVersion,
|
|
|
|
destination: str,
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
content: JsonDict,
|
2021-05-20 11:11:48 -04:00
|
|
|
) -> "SendJoinResponse":
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/send_join/%s/%s", room_id, event_id)
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2021-05-20 11:11:48 -04:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
|
|
|
parser=SendJoinParser(room_version, v1_api=True),
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2021-05-20 11:11:48 -04:00
|
|
|
async def send_join_v2(
|
2021-07-26 11:53:09 -04:00
|
|
|
self,
|
|
|
|
room_version: RoomVersion,
|
|
|
|
destination: str,
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
content: JsonDict,
|
2023-01-16 18:15:17 -05:00
|
|
|
omit_members: bool,
|
2021-05-20 11:11:48 -04:00
|
|
|
) -> "SendJoinResponse":
|
2019-11-11 10:47:47 -05:00
|
|
|
path = _create_v2_path("/send_join/%s/%s", room_id, event_id)
|
2022-02-17 11:11:59 -05:00
|
|
|
query_params: Dict[str, str] = {}
|
2023-05-19 07:23:09 -04:00
|
|
|
# lazy-load state on join
|
|
|
|
query_params["omit_members"] = "true" if omit_members else "false"
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2021-05-20 11:11:48 -04:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2022-02-17 11:11:59 -05:00
|
|
|
args=query_params,
|
2021-05-20 11:11:48 -04:00
|
|
|
data=content,
|
|
|
|
parser=SendJoinParser(room_version, v1_api=False),
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def send_leave_v1(
|
|
|
|
self, destination: str, room_id: str, event_id: str, content: JsonDict
|
|
|
|
) -> Tuple[int, JsonDict]:
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/send_leave/%s/%s", room_id, event_id)
|
2015-10-20 06:58:58 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2015-10-20 06:58:58 -04:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
2017-04-20 20:50:36 -04:00
|
|
|
# we want to do our best to send this through. The problem is
|
|
|
|
# that if it fails, we won't retry it later, so if the remote
|
|
|
|
# server was just having a momentary blip, the room will be out of
|
|
|
|
# sync.
|
|
|
|
ignore_backoff=True,
|
2023-04-24 13:12:06 -04:00
|
|
|
parser=LegacyJsonSendParser(),
|
2015-10-20 06:58:58 -04:00
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def send_leave_v2(
|
|
|
|
self, destination: str, room_id: str, event_id: str, content: JsonDict
|
|
|
|
) -> JsonDict:
|
2019-11-11 11:26:53 -05:00
|
|
|
path = _create_v2_path("/send_leave/%s/%s", room_id, event_id)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2019-11-11 11:26:53 -05:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
|
|
|
# we want to do our best to send this through. The problem is
|
|
|
|
# that if it fails, we won't retry it later, so if the remote
|
|
|
|
# server was just having a momentary blip, the room will be out of
|
|
|
|
# sync.
|
|
|
|
ignore_backoff=True,
|
|
|
|
)
|
|
|
|
|
2021-06-09 14:39:51 -04:00
|
|
|
async def send_knock_v1(
|
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
content: JsonDict,
|
|
|
|
) -> JsonDict:
|
|
|
|
"""
|
|
|
|
Sends a signed knock membership event to a remote server. This is the second
|
|
|
|
step for knocking after make_knock.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination: The remote homeserver.
|
|
|
|
room_id: The ID of the room to knock on.
|
|
|
|
event_id: The ID of the knock membership event that we're sending.
|
|
|
|
content: The knock membership event that we're sending. Note that this is not the
|
|
|
|
`content` field of the membership event, but the entire signed membership event
|
|
|
|
itself represented as a JSON dict.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The remote homeserver can optionally return some state from the room. The response
|
|
|
|
dictionary is in the form:
|
|
|
|
|
2023-10-06 07:27:35 -04:00
|
|
|
{"knock_room_state": [<state event dict>, ...]}
|
2021-06-09 14:39:51 -04:00
|
|
|
|
|
|
|
The list of state events may be empty.
|
|
|
|
"""
|
2021-06-15 07:45:14 -04:00
|
|
|
path = _create_v1_path("/send_knock/%s/%s", room_id, event_id)
|
2021-06-09 14:39:51 -04:00
|
|
|
|
|
|
|
return await self.client.put_json(
|
|
|
|
destination=destination, path=path, data=content
|
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def send_invite_v1(
|
|
|
|
self, destination: str, room_id: str, event_id: str, content: JsonDict
|
|
|
|
) -> Tuple[int, JsonDict]:
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/invite/%s/%s", room_id, event_id)
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2023-04-24 13:12:06 -04:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
|
|
|
ignore_backoff=True,
|
|
|
|
parser=LegacyJsonSendParser(),
|
2015-01-16 08:21:14 -05:00
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def send_invite_v2(
|
|
|
|
self, destination: str, room_id: str, event_id: str, content: JsonDict
|
|
|
|
) -> JsonDict:
|
2019-01-28 09:55:53 -05:00
|
|
|
path = _create_v2_path("/invite/%s/%s", room_id, event_id)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2019-01-28 09:55:53 -05:00
|
|
|
destination=destination, path=path, data=content, ignore_backoff=True
|
|
|
|
)
|
|
|
|
|
2020-07-30 08:01:33 -04:00
|
|
|
async def get_public_rooms(
|
2016-09-16 05:24:15 -04:00
|
|
|
self,
|
2020-05-01 10:15:08 -04:00
|
|
|
remote_server: str,
|
|
|
|
limit: Optional[int] = None,
|
|
|
|
since_token: Optional[str] = None,
|
|
|
|
search_filter: Optional[Dict] = None,
|
|
|
|
include_all_networks: bool = False,
|
|
|
|
third_party_instance_id: Optional[str] = None,
|
2021-07-26 11:53:09 -04:00
|
|
|
) -> JsonDict:
|
2020-05-01 10:15:08 -04:00
|
|
|
"""Get the list of public rooms from a remote homeserver
|
|
|
|
|
|
|
|
See synapse.federation.federation_client.FederationClient.get_public_rooms for
|
|
|
|
more information.
|
|
|
|
"""
|
2023-08-25 12:11:40 -04:00
|
|
|
path = _create_v1_path("/publicRooms")
|
|
|
|
|
2019-08-14 08:30:36 -04:00
|
|
|
if search_filter:
|
2019-08-20 03:49:31 -04:00
|
|
|
# this uses MSC2197 (Search Filtering over Federation)
|
2023-08-25 12:11:40 -04:00
|
|
|
data: Dict[str, Any] = {"include_all_networks": include_all_networks}
|
2019-08-14 08:30:36 -04:00
|
|
|
if third_party_instance_id:
|
|
|
|
data["third_party_instance_id"] = third_party_instance_id
|
|
|
|
if limit:
|
2022-04-05 07:45:36 -04:00
|
|
|
data["limit"] = limit
|
2019-08-14 08:30:36 -04:00
|
|
|
if since_token:
|
|
|
|
data["since"] = since_token
|
|
|
|
|
|
|
|
data["filter"] = search_filter
|
|
|
|
|
2020-05-01 10:15:08 -04:00
|
|
|
try:
|
2020-07-30 08:01:33 -04:00
|
|
|
response = await self.client.post_json(
|
2020-05-01 10:15:08 -04:00
|
|
|
destination=remote_server, path=path, data=data, ignore_backoff=True
|
|
|
|
)
|
|
|
|
except HttpResponseException as e:
|
|
|
|
if e.code == 403:
|
|
|
|
raise SynapseError(
|
|
|
|
403,
|
|
|
|
"You are not allowed to view the public rooms list of %s"
|
|
|
|
% (remote_server,),
|
|
|
|
errcode=Codes.FORBIDDEN,
|
|
|
|
)
|
|
|
|
raise
|
2019-08-14 08:30:36 -04:00
|
|
|
else:
|
2022-04-08 08:06:51 -04:00
|
|
|
args: Dict[str, Union[str, Iterable[str]]] = {
|
2020-01-30 11:42:11 -05:00
|
|
|
"include_all_networks": "true" if include_all_networks else "false"
|
2021-07-15 06:02:43 -04:00
|
|
|
}
|
2019-08-14 08:30:36 -04:00
|
|
|
if third_party_instance_id:
|
2023-08-25 12:11:40 -04:00
|
|
|
args["third_party_instance_id"] = third_party_instance_id
|
2019-08-14 08:30:36 -04:00
|
|
|
if limit:
|
2023-08-25 12:11:40 -04:00
|
|
|
args["limit"] = str(limit)
|
2019-08-14 08:30:36 -04:00
|
|
|
if since_token:
|
2023-08-25 12:11:40 -04:00
|
|
|
args["since"] = since_token
|
2019-08-14 08:30:36 -04:00
|
|
|
|
2020-05-01 10:15:08 -04:00
|
|
|
try:
|
2020-07-30 08:01:33 -04:00
|
|
|
response = await self.client.get_json(
|
2020-05-01 10:15:08 -04:00
|
|
|
destination=remote_server, path=path, args=args, ignore_backoff=True
|
|
|
|
)
|
|
|
|
except HttpResponseException as e:
|
|
|
|
if e.code == 403:
|
|
|
|
raise SynapseError(
|
|
|
|
403,
|
|
|
|
"You are not allowed to view the public rooms list of %s"
|
|
|
|
% (remote_server,),
|
|
|
|
errcode=Codes.FORBIDDEN,
|
|
|
|
)
|
|
|
|
raise
|
2016-05-31 12:20:07 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return response
|
2016-05-31 12:20:07 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def exchange_third_party_invite(
|
|
|
|
self, destination: str, room_id: str, event_dict: JsonDict
|
|
|
|
) -> JsonDict:
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/exchange_third_party_invite/%s", room_id)
|
2015-11-05 11:43:19 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.put_json(
|
2015-11-05 11:43:19 -05:00
|
|
|
destination=destination, path=path, data=event_dict
|
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def get_event_auth(
|
|
|
|
self, destination: str, room_id: str, event_id: str
|
|
|
|
) -> JsonDict:
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/event_auth/%s/%s", room_id, event_id)
|
2015-01-16 08:21:14 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.get_json(destination=destination, path=path)
|
2015-01-29 11:50:23 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def query_client_keys(
|
|
|
|
self, destination: str, query_content: JsonDict, timeout: int
|
|
|
|
) -> JsonDict:
|
2015-07-23 11:03:38 -04:00
|
|
|
"""Query the device keys for a list of user ids hosted on a remote
|
|
|
|
server.
|
|
|
|
|
|
|
|
Request:
|
|
|
|
{
|
|
|
|
"device_keys": {
|
|
|
|
"<user_id>": ["<device_id>"]
|
2020-04-22 07:29:36 -04:00
|
|
|
}
|
|
|
|
}
|
2015-07-23 11:03:38 -04:00
|
|
|
|
|
|
|
Response:
|
|
|
|
{
|
|
|
|
"device_keys": {
|
|
|
|
"<user_id>": {
|
|
|
|
"<device_id>": {...}
|
2020-04-22 07:29:36 -04:00
|
|
|
}
|
|
|
|
},
|
|
|
|
"master_key": {
|
|
|
|
"<user_id>": {...}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"self_signing_key": {
|
|
|
|
"<user_id>": {...}
|
|
|
|
}
|
|
|
|
}
|
2015-07-23 11:03:38 -04:00
|
|
|
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
destination: The server to query.
|
|
|
|
query_content: The user ids to query.
|
2015-07-23 11:03:38 -04:00
|
|
|
Returns:
|
2020-04-22 07:29:36 -04:00
|
|
|
A dict containing device and cross-signing keys.
|
2015-07-23 11:03:38 -04:00
|
|
|
"""
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/user/keys/query")
|
2015-07-23 11:03:38 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.post_json(
|
2015-07-23 11:03:38 -04:00
|
|
|
destination=destination, path=path, data=query_content, timeout=timeout
|
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def query_user_devices(
|
|
|
|
self, destination: str, user_id: str, timeout: int
|
|
|
|
) -> JsonDict:
|
2017-01-26 11:06:54 -05:00
|
|
|
"""Query the devices for a user id hosted on a remote server.
|
|
|
|
|
|
|
|
Response:
|
|
|
|
{
|
|
|
|
"stream_id": "...",
|
2020-04-22 07:29:36 -04:00
|
|
|
"devices": [ { ... } ],
|
|
|
|
"master_key": {
|
|
|
|
"user_id": "<user_id>",
|
|
|
|
"usage": [...],
|
|
|
|
"keys": {...},
|
|
|
|
"signatures": {
|
|
|
|
"<user_id>": {...}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"self_signing_key": {
|
|
|
|
"user_id": "<user_id>",
|
|
|
|
"usage": [...],
|
|
|
|
"keys": {...},
|
|
|
|
"signatures": {
|
|
|
|
"<user_id>": {...}
|
|
|
|
}
|
|
|
|
}
|
2017-01-26 11:06:54 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
destination: The server to query.
|
|
|
|
query_content: The user ids to query.
|
2017-01-26 11:06:54 -05:00
|
|
|
Returns:
|
2020-04-22 07:29:36 -04:00
|
|
|
A dict containing device and cross-signing keys.
|
2017-01-26 11:06:54 -05:00
|
|
|
"""
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/user/devices/%s", user_id)
|
2017-01-26 11:06:54 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.get_json(
|
2017-01-26 11:06:54 -05:00
|
|
|
destination=destination, path=path, timeout=timeout
|
|
|
|
)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def claim_client_keys(
|
2023-05-24 16:23:26 -04:00
|
|
|
self,
|
|
|
|
user: UserID,
|
|
|
|
destination: str,
|
|
|
|
query_content: JsonDict,
|
|
|
|
timeout: Optional[int],
|
2021-07-26 11:53:09 -04:00
|
|
|
) -> JsonDict:
|
2015-07-23 11:03:38 -04:00
|
|
|
"""Claim one-time keys for a list of devices hosted on a remote server.
|
|
|
|
|
|
|
|
Request:
|
|
|
|
{
|
|
|
|
"one_time_keys": {
|
|
|
|
"<user_id>": {
|
2020-04-22 07:29:36 -04:00
|
|
|
"<device_id>": "<algorithm>"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-07-23 11:03:38 -04:00
|
|
|
|
|
|
|
Response:
|
|
|
|
{
|
2023-04-27 12:57:46 -04:00
|
|
|
"one_time_keys": {
|
2015-07-23 11:03:38 -04:00
|
|
|
"<user_id>": {
|
|
|
|
"<device_id>": {
|
2023-04-27 12:57:46 -04:00
|
|
|
"<algorithm>:<key_id>": <OTK JSON>
|
2020-04-22 07:29:36 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-07-23 11:03:38 -04:00
|
|
|
|
|
|
|
Args:
|
2023-05-24 16:23:26 -04:00
|
|
|
user: the user_id of the requesting user
|
2021-07-26 11:53:09 -04:00
|
|
|
destination: The server to query.
|
|
|
|
query_content: The user ids to query.
|
2015-07-23 11:03:38 -04:00
|
|
|
Returns:
|
2020-04-22 07:29:36 -04:00
|
|
|
A dict containing the one-time keys.
|
2015-07-23 11:03:38 -04:00
|
|
|
"""
|
|
|
|
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/user/keys/claim")
|
2015-07-23 11:03:38 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.post_json(
|
2023-04-27 12:57:46 -04:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data={"one_time_keys": query_content},
|
|
|
|
timeout=timeout,
|
|
|
|
)
|
|
|
|
|
|
|
|
async def claim_client_keys_unstable(
|
2023-05-24 16:23:26 -04:00
|
|
|
self,
|
|
|
|
user: UserID,
|
|
|
|
destination: str,
|
|
|
|
query_content: JsonDict,
|
|
|
|
timeout: Optional[int],
|
2023-04-27 12:57:46 -04:00
|
|
|
) -> JsonDict:
|
|
|
|
"""Claim one-time keys for a list of devices hosted on a remote server.
|
|
|
|
|
|
|
|
Request:
|
|
|
|
{
|
|
|
|
"one_time_keys": {
|
|
|
|
"<user_id>": {
|
|
|
|
"<device_id>": {"<algorithm>": <count>}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Response:
|
|
|
|
{
|
|
|
|
"one_time_keys": {
|
|
|
|
"<user_id>": {
|
|
|
|
"<device_id>": {
|
|
|
|
"<algorithm>:<key_id>": <OTK JSON>
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Args:
|
2023-05-24 16:23:26 -04:00
|
|
|
user: the user_id of the requesting user
|
2023-04-27 12:57:46 -04:00
|
|
|
destination: The server to query.
|
|
|
|
query_content: The user ids to query.
|
|
|
|
Returns:
|
|
|
|
A dict containing the one-time keys.
|
|
|
|
"""
|
|
|
|
path = _create_path(FEDERATION_UNSTABLE_PREFIX, "/user/keys/claim")
|
|
|
|
|
|
|
|
return await self.client.post_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data={"one_time_keys": query_content},
|
|
|
|
timeout=timeout,
|
2015-07-23 11:03:38 -04:00
|
|
|
)
|
|
|
|
|
2020-07-30 08:01:33 -04:00
|
|
|
async def get_missing_events(
|
2015-02-23 08:58:02 -05:00
|
|
|
self,
|
2021-07-26 11:53:09 -04:00
|
|
|
destination: str,
|
|
|
|
room_id: str,
|
|
|
|
earliest_events: Iterable[str],
|
|
|
|
latest_events: Iterable[str],
|
|
|
|
limit: int,
|
|
|
|
min_depth: int,
|
|
|
|
timeout: int,
|
|
|
|
) -> JsonDict:
|
2019-01-15 06:14:34 -05:00
|
|
|
path = _create_v1_path("/get_missing_events/%s", room_id)
|
2015-02-23 08:58:02 -05:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.post_json(
|
2015-02-23 08:58:02 -05:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data={
|
|
|
|
"limit": int(limit),
|
|
|
|
"min_depth": int(min_depth),
|
|
|
|
"earliest_events": earliest_events,
|
|
|
|
"latest_events": latest_events,
|
2016-12-31 10:21:37 -05:00
|
|
|
},
|
|
|
|
timeout=timeout,
|
2015-02-23 08:58:02 -05:00
|
|
|
)
|
2018-04-10 06:16:08 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
async def get_room_complexity(self, destination: str, room_id: str) -> JsonDict:
|
2019-07-29 12:47:27 -04:00
|
|
|
"""
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
destination: The remote server
|
|
|
|
room_id: The room ID to ask about.
|
2019-07-29 12:47:27 -04:00
|
|
|
"""
|
|
|
|
path = _create_path(FEDERATION_UNSTABLE_PREFIX, "/rooms/%s/complexity", room_id)
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
return await self.client.get_json(destination=destination, path=path)
|
2019-07-29 12:47:27 -04:00
|
|
|
|
2021-08-16 08:06:17 -04:00
|
|
|
async def get_room_hierarchy(
|
2021-11-29 14:32:20 -05:00
|
|
|
self, destination: str, room_id: str, suggested_only: bool
|
|
|
|
) -> JsonDict:
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
destination: The remote server
|
|
|
|
room_id: The room ID to ask about.
|
|
|
|
suggested_only: if True, only suggested rooms will be returned
|
|
|
|
"""
|
|
|
|
path = _create_v1_path("/hierarchy/%s", room_id)
|
|
|
|
|
|
|
|
return await self.client.get_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
args={"suggested_only": "true" if suggested_only else "false"},
|
|
|
|
)
|
|
|
|
|
|
|
|
async def get_room_hierarchy_unstable(
|
|
|
|
self, destination: str, room_id: str, suggested_only: bool
|
2021-08-16 08:06:17 -04:00
|
|
|
) -> JsonDict:
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
destination: The remote server
|
|
|
|
room_id: The room ID to ask about.
|
|
|
|
suggested_only: if True, only suggested rooms will be returned
|
|
|
|
"""
|
|
|
|
path = _create_path(
|
|
|
|
FEDERATION_UNSTABLE_PREFIX, "/org.matrix.msc2946/hierarchy/%s", room_id
|
|
|
|
)
|
|
|
|
|
|
|
|
return await self.client.get_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
args={"suggested_only": "true" if suggested_only else "false"},
|
|
|
|
)
|
|
|
|
|
2022-02-22 10:10:10 -05:00
|
|
|
async def get_account_status(
|
|
|
|
self, destination: str, user_ids: List[str]
|
|
|
|
) -> JsonDict:
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
destination: The remote server.
|
|
|
|
user_ids: The user ID(s) for which to request account status(es).
|
|
|
|
"""
|
|
|
|
path = _create_path(
|
|
|
|
FEDERATION_UNSTABLE_PREFIX, "/org.matrix.msc3720/account_status"
|
|
|
|
)
|
|
|
|
|
|
|
|
return await self.client.post_json(
|
|
|
|
destination=destination, path=path, data={"user_ids": user_ids}
|
|
|
|
)
|
|
|
|
|
2023-11-29 14:03:42 -05:00
|
|
|
async def download_media_r0(
|
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
media_id: str,
|
|
|
|
output_stream: BinaryIO,
|
|
|
|
max_size: int,
|
|
|
|
max_timeout_ms: int,
|
2024-06-05 08:43:36 -04:00
|
|
|
download_ratelimiter: Ratelimiter,
|
|
|
|
ip_address: str,
|
2023-11-29 14:03:42 -05:00
|
|
|
) -> Tuple[int, Dict[bytes, List[bytes]]]:
|
|
|
|
path = f"/_matrix/media/r0/download/{destination}/{media_id}"
|
|
|
|
return await self.client.get_file(
|
|
|
|
destination,
|
|
|
|
path,
|
|
|
|
output_stream=output_stream,
|
|
|
|
max_size=max_size,
|
|
|
|
args={
|
|
|
|
# tell the remote server to 404 if it doesn't
|
|
|
|
# recognise the server_name, to make sure we don't
|
|
|
|
# end up with a routing loop.
|
|
|
|
"allow_remote": "false",
|
|
|
|
"timeout_ms": str(max_timeout_ms),
|
|
|
|
},
|
2024-06-05 08:43:36 -04:00
|
|
|
download_ratelimiter=download_ratelimiter,
|
|
|
|
ip_address=ip_address,
|
2023-11-29 14:03:42 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
async def download_media_v3(
|
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
media_id: str,
|
|
|
|
output_stream: BinaryIO,
|
|
|
|
max_size: int,
|
|
|
|
max_timeout_ms: int,
|
2024-06-05 08:43:36 -04:00
|
|
|
download_ratelimiter: Ratelimiter,
|
|
|
|
ip_address: str,
|
2023-11-29 14:03:42 -05:00
|
|
|
) -> Tuple[int, Dict[bytes, List[bytes]]]:
|
|
|
|
path = f"/_matrix/media/v3/download/{destination}/{media_id}"
|
|
|
|
return await self.client.get_file(
|
|
|
|
destination,
|
|
|
|
path,
|
|
|
|
output_stream=output_stream,
|
|
|
|
max_size=max_size,
|
|
|
|
args={
|
|
|
|
# tell the remote server to 404 if it doesn't
|
|
|
|
# recognise the server_name, to make sure we don't
|
|
|
|
# end up with a routing loop.
|
|
|
|
"allow_remote": "false",
|
|
|
|
"timeout_ms": str(max_timeout_ms),
|
|
|
|
# Matrix 1.7 allows for this to redirect to another URL, this should
|
|
|
|
# just be ignored for an old homeserver, so always provide it.
|
|
|
|
"allow_redirect": "true",
|
|
|
|
},
|
|
|
|
follow_redirects=True,
|
2024-06-05 08:43:36 -04:00
|
|
|
download_ratelimiter=download_ratelimiter,
|
|
|
|
ip_address=ip_address,
|
2023-11-29 14:03:42 -05:00
|
|
|
)
|
|
|
|
|
2024-07-02 09:07:04 -04:00
|
|
|
async def federation_download_media(
|
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
media_id: str,
|
|
|
|
output_stream: BinaryIO,
|
|
|
|
max_size: int,
|
|
|
|
max_timeout_ms: int,
|
|
|
|
download_ratelimiter: Ratelimiter,
|
|
|
|
ip_address: str,
|
|
|
|
) -> Tuple[int, Dict[bytes, List[bytes]], bytes]:
|
|
|
|
path = f"/_matrix/federation/v1/media/download/{media_id}"
|
|
|
|
return await self.client.federation_get_file(
|
|
|
|
destination,
|
|
|
|
path,
|
|
|
|
output_stream=output_stream,
|
|
|
|
max_size=max_size,
|
|
|
|
args={
|
|
|
|
"timeout_ms": str(max_timeout_ms),
|
|
|
|
},
|
|
|
|
download_ratelimiter=download_ratelimiter,
|
|
|
|
ip_address=ip_address,
|
|
|
|
)
|
|
|
|
|
2019-07-29 12:47:27 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
def _create_path(federation_prefix: str, path: str, *args: str) -> str:
|
2019-07-29 12:47:27 -04:00
|
|
|
"""
|
|
|
|
Ensures that all args are url encoded.
|
|
|
|
"""
|
|
|
|
return federation_prefix + path % tuple(urllib.parse.quote(arg, "") for arg in args)
|
|
|
|
|
2018-04-10 06:16:08 -04:00
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
def _create_v1_path(path: str, *args: str) -> str:
|
2019-01-15 06:14:34 -05:00
|
|
|
"""Creates a path against V1 federation API from the path template and
|
|
|
|
args. Ensures that all args are url encoded.
|
2018-04-10 06:16:08 -04:00
|
|
|
|
|
|
|
Example:
|
|
|
|
|
2019-03-08 11:55:52 -05:00
|
|
|
_create_v1_path("/event/%s", event_id)
|
2018-04-10 06:16:08 -04:00
|
|
|
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
path: String template for the path
|
|
|
|
args: Args to insert into path. Each arg will be url encoded
|
2018-04-10 06:16:08 -04:00
|
|
|
"""
|
2019-07-29 12:47:27 -04:00
|
|
|
return _create_path(FEDERATION_V1_PREFIX, path, *args)
|
2019-01-28 09:55:53 -05:00
|
|
|
|
|
|
|
|
2021-07-26 11:53:09 -04:00
|
|
|
def _create_v2_path(path: str, *args: str) -> str:
|
2019-01-28 09:55:53 -05:00
|
|
|
"""Creates a path against V2 federation API from the path template and
|
|
|
|
args. Ensures that all args are url encoded.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
2019-03-08 11:55:52 -05:00
|
|
|
_create_v2_path("/event/%s", event_id)
|
2019-01-28 09:55:53 -05:00
|
|
|
|
|
|
|
Args:
|
2021-07-26 11:53:09 -04:00
|
|
|
path: String template for the path
|
|
|
|
args: Args to insert into path. Each arg will be url encoded
|
2019-01-28 09:55:53 -05:00
|
|
|
"""
|
2019-07-29 12:47:27 -04:00
|
|
|
return _create_path(FEDERATION_V2_PREFIX, path, *args)
|
2021-05-20 11:11:48 -04:00
|
|
|
|
|
|
|
|
|
|
|
@attr.s(slots=True, auto_attribs=True)
|
|
|
|
class SendJoinResponse:
|
|
|
|
"""The parsed response of a `/send_join` request."""
|
|
|
|
|
2021-07-26 12:17:00 -04:00
|
|
|
# The list of auth events from the /send_join response.
|
2021-05-20 11:11:48 -04:00
|
|
|
auth_events: List[EventBase]
|
2021-07-26 12:17:00 -04:00
|
|
|
# The list of state from the /send_join response.
|
2021-05-20 11:11:48 -04:00
|
|
|
state: List[EventBase]
|
2021-07-26 12:17:00 -04:00
|
|
|
# The raw join event from the /send_join response.
|
|
|
|
event_dict: JsonDict
|
|
|
|
# The parsed join event from the /send_join response. This will be None if
|
|
|
|
# "event" is not included in the response.
|
|
|
|
event: Optional[EventBase] = None
|
|
|
|
|
2022-02-17 11:11:59 -05:00
|
|
|
# The room state is incomplete
|
2023-01-16 07:40:25 -05:00
|
|
|
members_omitted: bool = False
|
2022-02-17 11:11:59 -05:00
|
|
|
|
|
|
|
# List of servers in the room
|
|
|
|
servers_in_room: Optional[List[str]] = None
|
|
|
|
|
2021-07-26 12:17:00 -04:00
|
|
|
|
2022-02-22 07:17:10 -05:00
|
|
|
@attr.s(slots=True, auto_attribs=True)
|
|
|
|
class StateRequestResponse:
|
|
|
|
"""The parsed response of a `/state` request."""
|
|
|
|
|
|
|
|
auth_events: List[EventBase]
|
|
|
|
state: List[EventBase]
|
|
|
|
|
|
|
|
|
2021-07-26 12:17:00 -04:00
|
|
|
@ijson.coroutine
|
2021-12-02 11:18:10 -05:00
|
|
|
def _event_parser(event_dict: JsonDict) -> Generator[None, Tuple[str, Any], None]:
|
2021-07-26 12:17:00 -04:00
|
|
|
"""Helper function for use with `ijson.kvitems_coro` to parse key-value pairs
|
|
|
|
to add them to a given dictionary.
|
|
|
|
"""
|
|
|
|
|
|
|
|
while True:
|
|
|
|
key, value = yield
|
|
|
|
event_dict[key] = value
|
2021-05-20 11:11:48 -04:00
|
|
|
|
|
|
|
|
|
|
|
@ijson.coroutine
|
2021-12-02 11:18:10 -05:00
|
|
|
def _event_list_parser(
|
|
|
|
room_version: RoomVersion, events: List[EventBase]
|
|
|
|
) -> Generator[None, JsonDict, None]:
|
2021-05-20 11:11:48 -04:00
|
|
|
"""Helper function for use with `ijson.items_coro` to parse an array of
|
|
|
|
events and add them to the given list.
|
|
|
|
"""
|
|
|
|
|
|
|
|
while True:
|
|
|
|
obj = yield
|
|
|
|
event = make_event_from_dict(obj, room_version)
|
|
|
|
events.append(event)
|
|
|
|
|
|
|
|
|
2022-02-17 11:11:59 -05:00
|
|
|
@ijson.coroutine
|
2023-01-16 07:40:25 -05:00
|
|
|
def _members_omitted_parser(response: SendJoinResponse) -> Generator[None, Any, None]:
|
2022-02-17 11:11:59 -05:00
|
|
|
"""Helper function for use with `ijson.items_coro`
|
|
|
|
|
2023-01-16 07:40:25 -05:00
|
|
|
Parses the members_omitted field in send_join responses
|
2022-02-17 11:11:59 -05:00
|
|
|
"""
|
|
|
|
while True:
|
|
|
|
val = yield
|
|
|
|
if not isinstance(val, bool):
|
2023-05-19 07:23:09 -04:00
|
|
|
raise TypeError("members_omitted must be a boolean")
|
2023-01-16 07:40:25 -05:00
|
|
|
response.members_omitted = val
|
2022-02-17 11:11:59 -05:00
|
|
|
|
|
|
|
|
|
|
|
@ijson.coroutine
|
|
|
|
def _servers_in_room_parser(response: SendJoinResponse) -> Generator[None, Any, None]:
|
|
|
|
"""Helper function for use with `ijson.items_coro`
|
|
|
|
|
|
|
|
Parses the servers_in_room field in send_join responses
|
|
|
|
"""
|
|
|
|
while True:
|
|
|
|
val = yield
|
|
|
|
if not isinstance(val, list) or any(not isinstance(x, str) for x in val):
|
|
|
|
raise TypeError("servers_in_room must be a list of strings")
|
|
|
|
response.servers_in_room = val
|
|
|
|
|
|
|
|
|
2021-05-20 11:11:48 -04:00
|
|
|
class SendJoinParser(ByteParser[SendJoinResponse]):
|
|
|
|
"""A parser for the response to `/send_join` requests.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_version: The version of the room.
|
|
|
|
v1_api: Whether the response is in the v1 format.
|
|
|
|
"""
|
|
|
|
|
|
|
|
CONTENT_TYPE = "application/json"
|
|
|
|
|
2022-05-25 17:24:28 -04:00
|
|
|
# /send_join responses can be huge, so we override the size limit here. The response
|
|
|
|
# is parsed in a streaming manner, which helps alleviate the issue of memory
|
|
|
|
# usage a bit.
|
|
|
|
MAX_RESPONSE_SIZE = 500 * 1024 * 1024
|
|
|
|
|
2021-05-20 11:11:48 -04:00
|
|
|
def __init__(self, room_version: RoomVersion, v1_api: bool):
|
2022-02-17 11:11:59 -05:00
|
|
|
self._response = SendJoinResponse([], [], event_dict={})
|
2021-07-26 12:17:00 -04:00
|
|
|
self._room_version = room_version
|
2022-05-27 06:03:05 -04:00
|
|
|
self._coros: List[Generator[None, bytes, None]] = []
|
2021-05-20 11:11:48 -04:00
|
|
|
|
|
|
|
# The V1 API has the shape of `[200, {...}]`, which we handle by
|
|
|
|
# prefixing with `item.*`.
|
|
|
|
prefix = "item." if v1_api else ""
|
|
|
|
|
2022-02-17 11:11:59 -05:00
|
|
|
self._coros = [
|
|
|
|
ijson.items_coro(
|
|
|
|
_event_list_parser(room_version, self._response.state),
|
|
|
|
prefix + "state.item",
|
|
|
|
use_float=True,
|
|
|
|
),
|
|
|
|
ijson.items_coro(
|
|
|
|
_event_list_parser(room_version, self._response.auth_events),
|
|
|
|
prefix + "auth_chain.item",
|
|
|
|
use_float=True,
|
|
|
|
),
|
|
|
|
ijson.kvitems_coro(
|
|
|
|
_event_parser(self._response.event_dict),
|
|
|
|
prefix + "event",
|
|
|
|
use_float=True,
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
|
|
|
if not v1_api:
|
2023-01-13 12:58:53 -05:00
|
|
|
self._coros.append(
|
|
|
|
ijson.items_coro(
|
2023-01-16 07:40:25 -05:00
|
|
|
_members_omitted_parser(self._response),
|
2023-01-13 12:58:53 -05:00
|
|
|
"members_omitted",
|
|
|
|
use_float="True",
|
|
|
|
)
|
|
|
|
)
|
2022-02-17 11:11:59 -05:00
|
|
|
|
2023-01-13 12:58:53 -05:00
|
|
|
# Again, stable field name comes last
|
|
|
|
self._coros.append(
|
|
|
|
ijson.items_coro(
|
|
|
|
_servers_in_room_parser(self._response),
|
|
|
|
"servers_in_room",
|
|
|
|
use_float="True",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2021-05-20 11:11:48 -04:00
|
|
|
def write(self, data: bytes) -> int:
|
2022-02-17 11:11:59 -05:00
|
|
|
for c in self._coros:
|
|
|
|
c.send(data)
|
2021-05-20 11:11:48 -04:00
|
|
|
|
|
|
|
return len(data)
|
|
|
|
|
|
|
|
def finish(self) -> SendJoinResponse:
|
2022-10-06 14:17:50 -04:00
|
|
|
_close_coros(self._coros)
|
2022-05-27 06:03:05 -04:00
|
|
|
|
2021-07-26 12:17:00 -04:00
|
|
|
if self._response.event_dict:
|
|
|
|
self._response.event = make_event_from_dict(
|
|
|
|
self._response.event_dict, self._room_version
|
|
|
|
)
|
2021-05-20 11:11:48 -04:00
|
|
|
return self._response
|
2022-02-22 07:17:10 -05:00
|
|
|
|
|
|
|
|
|
|
|
class _StateParser(ByteParser[StateRequestResponse]):
|
|
|
|
"""A parser for the response to `/state` requests.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_version: The version of the room.
|
|
|
|
"""
|
|
|
|
|
|
|
|
CONTENT_TYPE = "application/json"
|
|
|
|
|
2022-05-25 17:24:28 -04:00
|
|
|
# As with /send_join, /state responses can be huge.
|
|
|
|
MAX_RESPONSE_SIZE = 500 * 1024 * 1024
|
|
|
|
|
2022-02-22 07:17:10 -05:00
|
|
|
def __init__(self, room_version: RoomVersion):
|
|
|
|
self._response = StateRequestResponse([], [])
|
|
|
|
self._room_version = room_version
|
2022-05-27 06:03:05 -04:00
|
|
|
self._coros: List[Generator[None, bytes, None]] = [
|
2022-02-22 07:17:10 -05:00
|
|
|
ijson.items_coro(
|
|
|
|
_event_list_parser(room_version, self._response.state),
|
|
|
|
"pdus.item",
|
|
|
|
use_float=True,
|
|
|
|
),
|
|
|
|
ijson.items_coro(
|
|
|
|
_event_list_parser(room_version, self._response.auth_events),
|
|
|
|
"auth_chain.item",
|
|
|
|
use_float=True,
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
|
|
|
def write(self, data: bytes) -> int:
|
|
|
|
for c in self._coros:
|
|
|
|
c.send(data)
|
|
|
|
return len(data)
|
|
|
|
|
|
|
|
def finish(self) -> StateRequestResponse:
|
2022-10-06 14:17:50 -04:00
|
|
|
_close_coros(self._coros)
|
2022-02-22 07:17:10 -05:00
|
|
|
return self._response
|
2022-10-06 14:17:50 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _close_coros(coros: Iterable[Generator[None, bytes, None]]) -> None:
|
|
|
|
"""Close each of the given coroutines.
|
|
|
|
|
|
|
|
Always calls .close() on each coroutine, even if doing so raises an exception.
|
|
|
|
Any exceptions raised are aggregated into an ExceptionBundle.
|
|
|
|
|
|
|
|
:raises ExceptionBundle: if at least one coroutine fails to close.
|
|
|
|
"""
|
|
|
|
exceptions = []
|
|
|
|
for c in coros:
|
|
|
|
try:
|
|
|
|
c.close()
|
|
|
|
except Exception as e:
|
|
|
|
exceptions.append(e)
|
|
|
|
|
|
|
|
if exceptions:
|
|
|
|
# raise from the first exception so that the traceback has slightly more context
|
|
|
|
raise ExceptionBundle(
|
|
|
|
f"There were {len(exceptions)} errors closing coroutines", exceptions
|
|
|
|
) from exceptions[0]
|