2021-08-13 15:37:24 -04:00
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import re
|
2021-09-15 04:30:58 -04:00
|
|
|
from http import HTTPStatus
|
2021-10-08 19:35:00 -04:00
|
|
|
from typing import TYPE_CHECKING, Awaitable, Tuple
|
2021-09-01 11:59:32 -04:00
|
|
|
|
|
|
|
from twisted.web.server import Request
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-10-08 19:35:00 -04:00
|
|
|
from synapse.api.constants import EventContentFields
|
2021-08-13 15:37:24 -04:00
|
|
|
from synapse.api.errors import AuthError, Codes, SynapseError
|
2021-09-01 11:59:32 -04:00
|
|
|
from synapse.http.server import HttpServer
|
2021-08-13 15:37:24 -04:00
|
|
|
from synapse.http.servlet import (
|
|
|
|
RestServlet,
|
|
|
|
assert_params_in_dict,
|
|
|
|
parse_json_object_from_request,
|
|
|
|
parse_string,
|
|
|
|
parse_strings_from_args,
|
|
|
|
)
|
2021-09-01 11:59:32 -04:00
|
|
|
from synapse.http.site import SynapseRequest
|
2021-08-13 15:37:24 -04:00
|
|
|
from synapse.rest.client.transactions import HttpTransactionCache
|
2021-10-08 19:35:00 -04:00
|
|
|
from synapse.types import JsonDict
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-09-01 11:59:32 -04:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2021-08-13 15:37:24 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class RoomBatchSendEventRestServlet(RestServlet):
|
|
|
|
"""
|
2021-09-21 16:06:28 -04:00
|
|
|
API endpoint which can insert a batch of events historically back in time
|
2021-08-13 15:37:24 -04:00
|
|
|
next to the given `prev_event`.
|
|
|
|
|
2021-09-21 16:06:28 -04:00
|
|
|
`batch_id` comes from `next_batch_id `in the response of the batch send
|
|
|
|
endpoint and is derived from the "insertion" events added to each batch.
|
2021-08-13 15:37:24 -04:00
|
|
|
It's not required for the first batch send.
|
|
|
|
|
|
|
|
`state_events_at_start` is used to define the historical state events
|
|
|
|
needed to auth the events like join events. These events will float
|
|
|
|
outside of the normal DAG as outlier's and won't be visible in the chat
|
2021-09-21 16:06:28 -04:00
|
|
|
history which also allows us to insert multiple batches without having a bunch
|
|
|
|
of `@mxid joined the room` noise between each batch.
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-09-21 16:06:28 -04:00
|
|
|
`events` is chronological list of events you want to insert.
|
|
|
|
There is a reverse-chronological constraint on batches so once you insert
|
2021-08-13 15:37:24 -04:00
|
|
|
some messages, you can only insert older ones after that.
|
2021-09-21 16:06:28 -04:00
|
|
|
tldr; Insert batches from your most recent history -> oldest history.
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-09-21 16:06:28 -04:00
|
|
|
POST /_matrix/client/unstable/org.matrix.msc2716/rooms/<roomID>/batch_send?prev_event_id=<eventID>&batch_id=<batchID>
|
2021-08-13 15:37:24 -04:00
|
|
|
{
|
|
|
|
"events": [ ... ],
|
|
|
|
"state_events_at_start": [ ... ]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
PATTERNS = (
|
|
|
|
re.compile(
|
|
|
|
"^/_matrix/client/unstable/org.matrix.msc2716"
|
|
|
|
"/rooms/(?P<room_id>[^/]*)/batch_send$"
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2021-09-01 11:59:32 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2021-08-13 15:37:24 -04:00
|
|
|
super().__init__()
|
2022-02-23 06:04:02 -05:00
|
|
|
self.store = hs.get_datastores().main
|
2021-08-13 15:37:24 -04:00
|
|
|
self.event_creation_handler = hs.get_event_creation_handler()
|
|
|
|
self.auth = hs.get_auth()
|
2021-10-08 19:35:00 -04:00
|
|
|
self.room_batch_handler = hs.get_room_batch_handler()
|
2021-08-13 15:37:24 -04:00
|
|
|
self.txns = HttpTransactionCache(hs)
|
|
|
|
|
2021-09-01 11:59:32 -04:00
|
|
|
async def on_POST(
|
|
|
|
self, request: SynapseRequest, room_id: str
|
|
|
|
) -> Tuple[int, JsonDict]:
|
2021-08-13 15:37:24 -04:00
|
|
|
requester = await self.auth.get_user_by_req(request, allow_guest=False)
|
|
|
|
|
|
|
|
if not requester.app_service:
|
|
|
|
raise AuthError(
|
2021-09-15 04:30:58 -04:00
|
|
|
HTTPStatus.FORBIDDEN,
|
2021-08-13 15:37:24 -04:00
|
|
|
"Only application services can use the /batchsend endpoint",
|
|
|
|
)
|
|
|
|
|
|
|
|
body = parse_json_object_from_request(request)
|
|
|
|
assert_params_in_dict(body, ["state_events_at_start", "events"])
|
|
|
|
|
2021-09-01 11:59:32 -04:00
|
|
|
assert request.args is not None
|
2021-09-21 09:10:01 -04:00
|
|
|
prev_event_ids_from_query = parse_strings_from_args(
|
|
|
|
request.args, "prev_event_id"
|
|
|
|
)
|
2021-09-21 16:06:28 -04:00
|
|
|
batch_id_from_query = parse_string(request, "batch_id")
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-09-21 09:10:01 -04:00
|
|
|
if prev_event_ids_from_query is None:
|
2021-08-13 15:37:24 -04:00
|
|
|
raise SynapseError(
|
2021-09-15 04:30:58 -04:00
|
|
|
HTTPStatus.BAD_REQUEST,
|
2021-08-13 15:37:24 -04:00
|
|
|
"prev_event query parameter is required when inserting historical messages back in time",
|
|
|
|
errcode=Codes.MISSING_PARAM,
|
|
|
|
)
|
|
|
|
|
2021-10-08 19:35:00 -04:00
|
|
|
# Verify the batch_id_from_query corresponds to an actual insertion event
|
|
|
|
# and have the batch connected.
|
|
|
|
if batch_id_from_query:
|
|
|
|
corresponding_insertion_event_id = (
|
2021-11-08 22:21:10 -05:00
|
|
|
await self.store.get_insertion_event_id_by_batch_id(
|
2021-10-08 19:35:00 -04:00
|
|
|
room_id, batch_id_from_query
|
|
|
|
)
|
|
|
|
)
|
|
|
|
if corresponding_insertion_event_id is None:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_REQUEST,
|
|
|
|
"No insertion event corresponds to the given ?batch_id",
|
|
|
|
errcode=Codes.INVALID_PARAM,
|
|
|
|
)
|
|
|
|
|
2021-09-21 09:10:01 -04:00
|
|
|
# For the event we are inserting next to (`prev_event_ids_from_query`),
|
2021-08-13 15:37:24 -04:00
|
|
|
# find the most recent auth events (derived from state events) that
|
|
|
|
# allowed that message to be sent. We will use that as a base
|
|
|
|
# to auth our historical messages against.
|
2021-10-08 19:35:00 -04:00
|
|
|
auth_event_ids = await self.room_batch_handler.get_most_recent_auth_event_ids_from_event_id_list(
|
|
|
|
prev_event_ids_from_query
|
2021-08-13 15:37:24 -04:00
|
|
|
)
|
|
|
|
|
Fix historical messages backfilling in random order on remote homeservers (MSC2716) (#11114)
Fix https://github.com/matrix-org/synapse/issues/11091
Fix https://github.com/matrix-org/synapse/issues/10764 (side-stepping the issue because we no longer have to deal with `fake_prev_event_id`)
1. Made the `/backfill` response return messages in `(depth, stream_ordering)` order (previously only sorted by `depth`)
- Technically, it shouldn't really matter how `/backfill` returns things but I'm just trying to make the `stream_ordering` a little more consistent from the origin to the remote homeservers in order to get the order of messages from `/messages` consistent ([sorted by `(topological_ordering, stream_ordering)`](https://github.com/matrix-org/synapse/blob/develop/docs/development/room-dag-concepts.md#depth-and-stream-ordering)).
- Even now that we return backfilled messages in order, it still doesn't guarantee the same `stream_ordering` (and more importantly the [`/messages` order](https://github.com/matrix-org/synapse/blob/develop/docs/development/room-dag-concepts.md#depth-and-stream-ordering)) on the other server. For example, if a room has a bunch of history imported and someone visits a permalink to a historical message back in time, their homeserver will skip over the historical messages in between and insert the permalink as the next message in the `stream_order` and totally throw off the sort.
- This will be even more the case when we add the [MSC3030 jump to date API endpoint](https://github.com/matrix-org/matrix-doc/pull/3030) so the static archives can navigate and jump to a certain date.
- We're solving this in the future by switching to [online topological ordering](https://github.com/matrix-org/gomatrixserverlib/issues/187) and [chunking](https://github.com/matrix-org/synapse/issues/3785) which by its nature will apply retroactively to fix any inconsistencies introduced by people permalinking
2. As we're navigating `prev_events` to return in `/backfill`, we order by `depth` first (newest -> oldest) and now also tie-break based on the `stream_ordering` (newest -> oldest). This is technically important because MSC2716 inserts a bunch of historical messages at the same `depth` so it's best to be prescriptive about which ones we should process first. In reality, I think the code already looped over the historical messages as expected because the database is already in order.
3. Making the historical state chain and historical event chain float on their own by having no `prev_events` instead of a fake `prev_event` which caused backfill to get clogged with an unresolvable event. Fixes https://github.com/matrix-org/synapse/issues/11091 and https://github.com/matrix-org/synapse/issues/10764
4. We no longer find connected insertion events by finding a potential `prev_event` connection to the current event we're iterating over. We now solely rely on marker events which when processed, add the insertion event as an extremity and the federating homeserver can ask about it when time calls.
- Related discussion, https://github.com/matrix-org/synapse/pull/11114#discussion_r741514793
Before | After
--- | ---
![](https://user-images.githubusercontent.com/558581/139218681-b465c862-5c49-4702-a59e-466733b0cf45.png) | ![](https://user-images.githubusercontent.com/558581/146453159-a1609e0a-8324-439d-ae44-e4bce43ac6d1.png)
#### Why aren't we sorting topologically when receiving backfill events?
> The main reason we're going to opt to not sort topologically when receiving backfill events is because it's probably best to do whatever is easiest to make it just work. People will probably have opinions once they look at [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) which could change whatever implementation anyway.
>
> As mentioned, ideally we would do this but code necessary to make the fake edges but it gets confusing and gives an impression of “just whyyyy” (feels icky). This problem also dissolves with online topological ordering.
>
> -- https://github.com/matrix-org/synapse/pull/11114#discussion_r741517138
See https://github.com/matrix-org/synapse/pull/11114#discussion_r739610091 for the technical difficulties
2022-02-07 16:54:13 -05:00
|
|
|
if not auth_event_ids:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_REQUEST,
|
|
|
|
"No auth events found for given prev_event query parameter. The prev_event=%s probably does not exist."
|
|
|
|
% prev_event_ids_from_query,
|
|
|
|
errcode=Codes.INVALID_PARAM,
|
|
|
|
)
|
|
|
|
|
2021-11-03 04:13:51 -04:00
|
|
|
state_event_ids_at_start = []
|
2021-10-08 19:35:00 -04:00
|
|
|
# Create and persist all of the state events that float off on their own
|
|
|
|
# before the batch. These will most likely be all of the invite/member
|
|
|
|
# state events used to auth the upcoming historical messages.
|
2021-11-03 04:13:51 -04:00
|
|
|
if body["state_events_at_start"]:
|
|
|
|
state_event_ids_at_start = (
|
|
|
|
await self.room_batch_handler.persist_state_events_at_start(
|
|
|
|
state_events_at_start=body["state_events_at_start"],
|
|
|
|
room_id=room_id,
|
|
|
|
initial_auth_event_ids=auth_event_ids,
|
|
|
|
app_service_requester=requester,
|
|
|
|
)
|
2021-08-13 15:37:24 -04:00
|
|
|
)
|
2021-11-03 04:13:51 -04:00
|
|
|
# Update our ongoing auth event ID list with all of the new state we
|
|
|
|
# just created
|
|
|
|
auth_event_ids.extend(state_event_ids_at_start)
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-10-08 19:35:00 -04:00
|
|
|
inherited_depth = await self.room_batch_handler.inherit_depth_from_prev_ids(
|
2021-09-21 09:10:01 -04:00
|
|
|
prev_event_ids_from_query
|
2021-08-13 15:37:24 -04:00
|
|
|
)
|
|
|
|
|
2021-10-08 19:35:00 -04:00
|
|
|
events_to_create = body["events"]
|
|
|
|
|
2021-09-21 16:06:28 -04:00
|
|
|
# Figure out which batch to connect to. If they passed in
|
|
|
|
# batch_id_from_query let's use it. The batch ID passed in comes
|
|
|
|
# from the batch_id in the "insertion" event from the previous batch.
|
|
|
|
last_event_in_batch = events_to_create[-1]
|
2021-08-13 15:37:24 -04:00
|
|
|
base_insertion_event = None
|
2021-09-21 16:06:28 -04:00
|
|
|
if batch_id_from_query:
|
2021-10-08 19:35:00 -04:00
|
|
|
batch_id_to_connect_to = batch_id_from_query
|
2021-08-13 15:37:24 -04:00
|
|
|
# Otherwise, create an insertion event to act as a starting point.
|
|
|
|
#
|
|
|
|
# We don't always have an insertion event to start hanging more history
|
|
|
|
# off of (ideally there would be one in the main DAG, but that's not the
|
|
|
|
# case if we're wanting to add history to e.g. existing rooms without
|
|
|
|
# an insertion event), in which case we just create a new insertion event
|
|
|
|
# that can then get pointed to by a "marker" event later.
|
|
|
|
else:
|
2021-10-08 19:35:00 -04:00
|
|
|
base_insertion_event_dict = (
|
|
|
|
self.room_batch_handler.create_insertion_event_dict(
|
|
|
|
sender=requester.user.to_string(),
|
|
|
|
room_id=room_id,
|
|
|
|
origin_server_ts=last_event_in_batch["origin_server_ts"],
|
|
|
|
)
|
2021-08-13 15:37:24 -04:00
|
|
|
)
|
2021-10-13 18:44:00 -04:00
|
|
|
base_insertion_event_dict["prev_events"] = prev_event_ids_from_query.copy()
|
2021-08-13 15:37:24 -04:00
|
|
|
|
|
|
|
(
|
|
|
|
base_insertion_event,
|
|
|
|
_,
|
|
|
|
) = await self.event_creation_handler.create_and_send_nonmember_event(
|
2021-10-08 19:35:00 -04:00
|
|
|
await self.room_batch_handler.create_requester_for_user_id_from_app_service(
|
2021-08-13 15:37:24 -04:00
|
|
|
base_insertion_event_dict["sender"],
|
|
|
|
requester.app_service,
|
|
|
|
),
|
|
|
|
base_insertion_event_dict,
|
|
|
|
prev_event_ids=base_insertion_event_dict.get("prev_events"),
|
|
|
|
auth_event_ids=auth_event_ids,
|
|
|
|
historical=True,
|
|
|
|
depth=inherited_depth,
|
|
|
|
)
|
|
|
|
|
2021-11-02 09:55:52 -04:00
|
|
|
batch_id_to_connect_to = base_insertion_event.content[
|
2021-09-21 16:06:28 -04:00
|
|
|
EventContentFields.MSC2716_NEXT_BATCH_ID
|
2021-08-13 15:37:24 -04:00
|
|
|
]
|
|
|
|
|
2021-10-08 19:35:00 -04:00
|
|
|
# Create and persist all of the historical events as well as insertion
|
|
|
|
# and batch meta events to make the batch navigable in the DAG.
|
|
|
|
event_ids, next_batch_id = await self.room_batch_handler.handle_batch_of_events(
|
|
|
|
events_to_create=events_to_create,
|
2021-08-13 15:37:24 -04:00
|
|
|
room_id=room_id,
|
2021-10-08 19:35:00 -04:00
|
|
|
batch_id_to_connect_to=batch_id_to_connect_to,
|
|
|
|
inherited_depth=inherited_depth,
|
|
|
|
auth_event_ids=auth_event_ids,
|
|
|
|
app_service_requester=requester,
|
2021-08-13 15:37:24 -04:00
|
|
|
)
|
|
|
|
|
2021-09-15 04:30:58 -04:00
|
|
|
insertion_event_id = event_ids[0]
|
2021-09-21 16:06:28 -04:00
|
|
|
batch_event_id = event_ids[-1]
|
2021-09-15 04:30:58 -04:00
|
|
|
historical_event_ids = event_ids[1:-1]
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-09-15 04:30:58 -04:00
|
|
|
response_dict = {
|
|
|
|
"state_event_ids": state_event_ids_at_start,
|
|
|
|
"event_ids": historical_event_ids,
|
2021-10-08 19:35:00 -04:00
|
|
|
"next_batch_id": next_batch_id,
|
2021-09-15 04:30:58 -04:00
|
|
|
"insertion_event_id": insertion_event_id,
|
2021-09-21 16:06:28 -04:00
|
|
|
"batch_event_id": batch_event_id,
|
2021-08-13 15:37:24 -04:00
|
|
|
}
|
2021-09-15 04:30:58 -04:00
|
|
|
if base_insertion_event is not None:
|
|
|
|
response_dict["base_insertion_event_id"] = base_insertion_event.event_id
|
|
|
|
|
|
|
|
return HTTPStatus.OK, response_dict
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-09-01 11:59:32 -04:00
|
|
|
def on_GET(self, request: Request, room_id: str) -> Tuple[int, str]:
|
2021-09-15 04:30:58 -04:00
|
|
|
return HTTPStatus.NOT_IMPLEMENTED, "Not implemented"
|
2021-08-13 15:37:24 -04:00
|
|
|
|
2021-09-01 11:59:32 -04:00
|
|
|
def on_PUT(
|
|
|
|
self, request: SynapseRequest, room_id: str
|
|
|
|
) -> Awaitable[Tuple[int, JsonDict]]:
|
2021-08-13 15:37:24 -04:00
|
|
|
return self.txns.fetch_or_execute_request(
|
|
|
|
request, self.on_POST, request, room_id
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-09-01 11:59:32 -04:00
|
|
|
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
2021-08-13 15:37:24 -04:00
|
|
|
msc2716_enabled = hs.config.experimental.msc2716_enabled
|
|
|
|
|
|
|
|
if msc2716_enabled:
|
|
|
|
RoomBatchSendEventRestServlet(hs).register(http_server)
|