Merge remote-tracking branch 'upstream/release-v1.40'

This commit is contained in:
Tulir Asokan 2021-08-03 14:38:58 +03:00
commit 7359964d9f
124 changed files with 3989 additions and 904 deletions

View file

@ -416,7 +416,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
assert_params_in_dict(body, ["state_events_at_start", "events"])
prev_events_from_query = parse_strings_from_args(request.args, "prev_event")
chunk_id_from_query = parse_string(request, "chunk_id", default=None)
chunk_id_from_query = parse_string(request, "chunk_id")
if prev_events_from_query is None:
raise SynapseError(
@ -507,7 +507,6 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
events_to_create = body["events"]
prev_event_ids = prev_events_from_query
inherited_depth = await self._inherit_depth_from_prev_ids(
prev_events_from_query
)
@ -519,6 +518,10 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
chunk_id_to_connect_to = chunk_id_from_query
base_insertion_event = None
if chunk_id_from_query:
# All but the first base insertion event should point at a fake
# event, which causes the HS to ask for the state at the start of
# the chunk later.
prev_event_ids = [fake_prev_event_id]
# TODO: Verify the chunk_id_from_query corresponds to an insertion event
pass
# Otherwise, create an insertion event to act as a starting point.
@ -529,6 +532,8 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
# an insertion event), in which case we just create a new insertion event
# that can then get pointed to by a "marker" event later.
else:
prev_event_ids = prev_events_from_query
base_insertion_event_dict = self._create_insertion_event_dict(
sender=requester.user.to_string(),
room_id=room_id,
@ -556,9 +561,18 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
]
# Connect this current chunk to the insertion event from the previous chunk
last_event_in_chunk["content"][
EventContentFields.MSC2716_CHUNK_ID
] = chunk_id_to_connect_to
chunk_event = {
"type": EventTypes.MSC2716_CHUNK,
"sender": requester.user.to_string(),
"room_id": room_id,
"content": {EventContentFields.MSC2716_CHUNK_ID: chunk_id_to_connect_to},
# Since the chunk event is put at the end of the chunk,
# where the newest-in-time event is, copy the origin_server_ts from
# the last event we're inserting
"origin_server_ts": last_event_in_chunk["origin_server_ts"],
}
# Add the chunk event to the end of the chunk (newest-in-time)
events_to_create.append(chunk_event)
# Add an "insertion" event to the start of each chunk (next to the oldest-in-time
# event in the chunk) so the next chunk can be connected to this one.
@ -570,7 +584,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
# the first event we're inserting
origin_server_ts=events_to_create[0]["origin_server_ts"],
)
# Prepend the insertion event to the start of the chunk
# Prepend the insertion event to the start of the chunk (oldest-in-time)
events_to_create = [insertion_event] + events_to_create
event_ids = []
@ -729,7 +743,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
self.auth = hs.get_auth()
async def on_GET(self, request):
server = parse_string(request, "server", default=None)
server = parse_string(request, "server")
try:
await self.auth.get_user_by_req(request, allow_guest=True)
@ -748,8 +762,8 @@ class PublicRoomListRestServlet(TransactionRestServlet):
if server:
raise e
limit = parse_integer(request, "limit", 0)
since_token = parse_string(request, "since", None)
limit: Optional[int] = parse_integer(request, "limit", 0)
since_token = parse_string(request, "since")
if limit == 0:
# zero is a special value which corresponds to no limit.
@ -783,7 +797,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
async def on_POST(self, request):
await self.auth.get_user_by_req(request, allow_guest=True)
server = parse_string(request, "server", default=None)
server = parse_string(request, "server")
content = parse_json_object_from_request(request)
limit: Optional[int] = int(content.get("limit", 100))