2015-01-23 13:31:29 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-01-23 13:31:29 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import itertools
|
|
|
|
import logging
|
2021-03-24 06:48:46 -04:00
|
|
|
from typing import TYPE_CHECKING, Tuple
|
2018-07-09 02:09:20 -04:00
|
|
|
|
|
|
|
from synapse.api.constants import PresenceState
|
2019-10-10 07:59:55 -04:00
|
|
|
from synapse.api.errors import Codes, StoreError, SynapseError
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection
|
|
|
|
from synapse.events.utils import (
|
|
|
|
format_event_for_client_v2_without_room_id,
|
2018-09-04 10:18:25 -04:00
|
|
|
format_event_raw,
|
2015-04-21 11:35:53 -04:00
|
|
|
)
|
2017-03-15 10:27:34 -04:00
|
|
|
from synapse.handlers.presence import format_user_presence_state
|
2015-01-26 13:53:31 -05:00
|
|
|
from synapse.handlers.sync import SyncConfig
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string
|
2021-03-24 06:48:46 -04:00
|
|
|
from synapse.http.site import SynapseRequest
|
|
|
|
from synapse.types import JsonDict, StreamToken
|
2020-08-19 07:26:03 -04:00
|
|
|
from synapse.util import json_decoder
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2019-06-03 07:28:59 -04:00
|
|
|
from ._base import client_patterns, set_timeline_upper_limit
|
2015-12-09 07:56:50 -05:00
|
|
|
|
2021-03-24 06:48:46 -04:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-23 13:31:29 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class SyncRestServlet(RestServlet):
|
|
|
|
"""
|
|
|
|
|
|
|
|
GET parameters::
|
|
|
|
timeout(int): How long to wait for new events in milliseconds.
|
|
|
|
since(batch_token): Batch token when asking for incremental deltas.
|
|
|
|
set_presence(str): What state the device presence should be set to.
|
|
|
|
default is "online".
|
|
|
|
filter(filter_id): A filter to apply to the events returned.
|
|
|
|
|
|
|
|
Response JSON::
|
|
|
|
{
|
2015-10-01 12:53:07 -04:00
|
|
|
"next_batch": // batch token for the next /sync
|
|
|
|
"presence": // presence data for the user.
|
2015-10-07 10:55:20 -04:00
|
|
|
"rooms": {
|
2015-12-09 07:56:50 -05:00
|
|
|
"join": { // Joined rooms being updated.
|
2015-10-07 10:55:20 -04:00
|
|
|
"${room_id}": { // Id of the room being updated
|
|
|
|
"event_map": // Map of EventID -> event JSON.
|
|
|
|
"timeline": { // The recent events in the room if gap is "true"
|
2015-10-01 12:53:07 -04:00
|
|
|
"limited": // Was the per-room event limit exceeded?
|
2015-10-07 10:55:20 -04:00
|
|
|
// otherwise the next events in the room.
|
|
|
|
"events": [] // list of EventIDs in the "event_map".
|
2015-10-01 12:53:07 -04:00
|
|
|
"prev_batch": // back token for getting previous events.
|
2015-10-07 10:55:20 -04:00
|
|
|
}
|
|
|
|
"state": {"events": []} // list of EventIDs updating the
|
|
|
|
// current state to be what it should
|
|
|
|
// be at the end of the batch.
|
|
|
|
"ephemeral": {"events": []} // list of event objects
|
2015-10-01 12:53:07 -04:00
|
|
|
}
|
2015-10-07 10:55:20 -04:00
|
|
|
},
|
2015-12-09 07:56:50 -05:00
|
|
|
"invite": {}, // Invited rooms being updated.
|
|
|
|
"leave": {} // Archived rooms being updated.
|
2015-10-01 12:53:07 -04:00
|
|
|
}
|
2015-01-23 13:31:29 -05:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2019-06-03 07:28:59 -04:00
|
|
|
PATTERNS = client_patterns("/sync$")
|
2020-02-21 07:15:07 -05:00
|
|
|
ALLOWED_PRESENCE = {"online", "offline", "unavailable"}
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2021-03-24 06:48:46 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-09-18 09:56:44 -04:00
|
|
|
super().__init__()
|
2017-05-15 08:51:43 -04:00
|
|
|
self.hs = hs
|
2015-01-23 13:31:29 -05:00
|
|
|
self.auth = hs.get_auth()
|
2020-09-30 15:29:19 -04:00
|
|
|
self.store = hs.get_datastore()
|
2016-05-16 15:19:26 -04:00
|
|
|
self.sync_handler = hs.get_sync_handler()
|
2015-01-26 13:53:31 -05:00
|
|
|
self.clock = hs.get_clock()
|
2015-01-29 13:11:28 -05:00
|
|
|
self.filtering = hs.get_filtering()
|
2016-05-16 13:56:37 -04:00
|
|
|
self.presence_handler = hs.get_presence_handler()
|
2018-05-22 05:57:56 -04:00
|
|
|
self._server_notices_sender = hs.get_server_notices_sender()
|
2019-05-09 08:21:57 -04:00
|
|
|
self._event_serializer = hs.get_event_client_serializer()
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2021-03-24 06:48:46 -04:00
|
|
|
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
2021-03-26 12:49:46 -04:00
|
|
|
# This will always be set by the time Twisted calls us.
|
|
|
|
assert request.args is not None
|
|
|
|
|
2018-09-12 06:41:31 -04:00
|
|
|
if b"from" in request.args:
|
2016-01-20 10:42:57 -05:00
|
|
|
# /events used to use 'from', but /sync uses 'since'.
|
|
|
|
# Lets be helpful and whine if we see a 'from'.
|
|
|
|
raise SynapseError(
|
|
|
|
400, "'from' is not a valid query parameter. Did you mean 'since'?"
|
|
|
|
)
|
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
2016-01-11 10:29:57 -05:00
|
|
|
user = requester.user
|
2016-08-25 12:35:37 -04:00
|
|
|
device_id = requester.device_id
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2015-04-21 11:35:53 -04:00
|
|
|
timeout = parse_integer(request, "timeout", default=0)
|
|
|
|
since = parse_string(request, "since")
|
|
|
|
set_presence = parse_string(
|
2015-01-23 13:31:29 -05:00
|
|
|
request,
|
|
|
|
"set_presence",
|
|
|
|
default="online",
|
|
|
|
allowed_values=self.ALLOWED_PRESENCE,
|
|
|
|
)
|
2015-04-21 11:35:53 -04:00
|
|
|
filter_id = parse_string(request, "filter", default=None)
|
2015-10-26 14:47:18 -04:00
|
|
|
full_state = parse_boolean(request, "full_state", default=False)
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2017-08-02 12:29:51 -04:00
|
|
|
logger.debug(
|
2019-10-24 13:31:53 -04:00
|
|
|
"/sync: user=%r, timeout=%r, since=%r, "
|
|
|
|
"set_presence=%r, filter_id=%r, device_id=%r",
|
2019-10-24 13:43:13 -04:00
|
|
|
user,
|
|
|
|
timeout,
|
|
|
|
since,
|
|
|
|
set_presence,
|
|
|
|
filter_id,
|
|
|
|
device_id,
|
2015-01-23 13:31:29 -05:00
|
|
|
)
|
|
|
|
|
2016-08-25 12:35:37 -04:00
|
|
|
request_key = (user, timeout, since, filter_id, full_state, device_id)
|
2016-03-24 13:47:31 -04:00
|
|
|
|
2019-10-10 07:59:55 -04:00
|
|
|
if filter_id is None:
|
|
|
|
filter_collection = DEFAULT_FILTER_COLLECTION
|
|
|
|
elif filter_id.startswith("{"):
|
|
|
|
try:
|
2020-08-19 07:26:03 -04:00
|
|
|
filter_object = json_decoder.decode(filter_id)
|
2019-10-10 07:59:55 -04:00
|
|
|
set_timeline_upper_limit(
|
|
|
|
filter_object, self.hs.config.filter_timeline_limit
|
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
raise SynapseError(400, "Invalid filter JSON")
|
|
|
|
self.filtering.check_valid_filter(filter_object)
|
|
|
|
filter_collection = FilterCollection(filter_object)
|
2016-01-22 05:41:30 -05:00
|
|
|
else:
|
2019-10-10 07:59:55 -04:00
|
|
|
try:
|
2019-12-05 11:46:37 -05:00
|
|
|
filter_collection = await self.filtering.get_user_filter(
|
2019-10-10 07:59:55 -04:00
|
|
|
user.localpart, filter_id
|
|
|
|
)
|
|
|
|
except StoreError as err:
|
|
|
|
if err.code != 404:
|
|
|
|
raise
|
|
|
|
# fix up the description and errcode to be more useful
|
|
|
|
raise SynapseError(400, "No such filter", errcode=Codes.INVALID_PARAM)
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
sync_config = SyncConfig(
|
|
|
|
user=user,
|
2019-10-10 07:59:55 -04:00
|
|
|
filter_collection=filter_collection,
|
2016-01-11 10:29:57 -05:00
|
|
|
is_guest=requester.is_guest,
|
2016-03-24 13:47:31 -04:00
|
|
|
request_key=request_key,
|
2016-08-25 12:35:37 -04:00
|
|
|
device_id=device_id,
|
2015-01-26 13:53:31 -05:00
|
|
|
)
|
|
|
|
|
2020-09-30 15:29:19 -04:00
|
|
|
since_token = None
|
2015-01-26 13:53:31 -05:00
|
|
|
if since is not None:
|
2020-09-30 15:29:19 -04:00
|
|
|
since_token = await StreamToken.from_string(self.store, since)
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2018-05-22 05:57:56 -04:00
|
|
|
# send any outstanding server notices to the user.
|
2019-12-05 11:46:37 -05:00
|
|
|
await self._server_notices_sender.on_user_syncing(user.to_string())
|
2018-05-22 05:57:56 -04:00
|
|
|
|
2016-02-15 12:10:40 -05:00
|
|
|
affect_presence = set_presence != PresenceState.OFFLINE
|
2015-10-09 14:57:50 -04:00
|
|
|
|
2016-02-15 12:10:40 -05:00
|
|
|
if affect_presence:
|
2019-12-05 11:46:37 -05:00
|
|
|
await self.presence_handler.set_state(
|
2016-08-10 07:57:30 -04:00
|
|
|
user, {"presence": set_presence}, True
|
|
|
|
)
|
2016-02-15 12:10:40 -05:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
context = await self.presence_handler.user_syncing(
|
2016-02-15 12:10:40 -05:00
|
|
|
user.to_string(), affect_presence=affect_presence
|
|
|
|
)
|
|
|
|
with context:
|
2019-12-05 11:46:37 -05:00
|
|
|
sync_result = await self.sync_handler.wait_for_sync_for_user(
|
2020-11-17 05:51:25 -05:00
|
|
|
requester,
|
2015-10-26 14:47:18 -04:00
|
|
|
sync_config,
|
|
|
|
since_token=since_token,
|
|
|
|
timeout=timeout,
|
|
|
|
full_state=full_state,
|
2015-10-09 14:57:50 -04:00
|
|
|
)
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2020-07-22 08:44:16 -04:00
|
|
|
# the client may have disconnected by now; don't bother to serialize the
|
|
|
|
# response if so.
|
|
|
|
if request._disconnected:
|
|
|
|
logger.info("Client has disconnected; not serializing response.")
|
|
|
|
return 200, {}
|
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
time_now = self.clock.time_msec()
|
2019-12-05 11:46:37 -05:00
|
|
|
response_content = await self.encode_response(
|
2019-10-10 07:59:55 -04:00
|
|
|
time_now, sync_result, requester.access_token_id, filter_collection
|
2017-07-10 11:34:58 -04:00
|
|
|
)
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Event formatting complete")
|
2019-08-30 11:28:26 -04:00
|
|
|
return 200, response_content
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
async def encode_response(self, time_now, sync_result, access_token_id, filter):
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Formatting events in sync response")
|
2018-09-04 10:18:25 -04:00
|
|
|
if filter.event_format == "client":
|
|
|
|
event_formatter = format_event_for_client_v2_without_room_id
|
|
|
|
elif filter.event_format == "federation":
|
|
|
|
event_formatter = format_event_raw
|
|
|
|
else:
|
|
|
|
raise Exception("Unknown event format %s" % (filter.event_format,))
|
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
joined = await self.encode_joined(
|
2018-09-04 10:18:25 -04:00
|
|
|
sync_result.joined,
|
|
|
|
time_now,
|
|
|
|
access_token_id,
|
|
|
|
filter.event_fields,
|
|
|
|
event_formatter,
|
2017-07-11 07:14:35 -04:00
|
|
|
)
|
2017-07-10 10:42:17 -04:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
invited = await self.encode_invited(
|
2017-07-11 07:14:35 -04:00
|
|
|
sync_result.invited, time_now, access_token_id, event_formatter
|
|
|
|
)
|
2017-07-10 10:42:17 -04:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
archived = await self.encode_archived(
|
2017-07-11 07:14:35 -04:00
|
|
|
sync_result.archived,
|
|
|
|
time_now,
|
|
|
|
access_token_id,
|
|
|
|
filter.event_fields,
|
2018-09-04 10:18:25 -04:00
|
|
|
event_formatter,
|
2017-07-11 07:14:35 -04:00
|
|
|
)
|
2017-07-10 10:42:17 -04:00
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("building sync response dict")
|
2019-07-23 09:00:55 -04:00
|
|
|
return {
|
|
|
|
"account_data": {"events": sync_result.account_data},
|
|
|
|
"to_device": {"events": sync_result.to_device},
|
|
|
|
"device_lists": {
|
|
|
|
"changed": list(sync_result.device_lists.changed),
|
|
|
|
"left": list(sync_result.device_lists.left),
|
|
|
|
},
|
|
|
|
"presence": SyncRestServlet.encode_presence(sync_result.presence, time_now),
|
|
|
|
"rooms": {"join": joined, "invite": invited, "leave": archived},
|
|
|
|
"groups": {
|
|
|
|
"join": sync_result.groups.join,
|
|
|
|
"invite": sync_result.groups.invite,
|
|
|
|
"leave": sync_result.groups.leave,
|
|
|
|
},
|
|
|
|
"device_one_time_keys_count": sync_result.device_one_time_keys_count,
|
2020-10-06 13:26:29 -04:00
|
|
|
"org.matrix.msc2732.device_unused_fallback_key_types": sync_result.device_unused_fallback_key_types,
|
2020-09-30 15:29:19 -04:00
|
|
|
"next_batch": await sync_result.next_batch.to_string(self.store),
|
2019-07-23 09:00:55 -04:00
|
|
|
}
|
2017-07-10 10:42:17 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def encode_presence(events, time_now):
|
2017-03-15 10:27:34 -04:00
|
|
|
return {
|
|
|
|
"events": [
|
|
|
|
{
|
|
|
|
"type": "m.presence",
|
|
|
|
"sender": event.user_id,
|
|
|
|
"content": format_user_presence_state(
|
|
|
|
event, time_now, include_user_id=False
|
|
|
|
),
|
|
|
|
}
|
|
|
|
for event in events
|
|
|
|
]
|
|
|
|
}
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
async def encode_joined(
|
|
|
|
self, rooms, time_now, token_id, event_fields, event_formatter
|
|
|
|
):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
Encode the joined rooms in a sync result
|
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
|
|
|
rooms(list[synapse.handlers.sync.JoinedSyncResult]): list of sync
|
|
|
|
results for rooms this user is joined to
|
|
|
|
time_now(int): current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
token_id(int): ID of the user's auth token - used for namespacing
|
|
|
|
of transaction IDs
|
2016-11-22 05:14:05 -05:00
|
|
|
event_fields(list<str>): List of event fields to include. If empty,
|
2018-09-04 10:18:25 -04:00
|
|
|
all fields will be returned.
|
|
|
|
event_formatter (func[dict]): function to convert from federation format
|
|
|
|
to client format
|
2016-04-01 11:08:59 -04:00
|
|
|
Returns:
|
|
|
|
dict[str, dict[str, object]]: the joined rooms list, in our
|
|
|
|
response format
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2015-10-07 10:55:20 -04:00
|
|
|
joined = {}
|
2015-10-05 11:39:22 -04:00
|
|
|
for room in rooms:
|
2019-12-05 11:46:37 -05:00
|
|
|
joined[room.room_id] = await self.encode_room(
|
2018-09-04 10:18:25 -04:00
|
|
|
room,
|
|
|
|
time_now,
|
|
|
|
token_id,
|
|
|
|
joined=True,
|
|
|
|
only_fields=event_fields,
|
|
|
|
event_formatter=event_formatter,
|
2015-10-05 11:39:22 -04:00
|
|
|
)
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return joined
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
async def encode_invited(self, rooms, time_now, token_id, event_formatter):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
Encode the invited rooms in a sync result
|
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
|
|
|
rooms(list[synapse.handlers.sync.InvitedSyncResult]): list of
|
|
|
|
sync results for rooms this user is joined to
|
|
|
|
time_now(int): current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
token_id(int): ID of the user's auth token - used for namespacing
|
2018-09-04 10:18:25 -04:00
|
|
|
of transaction IDs
|
|
|
|
event_formatter (func[dict]): function to convert from federation format
|
|
|
|
to client format
|
2015-11-13 05:31:15 -05:00
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Returns:
|
|
|
|
dict[str, dict[str, object]]: the invited rooms list, in our
|
|
|
|
response format
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2015-10-13 06:03:48 -04:00
|
|
|
invited = {}
|
|
|
|
for room in rooms:
|
2019-12-05 11:46:37 -05:00
|
|
|
invite = await self._event_serializer.serialize_event(
|
2015-10-13 06:03:48 -04:00
|
|
|
room.invite,
|
|
|
|
time_now,
|
|
|
|
token_id=token_id,
|
2018-09-04 10:18:25 -04:00
|
|
|
event_format=event_formatter,
|
2017-04-26 11:18:08 -04:00
|
|
|
is_invite=True,
|
2015-10-13 06:03:48 -04:00
|
|
|
)
|
2016-01-25 05:10:44 -05:00
|
|
|
unsigned = dict(invite.get("unsigned", {}))
|
|
|
|
invite["unsigned"] = unsigned
|
|
|
|
invited_state = list(unsigned.pop("invite_room_state", []))
|
2015-10-13 06:03:48 -04:00
|
|
|
invited_state.append(invite)
|
2015-10-13 06:43:12 -04:00
|
|
|
invited[room.room_id] = {"invite_state": {"events": invited_state}}
|
2015-10-13 06:03:48 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return invited
|
2015-10-13 06:03:48 -04:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
async def encode_archived(
|
|
|
|
self, rooms, time_now, token_id, event_fields, event_formatter
|
|
|
|
):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
Encode the archived rooms in a sync result
|
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
|
|
|
rooms (list[synapse.handlers.sync.ArchivedSyncResult]): list of
|
|
|
|
sync results for rooms this user is joined to
|
|
|
|
time_now(int): current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
token_id(int): ID of the user's auth token - used for namespacing
|
|
|
|
of transaction IDs
|
2016-11-22 05:14:05 -05:00
|
|
|
event_fields(list<str>): List of event fields to include. If empty,
|
2018-09-04 10:18:25 -04:00
|
|
|
all fields will be returned.
|
|
|
|
event_formatter (func[dict]): function to convert from federation format
|
|
|
|
to client format
|
2016-04-01 11:08:59 -04:00
|
|
|
Returns:
|
|
|
|
dict[str, dict[str, object]]: The invited rooms list, in our
|
|
|
|
response format
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2015-10-19 12:26:18 -04:00
|
|
|
joined = {}
|
|
|
|
for room in rooms:
|
2019-12-05 11:46:37 -05:00
|
|
|
joined[room.room_id] = await self.encode_room(
|
2018-09-04 10:18:25 -04:00
|
|
|
room,
|
|
|
|
time_now,
|
|
|
|
token_id,
|
|
|
|
joined=False,
|
|
|
|
only_fields=event_fields,
|
|
|
|
event_formatter=event_formatter,
|
2015-10-19 12:26:18 -04:00
|
|
|
)
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return joined
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
async def encode_room(
|
2019-05-09 08:21:57 -04:00
|
|
|
self, room, time_now, token_id, joined, only_fields, event_formatter
|
2018-09-04 10:18:25 -04:00
|
|
|
):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
|
|
|
room (JoinedSyncResult|ArchivedSyncResult): sync result for a
|
|
|
|
single room
|
|
|
|
time_now (int): current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
token_id (int): ID of the user's auth token - used for namespacing
|
|
|
|
of transaction IDs
|
|
|
|
joined (bool): True if the user is joined to this room - will mean
|
|
|
|
we handle ephemeral events
|
2016-11-22 05:14:05 -05:00
|
|
|
only_fields(list<str>): Optional. The list of event fields to include.
|
2018-09-04 10:18:25 -04:00
|
|
|
event_formatter (func[dict]): function to convert from federation format
|
|
|
|
to client format
|
2016-04-01 11:08:59 -04:00
|
|
|
Returns:
|
|
|
|
dict[str, object]: the room, encoded in our response format
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2019-05-09 08:21:57 -04:00
|
|
|
def serialize(events):
|
|
|
|
return self._event_serializer.serialize_events(
|
|
|
|
events,
|
|
|
|
time_now=time_now,
|
2019-05-21 08:54:09 -04:00
|
|
|
# We don't bundle "live" events, as otherwise clients
|
|
|
|
# will end up double counting annotations.
|
|
|
|
bundle_aggregations=False,
|
2019-05-09 08:21:57 -04:00
|
|
|
token_id=token_id,
|
2018-09-04 10:18:25 -04:00
|
|
|
event_format=event_formatter,
|
2016-11-22 05:14:05 -05:00
|
|
|
only_event_fields=only_fields,
|
2015-11-12 05:33:19 -05:00
|
|
|
)
|
|
|
|
|
2015-11-10 13:29:25 -05:00
|
|
|
state_dict = room.state
|
2016-01-25 05:10:44 -05:00
|
|
|
timeline_events = room.timeline.events
|
2015-11-10 13:29:25 -05:00
|
|
|
|
2016-01-25 05:10:44 -05:00
|
|
|
state_events = state_dict.values()
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2016-02-11 04:22:37 -05:00
|
|
|
for event in itertools.chain(state_events, timeline_events):
|
|
|
|
# We've had bug reports that events were coming down under the
|
|
|
|
# wrong room.
|
|
|
|
if event.room_id != room.room_id:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning(
|
2016-02-11 04:22:37 -05:00
|
|
|
"Event %r is under room %r instead of %r",
|
|
|
|
event.event_id,
|
|
|
|
room.room_id,
|
|
|
|
event.room_id,
|
|
|
|
)
|
|
|
|
|
2019-12-05 11:46:37 -05:00
|
|
|
serialized_state = await serialize(state_events)
|
|
|
|
serialized_timeline = await serialize(timeline_events)
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2016-01-25 05:10:44 -05:00
|
|
|
account_data = room.account_data
|
2015-11-02 11:23:15 -05:00
|
|
|
|
2015-01-28 22:33:51 -05:00
|
|
|
result = {
|
2015-10-08 10:17:43 -04:00
|
|
|
"timeline": {
|
2015-11-12 05:33:19 -05:00
|
|
|
"events": serialized_timeline,
|
2020-09-30 15:29:19 -04:00
|
|
|
"prev_batch": await room.timeline.prev_batch.to_string(self.store),
|
2015-10-08 10:17:43 -04:00
|
|
|
"limited": room.timeline.limited,
|
2015-01-26 13:53:31 -05:00
|
|
|
},
|
2015-11-19 07:23:42 -05:00
|
|
|
"state": {"events": serialized_state},
|
2015-11-18 10:31:04 -05:00
|
|
|
"account_data": {"events": account_data},
|
2015-01-26 13:53:31 -05:00
|
|
|
}
|
2015-10-19 12:26:18 -04:00
|
|
|
|
|
|
|
if joined:
|
2016-01-25 05:10:44 -05:00
|
|
|
ephemeral_events = room.ephemeral
|
2015-10-19 12:26:18 -04:00
|
|
|
result["ephemeral"] = {"events": ephemeral_events}
|
2016-01-19 12:19:53 -05:00
|
|
|
result["unread_notifications"] = room.unread_notifications
|
2018-08-16 04:46:50 -04:00
|
|
|
result["summary"] = room.summary
|
2020-09-02 12:19:37 -04:00
|
|
|
result["org.matrix.msc2654.unread_count"] = room.unread_count
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2015-01-23 13:31:29 -05:00
|
|
|
|
|
|
|
def register_servlets(hs, http_server):
|
|
|
|
SyncRestServlet(hs).register(http_server)
|