2015-01-26 13:53:31 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2018-03-13 20:02:20 -04:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2019-07-25 11:08:24 -04:00
|
|
|
# Copyright 2018, 2019 New Vector Ltd
|
2015-01-26 13:53:31 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 02:09:20 -04:00
|
|
|
import itertools
|
|
|
|
import logging
|
2020-09-03 07:54:10 -04:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict, FrozenSet, List, Optional, Set, Tuple
|
2018-07-09 02:09:20 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
import attr
|
2018-10-09 09:15:49 -04:00
|
|
|
from prometheus_client import Counter
|
|
|
|
|
2020-10-05 09:28:05 -04:00
|
|
|
from synapse.api.constants import AccountDataTypes, EventTypes, Membership
|
2020-02-03 13:05:44 -05:00
|
|
|
from synapse.api.filtering import FilterCollection
|
|
|
|
from synapse.events import EventBase
|
2020-03-24 10:45:33 -04:00
|
|
|
from synapse.logging.context import current_context
|
2021-04-01 12:08:21 -04:00
|
|
|
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.push.clientformat import format_push_rules_for_user
|
2018-09-11 19:50:39 -04:00
|
|
|
from synapse.storage.roommember import MemberSummary
|
2018-10-25 12:49:55 -04:00
|
|
|
from synapse.storage.state import StateFilter
|
2020-02-03 13:05:44 -05:00
|
|
|
from synapse.types import (
|
|
|
|
Collection,
|
|
|
|
JsonDict,
|
2020-08-28 07:28:53 -04:00
|
|
|
MutableStateMap,
|
2020-11-17 05:51:25 -05:00
|
|
|
Requester,
|
2020-02-03 13:05:44 -05:00
|
|
|
RoomStreamToken,
|
|
|
|
StateMap,
|
|
|
|
StreamToken,
|
|
|
|
UserID,
|
|
|
|
)
|
2018-08-10 09:50:21 -04:00
|
|
|
from synapse.util.async_helpers import concurrently_execute
|
2018-07-26 17:51:30 -04:00
|
|
|
from synapse.util.caches.expiringcache import ExpiringCache
|
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.util.caches.response_cache import ResponseCache
|
2017-02-02 13:36:17 -05:00
|
|
|
from synapse.util.metrics import Measure, measure_func
|
2016-05-11 08:42:37 -04:00
|
|
|
from synapse.visibility import filter_events_for_client
|
2018-04-28 07:19:12 -04:00
|
|
|
|
2020-09-03 07:54:10 -04:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2019-03-06 12:29:15 -05:00
|
|
|
# Debug logger for https://github.com/matrix-org/synapse/issues/4422
|
|
|
|
issue4422_logger = logging.getLogger("synapse.handler.sync.4422_debug")
|
|
|
|
|
2018-10-09 09:15:49 -04:00
|
|
|
|
2018-10-10 06:23:17 -04:00
|
|
|
# Counts the number of times we returned a non-empty sync. `type` is one of
|
|
|
|
# "initial_sync", "full_state_sync" or "incremental_sync", `lazy_loaded` is
|
|
|
|
# "true" or "false" depending on if the request asked for lazy loaded members or
|
|
|
|
# not.
|
|
|
|
non_empty_sync_counter = Counter(
|
2018-10-10 06:40:43 -04:00
|
|
|
"synapse_handlers_sync_nonempty_total",
|
|
|
|
"Count of non empty sync responses. type is initial_sync/full_state_sync"
|
|
|
|
"/incremental_sync. lazy_loaded indicates if lazy loaded members were "
|
|
|
|
"enabled for that request.",
|
|
|
|
["type", "lazy_loaded"],
|
2018-10-09 09:15:49 -04:00
|
|
|
)
|
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
# Store the cache that tracks which lazy-loaded members have been sent to a given
|
|
|
|
# client for no more than 30 minutes.
|
|
|
|
LAZY_LOADED_MEMBERS_CACHE_MAX_AGE = 30 * 60 * 1000
|
|
|
|
|
|
|
|
# Remember the last 100 members we sent to a client for the purposes of
|
|
|
|
# avoiding redundantly sending the same lazy-loaded members to the client
|
|
|
|
LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100
|
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class SyncConfig:
|
|
|
|
user = attr.ib(type=UserID)
|
|
|
|
filter_collection = attr.ib(type=FilterCollection)
|
|
|
|
is_guest = attr.ib(type=bool)
|
|
|
|
request_key = attr.ib(type=Tuple[Any, ...])
|
2021-03-24 06:48:46 -04:00
|
|
|
device_id = attr.ib(type=Optional[str])
|
2019-06-20 05:32:02 -04:00
|
|
|
|
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class TimelineBatch:
|
|
|
|
prev_batch = attr.ib(type=StreamToken)
|
|
|
|
events = attr.ib(type=List[EventBase])
|
2020-10-01 08:09:18 -04:00
|
|
|
limited = attr.ib(type=bool)
|
2015-10-01 12:53:07 -04:00
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
def __bool__(self) -> bool:
|
2015-10-01 12:53:07 -04:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
|
|
|
return bool(self.events)
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2015-10-05 11:39:22 -04:00
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
# We can't freeze this class, because we need to update it after it's instantiated to
|
|
|
|
# update its unread count. This is because we calculate the unread count for a room only
|
|
|
|
# if there are updates for it, which we check after the instance has been created.
|
|
|
|
# This should not be a big deal because we update the notification counts afterwards as
|
|
|
|
# well anyway.
|
|
|
|
@attr.s(slots=True)
|
2020-02-03 13:05:44 -05:00
|
|
|
class JoinedSyncResult:
|
|
|
|
room_id = attr.ib(type=str)
|
|
|
|
timeline = attr.ib(type=TimelineBatch)
|
|
|
|
state = attr.ib(type=StateMap[EventBase])
|
|
|
|
ephemeral = attr.ib(type=List[JsonDict])
|
|
|
|
account_data = attr.ib(type=List[JsonDict])
|
|
|
|
unread_notifications = attr.ib(type=JsonDict)
|
|
|
|
summary = attr.ib(type=Optional[JsonDict])
|
2020-09-02 12:19:37 -04:00
|
|
|
unread_count = attr.ib(type=int)
|
2020-02-03 13:05:44 -05:00
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
def __bool__(self) -> bool:
|
2015-01-30 10:52:05 -05:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2015-11-02 11:23:15 -05:00
|
|
|
return bool(
|
|
|
|
self.timeline
|
|
|
|
or self.state
|
|
|
|
or self.ephemeral
|
2015-11-18 10:31:04 -05:00
|
|
|
or self.account_data
|
2015-12-18 12:47:00 -05:00
|
|
|
# nb the notification count does not, er, count: if there's nothing
|
|
|
|
# else in the result, we don't need to send it.
|
2015-11-02 11:23:15 -05:00
|
|
|
)
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class ArchivedSyncResult:
|
|
|
|
room_id = attr.ib(type=str)
|
|
|
|
timeline = attr.ib(type=TimelineBatch)
|
|
|
|
state = attr.ib(type=StateMap[EventBase])
|
|
|
|
account_data = attr.ib(type=List[JsonDict])
|
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
def __bool__(self) -> bool:
|
2015-10-19 12:26:18 -04:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
return bool(self.timeline or self.state or self.account_data)
|
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class InvitedSyncResult:
|
|
|
|
room_id = attr.ib(type=str)
|
|
|
|
invite = attr.ib(type=EventBase)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
def __bool__(self) -> bool:
|
2015-10-19 12:26:18 -04:00
|
|
|
"""Invited rooms should always be reported to the client"""
|
|
|
|
return True
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class GroupsSyncResult:
|
|
|
|
join = attr.ib(type=JsonDict)
|
|
|
|
invite = attr.ib(type=JsonDict)
|
|
|
|
leave = attr.ib(type=JsonDict)
|
2017-07-10 09:53:19 -04:00
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
def __bool__(self) -> bool:
|
2017-07-20 11:47:35 -04:00
|
|
|
return bool(self.join or self.invite or self.leave)
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2017-07-10 09:53:19 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class DeviceLists:
|
|
|
|
"""
|
|
|
|
Attributes:
|
|
|
|
changed: List of user_ids whose devices may have changed
|
|
|
|
left: List of user_ids whose devices we no longer track
|
|
|
|
"""
|
|
|
|
|
|
|
|
changed = attr.ib(type=Collection[str])
|
|
|
|
left = attr.ib(type=Collection[str])
|
2017-09-07 10:08:39 -04:00
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
def __bool__(self) -> bool:
|
2017-09-07 10:08:39 -04:00
|
|
|
return bool(self.changed or self.left)
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2017-09-07 10:08:39 -04:00
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
@attr.s(slots=True)
|
2020-02-03 13:05:44 -05:00
|
|
|
class _RoomChanges:
|
|
|
|
"""The set of room entries to include in the sync, plus the set of joined
|
|
|
|
and left room IDs since last sync.
|
|
|
|
"""
|
|
|
|
|
|
|
|
room_entries = attr.ib(type=List["RoomSyncResultBuilder"])
|
|
|
|
invited = attr.ib(type=List[InvitedSyncResult])
|
|
|
|
newly_joined_rooms = attr.ib(type=List[str])
|
|
|
|
newly_left_rooms = attr.ib(type=List[str])
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class SyncResult:
|
|
|
|
"""
|
|
|
|
Attributes:
|
|
|
|
next_batch: Token for the next sync
|
|
|
|
presence: List of presence events for the user.
|
|
|
|
account_data: List of account_data events for the user.
|
|
|
|
joined: JoinedSyncResult for each joined room.
|
|
|
|
invited: InvitedSyncResult for each invited room.
|
|
|
|
archived: ArchivedSyncResult for each archived room.
|
|
|
|
to_device: List of direct messages for the device.
|
|
|
|
device_lists: List of user_ids whose devices have changed
|
|
|
|
device_one_time_keys_count: Dict of algorithm to count for one time keys
|
|
|
|
for this device
|
2020-10-06 13:26:29 -04:00
|
|
|
device_unused_fallback_key_types: List of key types that have an unused fallback
|
|
|
|
key
|
2020-02-03 13:05:44 -05:00
|
|
|
groups: Group updates, if any
|
|
|
|
"""
|
|
|
|
|
|
|
|
next_batch = attr.ib(type=StreamToken)
|
|
|
|
presence = attr.ib(type=List[JsonDict])
|
|
|
|
account_data = attr.ib(type=List[JsonDict])
|
|
|
|
joined = attr.ib(type=List[JoinedSyncResult])
|
|
|
|
invited = attr.ib(type=List[InvitedSyncResult])
|
|
|
|
archived = attr.ib(type=List[ArchivedSyncResult])
|
|
|
|
to_device = attr.ib(type=List[JsonDict])
|
|
|
|
device_lists = attr.ib(type=DeviceLists)
|
|
|
|
device_one_time_keys_count = attr.ib(type=JsonDict)
|
2020-10-06 13:26:29 -04:00
|
|
|
device_unused_fallback_key_types = attr.ib(type=List[str])
|
2020-02-03 13:05:44 -05:00
|
|
|
groups = attr.ib(type=Optional[GroupsSyncResult])
|
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
def __bool__(self) -> bool:
|
2015-01-30 10:52:05 -05:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if the notifier needs to wait for more events when polling for
|
|
|
|
events.
|
|
|
|
"""
|
2015-01-27 11:24:22 -05:00
|
|
|
return bool(
|
2019-06-20 05:32:02 -04:00
|
|
|
self.presence
|
|
|
|
or self.joined
|
|
|
|
or self.invited
|
|
|
|
or self.archived
|
|
|
|
or self.account_data
|
|
|
|
or self.to_device
|
|
|
|
or self.device_lists
|
|
|
|
or self.groups
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2020-09-04 06:54:56 -04:00
|
|
|
class SyncHandler:
|
2020-09-03 07:54:10 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2018-08-17 11:08:45 -04:00
|
|
|
self.hs_config = hs.config
|
2016-05-16 15:19:26 -04:00
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.notifier = hs.get_notifier()
|
|
|
|
self.presence_handler = hs.get_presence_handler()
|
2015-01-26 10:46:31 -05:00
|
|
|
self.event_sources = hs.get_event_sources()
|
2015-01-26 13:53:31 -05:00
|
|
|
self.clock = hs.get_clock()
|
2020-10-09 11:35:11 -04:00
|
|
|
self.response_cache = ResponseCache(
|
2021-03-08 14:00:07 -05:00
|
|
|
hs.get_clock(), "sync"
|
2020-10-09 11:35:11 -04:00
|
|
|
) # type: ResponseCache[Tuple[Any, ...]]
|
2016-08-26 09:54:30 -04:00
|
|
|
self.state = hs.get_state_handler()
|
2018-08-08 12:54:49 -04:00
|
|
|
self.auth = hs.get_auth()
|
2019-10-23 12:25:54 -04:00
|
|
|
self.storage = hs.get_storage()
|
|
|
|
self.state_store = self.storage.state
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2021-04-06 08:58:18 -04:00
|
|
|
# ExpiringCache((User, Device)) -> LruCache(user_id => event_id)
|
2018-07-26 17:51:30 -04:00
|
|
|
self.lazy_loaded_members_cache = ExpiringCache(
|
2019-06-20 05:32:02 -04:00
|
|
|
"lazy_loaded_members_cache",
|
|
|
|
self.clock,
|
|
|
|
max_len=0,
|
|
|
|
expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE,
|
2021-04-06 08:58:18 -04:00
|
|
|
) # type: ExpiringCache[Tuple[str, Optional[str]], LruCache[str, str]]
|
2018-07-26 17:51:30 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def wait_for_sync_for_user(
|
2020-02-03 13:05:44 -05:00
|
|
|
self,
|
2020-11-17 05:51:25 -05:00
|
|
|
requester: Requester,
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_config: SyncConfig,
|
|
|
|
since_token: Optional[StreamToken] = None,
|
|
|
|
timeout: int = 0,
|
|
|
|
full_state: bool = False,
|
|
|
|
) -> SyncResult:
|
2015-01-27 11:24:22 -05:00
|
|
|
"""Get the sync for a client if we have new data for it now. Otherwise
|
|
|
|
wait for new data to arrive on the server. If the timeout expires, then
|
|
|
|
return an empty sync result.
|
|
|
|
"""
|
2018-08-09 12:39:12 -04:00
|
|
|
# If the user is not part of the mau group, then check that limits have
|
|
|
|
# not been exceeded (if not part of the group by this point, almost certain
|
|
|
|
# auth_blocking will occur)
|
|
|
|
user_id = sync_config.user.to_string()
|
2020-11-17 05:51:25 -05:00
|
|
|
await self.auth.check_auth_blocking(requester=requester)
|
2018-08-09 12:39:12 -04:00
|
|
|
|
2021-03-02 09:43:34 -05:00
|
|
|
res = await self.response_cache.wrap(
|
2018-04-12 07:08:59 -04:00
|
|
|
sync_config.request_key,
|
|
|
|
self._wait_for_sync_for_user,
|
2019-06-20 05:32:02 -04:00
|
|
|
sync_config,
|
|
|
|
since_token,
|
|
|
|
timeout,
|
|
|
|
full_state,
|
2018-04-12 07:08:59 -04:00
|
|
|
)
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Returning sync response for %s", user_id)
|
2019-07-23 09:00:55 -04:00
|
|
|
return res
|
2015-10-26 14:47:18 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def _wait_for_sync_for_user(
|
2020-02-03 13:05:44 -05:00
|
|
|
self,
|
|
|
|
sync_config: SyncConfig,
|
|
|
|
since_token: Optional[StreamToken] = None,
|
|
|
|
timeout: int = 0,
|
|
|
|
full_state: bool = False,
|
|
|
|
) -> SyncResult:
|
2018-10-09 09:15:49 -04:00
|
|
|
if since_token is None:
|
|
|
|
sync_type = "initial_sync"
|
|
|
|
elif full_state:
|
|
|
|
sync_type = "full_state_sync"
|
|
|
|
else:
|
|
|
|
sync_type = "incremental_sync"
|
|
|
|
|
2020-03-24 10:45:33 -04:00
|
|
|
context = current_context()
|
2016-02-03 08:51:25 -05:00
|
|
|
if context:
|
2018-10-09 09:15:49 -04:00
|
|
|
context.tag = sync_type
|
|
|
|
|
2015-10-26 14:47:18 -04:00
|
|
|
if timeout == 0 or since_token is None or full_state:
|
|
|
|
# we are going to return immediately, so don't bother calling
|
|
|
|
# notifier.wait_for_events.
|
2019-12-05 12:58:25 -05:00
|
|
|
result = await self.current_sync_for_user(
|
2019-06-20 05:32:02 -04:00
|
|
|
sync_config, since_token, full_state=full_state
|
2016-01-25 05:10:44 -05:00
|
|
|
)
|
2015-01-26 10:46:31 -05:00
|
|
|
else:
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2015-05-14 06:25:30 -04:00
|
|
|
def current_sync_callback(before_token, after_token):
|
2015-01-27 15:09:52 -05:00
|
|
|
return self.current_sync_for_user(sync_config, since_token)
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
result = await self.notifier.wait_for_events(
|
2019-06-20 05:32:02 -04:00
|
|
|
sync_config.user.to_string(),
|
|
|
|
timeout,
|
|
|
|
current_sync_callback,
|
2016-01-25 05:10:44 -05:00
|
|
|
from_token=since_token,
|
2015-01-26 10:46:31 -05:00
|
|
|
)
|
2018-10-10 06:23:17 -04:00
|
|
|
|
|
|
|
if result:
|
|
|
|
if sync_config.filter_collection.lazy_load_members():
|
|
|
|
lazy_loaded = "true"
|
|
|
|
else:
|
|
|
|
lazy_loaded = "false"
|
|
|
|
non_empty_sync_counter.labels(sync_type, lazy_loaded).inc()
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def current_sync_for_user(
|
|
|
|
self,
|
|
|
|
sync_config: SyncConfig,
|
|
|
|
since_token: Optional[StreamToken] = None,
|
|
|
|
full_state: bool = False,
|
|
|
|
) -> SyncResult:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Get the sync for client needed to match what the server has now."""
|
2021-04-01 12:08:21 -04:00
|
|
|
with start_active_span("current_sync_for_user"):
|
|
|
|
log_kv({"since_token": since_token})
|
|
|
|
sync_result = await self.generate_sync_result(
|
|
|
|
sync_config, since_token, full_state
|
|
|
|
)
|
|
|
|
|
|
|
|
set_tag(SynapseTags.SYNC_RESULT, bool(sync_result))
|
|
|
|
return sync_result
|
2015-01-27 11:24:22 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def push_rules_for_user(self, user: UserID) -> JsonDict:
|
2016-03-04 09:44:01 -05:00
|
|
|
user_id = user.to_string()
|
2019-12-05 12:58:25 -05:00
|
|
|
rules = await self.store.get_push_rules_for_user(user_id)
|
2016-06-01 09:27:07 -04:00
|
|
|
rules = format_push_rules_for_user(user, rules)
|
2019-07-23 09:00:55 -04:00
|
|
|
return rules
|
2016-03-04 09:44:01 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def ephemeral_by_room(
|
|
|
|
self,
|
|
|
|
sync_result_builder: "SyncResultBuilder",
|
|
|
|
now_token: StreamToken,
|
|
|
|
since_token: Optional[StreamToken] = None,
|
|
|
|
) -> Tuple[StreamToken, Dict[str, List[JsonDict]]]:
|
2015-11-02 12:54:04 -05:00
|
|
|
"""Get the ephemeral events for each room the user is in
|
2015-10-21 10:45:37 -04:00
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder
|
|
|
|
now_token: Where the server is currently up to.
|
|
|
|
since_token: Where the server was when the client
|
2015-10-21 10:45:37 -04:00
|
|
|
last synced.
|
|
|
|
Returns:
|
|
|
|
A tuple of the now StreamToken, updated to reflect the which typing
|
|
|
|
events are included, and a dict mapping from room_id to a list of
|
|
|
|
typing events for that room.
|
|
|
|
"""
|
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "ephemeral_by_room"):
|
2020-09-11 07:22:55 -04:00
|
|
|
typing_key = since_token.typing_key if since_token else 0
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
room_ids = sync_result_builder.joined_room_ids
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
typing_source = self.event_sources.sources["typing"]
|
2019-12-06 05:14:59 -05:00
|
|
|
typing, typing_key = await typing_source.get_new_events(
|
2016-02-09 06:31:04 -05:00
|
|
|
user=sync_config.user,
|
|
|
|
from_key=typing_key,
|
|
|
|
limit=sync_config.filter_collection.ephemeral_limit(),
|
|
|
|
room_ids=room_ids,
|
|
|
|
is_guest=sync_config.is_guest,
|
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("typing_key", typing_key)
|
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
ephemeral_by_room = {} # type: JsonDict
|
2016-02-09 06:31:04 -05:00
|
|
|
|
|
|
|
for event in typing:
|
|
|
|
# we want to exclude the room_id from the event, but modifying the
|
|
|
|
# result returned by the event source is poor form (it might cache
|
|
|
|
# the object)
|
|
|
|
room_id = event["room_id"]
|
2020-06-15 07:03:36 -04:00
|
|
|
event_copy = {k: v for (k, v) in event.items() if k != "room_id"}
|
2016-02-09 06:31:04 -05:00
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
|
|
|
|
2020-09-11 07:22:55 -04:00
|
|
|
receipt_key = since_token.receipt_key if since_token else 0
|
2016-02-09 06:31:04 -05:00
|
|
|
|
|
|
|
receipt_source = self.event_sources.sources["receipt"]
|
2019-12-05 12:58:25 -05:00
|
|
|
receipts, receipt_key = await receipt_source.get_new_events(
|
2016-02-09 06:31:04 -05:00
|
|
|
user=sync_config.user,
|
|
|
|
from_key=receipt_key,
|
|
|
|
limit=sync_config.filter_collection.ephemeral_limit(),
|
|
|
|
room_ids=room_ids,
|
|
|
|
is_guest=sync_config.is_guest,
|
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("receipt_key", receipt_key)
|
2015-11-02 12:54:04 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
for event in receipts:
|
|
|
|
room_id = event["room_id"]
|
|
|
|
# exclude room id, as above
|
2020-06-15 07:03:36 -04:00
|
|
|
event_copy = {k: v for (k, v) in event.items() if k != "room_id"}
|
2016-02-09 06:31:04 -05:00
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2019-08-30 11:28:26 -04:00
|
|
|
return now_token, ephemeral_by_room
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def _load_filtered_recents(
|
2019-06-20 05:32:02 -04:00
|
|
|
self,
|
2020-02-03 13:05:44 -05:00
|
|
|
room_id: str,
|
|
|
|
sync_config: SyncConfig,
|
|
|
|
now_token: StreamToken,
|
|
|
|
since_token: Optional[StreamToken] = None,
|
|
|
|
potential_recents: Optional[List[EventBase]] = None,
|
|
|
|
newly_joined_room: bool = False,
|
|
|
|
) -> TimelineBatch:
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "load_filtered_recents"):
|
|
|
|
timeline_limit = sync_config.filter_collection.timeline_limit()
|
2019-06-20 05:32:02 -04:00
|
|
|
block_all_timeline = (
|
|
|
|
sync_config.filter_collection.blocks_all_room_timeline()
|
|
|
|
)
|
2016-02-09 06:31:04 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
if (
|
|
|
|
potential_recents is None
|
|
|
|
or newly_joined_room
|
|
|
|
or timeline_limit < len(potential_recents)
|
|
|
|
):
|
2016-02-09 08:50:29 -05:00
|
|
|
limited = True
|
|
|
|
else:
|
|
|
|
limited = False
|
2016-02-09 06:31:04 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
if potential_recents:
|
|
|
|
recents = sync_config.filter_collection.filter_room_timeline(
|
|
|
|
potential_recents
|
|
|
|
)
|
2017-09-25 12:35:39 -04:00
|
|
|
|
|
|
|
# We check if there are any state events, if there are then we pass
|
|
|
|
# all current state events to the filter_events function. This is to
|
|
|
|
# ensure that we always include current state in the timeline
|
2020-02-03 13:05:44 -05:00
|
|
|
current_state_ids = frozenset() # type: FrozenSet[str]
|
2017-09-25 12:35:39 -04:00
|
|
|
if any(e.is_state() for e in recents):
|
2020-02-03 13:05:44 -05:00
|
|
|
current_state_ids_map = await self.state.get_current_state_ids(
|
|
|
|
room_id
|
|
|
|
)
|
2020-06-15 07:03:36 -04:00
|
|
|
current_state_ids = frozenset(current_state_ids_map.values())
|
2017-09-25 12:35:39 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
recents = await filter_events_for_client(
|
2019-10-23 12:25:54 -04:00
|
|
|
self.storage,
|
2016-02-09 06:31:04 -05:00
|
|
|
sync_config.user.to_string(),
|
|
|
|
recents,
|
2017-09-18 12:13:03 -04:00
|
|
|
always_include_ids=current_state_ids,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
recents = []
|
|
|
|
|
2016-11-22 11:38:35 -05:00
|
|
|
if not limited or block_all_timeline:
|
2020-10-07 08:49:40 -04:00
|
|
|
prev_batch_token = now_token
|
|
|
|
if recents:
|
|
|
|
room_key = recents[0].internal_metadata.before
|
|
|
|
prev_batch_token = now_token.copy_and_replace("room_key", room_key)
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return TimelineBatch(
|
2020-10-07 08:49:40 -04:00
|
|
|
events=recents, prev_batch=prev_batch_token, limited=False
|
2019-06-20 05:32:02 -04:00
|
|
|
)
|
2016-05-24 09:00:43 -04:00
|
|
|
|
|
|
|
filtering_factor = 2
|
|
|
|
load_limit = max(timeline_limit * filtering_factor, 10)
|
|
|
|
max_repeat = 5 # Only try a few times per room, otherwise
|
|
|
|
room_key = now_token.room_key
|
|
|
|
end_key = room_key
|
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
since_key = None
|
|
|
|
if since_token and not newly_joined_room:
|
|
|
|
since_key = since_token.room_key
|
|
|
|
|
|
|
|
while limited and len(recents) < timeline_limit and max_repeat:
|
2018-05-09 10:56:07 -04:00
|
|
|
# If we have a since_key then we are trying to get any events
|
|
|
|
# that have happened since `since_key` up to `end_key`, so we
|
|
|
|
# can just use `get_room_events_stream_for_room`.
|
|
|
|
# Otherwise, we want to return the last N events in the room
|
|
|
|
# in toplogical ordering.
|
2018-05-09 06:59:45 -04:00
|
|
|
if since_key:
|
2019-12-05 12:58:25 -05:00
|
|
|
events, end_key = await self.store.get_room_events_stream_for_room(
|
2018-05-09 06:59:45 -04:00
|
|
|
room_id,
|
|
|
|
limit=load_limit + 1,
|
|
|
|
from_key=since_key,
|
|
|
|
to_key=end_key,
|
|
|
|
)
|
|
|
|
else:
|
2019-12-05 12:58:25 -05:00
|
|
|
events, end_key = await self.store.get_recent_events_for_room(
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id, limit=load_limit + 1, end_token=end_key
|
2018-05-09 06:59:45 -04:00
|
|
|
)
|
2016-02-09 06:31:04 -05:00
|
|
|
loaded_recents = sync_config.filter_collection.filter_room_timeline(
|
|
|
|
events
|
|
|
|
)
|
2017-09-25 12:35:39 -04:00
|
|
|
|
|
|
|
# We check if there are any state events, if there are then we pass
|
|
|
|
# all current state events to the filter_events function. This is to
|
|
|
|
# ensure that we always include current state in the timeline
|
|
|
|
current_state_ids = frozenset()
|
|
|
|
if any(e.is_state() for e in loaded_recents):
|
2020-02-03 13:05:44 -05:00
|
|
|
current_state_ids_map = await self.state.get_current_state_ids(
|
|
|
|
room_id
|
|
|
|
)
|
2020-06-15 07:03:36 -04:00
|
|
|
current_state_ids = frozenset(current_state_ids_map.values())
|
2017-09-25 12:35:39 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
loaded_recents = await filter_events_for_client(
|
2019-10-23 12:25:54 -04:00
|
|
|
self.storage,
|
2016-02-09 06:31:04 -05:00
|
|
|
sync_config.user.to_string(),
|
|
|
|
loaded_recents,
|
2017-09-18 12:13:03 -04:00
|
|
|
always_include_ids=current_state_ids,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
loaded_recents.extend(recents)
|
|
|
|
recents = loaded_recents
|
2016-01-27 12:06:52 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
if len(events) <= load_limit:
|
|
|
|
limited = False
|
|
|
|
break
|
|
|
|
max_repeat -= 1
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
if len(recents) > timeline_limit:
|
|
|
|
limited = True
|
|
|
|
recents = recents[-timeline_limit:]
|
2020-09-29 16:48:33 -04:00
|
|
|
room_key = recents[0].internal_metadata.before
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
prev_batch_token = now_token.copy_and_replace("room_key", room_key)
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return TimelineBatch(
|
|
|
|
events=recents,
|
|
|
|
prev_batch=prev_batch_token,
|
|
|
|
limited=limited or newly_joined_room,
|
2019-06-20 05:32:02 -04:00
|
|
|
)
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def get_state_after_event(
|
2021-04-08 17:38:54 -04:00
|
|
|
self, event: EventBase, state_filter: Optional[StateFilter] = None
|
2020-02-03 13:05:44 -05:00
|
|
|
) -> StateMap[str]:
|
2016-05-23 12:37:01 -04:00
|
|
|
"""
|
|
|
|
Get the room state after the given event
|
2015-11-10 13:27:23 -05:00
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
event: event of interest
|
|
|
|
state_filter: The state filter used to fetch state from the database.
|
2015-11-10 13:27:23 -05:00
|
|
|
"""
|
2019-12-05 12:58:25 -05:00
|
|
|
state_ids = await self.state_store.get_state_ids_for_event(
|
2021-04-08 17:38:54 -04:00
|
|
|
event.event_id, state_filter=state_filter or StateFilter.all()
|
2018-07-19 13:32:02 -04:00
|
|
|
)
|
2015-11-10 13:27:23 -05:00
|
|
|
if event.is_state():
|
2020-12-30 08:09:53 -05:00
|
|
|
state_ids = dict(state_ids)
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids[(event.type, event.state_key)] = event.event_id
|
2019-07-23 09:00:55 -04:00
|
|
|
return state_ids
|
2015-11-10 13:27:23 -05:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def get_state_at(
|
2020-02-03 13:05:44 -05:00
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
stream_position: StreamToken,
|
2021-04-08 17:38:54 -04:00
|
|
|
state_filter: Optional[StateFilter] = None,
|
2020-02-03 13:05:44 -05:00
|
|
|
) -> StateMap[str]:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Get the room state at a particular stream position
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
room_id: room for which to get state
|
|
|
|
stream_position: point at which to get state
|
|
|
|
state_filter: The state filter used to fetch state from the database.
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
2018-05-29 17:31:18 -04:00
|
|
|
# FIXME this claims to get the state at a stream position, but
|
|
|
|
# get_recent_events_for_room operates by topo ordering. This therefore
|
|
|
|
# does not reliably give you the state at the given stream position.
|
|
|
|
# (https://github.com/matrix-org/synapse/issues/3305)
|
2019-12-05 12:58:25 -05:00
|
|
|
last_events, _ = await self.store.get_recent_events_for_room(
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id, end_token=stream_position.room_key, limit=1
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if last_events:
|
2015-11-10 13:27:23 -05:00
|
|
|
last_event = last_events[-1]
|
2019-12-05 12:58:25 -05:00
|
|
|
state = await self.get_state_after_event(
|
2021-04-08 17:38:54 -04:00
|
|
|
last_event, state_filter=state_filter or StateFilter.all()
|
2018-07-19 13:32:02 -04:00
|
|
|
)
|
2015-11-10 13:27:23 -05:00
|
|
|
|
2015-01-27 11:24:22 -05:00
|
|
|
else:
|
2015-11-10 13:27:23 -05:00
|
|
|
# no events in this room - so presumably no state
|
2015-11-12 11:34:42 -05:00
|
|
|
state = {}
|
2019-07-23 09:00:55 -04:00
|
|
|
return state
|
2015-01-27 11:24:22 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def compute_summary(
|
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
sync_config: SyncConfig,
|
|
|
|
batch: TimelineBatch,
|
2020-08-28 07:28:53 -04:00
|
|
|
state: MutableStateMap[EventBase],
|
2020-02-03 13:05:44 -05:00
|
|
|
now_token: StreamToken,
|
|
|
|
) -> Optional[JsonDict]:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Works out a room summary block for this room, summarising the number
|
2018-08-16 04:46:50 -04:00
|
|
|
of joined members in the room, and providing the 'hero' members if the
|
|
|
|
room has no name so clients can consistently name rooms. Also adds
|
|
|
|
state events to 'state' if needed to describe the heroes.
|
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
Args
|
|
|
|
room_id
|
|
|
|
sync_config
|
|
|
|
batch: The timeline batch for the room that will be sent to the user.
|
|
|
|
state: State as returned by compute_state_delta
|
|
|
|
now_token: Token of the end of the current batch.
|
2018-08-16 04:46:50 -04:00
|
|
|
"""
|
|
|
|
|
2018-09-11 19:50:39 -04:00
|
|
|
# FIXME: we could/should get this from room_stats when matthew/stats lands
|
|
|
|
|
2018-08-16 04:46:50 -04:00
|
|
|
# FIXME: this promulgates https://github.com/matrix-org/synapse/issues/3305
|
2019-12-05 12:58:25 -05:00
|
|
|
last_events, _ = await self.store.get_recent_event_ids_for_room(
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id, end_token=now_token.room_key, limit=1
|
2018-08-16 04:46:50 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
if not last_events:
|
2019-07-23 09:00:55 -04:00
|
|
|
return None
|
2018-08-16 04:46:50 -04:00
|
|
|
|
|
|
|
last_event = last_events[-1]
|
2019-12-05 12:58:25 -05:00
|
|
|
state_ids = await self.state_store.get_state_ids_for_event(
|
2018-10-25 12:49:55 -04:00
|
|
|
last_event.event_id,
|
2019-06-20 05:32:02 -04:00
|
|
|
state_filter=StateFilter.from_types(
|
|
|
|
[(EventTypes.Name, ""), (EventTypes.CanonicalAlias, "")]
|
|
|
|
),
|
2018-08-16 04:46:50 -04:00
|
|
|
)
|
|
|
|
|
2018-09-11 19:50:39 -04:00
|
|
|
# this is heavily cached, thus: fast.
|
2019-12-05 12:58:25 -05:00
|
|
|
details = await self.store.get_room_summary(room_id)
|
2018-09-11 19:50:39 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
name_id = state_ids.get((EventTypes.Name, ""))
|
|
|
|
canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, ""))
|
2018-08-16 04:46:50 -04:00
|
|
|
|
|
|
|
summary = {}
|
2018-09-11 19:50:39 -04:00
|
|
|
empty_ms = MemberSummary([], 0)
|
2018-08-16 04:46:50 -04:00
|
|
|
|
|
|
|
# TODO: only send these when they change.
|
2019-06-20 05:32:02 -04:00
|
|
|
summary["m.joined_member_count"] = details.get(Membership.JOIN, empty_ms).count
|
|
|
|
summary["m.invited_member_count"] = details.get(
|
|
|
|
Membership.INVITE, empty_ms
|
|
|
|
).count
|
2018-08-16 04:46:50 -04:00
|
|
|
|
2018-09-12 12:11:05 -04:00
|
|
|
# if the room has a name or canonical_alias set, we can skip
|
2019-04-22 12:59:00 -04:00
|
|
|
# calculating heroes. Empty strings are falsey, so we check
|
|
|
|
# for the "name" value and default to an empty string.
|
2018-09-12 12:11:05 -04:00
|
|
|
if name_id:
|
2019-12-05 12:58:25 -05:00
|
|
|
name = await self.store.get_event(name_id, allow_none=True)
|
2019-06-06 05:34:12 -04:00
|
|
|
if name and name.content.get("name"):
|
2019-07-23 09:00:55 -04:00
|
|
|
return summary
|
2018-09-12 12:11:05 -04:00
|
|
|
|
|
|
|
if canonical_alias_id:
|
2019-12-05 12:58:25 -05:00
|
|
|
canonical_alias = await self.store.get_event(
|
2019-06-20 05:32:02 -04:00
|
|
|
canonical_alias_id, allow_none=True
|
2018-09-12 12:11:05 -04:00
|
|
|
)
|
2019-06-06 05:34:12 -04:00
|
|
|
if canonical_alias and canonical_alias.content.get("alias"):
|
2019-07-23 09:00:55 -04:00
|
|
|
return summary
|
2018-08-16 04:46:50 -04:00
|
|
|
|
2019-06-05 06:50:27 -04:00
|
|
|
me = sync_config.user.to_string()
|
|
|
|
|
2018-09-11 19:50:39 -04:00
|
|
|
joined_user_ids = [
|
2019-06-20 05:32:02 -04:00
|
|
|
r[0] for r in details.get(Membership.JOIN, empty_ms).members if r[0] != me
|
2018-09-11 19:50:39 -04:00
|
|
|
]
|
|
|
|
invited_user_ids = [
|
2019-06-20 05:32:02 -04:00
|
|
|
r[0] for r in details.get(Membership.INVITE, empty_ms).members if r[0] != me
|
2018-09-11 19:50:39 -04:00
|
|
|
]
|
2019-06-20 05:32:02 -04:00
|
|
|
gone_user_ids = [
|
|
|
|
r[0] for r in details.get(Membership.LEAVE, empty_ms).members if r[0] != me
|
|
|
|
] + [r[0] for r in details.get(Membership.BAN, empty_ms).members if r[0] != me]
|
2018-09-11 19:50:39 -04:00
|
|
|
|
|
|
|
# FIXME: only build up a member_ids list for our heroes
|
|
|
|
member_ids = {}
|
|
|
|
for membership in (
|
|
|
|
Membership.JOIN,
|
|
|
|
Membership.INVITE,
|
|
|
|
Membership.LEAVE,
|
2019-06-20 05:32:02 -04:00
|
|
|
Membership.BAN,
|
2018-09-11 19:50:39 -04:00
|
|
|
):
|
|
|
|
for user_id, event_id in details.get(membership, empty_ms).members:
|
|
|
|
member_ids[user_id] = event_id
|
2018-08-16 04:46:50 -04:00
|
|
|
|
2018-09-11 19:50:39 -04:00
|
|
|
# FIXME: order by stream ordering rather than as returned by SQL
|
2019-06-20 05:32:02 -04:00
|
|
|
if joined_user_ids or invited_user_ids:
|
2020-02-21 07:15:07 -05:00
|
|
|
summary["m.heroes"] = sorted(joined_user_ids + invited_user_ids)[0:5]
|
2018-08-16 04:46:50 -04:00
|
|
|
else:
|
2020-02-21 07:15:07 -05:00
|
|
|
summary["m.heroes"] = sorted(gone_user_ids)[0:5]
|
2018-08-16 04:46:50 -04:00
|
|
|
|
|
|
|
if not sync_config.filter_collection.lazy_load_members():
|
2019-07-23 09:00:55 -04:00
|
|
|
return summary
|
2018-08-16 04:46:50 -04:00
|
|
|
|
|
|
|
# ensure we send membership events for heroes if needed
|
|
|
|
cache_key = (sync_config.user.to_string(), sync_config.device_id)
|
|
|
|
cache = self.get_lazy_loaded_members_cache(cache_key)
|
|
|
|
|
|
|
|
# track which members the client should already know about via LL:
|
|
|
|
# Ones which are already in state...
|
2020-02-21 07:15:07 -05:00
|
|
|
existing_members = {
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id for (typ, user_id) in state.keys() if typ == EventTypes.Member
|
2020-02-21 07:15:07 -05:00
|
|
|
}
|
2018-08-16 04:46:50 -04:00
|
|
|
|
|
|
|
# ...or ones which are in the timeline...
|
|
|
|
for ev in batch.events:
|
|
|
|
if ev.type == EventTypes.Member:
|
|
|
|
existing_members.add(ev.state_key)
|
|
|
|
|
|
|
|
# ...and then ensure any missing ones get included in state.
|
|
|
|
missing_hero_event_ids = [
|
|
|
|
member_ids[hero_id]
|
2019-06-20 05:32:02 -04:00
|
|
|
for hero_id in summary["m.heroes"]
|
2018-08-16 04:46:50 -04:00
|
|
|
if (
|
2019-06-20 05:32:02 -04:00
|
|
|
cache.get(hero_id) != member_ids[hero_id]
|
|
|
|
and hero_id not in existing_members
|
2018-08-16 04:46:50 -04:00
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
missing_hero_state = await self.store.get_events(missing_hero_event_ids)
|
2018-08-16 04:46:50 -04:00
|
|
|
|
2020-09-03 07:54:10 -04:00
|
|
|
for s in missing_hero_state.values():
|
2018-08-16 04:46:50 -04:00
|
|
|
cache.set(s.state_key, s.event_id)
|
|
|
|
state[(EventTypes.Member, s.state_key)] = s
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return summary
|
2018-08-16 04:46:50 -04:00
|
|
|
|
2021-03-24 06:48:46 -04:00
|
|
|
def get_lazy_loaded_members_cache(
|
|
|
|
self, cache_key: Tuple[str, Optional[str]]
|
2021-04-06 08:58:18 -04:00
|
|
|
) -> LruCache[str, str]:
|
|
|
|
cache = self.lazy_loaded_members_cache.get(
|
|
|
|
cache_key
|
|
|
|
) # type: Optional[LruCache[str, str]]
|
2018-08-16 04:46:50 -04:00
|
|
|
if cache is None:
|
|
|
|
logger.debug("creating LruCache for %r", cache_key)
|
|
|
|
cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE)
|
|
|
|
self.lazy_loaded_members_cache[cache_key] = cache
|
|
|
|
else:
|
|
|
|
logger.debug("found LruCache for %r", cache_key)
|
|
|
|
return cache
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def compute_state_delta(
|
2020-02-03 13:05:44 -05:00
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
batch: TimelineBatch,
|
|
|
|
sync_config: SyncConfig,
|
|
|
|
since_token: Optional[StreamToken],
|
|
|
|
now_token: StreamToken,
|
|
|
|
full_state: bool,
|
2020-08-28 07:28:53 -04:00
|
|
|
) -> MutableStateMap[EventBase]:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Works out the difference in state between the start of the timeline
|
2016-02-01 10:59:40 -05:00
|
|
|
and the previous sync.
|
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
room_id:
|
|
|
|
batch: The timeline batch for the room that will be sent to the user.
|
|
|
|
sync_config:
|
|
|
|
since_token: Token of the end of the previous batch. May be None.
|
|
|
|
now_token: Token of the end of the current batch.
|
|
|
|
full_state: Whether to force returning the full state.
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
|
|
|
# TODO(mjark) Check if the state events were received by the server
|
|
|
|
# after the previous sync, since we need to include those state
|
2020-10-23 12:38:40 -04:00
|
|
|
# updates even if they occurred logically before the previous event.
|
2015-01-27 11:24:22 -05:00
|
|
|
# TODO(mjark) Check for new redactions in the state events.
|
2015-11-12 11:34:42 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "compute_state_delta"):
|
2018-03-11 16:01:41 -04:00
|
|
|
|
2018-10-25 12:49:55 -04:00
|
|
|
members_to_fetch = None
|
2018-03-13 18:03:42 -04:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
lazy_load_members = sync_config.filter_collection.lazy_load_members()
|
|
|
|
include_redundant_members = (
|
|
|
|
sync_config.filter_collection.include_redundant_members()
|
|
|
|
)
|
|
|
|
|
2018-03-18 21:15:13 -04:00
|
|
|
if lazy_load_members:
|
2018-03-11 16:01:41 -04:00
|
|
|
# We only request state for the members needed to display the
|
|
|
|
# timeline:
|
2018-03-15 21:37:53 -04:00
|
|
|
|
2020-02-21 07:15:07 -05:00
|
|
|
members_to_fetch = {
|
2018-10-25 12:49:55 -04:00
|
|
|
event.sender # FIXME: we also care about invite targets etc.
|
|
|
|
for event in batch.events
|
2020-02-21 07:15:07 -05:00
|
|
|
}
|
2018-03-11 16:01:41 -04:00
|
|
|
|
2018-10-25 12:49:55 -04:00
|
|
|
if full_state:
|
|
|
|
# always make sure we LL ourselves so we know we're in the room
|
|
|
|
# (if we are) to fix https://github.com/vector-im/riot-web/issues/7209
|
|
|
|
# We only need apply this on full state syncs given we disabled
|
|
|
|
# LL for incr syncs in #3840.
|
|
|
|
members_to_fetch.add(sync_config.user.to_string())
|
|
|
|
|
|
|
|
state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch)
|
|
|
|
else:
|
|
|
|
state_filter = StateFilter.all()
|
2018-03-15 21:37:53 -04:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
timeline_state = {
|
|
|
|
(event.type, event.state_key): event.event_id
|
2019-06-20 05:32:02 -04:00
|
|
|
for event in batch.events
|
|
|
|
if event.is_state()
|
2018-07-26 17:51:30 -04:00
|
|
|
}
|
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
if full_state:
|
|
|
|
if batch:
|
2019-12-05 12:58:25 -05:00
|
|
|
current_state_ids = await self.state_store.get_state_ids_for_event(
|
2019-06-20 05:32:02 -04:00
|
|
|
batch.events[-1].event_id, state_filter=state_filter
|
2016-02-22 08:54:46 -05:00
|
|
|
)
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
state_ids = await self.state_store.get_state_ids_for_event(
|
2019-06-20 05:32:02 -04:00
|
|
|
batch.events[0].event_id, state_filter=state_filter
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
2018-03-13 18:03:42 -04:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
else:
|
2019-12-05 12:58:25 -05:00
|
|
|
current_state_ids = await self.get_state_at(
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id, stream_position=now_token, state_filter=state_filter
|
2016-02-22 08:54:46 -05:00
|
|
|
)
|
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = current_state_ids
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = _calculate_state(
|
2016-02-09 06:31:04 -05:00
|
|
|
timeline_contains=timeline_state,
|
2016-08-25 13:59:44 -04:00
|
|
|
timeline_start=state_ids,
|
2016-02-09 06:31:04 -05:00
|
|
|
previous={},
|
2016-08-25 13:59:44 -04:00
|
|
|
current=current_state_ids,
|
2018-07-23 14:21:20 -04:00
|
|
|
lazy_load_members=lazy_load_members,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
elif batch.limited:
|
2019-08-06 07:55:36 -04:00
|
|
|
if batch:
|
2021-02-16 17:32:34 -05:00
|
|
|
state_at_timeline_start = (
|
|
|
|
await self.state_store.get_state_ids_for_event(
|
|
|
|
batch.events[0].event_id, state_filter=state_filter
|
|
|
|
)
|
2019-08-06 07:55:36 -04:00
|
|
|
)
|
|
|
|
else:
|
2019-08-15 07:02:18 -04:00
|
|
|
# We can get here if the user has ignored the senders of all
|
|
|
|
# the recent events.
|
2019-12-05 12:58:25 -05:00
|
|
|
state_at_timeline_start = await self.get_state_at(
|
2019-08-06 07:55:36 -04:00
|
|
|
room_id, stream_position=now_token, state_filter=state_filter
|
|
|
|
)
|
2018-09-11 19:50:39 -04:00
|
|
|
|
|
|
|
# for now, we disable LL for gappy syncs - see
|
|
|
|
# https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346
|
|
|
|
# N.B. this slows down incr syncs as we are now processing way
|
|
|
|
# more state in the server than if we were LLing.
|
|
|
|
#
|
|
|
|
# We still have to filter timeline_start to LL entries (above) in order
|
|
|
|
# for _calculate_state's LL logic to work, as we have to include LL
|
|
|
|
# members for timeline senders in case they weren't loaded in the initial
|
|
|
|
# sync. We do this by (counterintuitively) by filtering timeline_start
|
|
|
|
# members to just be ones which were timeline senders, which then ensures
|
|
|
|
# all of the rest get included in the state block (if we need to know
|
|
|
|
# about them).
|
2018-10-25 12:49:55 -04:00
|
|
|
state_filter = StateFilter.all()
|
2018-09-11 19:50:39 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
# If this is an initial sync then full_state should be set, and
|
|
|
|
# that case is handled above. We assert here to ensure that this
|
|
|
|
# is indeed the case.
|
|
|
|
assert since_token is not None
|
2019-12-05 12:58:25 -05:00
|
|
|
state_at_previous_sync = await self.get_state_at(
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id, stream_position=since_token, state_filter=state_filter
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2019-08-06 07:55:36 -04:00
|
|
|
if batch:
|
2019-12-05 12:58:25 -05:00
|
|
|
current_state_ids = await self.state_store.get_state_ids_for_event(
|
2019-08-06 07:55:36 -04:00
|
|
|
batch.events[-1].event_id, state_filter=state_filter
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# Its not clear how we get here, but empirically we do
|
|
|
|
# (#5407). Logging has been added elsewhere to try and
|
|
|
|
# figure out where this state comes from.
|
2019-12-05 12:58:25 -05:00
|
|
|
current_state_ids = await self.get_state_at(
|
2019-08-06 07:55:36 -04:00
|
|
|
room_id, stream_position=now_token, state_filter=state_filter
|
|
|
|
)
|
2016-02-22 08:54:46 -05:00
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = _calculate_state(
|
2016-02-09 06:31:04 -05:00
|
|
|
timeline_contains=timeline_state,
|
|
|
|
timeline_start=state_at_timeline_start,
|
|
|
|
previous=state_at_previous_sync,
|
2016-08-25 13:59:44 -04:00
|
|
|
current=current_state_ids,
|
2018-09-11 19:50:39 -04:00
|
|
|
# we have to include LL members in case LL initial sync missed them
|
2018-07-23 14:21:20 -04:00
|
|
|
lazy_load_members=lazy_load_members,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
else:
|
2018-03-18 17:40:35 -04:00
|
|
|
state_ids = {}
|
2018-03-18 21:15:13 -04:00
|
|
|
if lazy_load_members:
|
2018-10-25 12:49:55 -04:00
|
|
|
if members_to_fetch and batch.events:
|
2018-09-11 19:50:39 -04:00
|
|
|
# We're returning an incremental sync, with no
|
|
|
|
# "gap" since the previous sync, so normally there would be
|
|
|
|
# no state to return.
|
2018-08-28 18:25:58 -04:00
|
|
|
# But we're lazy-loading, so the client might need some more
|
|
|
|
# member events to understand the events in this timeline.
|
|
|
|
# So we fish out all the member events corresponding to the
|
|
|
|
# timeline here, and then dedupe any redundant ones below.
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
state_ids = await self.state_store.get_state_ids_for_event(
|
2018-10-25 12:49:55 -04:00
|
|
|
batch.events[0].event_id,
|
|
|
|
# we only want members!
|
|
|
|
state_filter=StateFilter.from_types(
|
|
|
|
(EventTypes.Member, member)
|
|
|
|
for member in members_to_fetch
|
|
|
|
),
|
2018-03-18 17:40:35 -04:00
|
|
|
)
|
2016-08-25 13:59:44 -04:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
if lazy_load_members and not include_redundant_members:
|
|
|
|
cache_key = (sync_config.user.to_string(), sync_config.device_id)
|
2018-08-16 04:46:50 -04:00
|
|
|
cache = self.get_lazy_loaded_members_cache(cache_key)
|
2018-07-26 17:51:30 -04:00
|
|
|
|
|
|
|
# if it's a new sync sequence, then assume the client has had
|
|
|
|
# amnesia and doesn't want any recent lazy-loaded members
|
|
|
|
# de-duplicated.
|
|
|
|
if since_token is None:
|
|
|
|
logger.debug("clearing LruCache for %r", cache_key)
|
|
|
|
cache.clear()
|
|
|
|
else:
|
|
|
|
# only send members which aren't in our LruCache (either
|
|
|
|
# because they're new to this client or have been pushed out
|
|
|
|
# of the cache)
|
|
|
|
logger.debug("filtering state from %r...", state_ids)
|
|
|
|
state_ids = {
|
|
|
|
t: event_id
|
2020-06-15 07:03:36 -04:00
|
|
|
for t, event_id in state_ids.items()
|
2018-07-26 17:51:30 -04:00
|
|
|
if cache.get(t[1]) != event_id
|
|
|
|
}
|
|
|
|
logger.debug("...to %r", state_ids)
|
|
|
|
|
|
|
|
# add any member IDs we are about to send into our LruCache
|
|
|
|
for t, event_id in itertools.chain(
|
2019-06-20 05:32:02 -04:00
|
|
|
state_ids.items(), timeline_state.items()
|
2018-07-26 17:51:30 -04:00
|
|
|
):
|
|
|
|
if t[0] == EventTypes.Member:
|
|
|
|
cache.set(t[1], event_id)
|
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
state = {} # type: Dict[str, EventBase]
|
2016-08-25 13:59:44 -04:00
|
|
|
if state_ids:
|
2019-12-05 12:58:25 -05:00
|
|
|
state = await self.store.get_events(list(state_ids.values()))
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return {
|
|
|
|
(e.type, e.state_key): e
|
|
|
|
for e in sync_config.filter_collection.filter_room_state(
|
|
|
|
list(state.values())
|
|
|
|
)
|
2020-02-10 13:07:35 -05:00
|
|
|
if e.type != EventTypes.Aliases # until MSC2261 or alternative solution
|
2019-07-23 09:00:55 -04:00
|
|
|
}
|
2015-01-30 08:33:41 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def unread_notifs_for_room_id(
|
|
|
|
self, room_id: str, sync_config: SyncConfig
|
2020-09-02 12:19:37 -04:00
|
|
|
) -> Dict[str, int]:
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "unread_notifs_for_room_id"):
|
2019-12-05 12:58:25 -05:00
|
|
|
last_unread_event_id = await self.store.get_last_receipt_event_id_for_user(
|
2016-02-09 08:55:59 -05:00
|
|
|
user_id=sync_config.user.to_string(),
|
|
|
|
room_id=room_id,
|
2019-06-20 05:32:02 -04:00
|
|
|
receipt_type="m.read",
|
2015-12-18 12:47:00 -05:00
|
|
|
)
|
2016-01-19 06:35:50 -05:00
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
notifs = await self.store.get_unread_event_push_actions_by_room_for_user(
|
|
|
|
room_id, sync_config.user.to_string(), last_unread_event_id
|
|
|
|
)
|
|
|
|
return notifs
|
2016-01-19 06:35:50 -05:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def generate_sync_result(
|
2020-02-03 13:05:44 -05:00
|
|
|
self,
|
|
|
|
sync_config: SyncConfig,
|
|
|
|
since_token: Optional[StreamToken] = None,
|
|
|
|
full_state: bool = False,
|
|
|
|
) -> SyncResult:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Generates a sync result."""
|
2016-05-24 04:43:35 -04:00
|
|
|
# NB: The now_token gets changed by some of the generate_sync_* methods,
|
|
|
|
# this is due to some of the underlying streams not supporting the ability
|
|
|
|
# to query up to a given point.
|
|
|
|
# Always use the `now_token` in `SyncResultBuilder`
|
2020-08-04 07:21:47 -04:00
|
|
|
now_token = self.event_sources.get_current_token()
|
2021-04-01 12:08:21 -04:00
|
|
|
log_kv({"now_token": now_token})
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-02-06 08:31:05 -05:00
|
|
|
logger.debug(
|
2019-01-31 13:30:40 -05:00
|
|
|
"Calculating sync response for %r between %s and %s",
|
2019-06-20 05:32:02 -04:00
|
|
|
sync_config.user,
|
|
|
|
since_token,
|
|
|
|
now_token,
|
2019-01-31 13:30:40 -05:00
|
|
|
)
|
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
user_id = sync_config.user.to_string()
|
|
|
|
app_service = self.store.get_app_service_by_user_id(user_id)
|
|
|
|
if app_service:
|
|
|
|
# We no longer support AS users using /sync directly.
|
|
|
|
# See https://github.com/matrix-org/matrix-doc/issues/1144
|
|
|
|
raise NotImplementedError()
|
|
|
|
else:
|
2019-12-05 12:58:25 -05:00
|
|
|
joined_room_ids = await self.get_rooms_for_user_at(
|
2020-09-24 08:24:17 -04:00
|
|
|
user_id, now_token.room_key
|
2018-03-05 07:06:19 -05:00
|
|
|
)
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder = SyncResultBuilder(
|
2019-06-20 05:32:02 -04:00
|
|
|
sync_config,
|
|
|
|
full_state,
|
2016-05-23 12:37:01 -04:00
|
|
|
since_token=since_token,
|
|
|
|
now_token=now_token,
|
2018-03-05 07:06:19 -05:00
|
|
|
joined_room_ids=joined_room_ids,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Fetching account data")
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
account_data_by_room = await self._generate_sync_entry_for_account_data(
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Fetching room data")
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
res = await self._generate_sync_entry_for_rooms(
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder, account_data_by_room
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2019-05-16 08:23:43 -04:00
|
|
|
newly_joined_rooms, newly_joined_or_invited_users, _, _ = res
|
2017-09-14 06:49:37 -04:00
|
|
|
_, _, newly_left_rooms, newly_left_users = res
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-11-22 11:38:35 -05:00
|
|
|
block_all_presence_data = (
|
2019-06-20 05:32:02 -04:00
|
|
|
since_token is None and sync_config.filter_collection.blocks_all_presence()
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2018-08-17 11:08:45 -04:00
|
|
|
if self.hs_config.use_presence and not block_all_presence_data:
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Fetching presence data")
|
2019-12-05 12:58:25 -05:00
|
|
|
await self._generate_sync_entry_for_presence(
|
2019-05-16 08:23:43 -04:00
|
|
|
sync_result_builder, newly_joined_rooms, newly_joined_or_invited_users
|
2016-11-22 11:38:35 -05:00
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Fetching to-device data")
|
2019-12-05 12:58:25 -05:00
|
|
|
await self._generate_sync_entry_for_to_device(sync_result_builder)
|
2016-08-25 12:35:37 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
device_lists = await self._generate_sync_entry_for_device_list(
|
2017-09-07 10:08:39 -04:00
|
|
|
sync_result_builder,
|
|
|
|
newly_joined_rooms=newly_joined_rooms,
|
2019-05-16 08:23:43 -04:00
|
|
|
newly_joined_or_invited_users=newly_joined_or_invited_users,
|
2017-09-14 06:49:37 -04:00
|
|
|
newly_left_rooms=newly_left_rooms,
|
2017-09-07 10:08:39 -04:00
|
|
|
newly_left_users=newly_left_users,
|
2017-01-27 08:36:39 -05:00
|
|
|
)
|
2017-01-25 09:27:27 -05:00
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Fetching OTK data")
|
2017-05-19 10:47:55 -04:00
|
|
|
device_id = sync_config.device_id
|
2020-02-03 13:05:44 -05:00
|
|
|
one_time_key_counts = {} # type: JsonDict
|
2020-10-06 13:26:29 -04:00
|
|
|
unused_fallback_key_types = [] # type: List[str]
|
2017-05-19 10:47:55 -04:00
|
|
|
if device_id:
|
2019-12-05 12:58:25 -05:00
|
|
|
one_time_key_counts = await self.store.count_e2e_one_time_keys(
|
2017-05-19 10:47:55 -04:00
|
|
|
user_id, device_id
|
|
|
|
)
|
2021-02-16 17:32:34 -05:00
|
|
|
unused_fallback_key_types = (
|
|
|
|
await self.store.get_e2e_unused_fallback_key_types(user_id, device_id)
|
2020-10-06 13:26:29 -04:00
|
|
|
)
|
2017-05-19 10:47:55 -04:00
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Fetching group data")
|
2019-12-05 12:58:25 -05:00
|
|
|
await self._generate_sync_entry_for_groups(sync_result_builder)
|
2017-07-10 09:53:19 -04:00
|
|
|
|
2019-03-06 12:29:15 -05:00
|
|
|
# debug for https://github.com/matrix-org/synapse/issues/4422
|
|
|
|
for joined_room in sync_result_builder.joined:
|
|
|
|
room_id = joined_room.room_id
|
|
|
|
if room_id in newly_joined_rooms:
|
|
|
|
issue4422_logger.debug(
|
2019-06-20 05:32:02 -04:00
|
|
|
"Sync result for newly joined room %s: %r", room_id, joined_room
|
2019-03-06 12:29:15 -05:00
|
|
|
)
|
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Sync response calculation complete")
|
2019-07-23 09:00:55 -04:00
|
|
|
return SyncResult(
|
|
|
|
presence=sync_result_builder.presence,
|
|
|
|
account_data=sync_result_builder.account_data,
|
|
|
|
joined=sync_result_builder.joined,
|
|
|
|
invited=sync_result_builder.invited,
|
|
|
|
archived=sync_result_builder.archived,
|
|
|
|
to_device=sync_result_builder.to_device,
|
|
|
|
device_lists=device_lists,
|
|
|
|
groups=sync_result_builder.groups,
|
|
|
|
device_one_time_keys_count=one_time_key_counts,
|
2020-10-06 13:26:29 -04:00
|
|
|
device_unused_fallback_key_types=unused_fallback_key_types,
|
2019-07-23 09:00:55 -04:00
|
|
|
next_batch=sync_result_builder.now_token,
|
2019-06-20 05:32:02 -04:00
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-07-10 09:53:19 -04:00
|
|
|
@measure_func("_generate_sync_entry_for_groups")
|
2020-02-03 13:05:44 -05:00
|
|
|
async def _generate_sync_entry_for_groups(
|
|
|
|
self, sync_result_builder: "SyncResultBuilder"
|
|
|
|
) -> None:
|
2017-07-10 09:53:19 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
|
|
|
|
if since_token and since_token.groups_key:
|
2019-12-06 05:14:59 -05:00
|
|
|
results = await self.store.get_groups_changes_for_user(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, since_token.groups_key, now_token.groups_key
|
2017-07-10 09:53:19 -04:00
|
|
|
)
|
|
|
|
else:
|
2019-12-05 12:58:25 -05:00
|
|
|
results = await self.store.get_all_groups_for_user(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, now_token.groups_key
|
2017-07-10 09:53:19 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
invited = {}
|
|
|
|
joined = {}
|
|
|
|
left = {}
|
|
|
|
for result in results:
|
|
|
|
membership = result["membership"]
|
|
|
|
group_id = result["group_id"]
|
|
|
|
gtype = result["type"]
|
|
|
|
content = result["content"]
|
|
|
|
|
|
|
|
if membership == "join":
|
|
|
|
if gtype == "membership":
|
2017-09-19 05:35:35 -04:00
|
|
|
# TODO: Add profile
|
2017-07-10 09:53:19 -04:00
|
|
|
content.pop("membership", None)
|
2017-07-24 08:31:26 -04:00
|
|
|
joined[group_id] = content["content"]
|
2017-07-10 09:53:19 -04:00
|
|
|
else:
|
|
|
|
joined.setdefault(group_id, {})[gtype] = content
|
|
|
|
elif membership == "invite":
|
|
|
|
if gtype == "membership":
|
|
|
|
content.pop("membership", None)
|
|
|
|
invited[group_id] = content["content"]
|
|
|
|
else:
|
|
|
|
if gtype == "membership":
|
|
|
|
left[group_id] = content["content"]
|
|
|
|
|
|
|
|
sync_result_builder.groups = GroupsSyncResult(
|
2019-06-20 05:32:02 -04:00
|
|
|
join=joined, invite=invited, leave=left
|
2017-07-10 09:53:19 -04:00
|
|
|
)
|
|
|
|
|
2017-02-02 13:36:17 -05:00
|
|
|
@measure_func("_generate_sync_entry_for_device_list")
|
2019-12-05 12:58:25 -05:00
|
|
|
async def _generate_sync_entry_for_device_list(
|
2019-06-20 05:32:02 -04:00
|
|
|
self,
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder: "SyncResultBuilder",
|
|
|
|
newly_joined_rooms: Set[str],
|
|
|
|
newly_joined_or_invited_users: Set[str],
|
|
|
|
newly_left_rooms: Set[str],
|
|
|
|
newly_left_users: Set[str],
|
|
|
|
) -> DeviceLists:
|
2019-06-26 14:30:35 -04:00
|
|
|
"""Generate the DeviceLists section of sync
|
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder
|
|
|
|
newly_joined_rooms: Set of rooms user has joined since previous sync
|
|
|
|
newly_joined_or_invited_users: Set of users that have joined or
|
|
|
|
been invited to a room since previous sync.
|
|
|
|
newly_left_rooms: Set of rooms user has left since previous sync
|
|
|
|
newly_left_users: Set of users that have left a room we're in since
|
2019-06-26 14:30:35 -04:00
|
|
|
previous sync
|
|
|
|
"""
|
|
|
|
|
2017-01-27 08:36:39 -05:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
|
2019-06-26 14:30:35 -04:00
|
|
|
# We're going to mutate these fields, so lets copy them rather than
|
|
|
|
# assume they won't get used later.
|
|
|
|
newly_joined_or_invited_users = set(newly_joined_or_invited_users)
|
|
|
|
newly_left_users = set(newly_left_users)
|
2017-09-12 11:44:26 -04:00
|
|
|
|
2019-06-26 14:30:35 -04:00
|
|
|
if since_token and since_token.device_list_key:
|
|
|
|
# We want to figure out what user IDs the client should refetch
|
|
|
|
# device keys for, and which users we aren't going to track changes
|
|
|
|
# for anymore.
|
|
|
|
#
|
|
|
|
# For the first step we check:
|
2019-06-27 11:06:23 -04:00
|
|
|
# a. if any users we share a room with have updated their devices,
|
2019-06-26 14:30:35 -04:00
|
|
|
# and
|
2019-06-27 11:06:23 -04:00
|
|
|
# b. we also check if we've joined any new rooms, or if a user has
|
2019-06-26 14:30:35 -04:00
|
|
|
# joined a room we're in.
|
|
|
|
#
|
|
|
|
# For the second step we just find any users we no longer share a
|
|
|
|
# room with by looking at all users that have left a room plus users
|
|
|
|
# that were in a room we've left.
|
2017-09-12 11:44:26 -04:00
|
|
|
|
2020-03-30 05:11:26 -04:00
|
|
|
users_who_share_room = await self.store.get_users_who_share_room_with_user(
|
2019-06-26 06:56:52 -04:00
|
|
|
user_id
|
|
|
|
)
|
2020-03-30 05:11:26 -04:00
|
|
|
|
2020-05-05 12:07:59 -04:00
|
|
|
# Always tell the user about their own devices. We check as the user
|
|
|
|
# ID is almost certainly already included (unless they're not in any
|
|
|
|
# rooms) and taking a copy of the set is relatively expensive.
|
|
|
|
if user_id not in users_who_share_room:
|
|
|
|
users_who_share_room = set(users_who_share_room)
|
|
|
|
users_who_share_room.add(user_id)
|
|
|
|
|
|
|
|
tracked_users = users_who_share_room
|
2019-06-26 06:56:52 -04:00
|
|
|
|
2019-06-27 11:06:23 -04:00
|
|
|
# Step 1a, check for changes in devices of users we share a room with
|
2019-12-05 12:58:25 -05:00
|
|
|
users_that_have_changed = await self.store.get_users_whose_devices_changed(
|
2020-03-30 05:11:26 -04:00
|
|
|
since_token.device_list_key, tracked_users
|
2019-06-26 14:30:35 -04:00
|
|
|
)
|
|
|
|
|
2019-06-27 11:06:23 -04:00
|
|
|
# Step 1b, check for newly joined rooms
|
2019-06-26 14:30:35 -04:00
|
|
|
for room_id in newly_joined_rooms:
|
2019-12-05 12:58:25 -05:00
|
|
|
joined_users = await self.state.get_current_users_in_room(room_id)
|
2019-06-26 14:30:35 -04:00
|
|
|
newly_joined_or_invited_users.update(joined_users)
|
|
|
|
|
2017-09-07 10:08:39 -04:00
|
|
|
# TODO: Check that these users are actually new, i.e. either they
|
|
|
|
# weren't in the previous sync *or* they left and rejoined.
|
2019-06-26 14:30:35 -04:00
|
|
|
users_that_have_changed.update(newly_joined_or_invited_users)
|
2017-07-12 05:30:10 -04:00
|
|
|
|
2021-02-16 17:32:34 -05:00
|
|
|
user_signatures_changed = (
|
|
|
|
await self.store.get_users_whose_signatures_changed(
|
|
|
|
user_id, since_token.device_list_key
|
|
|
|
)
|
2019-07-25 11:08:24 -04:00
|
|
|
)
|
|
|
|
users_that_have_changed.update(user_signatures_changed)
|
|
|
|
|
2019-06-26 14:30:35 -04:00
|
|
|
# Now find users that we no longer track
|
|
|
|
for room_id in newly_left_rooms:
|
2019-12-05 12:58:25 -05:00
|
|
|
left_users = await self.state.get_current_users_in_room(room_id)
|
2019-06-26 14:30:35 -04:00
|
|
|
newly_left_users.update(left_users)
|
2017-01-27 08:36:39 -05:00
|
|
|
|
2019-06-26 14:30:35 -04:00
|
|
|
# Remove any users that we still share a room with.
|
2020-03-30 05:11:26 -04:00
|
|
|
newly_left_users -= users_who_share_room
|
2019-06-26 06:56:52 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return DeviceLists(changed=users_that_have_changed, left=newly_left_users)
|
2017-01-27 08:36:39 -05:00
|
|
|
else:
|
2019-07-23 09:00:55 -04:00
|
|
|
return DeviceLists(changed=[], left=[])
|
2017-01-27 08:36:39 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def _generate_sync_entry_for_to_device(
|
|
|
|
self, sync_result_builder: "SyncResultBuilder"
|
|
|
|
) -> None:
|
2016-08-25 12:35:37 -04:00
|
|
|
"""Generates the portion of the sync response. Populates
|
|
|
|
`sync_result_builder` with the result.
|
|
|
|
"""
|
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
device_id = sync_result_builder.sync_config.device_id
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
since_stream_id = 0
|
|
|
|
if sync_result_builder.since_token is not None:
|
|
|
|
since_stream_id = int(sync_result_builder.since_token.to_device_key)
|
|
|
|
|
2016-08-30 06:17:46 -04:00
|
|
|
if since_stream_id != int(now_token.to_device_key):
|
2016-08-30 06:23:26 -04:00
|
|
|
# We only delete messages when a new message comes in, but that's
|
|
|
|
# fine so long as we delete them at some point.
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
deleted = await self.store.delete_messages_for_device(
|
2016-08-25 12:35:37 -04:00
|
|
|
user_id, device_id, since_stream_id
|
|
|
|
)
|
2019-06-20 05:32:02 -04:00
|
|
|
logger.debug(
|
|
|
|
"Deleted %d to-device messages up to %d", deleted, since_stream_id
|
|
|
|
)
|
2016-08-25 12:35:37 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
messages, stream_id = await self.store.get_new_messages_for_device(
|
2016-09-02 10:50:37 -04:00
|
|
|
user_id, device_id, since_stream_id, now_token.to_device_key
|
2016-08-30 05:58:46 -04:00
|
|
|
)
|
2016-12-15 13:13:58 -05:00
|
|
|
|
2021-04-01 12:08:21 -04:00
|
|
|
for message in messages:
|
|
|
|
# We pop here as we shouldn't be sending the message ID down
|
|
|
|
# `/sync`
|
|
|
|
message_id = message.pop("message_id", None)
|
|
|
|
if message_id:
|
|
|
|
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
|
|
|
|
2017-03-09 09:50:40 -05:00
|
|
|
logger.debug(
|
2016-12-15 13:13:58 -05:00
|
|
|
"Returning %d to-device messages between %d and %d (current token: %d)",
|
2019-06-20 05:32:02 -04:00
|
|
|
len(messages),
|
|
|
|
since_stream_id,
|
|
|
|
stream_id,
|
|
|
|
now_token.to_device_key,
|
2016-12-15 13:13:58 -05:00
|
|
|
)
|
2016-08-30 05:58:46 -04:00
|
|
|
sync_result_builder.now_token = now_token.copy_and_replace(
|
|
|
|
"to_device_key", stream_id
|
|
|
|
)
|
|
|
|
sync_result_builder.to_device = messages
|
|
|
|
else:
|
|
|
|
sync_result_builder.to_device = []
|
2016-08-25 12:35:37 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def _generate_sync_entry_for_account_data(
|
|
|
|
self, sync_result_builder: "SyncResultBuilder"
|
|
|
|
) -> Dict[str, Dict[str, JsonDict]]:
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Generates the account data portion of the sync response. Populates
|
2016-05-24 05:53:03 -04:00
|
|
|
`sync_result_builder` with the result.
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Returns:
|
2020-02-03 13:05:44 -05:00
|
|
|
A dictionary containing the per room account data.
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
if since_token and not sync_result_builder.full_state:
|
2019-10-31 11:43:24 -04:00
|
|
|
(
|
|
|
|
account_data,
|
|
|
|
account_data_by_room,
|
2019-12-06 05:14:59 -05:00
|
|
|
) = await self.store.get_updated_account_data_for_user(
|
2019-10-31 11:43:24 -04:00
|
|
|
user_id, since_token.account_data_key
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
push_rules_changed = await self.store.have_push_rules_changed_for_user(
|
2016-05-23 12:37:01 -04:00
|
|
|
user_id, int(since_token.push_rules_key)
|
|
|
|
)
|
|
|
|
|
|
|
|
if push_rules_changed:
|
2019-12-05 12:58:25 -05:00
|
|
|
account_data["m.push_rules"] = await self.push_rules_for_user(
|
2016-05-23 12:37:01 -04:00
|
|
|
sync_config.user
|
|
|
|
)
|
|
|
|
else:
|
2019-10-31 11:43:24 -04:00
|
|
|
(
|
|
|
|
account_data,
|
|
|
|
account_data_by_room,
|
2019-12-05 12:58:25 -05:00
|
|
|
) = await self.store.get_account_data_for_user(sync_config.user.to_string())
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
account_data["m.push_rules"] = await self.push_rules_for_user(
|
2016-05-23 12:37:01 -04:00
|
|
|
sync_config.user
|
|
|
|
)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
account_data_for_user = sync_config.filter_collection.filter_account_data(
|
|
|
|
[
|
|
|
|
{"type": account_data_type, "content": content}
|
|
|
|
for account_data_type, content in account_data.items()
|
|
|
|
]
|
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.account_data = account_data_for_user
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return account_data_by_room
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def _generate_sync_entry_for_presence(
|
2020-02-03 13:05:44 -05:00
|
|
|
self,
|
|
|
|
sync_result_builder: "SyncResultBuilder",
|
|
|
|
newly_joined_rooms: Set[str],
|
|
|
|
newly_joined_or_invited_users: Set[str],
|
|
|
|
) -> None:
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Generates the presence portion of the sync response. Populates the
|
2016-05-24 05:53:03 -04:00
|
|
|
`sync_result_builder` with the result.
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder
|
|
|
|
newly_joined_rooms: Set of rooms that the user has joined since
|
|
|
|
the last sync (or empty if an initial sync)
|
|
|
|
newly_joined_or_invited_users: Set of users that have joined or
|
|
|
|
been invited to rooms since the last sync (or empty if an
|
|
|
|
initial sync)
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
2016-05-24 05:53:03 -04:00
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
user = sync_result_builder.sync_config.user
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
presence_source = self.event_sources.sources["presence"]
|
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
since_token = sync_result_builder.since_token
|
2020-09-08 11:48:15 -04:00
|
|
|
presence_key = None
|
|
|
|
include_offline = False
|
2016-05-24 05:53:03 -04:00
|
|
|
if since_token and not sync_result_builder.full_state:
|
2016-05-23 12:37:01 -04:00
|
|
|
presence_key = since_token.presence_key
|
2016-05-24 06:04:35 -04:00
|
|
|
include_offline = True
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
presence, presence_key = await presence_source.get_new_events(
|
2016-05-23 12:37:01 -04:00
|
|
|
user=user,
|
|
|
|
from_key=presence_key,
|
|
|
|
is_guest=sync_config.is_guest,
|
2016-05-24 06:04:35 -04:00
|
|
|
include_offline=include_offline,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2020-09-11 07:22:55 -04:00
|
|
|
assert presence_key
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.now_token = now_token.copy_and_replace(
|
2016-05-23 12:37:01 -04:00
|
|
|
"presence_key", presence_key
|
|
|
|
)
|
|
|
|
|
2019-05-16 08:23:43 -04:00
|
|
|
extra_users_ids = set(newly_joined_or_invited_users)
|
2016-05-23 12:37:01 -04:00
|
|
|
for room_id in newly_joined_rooms:
|
2019-12-05 12:58:25 -05:00
|
|
|
users = await self.state.get_current_users_in_room(room_id)
|
2016-05-23 12:37:01 -04:00
|
|
|
extra_users_ids.update(users)
|
|
|
|
extra_users_ids.discard(user.to_string())
|
|
|
|
|
2017-03-15 10:27:34 -04:00
|
|
|
if extra_users_ids:
|
2019-12-05 12:58:25 -05:00
|
|
|
states = await self.presence_handler.get_states(extra_users_ids)
|
2017-03-15 10:27:34 -04:00
|
|
|
presence.extend(states)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-03-15 10:27:34 -04:00
|
|
|
# Deduplicate the presence entries so that there's at most one per user
|
2018-05-31 05:03:47 -04:00
|
|
|
presence = list({p.user_id: p for p in presence}.values())
|
2016-06-02 10:20:28 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
presence = sync_config.filter_collection.filter_presence(presence)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.presence = presence
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def _generate_sync_entry_for_rooms(
|
2020-02-03 13:05:44 -05:00
|
|
|
self,
|
|
|
|
sync_result_builder: "SyncResultBuilder",
|
|
|
|
account_data_by_room: Dict[str, Dict[str, JsonDict]],
|
|
|
|
) -> Tuple[Set[str], Set[str], Set[str], Set[str]]:
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Generates the rooms portion of the sync response. Populates the
|
2016-05-24 05:53:03 -04:00
|
|
|
`sync_result_builder` with the result.
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder
|
|
|
|
account_data_by_room: Dictionary of per room account data
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Returns:
|
2020-02-03 13:05:44 -05:00
|
|
|
Returns a 4-tuple of
|
2019-05-16 08:23:43 -04:00
|
|
|
`(newly_joined_rooms, newly_joined_or_invited_users,
|
|
|
|
newly_left_rooms, newly_left_users)`
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
2016-05-24 05:53:03 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
2016-11-22 11:38:35 -05:00
|
|
|
block_all_room_ephemeral = (
|
2019-06-20 05:32:02 -04:00
|
|
|
sync_result_builder.since_token is None
|
|
|
|
and sync_result_builder.sync_config.filter_collection.blocks_all_room_ephemeral()
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2016-11-22 11:38:35 -05:00
|
|
|
|
|
|
|
if block_all_room_ephemeral:
|
2020-02-03 13:05:44 -05:00
|
|
|
ephemeral_by_room = {} # type: Dict[str, List[JsonDict]]
|
2016-11-22 11:38:35 -05:00
|
|
|
else:
|
2019-12-05 12:58:25 -05:00
|
|
|
now_token, ephemeral_by_room = await self.ephemeral_by_room(
|
2018-03-05 07:06:19 -05:00
|
|
|
sync_result_builder,
|
2016-11-22 11:38:35 -05:00
|
|
|
now_token=sync_result_builder.now_token,
|
|
|
|
since_token=sync_result_builder.since_token,
|
|
|
|
)
|
|
|
|
sync_result_builder.now_token = now_token
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-03-16 07:56:59 -04:00
|
|
|
# We check up front if anything has changed, if it hasn't then there is
|
2020-06-05 08:43:21 -04:00
|
|
|
# no point in going further.
|
2017-03-15 14:13:59 -04:00
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
if not sync_result_builder.full_state:
|
|
|
|
if since_token and not ephemeral_by_room and not account_data_by_room:
|
2019-12-05 12:58:25 -05:00
|
|
|
have_changed = await self._have_rooms_changed(sync_result_builder)
|
2017-03-15 14:13:59 -04:00
|
|
|
if not have_changed:
|
2019-12-05 12:58:25 -05:00
|
|
|
tags_by_room = await self.store.get_updated_tags(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, since_token.account_data_key
|
2017-03-15 14:13:59 -04:00
|
|
|
)
|
|
|
|
if not tags_by_room:
|
2017-03-16 07:51:46 -04:00
|
|
|
logger.debug("no-oping sync")
|
2020-02-03 13:05:44 -05:00
|
|
|
return set(), set(), set(), set()
|
2017-03-15 14:13:59 -04:00
|
|
|
|
2021-02-16 17:32:34 -05:00
|
|
|
ignored_account_data = (
|
|
|
|
await self.store.get_global_account_data_by_type_for_user(
|
|
|
|
AccountDataTypes.IGNORED_USER_LIST, user_id=user_id
|
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2020-10-05 09:28:05 -04:00
|
|
|
# If there is ignored users account data and it matches the proper type,
|
|
|
|
# then use it.
|
|
|
|
ignored_users = frozenset() # type: FrozenSet[str]
|
2016-05-23 12:37:01 -04:00
|
|
|
if ignored_account_data:
|
2020-10-05 09:28:05 -04:00
|
|
|
ignored_users_data = ignored_account_data.get("ignored_users", {})
|
|
|
|
if isinstance(ignored_users_data, dict):
|
|
|
|
ignored_users = frozenset(ignored_users_data.keys())
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-03-15 14:13:59 -04:00
|
|
|
if since_token:
|
2020-02-03 13:05:44 -05:00
|
|
|
room_changes = await self._get_rooms_changed(
|
|
|
|
sync_result_builder, ignored_users
|
|
|
|
)
|
2019-12-05 12:58:25 -05:00
|
|
|
tags_by_room = await self.store.get_updated_tags(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, since_token.account_data_key
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
else:
|
2020-02-03 13:05:44 -05:00
|
|
|
room_changes = await self._get_all_rooms(sync_result_builder, ignored_users)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
tags_by_room = await self.store.get_tags_for_user(user_id)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
room_entries = room_changes.room_entries
|
|
|
|
invited = room_changes.invited
|
|
|
|
newly_joined_rooms = room_changes.newly_joined_rooms
|
|
|
|
newly_left_rooms = room_changes.newly_left_rooms
|
|
|
|
|
2020-07-22 08:43:10 -04:00
|
|
|
async def handle_room_entries(room_entry):
|
|
|
|
logger.debug("Generating room entry for %s", room_entry.room_id)
|
|
|
|
res = await self._generate_room_entry(
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder,
|
2016-05-23 12:37:01 -04:00
|
|
|
ignored_users,
|
|
|
|
room_entry,
|
|
|
|
ephemeral=ephemeral_by_room.get(room_entry.room_id, []),
|
|
|
|
tags=tags_by_room.get(room_entry.room_id),
|
|
|
|
account_data=account_data_by_room.get(room_entry.room_id, {}),
|
2016-05-24 05:53:03 -04:00
|
|
|
always_include=sync_result_builder.full_state,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2020-07-22 08:43:10 -04:00
|
|
|
logger.debug("Generated room entry for %s", room_entry.room_id)
|
|
|
|
return res
|
2016-05-23 13:21:27 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
await concurrently_execute(handle_room_entries, room_entries, 10)
|
2016-05-23 13:21:27 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.invited.extend(invited)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-05-16 08:23:43 -04:00
|
|
|
# Now we want to get any newly joined or invited users
|
|
|
|
newly_joined_or_invited_users = set()
|
2017-09-07 10:08:39 -04:00
|
|
|
newly_left_users = set()
|
2017-03-15 14:13:59 -04:00
|
|
|
if since_token:
|
2016-05-24 06:21:34 -04:00
|
|
|
for joined_sync in sync_result_builder.joined:
|
|
|
|
it = itertools.chain(
|
2020-06-15 07:03:36 -04:00
|
|
|
joined_sync.timeline.events, joined_sync.state.values()
|
2016-05-24 06:21:34 -04:00
|
|
|
)
|
|
|
|
for event in it:
|
|
|
|
if event.type == EventTypes.Member:
|
2019-05-16 08:23:43 -04:00
|
|
|
if (
|
2019-06-20 05:32:02 -04:00
|
|
|
event.membership == Membership.JOIN
|
|
|
|
or event.membership == Membership.INVITE
|
2019-05-16 08:23:43 -04:00
|
|
|
):
|
|
|
|
newly_joined_or_invited_users.add(event.state_key)
|
2017-09-07 10:08:39 -04:00
|
|
|
else:
|
|
|
|
prev_content = event.unsigned.get("prev_content", {})
|
|
|
|
prev_membership = prev_content.get("membership", None)
|
|
|
|
if prev_membership == Membership.JOIN:
|
|
|
|
newly_left_users.add(event.state_key)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-05-16 08:23:43 -04:00
|
|
|
newly_left_users -= newly_joined_or_invited_users
|
2017-09-13 10:13:41 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return (
|
2020-02-03 13:05:44 -05:00
|
|
|
set(newly_joined_rooms),
|
2019-07-23 09:00:55 -04:00
|
|
|
newly_joined_or_invited_users,
|
2020-02-03 13:05:44 -05:00
|
|
|
set(newly_left_rooms),
|
2019-07-23 09:00:55 -04:00
|
|
|
newly_left_users,
|
2019-06-20 05:32:02 -04:00
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def _have_rooms_changed(
|
|
|
|
self, sync_result_builder: "SyncResultBuilder"
|
|
|
|
) -> bool:
|
2017-03-16 09:04:07 -04:00
|
|
|
"""Returns whether there may be any new events that should be sent down
|
|
|
|
the sync. Returns True if there are.
|
2017-03-16 07:56:59 -04:00
|
|
|
"""
|
2017-03-15 14:13:59 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
|
2017-03-16 07:56:59 -04:00
|
|
|
assert since_token
|
2017-03-15 14:13:59 -04:00
|
|
|
|
|
|
|
# Get a list of membership change events that have happened.
|
2019-12-05 12:58:25 -05:00
|
|
|
rooms_changed = await self.store.get_membership_changes_for_user(
|
2017-03-15 14:13:59 -04:00
|
|
|
user_id, since_token.room_key, now_token.room_key
|
|
|
|
)
|
|
|
|
|
|
|
|
if rooms_changed:
|
2019-07-23 09:00:55 -04:00
|
|
|
return True
|
2017-03-15 14:13:59 -04:00
|
|
|
|
2020-09-11 07:22:55 -04:00
|
|
|
stream_id = since_token.room_key.stream
|
2018-03-05 07:06:19 -05:00
|
|
|
for room_id in sync_result_builder.joined_room_ids:
|
2017-03-16 07:56:59 -04:00
|
|
|
if self.store.has_room_changed_since(room_id, stream_id):
|
2019-07-23 09:00:55 -04:00
|
|
|
return True
|
|
|
|
return False
|
2017-03-15 14:13:59 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def _get_rooms_changed(
|
2020-10-05 09:28:05 -04:00
|
|
|
self, sync_result_builder: "SyncResultBuilder", ignored_users: FrozenSet[str]
|
2020-02-03 13:05:44 -05:00
|
|
|
) -> _RoomChanges:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Gets the the changes that have happened since the last sync."""
|
2016-05-24 05:53:03 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
assert since_token
|
|
|
|
|
|
|
|
# Get a list of membership change events that have happened.
|
2019-12-05 12:58:25 -05:00
|
|
|
rooms_changed = await self.store.get_membership_changes_for_user(
|
2016-05-23 12:37:01 -04:00
|
|
|
user_id, since_token.room_key, now_token.room_key
|
|
|
|
)
|
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
mem_change_events_by_room_id = {} # type: Dict[str, List[EventBase]]
|
2016-05-23 12:37:01 -04:00
|
|
|
for event in rooms_changed:
|
|
|
|
mem_change_events_by_room_id.setdefault(event.room_id, []).append(event)
|
|
|
|
|
|
|
|
newly_joined_rooms = []
|
2017-09-13 10:13:41 -04:00
|
|
|
newly_left_rooms = []
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries = []
|
2016-05-23 12:37:01 -04:00
|
|
|
invited = []
|
2020-06-15 07:03:36 -04:00
|
|
|
for room_id, events in mem_change_events_by_room_id.items():
|
2020-02-06 08:31:05 -05:00
|
|
|
logger.debug(
|
2019-01-31 13:30:40 -05:00
|
|
|
"Membership changes in %s: [%s]",
|
|
|
|
room_id,
|
|
|
|
", ".join(("%s (%s)" % (e.event_id, e.membership) for e in events)),
|
|
|
|
)
|
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
non_joins = [e for e in events if e.membership != Membership.JOIN]
|
|
|
|
has_join = len(non_joins) != len(events)
|
|
|
|
|
|
|
|
# We want to figure out if we joined the room at some point since
|
|
|
|
# the last sync (even if we have since left). This is to make sure
|
|
|
|
# we do send down the room, and with full state, where necessary
|
2017-09-22 09:44:17 -04:00
|
|
|
|
2017-09-13 10:13:41 -04:00
|
|
|
old_state_ids = None
|
2018-03-05 07:06:19 -05:00
|
|
|
if room_id in sync_result_builder.joined_room_ids and non_joins:
|
2017-09-22 09:44:17 -04:00
|
|
|
# Always include if the user (re)joined the room, especially
|
|
|
|
# important so that device list changes are calculated correctly.
|
2019-05-16 08:23:43 -04:00
|
|
|
# If there are non-join member events, but we are still in the room,
|
2017-09-22 09:44:17 -04:00
|
|
|
# then the user must have left and joined
|
|
|
|
newly_joined_rooms.append(room_id)
|
|
|
|
|
|
|
|
# User is in the room so we don't need to do the invite/leave checks
|
|
|
|
continue
|
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
if room_id in sync_result_builder.joined_room_ids or has_join:
|
2019-12-05 12:58:25 -05:00
|
|
|
old_state_ids = await self.get_state_at(room_id, since_token)
|
2016-08-25 13:59:44 -04:00
|
|
|
old_mem_ev_id = old_state_ids.get((EventTypes.Member, user_id), None)
|
|
|
|
old_mem_ev = None
|
|
|
|
if old_mem_ev_id:
|
2019-12-05 12:58:25 -05:00
|
|
|
old_mem_ev = await self.store.get_event(
|
2016-08-25 13:59:44 -04:00
|
|
|
old_mem_ev_id, allow_none=True
|
|
|
|
)
|
2019-03-06 12:29:15 -05:00
|
|
|
|
|
|
|
# debug for #4422
|
|
|
|
if has_join:
|
|
|
|
prev_membership = None
|
|
|
|
if old_mem_ev:
|
|
|
|
prev_membership = old_mem_ev.membership
|
|
|
|
issue4422_logger.debug(
|
|
|
|
"Previous membership for room %s with join: %s (event %s)",
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id,
|
|
|
|
prev_membership,
|
|
|
|
old_mem_ev_id,
|
2019-03-06 12:29:15 -05:00
|
|
|
)
|
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
if not old_mem_ev or old_mem_ev.membership != Membership.JOIN:
|
|
|
|
newly_joined_rooms.append(room_id)
|
|
|
|
|
2017-09-22 09:44:17 -04:00
|
|
|
# If user is in the room then we don't need to do the invite/leave checks
|
2018-03-05 07:06:19 -05:00
|
|
|
if room_id in sync_result_builder.joined_room_ids:
|
2017-09-22 09:44:17 -04:00
|
|
|
continue
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
if not non_joins:
|
|
|
|
continue
|
|
|
|
|
2017-09-13 10:13:41 -04:00
|
|
|
# Check if we have left the room. This can either be because we were
|
|
|
|
# joined before *or* that we since joined and then left.
|
|
|
|
if events[-1].membership != Membership.JOIN:
|
|
|
|
if has_join:
|
|
|
|
newly_left_rooms.append(room_id)
|
|
|
|
else:
|
|
|
|
if not old_state_ids:
|
2019-12-05 12:58:25 -05:00
|
|
|
old_state_ids = await self.get_state_at(room_id, since_token)
|
2017-09-13 10:13:41 -04:00
|
|
|
old_mem_ev_id = old_state_ids.get(
|
2019-06-20 05:32:02 -04:00
|
|
|
(EventTypes.Member, user_id), None
|
2017-09-13 10:13:41 -04:00
|
|
|
)
|
|
|
|
old_mem_ev = None
|
|
|
|
if old_mem_ev_id:
|
2019-12-05 12:58:25 -05:00
|
|
|
old_mem_ev = await self.store.get_event(
|
2017-09-13 10:13:41 -04:00
|
|
|
old_mem_ev_id, allow_none=True
|
|
|
|
)
|
|
|
|
if old_mem_ev and old_mem_ev.membership == Membership.JOIN:
|
|
|
|
newly_left_rooms.append(room_id)
|
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
# Only bother if we're still currently invited
|
|
|
|
should_invite = non_joins[-1].membership == Membership.INVITE
|
|
|
|
if should_invite:
|
|
|
|
if event.sender not in ignored_users:
|
|
|
|
room_sync = InvitedSyncResult(room_id, invite=non_joins[-1])
|
|
|
|
if room_sync:
|
|
|
|
invited.append(room_sync)
|
|
|
|
|
|
|
|
# Always include leave/ban events. Just take the last one.
|
|
|
|
# TODO: How do we handle ban -> leave in same batch?
|
|
|
|
leave_events = [
|
2019-06-20 05:32:02 -04:00
|
|
|
e
|
|
|
|
for e in non_joins
|
2016-05-23 12:37:01 -04:00
|
|
|
if e.membership in (Membership.LEAVE, Membership.BAN)
|
|
|
|
]
|
|
|
|
|
|
|
|
if leave_events:
|
|
|
|
leave_event = leave_events[-1]
|
2020-09-29 16:48:33 -04:00
|
|
|
leave_position = await self.store.get_position_for_event(
|
2016-05-23 12:37:01 -04:00
|
|
|
leave_event.event_id
|
|
|
|
)
|
|
|
|
|
2020-09-29 16:48:33 -04:00
|
|
|
# If the leave event happened before the since token then we
|
|
|
|
# bail.
|
|
|
|
if since_token and not leave_position.persisted_after(
|
|
|
|
since_token.room_key
|
|
|
|
):
|
2016-05-23 12:37:01 -04:00
|
|
|
continue
|
|
|
|
|
2020-09-29 16:48:33 -04:00
|
|
|
# We can safely convert the position of the leave event into a
|
|
|
|
# stream token as it'll only be used in the context of this
|
|
|
|
# room. (c.f. the docstring of `to_room_stream_token`).
|
|
|
|
leave_token = since_token.copy_and_replace(
|
|
|
|
"room_key", leave_position.to_room_stream_token()
|
|
|
|
)
|
|
|
|
|
2019-01-30 10:46:27 -05:00
|
|
|
# If this is an out of band message, like a remote invite
|
|
|
|
# rejection, we include it in the recents batch. Otherwise, we
|
|
|
|
# let _load_filtered_recents handle fetching the correct
|
|
|
|
# batches.
|
|
|
|
#
|
|
|
|
# This is all screaming out for a refactor, as the logic here is
|
|
|
|
# subtle and the moving parts numerous.
|
|
|
|
if leave_event.internal_metadata.is_out_of_band_membership():
|
2020-02-03 13:05:44 -05:00
|
|
|
batch_events = [leave_event] # type: Optional[List[EventBase]]
|
2019-01-30 10:46:27 -05:00
|
|
|
else:
|
|
|
|
batch_events = None
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
room_entries.append(
|
|
|
|
RoomSyncResultBuilder(
|
|
|
|
room_id=room_id,
|
|
|
|
rtype="archived",
|
|
|
|
events=batch_events,
|
|
|
|
newly_joined=room_id in newly_joined_rooms,
|
|
|
|
full_state=False,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=leave_token,
|
|
|
|
)
|
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
timeline_limit = sync_config.filter_collection.timeline_limit()
|
|
|
|
|
|
|
|
# Get all events for rooms we're currently joined to.
|
2019-12-05 12:58:25 -05:00
|
|
|
room_to_events = await self.store.get_room_events_stream_for_rooms(
|
2018-03-05 07:06:19 -05:00
|
|
|
room_ids=sync_result_builder.joined_room_ids,
|
2016-05-23 12:37:01 -04:00
|
|
|
from_key=since_token.room_key,
|
|
|
|
to_key=now_token.room_key,
|
|
|
|
limit=timeline_limit + 1,
|
|
|
|
)
|
|
|
|
|
|
|
|
# We loop through all room ids, even if there are no new events, in case
|
2020-04-11 15:55:18 -04:00
|
|
|
# there are non room events that we need to notify about.
|
2018-03-05 07:06:19 -05:00
|
|
|
for room_id in sync_result_builder.joined_room_ids:
|
2016-05-23 12:37:01 -04:00
|
|
|
room_entry = room_to_events.get(room_id, None)
|
|
|
|
|
2019-03-06 12:29:15 -05:00
|
|
|
newly_joined = room_id in newly_joined_rooms
|
2016-05-23 12:37:01 -04:00
|
|
|
if room_entry:
|
|
|
|
events, start_key = room_entry
|
|
|
|
|
|
|
|
prev_batch_token = now_token.copy_and_replace("room_key", start_key)
|
|
|
|
|
2019-03-06 12:29:15 -05:00
|
|
|
entry = RoomSyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=room_id,
|
2016-05-24 04:43:35 -04:00
|
|
|
rtype="joined",
|
2016-05-23 12:37:01 -04:00
|
|
|
events=events,
|
2019-03-06 12:29:15 -05:00
|
|
|
newly_joined=newly_joined,
|
2016-05-23 12:37:01 -04:00
|
|
|
full_state=False,
|
2019-03-06 12:29:15 -05:00
|
|
|
since_token=None if newly_joined else since_token,
|
2016-05-23 12:37:01 -04:00
|
|
|
upto_token=prev_batch_token,
|
2019-03-06 12:29:15 -05:00
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
else:
|
2019-03-06 12:29:15 -05:00
|
|
|
entry = RoomSyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=room_id,
|
2016-05-24 04:43:35 -04:00
|
|
|
rtype="joined",
|
2016-05-23 12:37:01 -04:00
|
|
|
events=[],
|
2019-03-06 12:29:15 -05:00
|
|
|
newly_joined=newly_joined,
|
2016-05-23 12:37:01 -04:00
|
|
|
full_state=False,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=since_token,
|
2019-03-06 12:29:15 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if newly_joined:
|
|
|
|
# debugging for https://github.com/matrix-org/synapse/issues/4422
|
|
|
|
issue4422_logger.debug(
|
|
|
|
"RoomSyncResultBuilder events for newly joined room %s: %r",
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id,
|
|
|
|
entry.events,
|
2019-03-06 12:29:15 -05:00
|
|
|
)
|
|
|
|
room_entries.append(entry)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
return _RoomChanges(room_entries, invited, newly_joined_rooms, newly_left_rooms)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def _get_all_rooms(
|
2020-10-05 09:28:05 -04:00
|
|
|
self, sync_result_builder: "SyncResultBuilder", ignored_users: FrozenSet[str]
|
2020-02-03 13:05:44 -05:00
|
|
|
) -> _RoomChanges:
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Returns entries for all rooms for the user.
|
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder
|
|
|
|
ignored_users: Set of users ignored by user.
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
membership_list = (
|
2019-06-20 05:32:02 -04:00
|
|
|
Membership.INVITE,
|
|
|
|
Membership.JOIN,
|
|
|
|
Membership.LEAVE,
|
|
|
|
Membership.BAN,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2020-01-15 09:59:33 -05:00
|
|
|
room_list = await self.store.get_rooms_for_local_user_where_membership_is(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id=user_id, membership_list=membership_list
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries = []
|
2016-05-23 12:37:01 -04:00
|
|
|
invited = []
|
|
|
|
|
|
|
|
for event in room_list:
|
|
|
|
if event.membership == Membership.JOIN:
|
2019-06-20 05:32:02 -04:00
|
|
|
room_entries.append(
|
|
|
|
RoomSyncResultBuilder(
|
|
|
|
room_id=event.room_id,
|
|
|
|
rtype="joined",
|
|
|
|
events=None,
|
|
|
|
newly_joined=False,
|
|
|
|
full_state=True,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=now_token,
|
|
|
|
)
|
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
elif event.membership == Membership.INVITE:
|
|
|
|
if event.sender in ignored_users:
|
|
|
|
continue
|
2019-12-05 12:58:25 -05:00
|
|
|
invite = await self.store.get_event(event.event_id)
|
2019-06-20 05:32:02 -04:00
|
|
|
invited.append(InvitedSyncResult(room_id=event.room_id, invite=invite))
|
2016-05-23 12:37:01 -04:00
|
|
|
elif event.membership in (Membership.LEAVE, Membership.BAN):
|
|
|
|
# Always send down rooms we were banned or kicked from.
|
|
|
|
if not sync_config.filter_collection.include_leave:
|
|
|
|
if event.membership == Membership.LEAVE:
|
|
|
|
if user_id == event.sender:
|
|
|
|
continue
|
|
|
|
|
|
|
|
leave_token = now_token.copy_and_replace(
|
2020-09-11 07:22:55 -04:00
|
|
|
"room_key", RoomStreamToken(None, event.stream_ordering)
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2019-06-20 05:32:02 -04:00
|
|
|
room_entries.append(
|
|
|
|
RoomSyncResultBuilder(
|
|
|
|
room_id=event.room_id,
|
|
|
|
rtype="archived",
|
|
|
|
events=None,
|
|
|
|
newly_joined=False,
|
|
|
|
full_state=True,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=leave_token,
|
|
|
|
)
|
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
return _RoomChanges(room_entries, invited, [], [])
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
async def _generate_room_entry(
|
2019-06-20 05:32:02 -04:00
|
|
|
self,
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder: "SyncResultBuilder",
|
2020-10-05 09:28:05 -04:00
|
|
|
ignored_users: FrozenSet[str],
|
2020-02-03 13:05:44 -05:00
|
|
|
room_builder: "RoomSyncResultBuilder",
|
|
|
|
ephemeral: List[JsonDict],
|
2020-09-03 07:54:10 -04:00
|
|
|
tags: Optional[Dict[str, Dict[str, Any]]],
|
2020-02-03 13:05:44 -05:00
|
|
|
account_data: Dict[str, JsonDict],
|
|
|
|
always_include: bool = False,
|
2019-06-20 05:32:02 -04:00
|
|
|
):
|
2016-05-24 05:53:03 -04:00
|
|
|
"""Populates the `joined` and `archived` section of `sync_result_builder`
|
2016-05-24 05:14:53 -04:00
|
|
|
based on the `room_builder`.
|
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_result_builder
|
|
|
|
ignored_users: Set of users ignored by user.
|
|
|
|
room_builder
|
|
|
|
ephemeral: List of new ephemeral events for room
|
|
|
|
tags: List of *all* tags for room, or None if there has been
|
2016-05-24 05:14:53 -04:00
|
|
|
no change.
|
2020-02-03 13:05:44 -05:00
|
|
|
account_data: List of new account data for room
|
|
|
|
always_include: Always include this room in the sync response,
|
2016-05-24 05:14:53 -04:00
|
|
|
even if empty.
|
|
|
|
"""
|
2016-05-23 12:37:01 -04:00
|
|
|
newly_joined = room_builder.newly_joined
|
|
|
|
full_state = (
|
2019-06-20 05:32:02 -04:00
|
|
|
room_builder.full_state or newly_joined or sync_result_builder.full_state
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2016-05-24 09:00:43 -04:00
|
|
|
events = room_builder.events
|
|
|
|
|
|
|
|
# We want to shortcut out as early as possible.
|
|
|
|
if not (always_include or account_data or ephemeral or full_state):
|
|
|
|
if events == [] and tags is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
|
|
|
|
room_id = room_builder.room_id
|
2016-05-23 12:37:01 -04:00
|
|
|
since_token = room_builder.since_token
|
|
|
|
upto_token = room_builder.upto_token
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
batch = await self._load_filtered_recents(
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id,
|
|
|
|
sync_config,
|
2016-05-23 12:37:01 -04:00
|
|
|
now_token=upto_token,
|
|
|
|
since_token=since_token,
|
2020-02-03 13:05:44 -05:00
|
|
|
potential_recents=events,
|
2016-05-24 04:43:35 -04:00
|
|
|
newly_joined_room=newly_joined,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2019-08-15 07:02:18 -04:00
|
|
|
# Note: `batch` can be both empty and limited here in the case where
|
|
|
|
# `_load_filtered_recents` can't find any events the user should see
|
|
|
|
# (e.g. due to having ignored the sender of the last 50 events).
|
2019-08-06 07:55:36 -04:00
|
|
|
|
2019-03-06 12:29:15 -05:00
|
|
|
if newly_joined:
|
|
|
|
# debug for https://github.com/matrix-org/synapse/issues/4422
|
|
|
|
issue4422_logger.debug(
|
|
|
|
"Timeline events after filtering in newly-joined room %s: %r",
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id,
|
|
|
|
batch,
|
2019-03-06 12:29:15 -05:00
|
|
|
)
|
|
|
|
|
2018-09-06 11:46:51 -04:00
|
|
|
# When we join the room (or the client requests full_state), we should
|
|
|
|
# send down any existing tags. Usually the user won't have tags in a
|
|
|
|
# newly joined room, unless either a) they've joined before or b) the
|
|
|
|
# tag was added by synapse e.g. for server notice rooms.
|
|
|
|
if full_state:
|
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
2019-12-05 12:58:25 -05:00
|
|
|
tags = await self.store.get_tags_for_room(user_id, room_id)
|
2018-09-06 11:46:51 -04:00
|
|
|
|
2018-09-06 12:01:41 -04:00
|
|
|
# If there aren't any tags, don't send the empty tags list down
|
|
|
|
# sync
|
|
|
|
if not tags:
|
|
|
|
tags = None
|
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
account_data_events = []
|
|
|
|
if tags is not None:
|
2019-06-20 05:32:02 -04:00
|
|
|
account_data_events.append({"type": "m.tag", "content": {"tags": tags}})
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
for account_data_type, content in account_data.items():
|
2019-06-20 05:32:02 -04:00
|
|
|
account_data_events.append({"type": account_data_type, "content": content})
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2018-12-19 06:53:02 -05:00
|
|
|
account_data_events = sync_config.filter_collection.filter_room_account_data(
|
2016-05-23 12:37:01 -04:00
|
|
|
account_data_events
|
|
|
|
)
|
|
|
|
|
|
|
|
ephemeral = sync_config.filter_collection.filter_room_ephemeral(ephemeral)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
if not (
|
|
|
|
always_include or batch or account_data_events or ephemeral or full_state
|
|
|
|
):
|
2016-05-23 12:37:01 -04:00
|
|
|
return
|
|
|
|
|
2019-12-05 12:58:25 -05:00
|
|
|
state = await self.compute_state_delta(
|
2019-06-20 05:32:02 -04:00
|
|
|
room_id, batch, sync_config, since_token, now_token, full_state=full_state
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
summary = {} # type: Optional[JsonDict]
|
2018-09-11 19:50:39 -04:00
|
|
|
|
|
|
|
# we include a summary in room responses when we're lazy loading
|
|
|
|
# members (as the client otherwise doesn't have enough info to form
|
|
|
|
# the name itself).
|
2019-06-20 05:32:02 -04:00
|
|
|
if sync_config.filter_collection.lazy_load_members() and (
|
2020-10-23 12:38:40 -04:00
|
|
|
# we recalculate the summary:
|
2019-06-20 05:32:02 -04:00
|
|
|
# if there are membership changes in the timeline, or
|
|
|
|
# if membership has changed during a gappy sync, or
|
|
|
|
# if this is an initial sync.
|
|
|
|
any(ev.type == EventTypes.Member for ev in batch.events)
|
|
|
|
or (
|
|
|
|
# XXX: this may include false positives in the form of LL
|
|
|
|
# members which have snuck into state
|
|
|
|
batch.limited
|
|
|
|
and any(t == EventTypes.Member for (t, k) in state)
|
2018-08-16 04:46:50 -04:00
|
|
|
)
|
2019-06-20 05:32:02 -04:00
|
|
|
or since_token is None
|
2018-08-16 04:46:50 -04:00
|
|
|
):
|
2019-12-05 12:58:25 -05:00
|
|
|
summary = await self.compute_summary(
|
2018-08-16 04:46:50 -04:00
|
|
|
room_id, sync_config, batch, state, now_token
|
|
|
|
)
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
if room_builder.rtype == "joined":
|
2020-09-02 12:19:37 -04:00
|
|
|
unread_notifications = {} # type: Dict[str, int]
|
2016-05-23 12:37:01 -04:00
|
|
|
room_sync = JoinedSyncResult(
|
|
|
|
room_id=room_id,
|
|
|
|
timeline=batch,
|
|
|
|
state=state,
|
|
|
|
ephemeral=ephemeral,
|
|
|
|
account_data=account_data_events,
|
|
|
|
unread_notifications=unread_notifications,
|
2018-08-16 04:46:50 -04:00
|
|
|
summary=summary,
|
2020-09-02 12:19:37 -04:00
|
|
|
unread_count=0,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
if room_sync or always_include:
|
2019-12-05 12:58:25 -05:00
|
|
|
notifs = await self.unread_notifs_for_room_id(room_id, sync_config)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-09-02 12:19:37 -04:00
|
|
|
unread_notifications["notification_count"] = notifs["notify_count"]
|
|
|
|
unread_notifications["highlight_count"] = notifs["highlight_count"]
|
|
|
|
|
|
|
|
room_sync.unread_count = notifs["unread_count"]
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.joined.append(room_sync)
|
2018-09-11 19:50:39 -04:00
|
|
|
|
2018-09-12 11:47:20 -04:00
|
|
|
if batch.limited and since_token:
|
2018-09-11 19:50:39 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
2020-02-06 08:31:05 -05:00
|
|
|
logger.debug(
|
2019-06-20 05:32:02 -04:00
|
|
|
"Incremental gappy sync of %s for user %s with %d state events"
|
|
|
|
% (room_id, user_id, len(state))
|
2018-09-11 19:50:39 -04:00
|
|
|
)
|
2016-05-24 04:43:35 -04:00
|
|
|
elif room_builder.rtype == "archived":
|
2020-02-03 13:05:44 -05:00
|
|
|
archived_room_sync = ArchivedSyncResult(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=room_id,
|
|
|
|
timeline=batch,
|
|
|
|
state=state,
|
2018-12-19 06:53:02 -05:00
|
|
|
account_data=account_data_events,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2020-02-03 13:05:44 -05:00
|
|
|
if archived_room_sync or always_include:
|
|
|
|
sync_result_builder.archived.append(archived_room_sync)
|
2016-05-24 04:43:35 -04:00
|
|
|
else:
|
|
|
|
raise Exception("Unrecognized rtype: %r", room_builder.rtype)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
async def get_rooms_for_user_at(
|
2020-09-24 08:24:17 -04:00
|
|
|
self, user_id: str, room_key: RoomStreamToken
|
2020-02-03 13:05:44 -05:00
|
|
|
) -> FrozenSet[str]:
|
2018-03-05 07:06:19 -05:00
|
|
|
"""Get set of joined rooms for a user at the given stream ordering.
|
|
|
|
|
|
|
|
The stream ordering *must* be recent, otherwise this may throw an
|
|
|
|
exception if older than a month. (This function is called with the
|
|
|
|
current token, which should be perfectly fine).
|
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
user_id
|
|
|
|
stream_ordering
|
2018-03-05 07:06:19 -05:00
|
|
|
|
|
|
|
ReturnValue:
|
2020-02-03 13:05:44 -05:00
|
|
|
Set of room_ids the user is in at given stream_ordering.
|
2018-03-05 07:06:19 -05:00
|
|
|
"""
|
2019-12-05 12:58:25 -05:00
|
|
|
joined_rooms = await self.store.get_rooms_for_user_with_stream_ordering(user_id)
|
2018-03-05 07:06:19 -05:00
|
|
|
|
|
|
|
joined_room_ids = set()
|
|
|
|
|
|
|
|
# We need to check that the stream ordering of the join for each room
|
|
|
|
# is before the stream_ordering asked for. This might not be the case
|
|
|
|
# if the user joins a room between us getting the current token and
|
|
|
|
# calling `get_rooms_for_user_with_stream_ordering`.
|
|
|
|
# If the membership's stream ordering is after the given stream
|
|
|
|
# ordering, we need to go and work out if the user was in the room
|
|
|
|
# before.
|
2020-09-24 08:24:17 -04:00
|
|
|
for room_id, event_pos in joined_rooms:
|
|
|
|
if not event_pos.persisted_after(room_key):
|
2018-03-05 07:06:19 -05:00
|
|
|
joined_room_ids.add(room_id)
|
|
|
|
continue
|
|
|
|
|
2018-03-05 08:29:49 -05:00
|
|
|
logger.info("User joined room after current token: %s", room_id)
|
2018-03-05 07:06:19 -05:00
|
|
|
|
2021-03-17 09:20:08 -04:00
|
|
|
extrems = (
|
|
|
|
await self.store.get_forward_extremities_for_room_at_stream_ordering(
|
|
|
|
room_id, event_pos.stream
|
|
|
|
)
|
2018-03-05 07:06:19 -05:00
|
|
|
)
|
2019-12-05 12:58:25 -05:00
|
|
|
users_in_room = await self.state.get_current_users_in_room(room_id, extrems)
|
2018-03-05 07:06:19 -05:00
|
|
|
if user_id in users_in_room:
|
|
|
|
joined_room_ids.add(room_id)
|
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
return frozenset(joined_room_ids)
|
2018-03-05 07:06:19 -05:00
|
|
|
|
2016-01-19 06:35:50 -05:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
def _action_has_highlight(actions: List[JsonDict]) -> bool:
|
2016-01-19 06:35:50 -05:00
|
|
|
for action in actions:
|
|
|
|
try:
|
|
|
|
if action.get("set_tweak", None) == "highlight":
|
|
|
|
return action.get("value", True)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return False
|
2016-02-01 10:59:40 -05:00
|
|
|
|
|
|
|
|
2018-07-23 14:21:20 -04:00
|
|
|
def _calculate_state(
|
2020-02-03 13:05:44 -05:00
|
|
|
timeline_contains: StateMap[str],
|
|
|
|
timeline_start: StateMap[str],
|
|
|
|
previous: StateMap[str],
|
|
|
|
current: StateMap[str],
|
|
|
|
lazy_load_members: bool,
|
|
|
|
) -> StateMap[str]:
|
2016-02-01 10:59:40 -05:00
|
|
|
"""Works out what state to include in a sync response.
|
|
|
|
|
|
|
|
Args:
|
2020-02-03 13:05:44 -05:00
|
|
|
timeline_contains: state in the timeline
|
|
|
|
timeline_start: state at the start of the timeline
|
|
|
|
previous: state at the end of the previous sync (or empty dict
|
2016-02-01 11:52:27 -05:00
|
|
|
if this is an initial sync)
|
2020-02-03 13:05:44 -05:00
|
|
|
current: state at the end of the timeline
|
|
|
|
lazy_load_members: whether to return members from timeline_start
|
2018-07-24 07:39:40 -04:00
|
|
|
or not. assumes that timeline_start has already been filtered to
|
|
|
|
include only the members the client needs to know about.
|
2016-02-01 10:59:40 -05:00
|
|
|
"""
|
2016-08-25 13:59:44 -04:00
|
|
|
event_id_to_key = {
|
|
|
|
e: key
|
|
|
|
for key, e in itertools.chain(
|
2020-06-15 07:03:36 -04:00
|
|
|
timeline_contains.items(),
|
|
|
|
previous.items(),
|
|
|
|
timeline_start.items(),
|
|
|
|
current.items(),
|
2016-02-01 10:59:40 -05:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2020-06-15 07:03:36 -04:00
|
|
|
c_ids = set(current.values())
|
|
|
|
ts_ids = set(timeline_start.values())
|
|
|
|
p_ids = set(previous.values())
|
|
|
|
tc_ids = set(timeline_contains.values())
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2018-07-24 07:39:40 -04:00
|
|
|
# If we are lazyloading room members, we explicitly add the membership events
|
|
|
|
# for the senders in the timeline into the state block returned by /sync,
|
|
|
|
# as we may not have sent them to the client before. We find these membership
|
|
|
|
# events by filtering them out of timeline_start, which has already been filtered
|
|
|
|
# to only include membership events for the senders in the timeline.
|
2018-07-24 09:03:15 -04:00
|
|
|
# In practice, we can do this by removing them from the p_ids list,
|
|
|
|
# which is the list of relevant state we know we have already sent to the client.
|
2018-07-24 08:40:49 -04:00
|
|
|
# see https://github.com/matrix-org/synapse/pull/2970
|
|
|
|
# /files/efcdacad7d1b7f52f879179701c7e0d9b763511f#r204732809
|
2018-07-24 07:39:40 -04:00
|
|
|
|
2018-07-23 14:21:20 -04:00
|
|
|
if lazy_load_members:
|
2018-07-24 08:40:49 -04:00
|
|
|
p_ids.difference_update(
|
2020-06-15 07:03:36 -04:00
|
|
|
e for t, e in timeline_start.items() if t[0] == EventTypes.Member
|
2018-07-23 14:21:20 -04:00
|
|
|
)
|
|
|
|
|
2018-07-24 08:40:49 -04:00
|
|
|
state_ids = ((c_ids | ts_ids) - p_ids) - tc_ids
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
return {event_id_to_key[e]: e for e in state_ids}
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
@attr.s(slots=True)
|
2020-02-03 13:05:44 -05:00
|
|
|
class SyncResultBuilder:
|
2019-03-06 12:21:08 -05:00
|
|
|
"""Used to help build up a new SyncResult for a user
|
|
|
|
|
|
|
|
Attributes:
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_config
|
|
|
|
full_state: The full_state flag as specified by user
|
|
|
|
since_token: The token supplied by user, or None.
|
|
|
|
now_token: The token to sync up to.
|
|
|
|
joined_room_ids: List of rooms the user is joined to
|
2019-03-06 12:21:08 -05:00
|
|
|
|
|
|
|
# The following mirror the fields in a sync response
|
|
|
|
presence (list)
|
|
|
|
account_data (list)
|
|
|
|
joined (list[JoinedSyncResult])
|
|
|
|
invited (list[InvitedSyncResult])
|
|
|
|
archived (list[ArchivedSyncResult])
|
|
|
|
groups (GroupsSyncResult|None)
|
|
|
|
to_device (list)
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
sync_config = attr.ib(type=SyncConfig)
|
|
|
|
full_state = attr.ib(type=bool)
|
|
|
|
since_token = attr.ib(type=Optional[StreamToken])
|
|
|
|
now_token = attr.ib(type=StreamToken)
|
|
|
|
joined_room_ids = attr.ib(type=FrozenSet[str])
|
|
|
|
|
|
|
|
presence = attr.ib(type=List[JsonDict], default=attr.Factory(list))
|
|
|
|
account_data = attr.ib(type=List[JsonDict], default=attr.Factory(list))
|
|
|
|
joined = attr.ib(type=List[JoinedSyncResult], default=attr.Factory(list))
|
|
|
|
invited = attr.ib(type=List[InvitedSyncResult], default=attr.Factory(list))
|
|
|
|
archived = attr.ib(type=List[ArchivedSyncResult], default=attr.Factory(list))
|
|
|
|
groups = attr.ib(type=Optional[GroupsSyncResult], default=None)
|
|
|
|
to_device = attr.ib(type=List[JsonDict], default=attr.Factory(list))
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
@attr.s(slots=True)
|
2020-09-04 06:54:56 -04:00
|
|
|
class RoomSyncResultBuilder:
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Stores information needed to create either a `JoinedSyncResult` or
|
|
|
|
`ArchivedSyncResult`.
|
2020-02-03 13:05:44 -05:00
|
|
|
|
|
|
|
Attributes:
|
|
|
|
room_id
|
|
|
|
rtype: One of `"joined"` or `"archived"`
|
|
|
|
events: List of events to include in the room (more events may be added
|
|
|
|
when generating result).
|
|
|
|
newly_joined: If the user has newly joined the room
|
|
|
|
full_state: Whether the full state should be sent in result
|
|
|
|
since_token: Earliest point to return events from, or None
|
|
|
|
upto_token: Latest point to return events from.
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2020-02-03 13:05:44 -05:00
|
|
|
room_id = attr.ib(type=str)
|
|
|
|
rtype = attr.ib(type=str)
|
|
|
|
events = attr.ib(type=Optional[List[EventBase]])
|
|
|
|
newly_joined = attr.ib(type=bool)
|
|
|
|
full_state = attr.ib(type=bool)
|
|
|
|
since_token = attr.ib(type=Optional[StreamToken])
|
|
|
|
upto_token = attr.ib(type=StreamToken)
|