2015-01-26 13:53:31 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2018-03-13 20:02:20 -04:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-01-26 13:53:31 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import collections
|
|
|
|
import itertools
|
|
|
|
import logging
|
|
|
|
|
|
|
|
from six import iteritems, itervalues
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from synapse.api.constants import EventTypes, Membership
|
|
|
|
from synapse.push.clientformat import format_push_rules_for_user
|
|
|
|
from synapse.types import RoomStreamToken
|
2018-08-10 09:50:21 -04:00
|
|
|
from synapse.util.async_helpers import concurrently_execute
|
2018-07-26 17:51:30 -04:00
|
|
|
from synapse.util.caches.expiringcache import ExpiringCache
|
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.util.caches.response_cache import ResponseCache
|
2018-04-12 07:08:59 -04:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
2017-02-02 13:36:17 -05:00
|
|
|
from synapse.util.metrics import Measure, measure_func
|
2016-05-11 08:42:37 -04:00
|
|
|
from synapse.visibility import filter_events_for_client
|
2018-04-28 07:19:12 -04:00
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
# Store the cache that tracks which lazy-loaded members have been sent to a given
|
|
|
|
# client for no more than 30 minutes.
|
|
|
|
LAZY_LOADED_MEMBERS_CACHE_MAX_AGE = 30 * 60 * 1000
|
|
|
|
|
|
|
|
# Remember the last 100 members we sent to a client for the purposes of
|
|
|
|
# avoiding redundantly sending the same lazy-loaded members to the client
|
|
|
|
LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100
|
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
SyncConfig = collections.namedtuple("SyncConfig", [
|
|
|
|
"user",
|
2016-01-25 05:10:44 -05:00
|
|
|
"filter_collection",
|
2016-01-06 11:44:13 -05:00
|
|
|
"is_guest",
|
2016-03-24 13:47:31 -04:00
|
|
|
"request_key",
|
2016-08-25 12:35:37 -04:00
|
|
|
"device_id",
|
2015-01-26 13:53:31 -05:00
|
|
|
])
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
|
2015-10-01 12:53:07 -04:00
|
|
|
class TimelineBatch(collections.namedtuple("TimelineBatch", [
|
|
|
|
"prev_batch",
|
2015-01-27 11:24:22 -05:00
|
|
|
"events",
|
2015-10-01 12:53:07 -04:00
|
|
|
"limited",
|
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
|
|
|
return bool(self.events)
|
2018-04-15 10:39:30 -04:00
|
|
|
__bool__ = __nonzero__ # python3
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-10-05 11:39:22 -04:00
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
|
2015-11-13 05:31:15 -05:00
|
|
|
"room_id", # str
|
|
|
|
"timeline", # TimelineBatch
|
2015-11-12 11:34:42 -05:00
|
|
|
"state", # dict[(str, str), FrozenEvent]
|
2015-01-29 11:41:21 -05:00
|
|
|
"ephemeral",
|
2015-11-18 10:31:04 -05:00
|
|
|
"account_data",
|
2016-01-19 12:19:53 -05:00
|
|
|
"unread_notifications",
|
2018-08-16 04:46:50 -04:00
|
|
|
"summary",
|
2015-01-27 11:24:22 -05:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
2015-01-30 10:52:05 -05:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2015-11-02 11:23:15 -05:00
|
|
|
return bool(
|
|
|
|
self.timeline
|
|
|
|
or self.state
|
|
|
|
or self.ephemeral
|
2015-11-18 10:31:04 -05:00
|
|
|
or self.account_data
|
2015-12-18 12:47:00 -05:00
|
|
|
# nb the notification count does not, er, count: if there's nothing
|
|
|
|
# else in the result, we don't need to send it.
|
2015-11-02 11:23:15 -05:00
|
|
|
)
|
2018-04-15 10:39:30 -04:00
|
|
|
__bool__ = __nonzero__ # python3
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
|
2016-01-27 12:06:52 -05:00
|
|
|
class ArchivedSyncResult(collections.namedtuple("ArchivedSyncResult", [
|
2015-11-13 05:31:15 -05:00
|
|
|
"room_id", # str
|
|
|
|
"timeline", # TimelineBatch
|
2015-11-12 11:34:42 -05:00
|
|
|
"state", # dict[(str, str), FrozenEvent]
|
2015-11-18 10:31:04 -05:00
|
|
|
"account_data",
|
2015-10-19 12:26:18 -04:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2015-11-02 11:23:15 -05:00
|
|
|
return bool(
|
|
|
|
self.timeline
|
|
|
|
or self.state
|
2015-11-18 10:31:04 -05:00
|
|
|
or self.account_data
|
2015-11-02 11:23:15 -05:00
|
|
|
)
|
2018-04-15 10:39:30 -04:00
|
|
|
__bool__ = __nonzero__ # python3
|
2015-10-19 12:26:18 -04:00
|
|
|
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
|
2015-11-13 05:31:15 -05:00
|
|
|
"room_id", # str
|
|
|
|
"invite", # FrozenEvent: the invite event
|
2015-10-13 05:24:51 -04:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
def __nonzero__(self):
|
|
|
|
"""Invited rooms should always be reported to the client"""
|
|
|
|
return True
|
2018-04-15 10:39:30 -04:00
|
|
|
__bool__ = __nonzero__ # python3
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2017-07-10 09:53:19 -04:00
|
|
|
class GroupsSyncResult(collections.namedtuple("GroupsSyncResult", [
|
|
|
|
"join",
|
|
|
|
"invite",
|
|
|
|
"leave",
|
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
2017-07-20 11:47:35 -04:00
|
|
|
return bool(self.join or self.invite or self.leave)
|
2018-04-15 10:39:30 -04:00
|
|
|
__bool__ = __nonzero__ # python3
|
2017-07-10 09:53:19 -04:00
|
|
|
|
|
|
|
|
2017-09-07 10:08:39 -04:00
|
|
|
class DeviceLists(collections.namedtuple("DeviceLists", [
|
|
|
|
"changed", # list of user_ids whose devices may have changed
|
|
|
|
"left", # list of user_ids whose devices we no longer track
|
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
return bool(self.changed or self.left)
|
2018-04-15 10:39:30 -04:00
|
|
|
__bool__ = __nonzero__ # python3
|
2017-09-07 10:08:39 -04:00
|
|
|
|
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
class SyncResult(collections.namedtuple("SyncResult", [
|
2015-01-27 15:19:36 -05:00
|
|
|
"next_batch", # Token for the next sync
|
2015-10-01 12:53:07 -04:00
|
|
|
"presence", # List of presence events for the user.
|
2015-12-01 13:41:32 -05:00
|
|
|
"account_data", # List of account_data events for the user.
|
2015-10-13 05:24:51 -04:00
|
|
|
"joined", # JoinedSyncResult for each joined room.
|
|
|
|
"invited", # InvitedSyncResult for each invited room.
|
2015-10-19 12:26:18 -04:00
|
|
|
"archived", # ArchivedSyncResult for each archived room.
|
2016-08-25 12:35:37 -04:00
|
|
|
"to_device", # List of direct messages for the device.
|
2018-06-20 11:27:18 -04:00
|
|
|
"device_lists", # List of user_ids whose devices have changed
|
2017-05-19 10:47:55 -04:00
|
|
|
"device_one_time_keys_count", # Dict of algorithm to count for one time keys
|
|
|
|
# for this device
|
2017-07-10 09:53:19 -04:00
|
|
|
"groups",
|
2015-01-26 10:46:31 -05:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
2015-01-30 10:52:05 -05:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if the notifier needs to wait for more events when polling for
|
|
|
|
events.
|
|
|
|
"""
|
2015-01-27 11:24:22 -05:00
|
|
|
return bool(
|
2016-02-23 08:40:02 -05:00
|
|
|
self.presence or
|
|
|
|
self.joined or
|
|
|
|
self.invited or
|
|
|
|
self.archived or
|
2016-08-25 12:35:37 -04:00
|
|
|
self.account_data or
|
2017-02-01 10:15:16 -05:00
|
|
|
self.to_device or
|
2017-07-10 09:53:19 -04:00
|
|
|
self.device_lists or
|
|
|
|
self.groups
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
2018-04-15 10:39:30 -04:00
|
|
|
__bool__ = __nonzero__ # python3
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
|
2016-05-16 15:19:26 -04:00
|
|
|
class SyncHandler(object):
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
def __init__(self, hs):
|
2016-05-16 15:19:26 -04:00
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.notifier = hs.get_notifier()
|
|
|
|
self.presence_handler = hs.get_presence_handler()
|
2015-01-26 10:46:31 -05:00
|
|
|
self.event_sources = hs.get_event_sources()
|
2015-01-26 13:53:31 -05:00
|
|
|
self.clock = hs.get_clock()
|
2018-04-10 18:14:47 -04:00
|
|
|
self.response_cache = ResponseCache(hs, "sync")
|
2016-08-26 09:54:30 -04:00
|
|
|
self.state = hs.get_state_handler()
|
2018-08-08 12:54:49 -04:00
|
|
|
self.auth = hs.get_auth()
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
# ExpiringCache((User, Device)) -> LruCache(state_key => event_id)
|
|
|
|
self.lazy_loaded_members_cache = ExpiringCache(
|
|
|
|
"lazy_loaded_members_cache", self.clock,
|
|
|
|
max_len=0, expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE,
|
|
|
|
)
|
|
|
|
|
2018-08-08 12:54:49 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-10-26 14:47:18 -04:00
|
|
|
def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0,
|
|
|
|
full_state=False):
|
2015-01-27 11:24:22 -05:00
|
|
|
"""Get the sync for a client if we have new data for it now. Otherwise
|
|
|
|
wait for new data to arrive on the server. If the timeout expires, then
|
|
|
|
return an empty sync result.
|
|
|
|
Returns:
|
2018-08-08 12:54:49 -04:00
|
|
|
Deferred[SyncResult]
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
2018-08-09 12:39:12 -04:00
|
|
|
# If the user is not part of the mau group, then check that limits have
|
|
|
|
# not been exceeded (if not part of the group by this point, almost certain
|
|
|
|
# auth_blocking will occur)
|
|
|
|
user_id = sync_config.user.to_string()
|
|
|
|
yield self.auth.check_auth_blocking(user_id)
|
|
|
|
|
2018-08-09 07:26:27 -04:00
|
|
|
res = yield self.response_cache.wrap(
|
2018-04-12 07:08:59 -04:00
|
|
|
sync_config.request_key,
|
|
|
|
self._wait_for_sync_for_user,
|
|
|
|
sync_config, since_token, timeout, full_state,
|
|
|
|
)
|
2018-08-09 07:26:27 -04:00
|
|
|
defer.returnValue(res)
|
2015-10-26 14:47:18 -04:00
|
|
|
|
2016-03-24 13:47:31 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _wait_for_sync_for_user(self, sync_config, since_token, timeout,
|
|
|
|
full_state):
|
2016-02-03 08:51:25 -05:00
|
|
|
context = LoggingContext.current_context()
|
|
|
|
if context:
|
|
|
|
if since_token is None:
|
|
|
|
context.tag = "initial_sync"
|
|
|
|
elif full_state:
|
|
|
|
context.tag = "full_state_sync"
|
|
|
|
else:
|
|
|
|
context.tag = "incremental_sync"
|
|
|
|
|
2015-10-26 14:47:18 -04:00
|
|
|
if timeout == 0 or since_token is None or full_state:
|
|
|
|
# we are going to return immediately, so don't bother calling
|
|
|
|
# notifier.wait_for_events.
|
2016-01-25 05:10:44 -05:00
|
|
|
result = yield self.current_sync_for_user(
|
|
|
|
sync_config, since_token, full_state=full_state,
|
|
|
|
)
|
2015-01-27 15:09:52 -05:00
|
|
|
defer.returnValue(result)
|
2015-01-26 10:46:31 -05:00
|
|
|
else:
|
2015-05-14 06:25:30 -04:00
|
|
|
def current_sync_callback(before_token, after_token):
|
2015-01-27 15:09:52 -05:00
|
|
|
return self.current_sync_for_user(sync_config, since_token)
|
|
|
|
|
|
|
|
result = yield self.notifier.wait_for_events(
|
2016-01-20 10:34:07 -05:00
|
|
|
sync_config.user.to_string(), timeout, current_sync_callback,
|
2016-01-25 05:10:44 -05:00
|
|
|
from_token=since_token,
|
2015-01-26 10:46:31 -05:00
|
|
|
)
|
2015-01-27 15:09:52 -05:00
|
|
|
defer.returnValue(result)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-10-26 14:47:18 -04:00
|
|
|
def current_sync_for_user(self, sync_config, since_token=None,
|
|
|
|
full_state=False):
|
2015-01-27 11:24:22 -05:00
|
|
|
"""Get the sync for client needed to match what the server has now.
|
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
2016-05-23 12:37:01 -04:00
|
|
|
return self.generate_sync_result(sync_config, since_token, full_state)
|
2015-01-27 11:24:22 -05:00
|
|
|
|
2016-03-04 09:44:01 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def push_rules_for_user(self, user):
|
|
|
|
user_id = user.to_string()
|
2016-06-01 09:27:07 -04:00
|
|
|
rules = yield self.store.get_push_rules_for_user(user_id)
|
|
|
|
rules = format_push_rules_for_user(user, rules)
|
2016-03-04 09:44:01 -05:00
|
|
|
defer.returnValue(rules)
|
|
|
|
|
2015-10-20 11:36:20 -04:00
|
|
|
@defer.inlineCallbacks
|
2018-03-05 07:06:19 -05:00
|
|
|
def ephemeral_by_room(self, sync_result_builder, now_token, since_token=None):
|
2015-11-02 12:54:04 -05:00
|
|
|
"""Get the ephemeral events for each room the user is in
|
2015-10-21 10:45:37 -04:00
|
|
|
Args:
|
2018-03-05 07:06:19 -05:00
|
|
|
sync_result_builder(SyncResultBuilder)
|
2015-10-21 10:45:37 -04:00
|
|
|
now_token (StreamToken): Where the server is currently up to.
|
|
|
|
since_token (StreamToken): Where the server was when the client
|
|
|
|
last synced.
|
|
|
|
Returns:
|
|
|
|
A tuple of the now StreamToken, updated to reflect the which typing
|
|
|
|
events are included, and a dict mapping from room_id to a list of
|
|
|
|
typing events for that room.
|
|
|
|
"""
|
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "ephemeral_by_room"):
|
|
|
|
typing_key = since_token.typing_key if since_token else "0"
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
room_ids = sync_result_builder.joined_room_ids
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
typing_source = self.event_sources.sources["typing"]
|
|
|
|
typing, typing_key = yield typing_source.get_new_events(
|
|
|
|
user=sync_config.user,
|
|
|
|
from_key=typing_key,
|
|
|
|
limit=sync_config.filter_collection.ephemeral_limit(),
|
|
|
|
room_ids=room_ids,
|
|
|
|
is_guest=sync_config.is_guest,
|
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("typing_key", typing_key)
|
|
|
|
|
|
|
|
ephemeral_by_room = {}
|
|
|
|
|
|
|
|
for event in typing:
|
|
|
|
# we want to exclude the room_id from the event, but modifying the
|
|
|
|
# result returned by the event source is poor form (it might cache
|
|
|
|
# the object)
|
|
|
|
room_id = event["room_id"]
|
2018-04-28 07:19:12 -04:00
|
|
|
event_copy = {k: v for (k, v) in iteritems(event)
|
2016-02-09 06:31:04 -05:00
|
|
|
if k != "room_id"}
|
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
|
|
|
|
|
|
|
receipt_key = since_token.receipt_key if since_token else "0"
|
|
|
|
|
|
|
|
receipt_source = self.event_sources.sources["receipt"]
|
|
|
|
receipts, receipt_key = yield receipt_source.get_new_events(
|
|
|
|
user=sync_config.user,
|
|
|
|
from_key=receipt_key,
|
|
|
|
limit=sync_config.filter_collection.ephemeral_limit(),
|
|
|
|
room_ids=room_ids,
|
|
|
|
is_guest=sync_config.is_guest,
|
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("receipt_key", receipt_key)
|
2015-11-02 12:54:04 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
for event in receipts:
|
|
|
|
room_id = event["room_id"]
|
|
|
|
# exclude room id, as above
|
2018-04-28 07:19:12 -04:00
|
|
|
event_copy = {k: v for (k, v) in iteritems(event)
|
2016-02-09 06:31:04 -05:00
|
|
|
if k != "room_id"}
|
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2015-11-02 12:54:04 -05:00
|
|
|
defer.returnValue((now_token, ephemeral_by_room))
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2015-01-30 06:32:35 -05:00
|
|
|
@defer.inlineCallbacks
|
2016-05-24 05:22:24 -04:00
|
|
|
def _load_filtered_recents(self, room_id, sync_config, now_token,
|
|
|
|
since_token=None, recents=None, newly_joined_room=False):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2016-04-01 11:08:59 -04:00
|
|
|
Returns:
|
|
|
|
a Deferred TimelineBatch
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "load_filtered_recents"):
|
|
|
|
timeline_limit = sync_config.filter_collection.timeline_limit()
|
2016-11-22 11:38:35 -05:00
|
|
|
block_all_timeline = sync_config.filter_collection.blocks_all_room_timeline()
|
2016-02-09 06:31:04 -05:00
|
|
|
|
2016-02-09 08:50:29 -05:00
|
|
|
if recents is None or newly_joined_room or timeline_limit < len(recents):
|
|
|
|
limited = True
|
|
|
|
else:
|
|
|
|
limited = False
|
2016-02-09 06:31:04 -05:00
|
|
|
|
2016-05-24 09:00:43 -04:00
|
|
|
if recents:
|
2016-02-09 06:31:04 -05:00
|
|
|
recents = sync_config.filter_collection.filter_room_timeline(recents)
|
2017-09-25 12:35:39 -04:00
|
|
|
|
|
|
|
# We check if there are any state events, if there are then we pass
|
|
|
|
# all current state events to the filter_events function. This is to
|
|
|
|
# ensure that we always include current state in the timeline
|
|
|
|
current_state_ids = frozenset()
|
|
|
|
if any(e.is_state() for e in recents):
|
|
|
|
current_state_ids = yield self.state.get_current_state_ids(room_id)
|
2018-04-28 07:19:12 -04:00
|
|
|
current_state_ids = frozenset(itervalues(current_state_ids))
|
2017-09-25 12:35:39 -04:00
|
|
|
|
2016-05-11 08:42:37 -04:00
|
|
|
recents = yield filter_events_for_client(
|
|
|
|
self.store,
|
2016-02-09 06:31:04 -05:00
|
|
|
sync_config.user.to_string(),
|
|
|
|
recents,
|
2017-09-18 12:13:03 -04:00
|
|
|
always_include_ids=current_state_ids,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
recents = []
|
|
|
|
|
2016-11-22 11:38:35 -05:00
|
|
|
if not limited or block_all_timeline:
|
2016-05-24 09:00:43 -04:00
|
|
|
defer.returnValue(TimelineBatch(
|
|
|
|
events=recents,
|
|
|
|
prev_batch=now_token,
|
|
|
|
limited=False
|
|
|
|
))
|
|
|
|
|
|
|
|
filtering_factor = 2
|
|
|
|
load_limit = max(timeline_limit * filtering_factor, 10)
|
|
|
|
max_repeat = 5 # Only try a few times per room, otherwise
|
|
|
|
room_key = now_token.room_key
|
|
|
|
end_key = room_key
|
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
since_key = None
|
|
|
|
if since_token and not newly_joined_room:
|
|
|
|
since_key = since_token.room_key
|
|
|
|
|
|
|
|
while limited and len(recents) < timeline_limit and max_repeat:
|
2018-05-09 10:56:07 -04:00
|
|
|
# If we have a since_key then we are trying to get any events
|
|
|
|
# that have happened since `since_key` up to `end_key`, so we
|
|
|
|
# can just use `get_room_events_stream_for_room`.
|
|
|
|
# Otherwise, we want to return the last N events in the room
|
|
|
|
# in toplogical ordering.
|
2018-05-09 06:59:45 -04:00
|
|
|
if since_key:
|
|
|
|
events, end_key = yield self.store.get_room_events_stream_for_room(
|
|
|
|
room_id,
|
|
|
|
limit=load_limit + 1,
|
|
|
|
from_key=since_key,
|
|
|
|
to_key=end_key,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
events, end_key = yield self.store.get_recent_events_for_room(
|
|
|
|
room_id,
|
|
|
|
limit=load_limit + 1,
|
|
|
|
end_token=end_key,
|
|
|
|
)
|
2016-02-09 06:31:04 -05:00
|
|
|
loaded_recents = sync_config.filter_collection.filter_room_timeline(
|
|
|
|
events
|
|
|
|
)
|
2017-09-25 12:35:39 -04:00
|
|
|
|
|
|
|
# We check if there are any state events, if there are then we pass
|
|
|
|
# all current state events to the filter_events function. This is to
|
|
|
|
# ensure that we always include current state in the timeline
|
|
|
|
current_state_ids = frozenset()
|
|
|
|
if any(e.is_state() for e in loaded_recents):
|
|
|
|
current_state_ids = yield self.state.get_current_state_ids(room_id)
|
2018-04-28 07:19:12 -04:00
|
|
|
current_state_ids = frozenset(itervalues(current_state_ids))
|
2017-09-25 12:35:39 -04:00
|
|
|
|
2016-05-11 08:42:37 -04:00
|
|
|
loaded_recents = yield filter_events_for_client(
|
|
|
|
self.store,
|
2016-02-09 06:31:04 -05:00
|
|
|
sync_config.user.to_string(),
|
|
|
|
loaded_recents,
|
2017-09-18 12:13:03 -04:00
|
|
|
always_include_ids=current_state_ids,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
loaded_recents.extend(recents)
|
|
|
|
recents = loaded_recents
|
2016-01-27 12:06:52 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
if len(events) <= load_limit:
|
|
|
|
limited = False
|
|
|
|
break
|
|
|
|
max_repeat -= 1
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
if len(recents) > timeline_limit:
|
|
|
|
limited = True
|
|
|
|
recents = recents[-timeline_limit:]
|
|
|
|
room_key = recents[0].internal_metadata.before
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
prev_batch_token = now_token.copy_and_replace(
|
|
|
|
"room_key", room_key
|
|
|
|
)
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2015-10-01 12:53:07 -04:00
|
|
|
defer.returnValue(TimelineBatch(
|
2016-01-27 12:06:52 -05:00
|
|
|
events=recents,
|
|
|
|
prev_batch=prev_batch_token,
|
|
|
|
limited=limited or newly_joined_room
|
2015-10-01 12:53:07 -04:00
|
|
|
))
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
@defer.inlineCallbacks
|
2018-07-19 13:32:02 -04:00
|
|
|
def get_state_after_event(self, event, types=None, filtered_types=None):
|
2016-05-23 12:37:01 -04:00
|
|
|
"""
|
|
|
|
Get the room state after the given event
|
2015-11-10 13:27:23 -05:00
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
|
|
|
event(synapse.events.EventBase): event of interest
|
2018-07-19 13:32:02 -04:00
|
|
|
types(list[(str, str|None)]|None): List of (type, state_key) tuples
|
2018-05-28 20:09:55 -04:00
|
|
|
which are used to filter the state fetched. If `state_key` is None,
|
2018-07-19 13:32:02 -04:00
|
|
|
all events are returned of the given type.
|
2018-05-28 20:09:55 -04:00
|
|
|
May be None, which matches any key.
|
2018-07-19 13:32:02 -04:00
|
|
|
filtered_types(list[str]|None): Only apply filtering via `types` to this
|
|
|
|
list of event types. Other types of events are returned unfiltered.
|
|
|
|
If None, `types` filtering is applied to all events.
|
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Returns:
|
|
|
|
A Deferred map from ((type, state_key)->Event)
|
2015-11-10 13:27:23 -05:00
|
|
|
"""
|
2018-07-19 13:32:02 -04:00
|
|
|
state_ids = yield self.store.get_state_ids_for_event(
|
2018-07-19 14:03:50 -04:00
|
|
|
event.event_id, types, filtered_types=filtered_types,
|
2018-07-19 13:32:02 -04:00
|
|
|
)
|
2015-11-10 13:27:23 -05:00
|
|
|
if event.is_state():
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = state_ids.copy()
|
|
|
|
state_ids[(event.type, event.state_key)] = event.event_id
|
|
|
|
defer.returnValue(state_ids)
|
2015-11-10 13:27:23 -05:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-07-19 13:32:02 -04:00
|
|
|
def get_state_at(self, room_id, stream_position, types=None, filtered_types=None):
|
2015-11-10 13:27:23 -05:00
|
|
|
""" Get the room state at a particular stream position
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id(str): room for which to get state
|
|
|
|
stream_position(StreamToken): point at which to get state
|
2018-07-19 13:32:02 -04:00
|
|
|
types(list[(str, str|None)]|None): List of (type, state_key) tuples
|
2018-05-28 20:09:55 -04:00
|
|
|
which are used to filter the state fetched. If `state_key` is None,
|
2018-07-19 13:32:02 -04:00
|
|
|
all events are returned of the given type.
|
|
|
|
filtered_types(list[str]|None): Only apply filtering via `types` to this
|
|
|
|
list of event types. Other types of events are returned unfiltered.
|
|
|
|
If None, `types` filtering is applied to all events.
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
A Deferred map from ((type, state_key)->Event)
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
2018-05-29 17:31:18 -04:00
|
|
|
# FIXME this claims to get the state at a stream position, but
|
|
|
|
# get_recent_events_for_room operates by topo ordering. This therefore
|
|
|
|
# does not reliably give you the state at the given stream position.
|
|
|
|
# (https://github.com/matrix-org/synapse/issues/3305)
|
2018-05-09 06:55:34 -04:00
|
|
|
last_events, _ = yield self.store.get_recent_events_for_room(
|
2015-11-10 13:27:23 -05:00
|
|
|
room_id, end_token=stream_position.room_key, limit=1,
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if last_events:
|
2015-11-10 13:27:23 -05:00
|
|
|
last_event = last_events[-1]
|
2018-07-19 13:32:02 -04:00
|
|
|
state = yield self.get_state_after_event(
|
2018-07-19 14:03:50 -04:00
|
|
|
last_event, types, filtered_types=filtered_types,
|
2018-07-19 13:32:02 -04:00
|
|
|
)
|
2015-11-10 13:27:23 -05:00
|
|
|
|
2015-01-27 11:24:22 -05:00
|
|
|
else:
|
2015-11-10 13:27:23 -05:00
|
|
|
# no events in this room - so presumably no state
|
2015-11-12 11:34:42 -05:00
|
|
|
state = {}
|
2015-01-27 11:24:22 -05:00
|
|
|
defer.returnValue(state)
|
|
|
|
|
2018-08-16 04:46:50 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def compute_summary(self, room_id, sync_config, batch, state, now_token):
|
|
|
|
""" Works out a room summary block for this room, summarising the number
|
|
|
|
of joined members in the room, and providing the 'hero' members if the
|
|
|
|
room has no name so clients can consistently name rooms. Also adds
|
|
|
|
state events to 'state' if needed to describe the heroes.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id(str):
|
|
|
|
sync_config(synapse.handlers.sync.SyncConfig):
|
|
|
|
batch(synapse.handlers.sync.TimelineBatch): The timeline batch for
|
|
|
|
the room that will be sent to the user.
|
|
|
|
state(dict): dict of (type, state_key) -> Event as returned by
|
|
|
|
compute_state_delta
|
|
|
|
now_token(str): Token of the end of the current batch.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A deferred dict describing the room summary
|
|
|
|
"""
|
|
|
|
|
|
|
|
# FIXME: this promulgates https://github.com/matrix-org/synapse/issues/3305
|
|
|
|
last_events, _ = yield self.store.get_recent_event_ids_for_room(
|
|
|
|
room_id, end_token=now_token.room_key, limit=1,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not last_events:
|
|
|
|
defer.returnValue(None)
|
|
|
|
return
|
|
|
|
|
|
|
|
last_event = last_events[-1]
|
|
|
|
state_ids = yield self.store.get_state_ids_for_event(
|
|
|
|
last_event.event_id, [
|
|
|
|
(EventTypes.Member, None),
|
|
|
|
(EventTypes.Name, ''),
|
|
|
|
(EventTypes.CanonicalAlias, ''),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
member_ids = {
|
|
|
|
state_key: event_id
|
|
|
|
for (t, state_key), event_id in state_ids.iteritems()
|
|
|
|
if t == EventTypes.Member
|
|
|
|
}
|
|
|
|
name_id = state_ids.get((EventTypes.Name, ''))
|
|
|
|
canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, ''))
|
|
|
|
|
|
|
|
summary = {}
|
|
|
|
|
|
|
|
# FIXME: it feels very heavy to load up every single membership event
|
|
|
|
# just to calculate the counts.
|
|
|
|
member_events = yield self.store.get_events(member_ids.values())
|
|
|
|
|
|
|
|
joined_user_ids = []
|
|
|
|
invited_user_ids = []
|
|
|
|
|
|
|
|
for ev in member_events.values():
|
|
|
|
if ev.content.get("membership") == Membership.JOIN:
|
|
|
|
joined_user_ids.append(ev.state_key)
|
|
|
|
elif ev.content.get("membership") == Membership.INVITE:
|
|
|
|
invited_user_ids.append(ev.state_key)
|
|
|
|
|
|
|
|
# TODO: only send these when they change.
|
|
|
|
summary["m.joined_member_count"] = len(joined_user_ids)
|
|
|
|
summary["m.invited_member_count"] = len(invited_user_ids)
|
|
|
|
|
|
|
|
if name_id or canonical_alias_id:
|
|
|
|
defer.returnValue(summary)
|
|
|
|
|
|
|
|
# FIXME: order by stream ordering, not alphabetic
|
|
|
|
|
|
|
|
me = sync_config.user.to_string()
|
|
|
|
if (joined_user_ids or invited_user_ids):
|
|
|
|
summary['m.heroes'] = sorted(
|
|
|
|
[
|
|
|
|
user_id
|
|
|
|
for user_id in (joined_user_ids + invited_user_ids)
|
|
|
|
if user_id != me
|
|
|
|
]
|
|
|
|
)[0:5]
|
|
|
|
else:
|
|
|
|
summary['m.heroes'] = sorted(
|
|
|
|
[user_id for user_id in member_ids.keys() if user_id != me]
|
|
|
|
)[0:5]
|
|
|
|
|
|
|
|
if not sync_config.filter_collection.lazy_load_members():
|
|
|
|
defer.returnValue(summary)
|
|
|
|
|
|
|
|
# ensure we send membership events for heroes if needed
|
|
|
|
cache_key = (sync_config.user.to_string(), sync_config.device_id)
|
|
|
|
cache = self.get_lazy_loaded_members_cache(cache_key)
|
|
|
|
|
|
|
|
# track which members the client should already know about via LL:
|
|
|
|
# Ones which are already in state...
|
|
|
|
existing_members = set(
|
|
|
|
user_id for (typ, user_id) in state.keys()
|
|
|
|
if typ == EventTypes.Member
|
|
|
|
)
|
|
|
|
|
|
|
|
# ...or ones which are in the timeline...
|
|
|
|
for ev in batch.events:
|
|
|
|
if ev.type == EventTypes.Member:
|
|
|
|
existing_members.add(ev.state_key)
|
|
|
|
|
|
|
|
# ...and then ensure any missing ones get included in state.
|
|
|
|
missing_hero_event_ids = [
|
|
|
|
member_ids[hero_id]
|
|
|
|
for hero_id in summary['m.heroes']
|
|
|
|
if (
|
|
|
|
cache.get(hero_id) != member_ids[hero_id] and
|
|
|
|
hero_id not in existing_members
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
missing_hero_state = yield self.store.get_events(missing_hero_event_ids)
|
|
|
|
missing_hero_state = missing_hero_state.values()
|
|
|
|
|
|
|
|
for s in missing_hero_state:
|
|
|
|
cache.set(s.state_key, s.event_id)
|
|
|
|
state[(EventTypes.Member, s.state_key)] = s
|
|
|
|
|
|
|
|
defer.returnValue(summary)
|
|
|
|
|
|
|
|
def get_lazy_loaded_members_cache(self, cache_key):
|
|
|
|
cache = self.lazy_loaded_members_cache.get(cache_key)
|
|
|
|
if cache is None:
|
|
|
|
logger.debug("creating LruCache for %r", cache_key)
|
|
|
|
cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE)
|
|
|
|
self.lazy_loaded_members_cache[cache_key] = cache
|
|
|
|
else:
|
|
|
|
logger.debug("found LruCache for %r", cache_key)
|
|
|
|
return cache
|
|
|
|
|
2016-02-01 10:59:40 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def compute_state_delta(self, room_id, batch, sync_config, since_token, now_token,
|
2018-03-18 21:15:13 -04:00
|
|
|
full_state):
|
2018-08-16 04:46:50 -04:00
|
|
|
""" Works out the difference in state between the start of the timeline
|
2016-02-01 10:59:40 -05:00
|
|
|
and the previous sync.
|
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Args:
|
|
|
|
room_id(str):
|
|
|
|
batch(synapse.handlers.sync.TimelineBatch): The timeline batch for
|
|
|
|
the room that will be sent to the user.
|
|
|
|
sync_config(synapse.handlers.sync.SyncConfig):
|
|
|
|
since_token(str|None): Token of the end of the previous batch. May
|
|
|
|
be None.
|
|
|
|
now_token(str): Token of the end of the current batch.
|
|
|
|
full_state(bool): Whether to force returning the full state.
|
2015-11-13 05:31:15 -05:00
|
|
|
|
2016-04-01 11:08:59 -04:00
|
|
|
Returns:
|
2018-08-16 04:46:50 -04:00
|
|
|
A deferred dict of (type, state_key) -> Event
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
|
|
|
# TODO(mjark) Check if the state events were received by the server
|
|
|
|
# after the previous sync, since we need to include those state
|
|
|
|
# updates even if they occured logically before the previous event.
|
|
|
|
# TODO(mjark) Check for new redactions in the state events.
|
2015-11-12 11:34:42 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "compute_state_delta"):
|
2018-03-11 16:01:41 -04:00
|
|
|
|
|
|
|
types = None
|
2018-07-19 13:32:02 -04:00
|
|
|
filtered_types = None
|
2018-03-13 18:03:42 -04:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
lazy_load_members = sync_config.filter_collection.lazy_load_members()
|
|
|
|
include_redundant_members = (
|
|
|
|
sync_config.filter_collection.include_redundant_members()
|
|
|
|
)
|
|
|
|
|
2018-03-18 21:15:13 -04:00
|
|
|
if lazy_load_members:
|
2018-03-11 16:01:41 -04:00
|
|
|
# We only request state for the members needed to display the
|
|
|
|
# timeline:
|
2018-03-15 21:37:53 -04:00
|
|
|
|
2018-03-11 16:10:25 -04:00
|
|
|
types = [
|
2018-03-11 16:01:41 -04:00
|
|
|
(EventTypes.Member, state_key)
|
|
|
|
for state_key in set(
|
2018-05-28 20:09:55 -04:00
|
|
|
event.sender # FIXME: we also care about invite targets etc.
|
2018-03-11 16:01:41 -04:00
|
|
|
for event in batch.events
|
|
|
|
)
|
2018-03-11 16:10:25 -04:00
|
|
|
]
|
2018-03-11 16:01:41 -04:00
|
|
|
|
2018-07-19 13:32:02 -04:00
|
|
|
# only apply the filtering to room members
|
|
|
|
filtered_types = [EventTypes.Member]
|
2018-03-15 21:37:53 -04:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
timeline_state = {
|
|
|
|
(event.type, event.state_key): event.event_id
|
|
|
|
for event in batch.events if event.is_state()
|
|
|
|
}
|
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
if full_state:
|
|
|
|
if batch:
|
2016-08-25 13:59:44 -04:00
|
|
|
current_state_ids = yield self.store.get_state_ids_for_event(
|
2018-07-19 13:32:02 -04:00
|
|
|
batch.events[-1].event_id, types=types,
|
2018-07-19 14:03:50 -04:00
|
|
|
filtered_types=filtered_types,
|
2016-02-22 08:54:46 -05:00
|
|
|
)
|
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = yield self.store.get_state_ids_for_event(
|
2018-07-19 13:32:02 -04:00
|
|
|
batch.events[0].event_id, types=types,
|
2018-07-19 14:03:50 -04:00
|
|
|
filtered_types=filtered_types,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
2018-03-13 18:03:42 -04:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
else:
|
2016-08-25 13:59:44 -04:00
|
|
|
current_state_ids = yield self.get_state_at(
|
2018-07-19 13:32:02 -04:00
|
|
|
room_id, stream_position=now_token, types=types,
|
2018-07-19 14:03:50 -04:00
|
|
|
filtered_types=filtered_types,
|
2016-02-22 08:54:46 -05:00
|
|
|
)
|
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = current_state_ids
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = _calculate_state(
|
2016-02-09 06:31:04 -05:00
|
|
|
timeline_contains=timeline_state,
|
2016-08-25 13:59:44 -04:00
|
|
|
timeline_start=state_ids,
|
2016-02-09 06:31:04 -05:00
|
|
|
previous={},
|
2016-08-25 13:59:44 -04:00
|
|
|
current=current_state_ids,
|
2018-07-23 14:21:20 -04:00
|
|
|
lazy_load_members=lazy_load_members,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
elif batch.limited:
|
|
|
|
state_at_previous_sync = yield self.get_state_at(
|
2018-07-19 13:32:02 -04:00
|
|
|
room_id, stream_position=since_token, types=types,
|
2018-07-19 14:03:50 -04:00
|
|
|
filtered_types=filtered_types,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
current_state_ids = yield self.store.get_state_ids_for_event(
|
2018-07-19 13:32:02 -04:00
|
|
|
batch.events[-1].event_id, types=types,
|
2018-07-19 14:03:50 -04:00
|
|
|
filtered_types=filtered_types,
|
2016-02-22 08:54:46 -05:00
|
|
|
)
|
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_at_timeline_start = yield self.store.get_state_ids_for_event(
|
2018-07-19 13:32:02 -04:00
|
|
|
batch.events[0].event_id, types=types,
|
2018-07-19 14:03:50 -04:00
|
|
|
filtered_types=filtered_types,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state_ids = _calculate_state(
|
2016-02-09 06:31:04 -05:00
|
|
|
timeline_contains=timeline_state,
|
|
|
|
timeline_start=state_at_timeline_start,
|
|
|
|
previous=state_at_previous_sync,
|
2016-08-25 13:59:44 -04:00
|
|
|
current=current_state_ids,
|
2018-07-23 14:21:20 -04:00
|
|
|
lazy_load_members=lazy_load_members,
|
2016-02-09 06:31:04 -05:00
|
|
|
)
|
|
|
|
else:
|
2018-03-18 17:40:35 -04:00
|
|
|
state_ids = {}
|
2018-03-18 21:15:13 -04:00
|
|
|
if lazy_load_members:
|
2018-03-18 17:40:35 -04:00
|
|
|
if types:
|
|
|
|
state_ids = yield self.store.get_state_ids_for_event(
|
2018-07-19 13:32:02 -04:00
|
|
|
batch.events[0].event_id, types=types,
|
2018-07-19 14:03:50 -04:00
|
|
|
filtered_types=filtered_types,
|
2018-03-18 17:40:35 -04:00
|
|
|
)
|
2016-08-25 13:59:44 -04:00
|
|
|
|
2018-07-26 17:51:30 -04:00
|
|
|
if lazy_load_members and not include_redundant_members:
|
|
|
|
cache_key = (sync_config.user.to_string(), sync_config.device_id)
|
2018-08-16 04:46:50 -04:00
|
|
|
cache = self.get_lazy_loaded_members_cache(cache_key)
|
2018-07-26 17:51:30 -04:00
|
|
|
|
|
|
|
# if it's a new sync sequence, then assume the client has had
|
|
|
|
# amnesia and doesn't want any recent lazy-loaded members
|
|
|
|
# de-duplicated.
|
|
|
|
if since_token is None:
|
|
|
|
logger.debug("clearing LruCache for %r", cache_key)
|
|
|
|
cache.clear()
|
|
|
|
else:
|
|
|
|
# only send members which aren't in our LruCache (either
|
|
|
|
# because they're new to this client or have been pushed out
|
|
|
|
# of the cache)
|
|
|
|
logger.debug("filtering state from %r...", state_ids)
|
|
|
|
state_ids = {
|
|
|
|
t: event_id
|
|
|
|
for t, event_id in state_ids.iteritems()
|
|
|
|
if cache.get(t[1]) != event_id
|
|
|
|
}
|
|
|
|
logger.debug("...to %r", state_ids)
|
|
|
|
|
|
|
|
# add any member IDs we are about to send into our LruCache
|
|
|
|
for t, event_id in itertools.chain(
|
|
|
|
state_ids.items(),
|
|
|
|
timeline_state.items(),
|
|
|
|
):
|
|
|
|
if t[0] == EventTypes.Member:
|
|
|
|
cache.set(t[1], event_id)
|
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
state = {}
|
|
|
|
if state_ids:
|
2018-05-31 05:03:47 -04:00
|
|
|
state = yield self.store.get_events(list(state_ids.values()))
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2016-08-19 13:06:31 -04:00
|
|
|
defer.returnValue({
|
|
|
|
(e.type, e.state_key): e
|
2018-05-31 05:03:47 -04:00
|
|
|
for e in sync_config.filter_collection.filter_room_state(list(state.values()))
|
2016-08-19 13:06:31 -04:00
|
|
|
})
|
2015-01-30 08:33:41 -05:00
|
|
|
|
2015-12-18 12:47:00 -05:00
|
|
|
@defer.inlineCallbacks
|
2016-02-09 08:55:59 -05:00
|
|
|
def unread_notifs_for_room_id(self, room_id, sync_config):
|
2016-02-09 06:31:04 -05:00
|
|
|
with Measure(self.clock, "unread_notifs_for_room_id"):
|
2016-02-09 08:55:59 -05:00
|
|
|
last_unread_event_id = yield self.store.get_last_receipt_event_id_for_user(
|
|
|
|
user_id=sync_config.user.to_string(),
|
|
|
|
room_id=room_id,
|
|
|
|
receipt_type="m.read"
|
2015-12-18 12:47:00 -05:00
|
|
|
)
|
2016-01-19 06:35:50 -05:00
|
|
|
|
2016-02-09 06:31:04 -05:00
|
|
|
notifs = []
|
|
|
|
if last_unread_event_id:
|
|
|
|
notifs = yield self.store.get_unread_event_push_actions_by_room_for_user(
|
|
|
|
room_id, sync_config.user.to_string(), last_unread_event_id
|
|
|
|
)
|
|
|
|
defer.returnValue(notifs)
|
|
|
|
|
2016-08-19 13:06:31 -04:00
|
|
|
# There is no new information in this period, so your notification
|
|
|
|
# count is whatever it was last time.
|
|
|
|
defer.returnValue(None)
|
2016-01-19 06:35:50 -05:00
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def generate_sync_result(self, sync_config, since_token=None, full_state=False):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Generates a sync result.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
sync_config (SyncConfig)
|
|
|
|
since_token (StreamToken)
|
|
|
|
full_state (bool)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred(SyncResult)
|
|
|
|
"""
|
2016-12-09 10:43:18 -05:00
|
|
|
logger.info("Calculating sync response for %r", sync_config.user)
|
2016-05-24 05:14:53 -04:00
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
# NB: The now_token gets changed by some of the generate_sync_* methods,
|
|
|
|
# this is due to some of the underlying streams not supporting the ability
|
|
|
|
# to query up to a given point.
|
|
|
|
# Always use the `now_token` in `SyncResultBuilder`
|
2016-05-23 12:37:01 -04:00
|
|
|
now_token = yield self.event_sources.get_current_token()
|
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
user_id = sync_config.user.to_string()
|
|
|
|
app_service = self.store.get_app_service_by_user_id(user_id)
|
|
|
|
if app_service:
|
|
|
|
# We no longer support AS users using /sync directly.
|
|
|
|
# See https://github.com/matrix-org/matrix-doc/issues/1144
|
|
|
|
raise NotImplementedError()
|
|
|
|
else:
|
|
|
|
joined_room_ids = yield self.get_rooms_for_user_at(
|
|
|
|
user_id, now_token.room_stream_id,
|
|
|
|
)
|
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder = SyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
sync_config, full_state,
|
|
|
|
since_token=since_token,
|
|
|
|
now_token=now_token,
|
2018-03-05 07:06:19 -05:00
|
|
|
joined_room_ids=joined_room_ids,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2016-05-24 05:14:53 -04:00
|
|
|
account_data_by_room = yield self._generate_sync_entry_for_account_data(
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2016-05-24 05:14:53 -04:00
|
|
|
res = yield self._generate_sync_entry_for_rooms(
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder, account_data_by_room
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2017-09-14 06:49:37 -04:00
|
|
|
newly_joined_rooms, newly_joined_users, _, _ = res
|
|
|
|
_, _, newly_left_rooms, newly_left_users = res
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-11-22 11:38:35 -05:00
|
|
|
block_all_presence_data = (
|
|
|
|
since_token is None and
|
|
|
|
sync_config.filter_collection.blocks_all_presence()
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2016-11-22 11:38:35 -05:00
|
|
|
if not block_all_presence_data:
|
|
|
|
yield self._generate_sync_entry_for_presence(
|
|
|
|
sync_result_builder, newly_joined_rooms, newly_joined_users
|
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-08-25 12:35:37 -04:00
|
|
|
yield self._generate_sync_entry_for_to_device(sync_result_builder)
|
|
|
|
|
2017-01-27 08:36:39 -05:00
|
|
|
device_lists = yield self._generate_sync_entry_for_device_list(
|
2017-09-07 10:08:39 -04:00
|
|
|
sync_result_builder,
|
|
|
|
newly_joined_rooms=newly_joined_rooms,
|
|
|
|
newly_joined_users=newly_joined_users,
|
2017-09-14 06:49:37 -04:00
|
|
|
newly_left_rooms=newly_left_rooms,
|
2017-09-07 10:08:39 -04:00
|
|
|
newly_left_users=newly_left_users,
|
2017-01-27 08:36:39 -05:00
|
|
|
)
|
2017-01-25 09:27:27 -05:00
|
|
|
|
2017-05-19 10:47:55 -04:00
|
|
|
device_id = sync_config.device_id
|
|
|
|
one_time_key_counts = {}
|
|
|
|
if device_id:
|
|
|
|
one_time_key_counts = yield self.store.count_e2e_one_time_keys(
|
|
|
|
user_id, device_id
|
|
|
|
)
|
|
|
|
|
2017-07-10 09:53:19 -04:00
|
|
|
yield self._generate_sync_entry_for_groups(sync_result_builder)
|
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
defer.returnValue(SyncResult(
|
2016-05-24 05:53:03 -04:00
|
|
|
presence=sync_result_builder.presence,
|
|
|
|
account_data=sync_result_builder.account_data,
|
|
|
|
joined=sync_result_builder.joined,
|
|
|
|
invited=sync_result_builder.invited,
|
|
|
|
archived=sync_result_builder.archived,
|
2016-08-25 12:35:37 -04:00
|
|
|
to_device=sync_result_builder.to_device,
|
2017-01-25 09:27:27 -05:00
|
|
|
device_lists=device_lists,
|
2017-07-10 09:53:19 -04:00
|
|
|
groups=sync_result_builder.groups,
|
2017-05-19 10:47:55 -04:00
|
|
|
device_one_time_keys_count=one_time_key_counts,
|
2016-05-24 05:53:03 -04:00
|
|
|
next_batch=sync_result_builder.now_token,
|
2016-05-23 12:37:01 -04:00
|
|
|
))
|
|
|
|
|
2017-07-10 09:53:19 -04:00
|
|
|
@measure_func("_generate_sync_entry_for_groups")
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _generate_sync_entry_for_groups(self, sync_result_builder):
|
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
|
|
|
|
if since_token and since_token.groups_key:
|
|
|
|
results = yield self.store.get_groups_changes_for_user(
|
|
|
|
user_id, since_token.groups_key, now_token.groups_key,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
results = yield self.store.get_all_groups_for_user(
|
|
|
|
user_id, now_token.groups_key,
|
|
|
|
)
|
|
|
|
|
|
|
|
invited = {}
|
|
|
|
joined = {}
|
|
|
|
left = {}
|
|
|
|
for result in results:
|
|
|
|
membership = result["membership"]
|
|
|
|
group_id = result["group_id"]
|
|
|
|
gtype = result["type"]
|
|
|
|
content = result["content"]
|
|
|
|
|
|
|
|
if membership == "join":
|
|
|
|
if gtype == "membership":
|
2017-09-19 05:35:35 -04:00
|
|
|
# TODO: Add profile
|
2017-07-10 09:53:19 -04:00
|
|
|
content.pop("membership", None)
|
2017-07-24 08:31:26 -04:00
|
|
|
joined[group_id] = content["content"]
|
2017-07-10 09:53:19 -04:00
|
|
|
else:
|
|
|
|
joined.setdefault(group_id, {})[gtype] = content
|
|
|
|
elif membership == "invite":
|
|
|
|
if gtype == "membership":
|
|
|
|
content.pop("membership", None)
|
|
|
|
invited[group_id] = content["content"]
|
|
|
|
else:
|
|
|
|
if gtype == "membership":
|
|
|
|
left[group_id] = content["content"]
|
|
|
|
|
|
|
|
sync_result_builder.groups = GroupsSyncResult(
|
|
|
|
join=joined,
|
|
|
|
invite=invited,
|
|
|
|
leave=left,
|
|
|
|
)
|
|
|
|
|
2017-02-02 13:36:17 -05:00
|
|
|
@measure_func("_generate_sync_entry_for_device_list")
|
2017-01-27 08:36:39 -05:00
|
|
|
@defer.inlineCallbacks
|
2017-09-07 10:08:39 -04:00
|
|
|
def _generate_sync_entry_for_device_list(self, sync_result_builder,
|
|
|
|
newly_joined_rooms, newly_joined_users,
|
|
|
|
newly_left_rooms, newly_left_users):
|
2017-01-27 08:36:39 -05:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
|
|
|
|
if since_token and since_token.device_list_key:
|
|
|
|
changed = yield self.store.get_user_whose_devices_changed(
|
|
|
|
since_token.device_list_key
|
|
|
|
)
|
|
|
|
|
2017-09-12 11:44:26 -04:00
|
|
|
# TODO: Be more clever than this, i.e. remove users who we already
|
|
|
|
# share a room with?
|
|
|
|
for room_id in newly_joined_rooms:
|
|
|
|
joined_users = yield self.state.get_current_user_in_room(room_id)
|
|
|
|
newly_joined_users.update(joined_users)
|
|
|
|
|
|
|
|
for room_id in newly_left_rooms:
|
|
|
|
left_users = yield self.state.get_current_user_in_room(room_id)
|
|
|
|
newly_left_users.update(left_users)
|
|
|
|
|
2017-09-07 10:08:39 -04:00
|
|
|
# TODO: Check that these users are actually new, i.e. either they
|
|
|
|
# weren't in the previous sync *or* they left and rejoined.
|
|
|
|
changed.update(newly_joined_users)
|
|
|
|
|
|
|
|
if not changed and not newly_left_users:
|
|
|
|
defer.returnValue(DeviceLists(
|
|
|
|
changed=[],
|
|
|
|
left=newly_left_users,
|
|
|
|
))
|
2017-07-12 05:30:10 -04:00
|
|
|
|
|
|
|
users_who_share_room = yield self.store.get_users_who_share_room_with_user(
|
|
|
|
user_id
|
|
|
|
)
|
2017-01-27 08:36:39 -05:00
|
|
|
|
2017-09-07 10:08:39 -04:00
|
|
|
defer.returnValue(DeviceLists(
|
|
|
|
changed=users_who_share_room & changed,
|
|
|
|
left=set(newly_left_users) - users_who_share_room,
|
|
|
|
))
|
2017-01-27 08:36:39 -05:00
|
|
|
else:
|
2017-09-07 10:08:39 -04:00
|
|
|
defer.returnValue(DeviceLists(
|
|
|
|
changed=[],
|
|
|
|
left=[],
|
|
|
|
))
|
2017-01-27 08:36:39 -05:00
|
|
|
|
2016-08-25 12:35:37 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _generate_sync_entry_for_to_device(self, sync_result_builder):
|
|
|
|
"""Generates the portion of the sync response. Populates
|
|
|
|
`sync_result_builder` with the result.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
sync_result_builder(SyncResultBuilder)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred(dict): A dictionary containing the per room account data.
|
|
|
|
"""
|
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
device_id = sync_result_builder.sync_config.device_id
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
since_stream_id = 0
|
|
|
|
if sync_result_builder.since_token is not None:
|
|
|
|
since_stream_id = int(sync_result_builder.since_token.to_device_key)
|
|
|
|
|
2016-08-30 06:17:46 -04:00
|
|
|
if since_stream_id != int(now_token.to_device_key):
|
2016-08-30 06:23:26 -04:00
|
|
|
# We only delete messages when a new message comes in, but that's
|
|
|
|
# fine so long as we delete them at some point.
|
|
|
|
|
2016-12-15 13:13:58 -05:00
|
|
|
deleted = yield self.store.delete_messages_for_device(
|
2016-08-25 12:35:37 -04:00
|
|
|
user_id, device_id, since_stream_id
|
|
|
|
)
|
2017-03-09 09:50:40 -05:00
|
|
|
logger.debug("Deleted %d to-device messages up to %d",
|
|
|
|
deleted, since_stream_id)
|
2016-08-25 12:35:37 -04:00
|
|
|
|
2016-08-30 05:58:46 -04:00
|
|
|
messages, stream_id = yield self.store.get_new_messages_for_device(
|
2016-09-02 10:50:37 -04:00
|
|
|
user_id, device_id, since_stream_id, now_token.to_device_key
|
2016-08-30 05:58:46 -04:00
|
|
|
)
|
2016-12-15 13:13:58 -05:00
|
|
|
|
2017-03-09 09:50:40 -05:00
|
|
|
logger.debug(
|
2016-12-15 13:13:58 -05:00
|
|
|
"Returning %d to-device messages between %d and %d (current token: %d)",
|
|
|
|
len(messages), since_stream_id, stream_id, now_token.to_device_key
|
|
|
|
)
|
2016-08-30 05:58:46 -04:00
|
|
|
sync_result_builder.now_token = now_token.copy_and_replace(
|
|
|
|
"to_device_key", stream_id
|
|
|
|
)
|
|
|
|
sync_result_builder.to_device = messages
|
|
|
|
else:
|
|
|
|
sync_result_builder.to_device = []
|
2016-08-25 12:35:37 -04:00
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-05-24 05:53:03 -04:00
|
|
|
def _generate_sync_entry_for_account_data(self, sync_result_builder):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Generates the account data portion of the sync response. Populates
|
2016-05-24 05:53:03 -04:00
|
|
|
`sync_result_builder` with the result.
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Args:
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder(SyncResultBuilder)
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred(dict): A dictionary containing the per room account data.
|
|
|
|
"""
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
if since_token and not sync_result_builder.full_state:
|
2016-05-23 12:37:01 -04:00
|
|
|
account_data, account_data_by_room = (
|
|
|
|
yield self.store.get_updated_account_data_for_user(
|
|
|
|
user_id,
|
|
|
|
since_token.account_data_key,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
push_rules_changed = yield self.store.have_push_rules_changed_for_user(
|
|
|
|
user_id, int(since_token.push_rules_key)
|
|
|
|
)
|
|
|
|
|
|
|
|
if push_rules_changed:
|
|
|
|
account_data["m.push_rules"] = yield self.push_rules_for_user(
|
|
|
|
sync_config.user
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
account_data, account_data_by_room = (
|
|
|
|
yield self.store.get_account_data_for_user(
|
|
|
|
sync_config.user.to_string()
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
account_data['m.push_rules'] = yield self.push_rules_for_user(
|
|
|
|
sync_config.user
|
|
|
|
)
|
|
|
|
|
2016-05-24 05:22:24 -04:00
|
|
|
account_data_for_user = sync_config.filter_collection.filter_account_data([
|
|
|
|
{"type": account_data_type, "content": content}
|
|
|
|
for account_data_type, content in account_data.items()
|
|
|
|
])
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.account_data = account_data_for_user
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
defer.returnValue(account_data_by_room)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-05-24 05:53:03 -04:00
|
|
|
def _generate_sync_entry_for_presence(self, sync_result_builder, newly_joined_rooms,
|
2016-05-24 05:14:53 -04:00
|
|
|
newly_joined_users):
|
|
|
|
"""Generates the presence portion of the sync response. Populates the
|
2016-05-24 05:53:03 -04:00
|
|
|
`sync_result_builder` with the result.
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Args:
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder(SyncResultBuilder)
|
2016-05-24 05:14:53 -04:00
|
|
|
newly_joined_rooms(list): List of rooms that the user has joined
|
|
|
|
since the last sync (or empty if an initial sync)
|
|
|
|
newly_joined_users(list): List of users that have joined rooms
|
|
|
|
since the last sync (or empty if an initial sync)
|
|
|
|
"""
|
2016-05-24 05:53:03 -04:00
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
user = sync_result_builder.sync_config.user
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
presence_source = self.event_sources.sources["presence"]
|
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
if since_token and not sync_result_builder.full_state:
|
2016-05-23 12:37:01 -04:00
|
|
|
presence_key = since_token.presence_key
|
2016-05-24 06:04:35 -04:00
|
|
|
include_offline = True
|
2016-05-23 12:37:01 -04:00
|
|
|
else:
|
|
|
|
presence_key = None
|
2016-05-24 06:04:35 -04:00
|
|
|
include_offline = False
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
presence, presence_key = yield presence_source.get_new_events(
|
|
|
|
user=user,
|
|
|
|
from_key=presence_key,
|
|
|
|
is_guest=sync_config.is_guest,
|
2016-05-24 06:04:35 -04:00
|
|
|
include_offline=include_offline,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.now_token = now_token.copy_and_replace(
|
2016-05-23 12:37:01 -04:00
|
|
|
"presence_key", presence_key
|
|
|
|
)
|
|
|
|
|
|
|
|
extra_users_ids = set(newly_joined_users)
|
|
|
|
for room_id in newly_joined_rooms:
|
2016-08-26 09:54:30 -04:00
|
|
|
users = yield self.state.get_current_user_in_room(room_id)
|
2016-05-23 12:37:01 -04:00
|
|
|
extra_users_ids.update(users)
|
|
|
|
extra_users_ids.discard(user.to_string())
|
|
|
|
|
2017-03-15 10:27:34 -04:00
|
|
|
if extra_users_ids:
|
|
|
|
states = yield self.presence_handler.get_states(
|
|
|
|
extra_users_ids,
|
|
|
|
)
|
|
|
|
presence.extend(states)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-03-15 10:27:34 -04:00
|
|
|
# Deduplicate the presence entries so that there's at most one per user
|
2018-05-31 05:03:47 -04:00
|
|
|
presence = list({p.user_id: p for p in presence}.values())
|
2016-06-02 10:20:28 -04:00
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
presence = sync_config.filter_collection.filter_presence(
|
|
|
|
presence
|
|
|
|
)
|
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.presence = presence
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-05-24 05:53:03 -04:00
|
|
|
def _generate_sync_entry_for_rooms(self, sync_result_builder, account_data_by_room):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Generates the rooms portion of the sync response. Populates the
|
2016-05-24 05:53:03 -04:00
|
|
|
`sync_result_builder` with the result.
|
2016-05-24 05:14:53 -04:00
|
|
|
|
|
|
|
Args:
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder(SyncResultBuilder)
|
2016-05-24 05:14:53 -04:00
|
|
|
account_data_by_room(dict): Dictionary of per room account data
|
|
|
|
|
|
|
|
Returns:
|
2017-09-07 10:08:39 -04:00
|
|
|
Deferred(tuple): Returns a 4-tuple of
|
|
|
|
`(newly_joined_rooms, newly_joined_users, newly_left_rooms, newly_left_users)`
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
2016-05-24 05:53:03 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
2016-11-22 11:38:35 -05:00
|
|
|
block_all_room_ephemeral = (
|
|
|
|
sync_result_builder.since_token is None and
|
|
|
|
sync_result_builder.sync_config.filter_collection.blocks_all_room_ephemeral()
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2016-11-22 11:38:35 -05:00
|
|
|
|
|
|
|
if block_all_room_ephemeral:
|
|
|
|
ephemeral_by_room = {}
|
|
|
|
else:
|
|
|
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
2018-03-05 07:06:19 -05:00
|
|
|
sync_result_builder,
|
2016-11-22 11:38:35 -05:00
|
|
|
now_token=sync_result_builder.now_token,
|
|
|
|
since_token=sync_result_builder.since_token,
|
|
|
|
)
|
|
|
|
sync_result_builder.now_token = now_token
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-03-16 07:56:59 -04:00
|
|
|
# We check up front if anything has changed, if it hasn't then there is
|
|
|
|
# no point in going futher.
|
2017-03-15 14:13:59 -04:00
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
if not sync_result_builder.full_state:
|
|
|
|
if since_token and not ephemeral_by_room and not account_data_by_room:
|
|
|
|
have_changed = yield self._have_rooms_changed(sync_result_builder)
|
|
|
|
if not have_changed:
|
|
|
|
tags_by_room = yield self.store.get_updated_tags(
|
|
|
|
user_id,
|
|
|
|
since_token.account_data_key,
|
|
|
|
)
|
|
|
|
if not tags_by_room:
|
2017-03-16 07:51:46 -04:00
|
|
|
logger.debug("no-oping sync")
|
2017-09-07 10:08:39 -04:00
|
|
|
defer.returnValue(([], [], [], []))
|
2017-03-15 14:13:59 -04:00
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
ignored_account_data = yield self.store.get_global_account_data_by_type_for_user(
|
|
|
|
"m.ignored_user_list", user_id=user_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
if ignored_account_data:
|
|
|
|
ignored_users = ignored_account_data.get("ignored_users", {}).keys()
|
|
|
|
else:
|
|
|
|
ignored_users = frozenset()
|
|
|
|
|
2017-03-15 14:13:59 -04:00
|
|
|
if since_token:
|
2016-05-24 05:53:03 -04:00
|
|
|
res = yield self._get_rooms_changed(sync_result_builder, ignored_users)
|
2017-09-13 10:13:41 -04:00
|
|
|
room_entries, invited, newly_joined_rooms, newly_left_rooms = res
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
tags_by_room = yield self.store.get_updated_tags(
|
2017-03-15 14:13:59 -04:00
|
|
|
user_id, since_token.account_data_key,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
else:
|
2016-05-24 05:53:03 -04:00
|
|
|
res = yield self._get_all_rooms(sync_result_builder, ignored_users)
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries, invited, newly_joined_rooms = res
|
2017-09-13 10:13:41 -04:00
|
|
|
newly_left_rooms = []
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
tags_by_room = yield self.store.get_tags_for_user(user_id)
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
def handle_room_entries(room_entry):
|
2016-05-23 13:21:27 -04:00
|
|
|
return self._generate_room_entry(
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder,
|
2016-05-23 12:37:01 -04:00
|
|
|
ignored_users,
|
|
|
|
room_entry,
|
|
|
|
ephemeral=ephemeral_by_room.get(room_entry.room_id, []),
|
|
|
|
tags=tags_by_room.get(room_entry.room_id),
|
|
|
|
account_data=account_data_by_room.get(room_entry.room_id, {}),
|
2016-05-24 05:53:03 -04:00
|
|
|
always_include=sync_result_builder.full_state,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2016-05-23 13:21:27 -04:00
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
yield concurrently_execute(handle_room_entries, room_entries, 10)
|
2016-05-23 13:21:27 -04:00
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.invited.extend(invited)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
# Now we want to get any newly joined users
|
|
|
|
newly_joined_users = set()
|
2017-09-07 10:08:39 -04:00
|
|
|
newly_left_users = set()
|
2017-03-15 14:13:59 -04:00
|
|
|
if since_token:
|
2016-05-24 06:21:34 -04:00
|
|
|
for joined_sync in sync_result_builder.joined:
|
|
|
|
it = itertools.chain(
|
2018-04-28 07:19:12 -04:00
|
|
|
joined_sync.timeline.events, itervalues(joined_sync.state)
|
2016-05-24 06:21:34 -04:00
|
|
|
)
|
|
|
|
for event in it:
|
|
|
|
if event.type == EventTypes.Member:
|
|
|
|
if event.membership == Membership.JOIN:
|
|
|
|
newly_joined_users.add(event.state_key)
|
2017-09-07 10:08:39 -04:00
|
|
|
else:
|
|
|
|
prev_content = event.unsigned.get("prev_content", {})
|
|
|
|
prev_membership = prev_content.get("membership", None)
|
|
|
|
if prev_membership == Membership.JOIN:
|
|
|
|
newly_left_users.add(event.state_key)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-09-07 10:08:39 -04:00
|
|
|
newly_left_users -= newly_joined_users
|
2017-09-13 10:13:41 -04:00
|
|
|
|
|
|
|
defer.returnValue((
|
|
|
|
newly_joined_rooms,
|
|
|
|
newly_joined_users,
|
|
|
|
newly_left_rooms,
|
|
|
|
newly_left_users,
|
|
|
|
))
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2017-03-15 14:13:59 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _have_rooms_changed(self, sync_result_builder):
|
2017-03-16 09:04:07 -04:00
|
|
|
"""Returns whether there may be any new events that should be sent down
|
|
|
|
the sync. Returns True if there are.
|
2017-03-16 07:56:59 -04:00
|
|
|
"""
|
2017-03-15 14:13:59 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
|
2017-03-16 07:56:59 -04:00
|
|
|
assert since_token
|
2017-03-15 14:13:59 -04:00
|
|
|
|
|
|
|
# Get a list of membership change events that have happened.
|
|
|
|
rooms_changed = yield self.store.get_membership_changes_for_user(
|
|
|
|
user_id, since_token.room_key, now_token.room_key
|
|
|
|
)
|
|
|
|
|
|
|
|
if rooms_changed:
|
|
|
|
defer.returnValue(True)
|
|
|
|
|
2017-03-16 07:56:59 -04:00
|
|
|
stream_id = RoomStreamToken.parse_stream_token(since_token.room_key).stream
|
2018-03-05 07:06:19 -05:00
|
|
|
for room_id in sync_result_builder.joined_room_ids:
|
2017-03-16 07:56:59 -04:00
|
|
|
if self.store.has_room_changed_since(room_id, stream_id):
|
2017-03-15 14:13:59 -04:00
|
|
|
defer.returnValue(True)
|
|
|
|
defer.returnValue(False)
|
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-05-24 05:53:03 -04:00
|
|
|
def _get_rooms_changed(self, sync_result_builder, ignored_users):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Gets the the changes that have happened since the last sync.
|
|
|
|
|
|
|
|
Args:
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder(SyncResultBuilder)
|
2016-05-24 05:14:53 -04:00
|
|
|
ignored_users(set(str)): Set of users ignored by user.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred(tuple): Returns a tuple of the form:
|
2018-05-29 17:31:18 -04:00
|
|
|
`(room_entries, invited_rooms, newly_joined_rooms, newly_left_rooms)`
|
|
|
|
|
|
|
|
where:
|
|
|
|
room_entries is a list [RoomSyncResultBuilder]
|
|
|
|
invited_rooms is a list [InvitedSyncResult]
|
|
|
|
newly_joined rooms is a list[str] of room ids
|
|
|
|
newly_left_rooms is a list[str] of room ids
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
2016-05-24 05:53:03 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
assert since_token
|
|
|
|
|
|
|
|
# Get a list of membership change events that have happened.
|
|
|
|
rooms_changed = yield self.store.get_membership_changes_for_user(
|
|
|
|
user_id, since_token.room_key, now_token.room_key
|
|
|
|
)
|
|
|
|
|
|
|
|
mem_change_events_by_room_id = {}
|
|
|
|
for event in rooms_changed:
|
|
|
|
mem_change_events_by_room_id.setdefault(event.room_id, []).append(event)
|
|
|
|
|
|
|
|
newly_joined_rooms = []
|
2017-09-13 10:13:41 -04:00
|
|
|
newly_left_rooms = []
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries = []
|
2016-05-23 12:37:01 -04:00
|
|
|
invited = []
|
2018-04-28 07:19:12 -04:00
|
|
|
for room_id, events in iteritems(mem_change_events_by_room_id):
|
2016-05-23 12:37:01 -04:00
|
|
|
non_joins = [e for e in events if e.membership != Membership.JOIN]
|
|
|
|
has_join = len(non_joins) != len(events)
|
|
|
|
|
|
|
|
# We want to figure out if we joined the room at some point since
|
|
|
|
# the last sync (even if we have since left). This is to make sure
|
|
|
|
# we do send down the room, and with full state, where necessary
|
2017-09-22 09:44:17 -04:00
|
|
|
|
2017-09-13 10:13:41 -04:00
|
|
|
old_state_ids = None
|
2018-03-05 07:06:19 -05:00
|
|
|
if room_id in sync_result_builder.joined_room_ids and non_joins:
|
2017-09-22 09:44:17 -04:00
|
|
|
# Always include if the user (re)joined the room, especially
|
|
|
|
# important so that device list changes are calculated correctly.
|
|
|
|
# If there are non join member events, but we are still in the room,
|
|
|
|
# then the user must have left and joined
|
|
|
|
newly_joined_rooms.append(room_id)
|
|
|
|
|
|
|
|
# User is in the room so we don't need to do the invite/leave checks
|
|
|
|
continue
|
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
if room_id in sync_result_builder.joined_room_ids or has_join:
|
2016-08-25 13:59:44 -04:00
|
|
|
old_state_ids = yield self.get_state_at(room_id, since_token)
|
|
|
|
old_mem_ev_id = old_state_ids.get((EventTypes.Member, user_id), None)
|
|
|
|
old_mem_ev = None
|
|
|
|
if old_mem_ev_id:
|
|
|
|
old_mem_ev = yield self.store.get_event(
|
|
|
|
old_mem_ev_id, allow_none=True
|
|
|
|
)
|
2016-05-23 12:37:01 -04:00
|
|
|
if not old_mem_ev or old_mem_ev.membership != Membership.JOIN:
|
|
|
|
newly_joined_rooms.append(room_id)
|
|
|
|
|
2017-09-22 09:44:17 -04:00
|
|
|
# If user is in the room then we don't need to do the invite/leave checks
|
2018-03-05 07:06:19 -05:00
|
|
|
if room_id in sync_result_builder.joined_room_ids:
|
2017-09-22 09:44:17 -04:00
|
|
|
continue
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
if not non_joins:
|
|
|
|
continue
|
|
|
|
|
2017-09-13 10:13:41 -04:00
|
|
|
# Check if we have left the room. This can either be because we were
|
|
|
|
# joined before *or* that we since joined and then left.
|
|
|
|
if events[-1].membership != Membership.JOIN:
|
|
|
|
if has_join:
|
|
|
|
newly_left_rooms.append(room_id)
|
|
|
|
else:
|
|
|
|
if not old_state_ids:
|
|
|
|
old_state_ids = yield self.get_state_at(room_id, since_token)
|
|
|
|
old_mem_ev_id = old_state_ids.get(
|
|
|
|
(EventTypes.Member, user_id),
|
|
|
|
None,
|
|
|
|
)
|
|
|
|
old_mem_ev = None
|
|
|
|
if old_mem_ev_id:
|
|
|
|
old_mem_ev = yield self.store.get_event(
|
|
|
|
old_mem_ev_id, allow_none=True
|
|
|
|
)
|
|
|
|
if old_mem_ev and old_mem_ev.membership == Membership.JOIN:
|
|
|
|
newly_left_rooms.append(room_id)
|
|
|
|
|
2016-05-23 12:37:01 -04:00
|
|
|
# Only bother if we're still currently invited
|
|
|
|
should_invite = non_joins[-1].membership == Membership.INVITE
|
|
|
|
if should_invite:
|
|
|
|
if event.sender not in ignored_users:
|
|
|
|
room_sync = InvitedSyncResult(room_id, invite=non_joins[-1])
|
|
|
|
if room_sync:
|
|
|
|
invited.append(room_sync)
|
|
|
|
|
|
|
|
# Always include leave/ban events. Just take the last one.
|
|
|
|
# TODO: How do we handle ban -> leave in same batch?
|
|
|
|
leave_events = [
|
|
|
|
e for e in non_joins
|
|
|
|
if e.membership in (Membership.LEAVE, Membership.BAN)
|
|
|
|
]
|
|
|
|
|
|
|
|
if leave_events:
|
|
|
|
leave_event = leave_events[-1]
|
|
|
|
leave_stream_token = yield self.store.get_stream_token_for_event(
|
|
|
|
leave_event.event_id
|
|
|
|
)
|
|
|
|
leave_token = since_token.copy_and_replace(
|
|
|
|
"room_key", leave_stream_token
|
|
|
|
)
|
|
|
|
|
|
|
|
if since_token and since_token.is_after(leave_token):
|
|
|
|
continue
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries.append(RoomSyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=room_id,
|
2016-05-24 04:43:35 -04:00
|
|
|
rtype="archived",
|
2016-05-23 12:37:01 -04:00
|
|
|
events=None,
|
|
|
|
newly_joined=room_id in newly_joined_rooms,
|
|
|
|
full_state=False,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=leave_token,
|
|
|
|
))
|
|
|
|
|
|
|
|
timeline_limit = sync_config.filter_collection.timeline_limit()
|
|
|
|
|
|
|
|
# Get all events for rooms we're currently joined to.
|
|
|
|
room_to_events = yield self.store.get_room_events_stream_for_rooms(
|
2018-03-05 07:06:19 -05:00
|
|
|
room_ids=sync_result_builder.joined_room_ids,
|
2016-05-23 12:37:01 -04:00
|
|
|
from_key=since_token.room_key,
|
|
|
|
to_key=now_token.room_key,
|
|
|
|
limit=timeline_limit + 1,
|
|
|
|
)
|
|
|
|
|
|
|
|
# We loop through all room ids, even if there are no new events, in case
|
|
|
|
# there are non room events taht we need to notify about.
|
2018-03-05 07:06:19 -05:00
|
|
|
for room_id in sync_result_builder.joined_room_ids:
|
2016-05-23 12:37:01 -04:00
|
|
|
room_entry = room_to_events.get(room_id, None)
|
|
|
|
|
|
|
|
if room_entry:
|
|
|
|
events, start_key = room_entry
|
|
|
|
|
|
|
|
prev_batch_token = now_token.copy_and_replace("room_key", start_key)
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries.append(RoomSyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=room_id,
|
2016-05-24 04:43:35 -04:00
|
|
|
rtype="joined",
|
2016-05-23 12:37:01 -04:00
|
|
|
events=events,
|
|
|
|
newly_joined=room_id in newly_joined_rooms,
|
|
|
|
full_state=False,
|
|
|
|
since_token=None if room_id in newly_joined_rooms else since_token,
|
|
|
|
upto_token=prev_batch_token,
|
|
|
|
))
|
|
|
|
else:
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries.append(RoomSyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=room_id,
|
2016-05-24 04:43:35 -04:00
|
|
|
rtype="joined",
|
2016-05-23 12:37:01 -04:00
|
|
|
events=[],
|
|
|
|
newly_joined=room_id in newly_joined_rooms,
|
|
|
|
full_state=False,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=since_token,
|
|
|
|
))
|
|
|
|
|
2017-09-13 10:13:41 -04:00
|
|
|
defer.returnValue((room_entries, invited, newly_joined_rooms, newly_left_rooms))
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-05-24 05:53:03 -04:00
|
|
|
def _get_all_rooms(self, sync_result_builder, ignored_users):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Returns entries for all rooms for the user.
|
|
|
|
|
|
|
|
Args:
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder(SyncResultBuilder)
|
2016-05-24 05:14:53 -04:00
|
|
|
ignored_users(set(str)): Set of users ignored by user.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred(tuple): Returns a tuple of the form:
|
|
|
|
`([RoomSyncResultBuilder], [InvitedSyncResult], [])`
|
|
|
|
"""
|
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
user_id = sync_result_builder.sync_config.user.to_string()
|
|
|
|
since_token = sync_result_builder.since_token
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
membership_list = (
|
|
|
|
Membership.INVITE, Membership.JOIN, Membership.LEAVE, Membership.BAN
|
|
|
|
)
|
|
|
|
|
|
|
|
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
|
|
|
user_id=user_id,
|
|
|
|
membership_list=membership_list
|
|
|
|
)
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries = []
|
2016-05-23 12:37:01 -04:00
|
|
|
invited = []
|
|
|
|
|
|
|
|
for event in room_list:
|
|
|
|
if event.membership == Membership.JOIN:
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries.append(RoomSyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=event.room_id,
|
2016-05-24 04:43:35 -04:00
|
|
|
rtype="joined",
|
2016-05-23 12:37:01 -04:00
|
|
|
events=None,
|
|
|
|
newly_joined=False,
|
|
|
|
full_state=True,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=now_token,
|
|
|
|
))
|
|
|
|
elif event.membership == Membership.INVITE:
|
|
|
|
if event.sender in ignored_users:
|
|
|
|
continue
|
|
|
|
invite = yield self.store.get_event(event.event_id)
|
|
|
|
invited.append(InvitedSyncResult(
|
|
|
|
room_id=event.room_id,
|
|
|
|
invite=invite,
|
|
|
|
))
|
|
|
|
elif event.membership in (Membership.LEAVE, Membership.BAN):
|
|
|
|
# Always send down rooms we were banned or kicked from.
|
|
|
|
if not sync_config.filter_collection.include_leave:
|
|
|
|
if event.membership == Membership.LEAVE:
|
|
|
|
if user_id == event.sender:
|
|
|
|
continue
|
|
|
|
|
|
|
|
leave_token = now_token.copy_and_replace(
|
|
|
|
"room_key", "s%d" % (event.stream_ordering,)
|
|
|
|
)
|
2016-05-24 04:43:35 -04:00
|
|
|
room_entries.append(RoomSyncResultBuilder(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id=event.room_id,
|
2016-05-24 04:43:35 -04:00
|
|
|
rtype="archived",
|
2016-05-23 12:37:01 -04:00
|
|
|
events=None,
|
|
|
|
newly_joined=False,
|
|
|
|
full_state=True,
|
|
|
|
since_token=since_token,
|
|
|
|
upto_token=leave_token,
|
|
|
|
))
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
defer.returnValue((room_entries, invited, []))
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-05-24 05:53:03 -04:00
|
|
|
def _generate_room_entry(self, sync_result_builder, ignored_users,
|
2016-05-23 12:37:01 -04:00
|
|
|
room_builder, ephemeral, tags, account_data,
|
|
|
|
always_include=False):
|
2016-05-24 05:53:03 -04:00
|
|
|
"""Populates the `joined` and `archived` section of `sync_result_builder`
|
2016-05-24 05:14:53 -04:00
|
|
|
based on the `room_builder`.
|
|
|
|
|
|
|
|
Args:
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder(SyncResultBuilder)
|
2016-05-24 05:14:53 -04:00
|
|
|
ignored_users(set(str)): Set of users ignored by user.
|
|
|
|
room_builder(RoomSyncResultBuilder)
|
|
|
|
ephemeral(list): List of new ephemeral events for room
|
|
|
|
tags(list): List of *all* tags for room, or None if there has been
|
|
|
|
no change.
|
|
|
|
account_data(list): List of new account data for room
|
|
|
|
always_include(bool): Always include this room in the sync response,
|
|
|
|
even if empty.
|
|
|
|
"""
|
2016-05-23 12:37:01 -04:00
|
|
|
newly_joined = room_builder.newly_joined
|
|
|
|
full_state = (
|
|
|
|
room_builder.full_state
|
|
|
|
or newly_joined
|
2016-05-24 05:53:03 -04:00
|
|
|
or sync_result_builder.full_state
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
2016-05-24 09:00:43 -04:00
|
|
|
events = room_builder.events
|
|
|
|
|
|
|
|
# We want to shortcut out as early as possible.
|
|
|
|
if not (always_include or account_data or ephemeral or full_state):
|
|
|
|
if events == [] and tags is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
now_token = sync_result_builder.now_token
|
|
|
|
sync_config = sync_result_builder.sync_config
|
|
|
|
|
|
|
|
room_id = room_builder.room_id
|
2016-05-23 12:37:01 -04:00
|
|
|
since_token = room_builder.since_token
|
|
|
|
upto_token = room_builder.upto_token
|
|
|
|
|
2016-05-24 05:22:24 -04:00
|
|
|
batch = yield self._load_filtered_recents(
|
2016-05-23 12:37:01 -04:00
|
|
|
room_id, sync_config,
|
|
|
|
now_token=upto_token,
|
|
|
|
since_token=since_token,
|
|
|
|
recents=events,
|
2016-05-24 04:43:35 -04:00
|
|
|
newly_joined_room=newly_joined,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
account_data_events = []
|
|
|
|
if tags is not None:
|
|
|
|
account_data_events.append({
|
|
|
|
"type": "m.tag",
|
|
|
|
"content": {"tags": tags},
|
|
|
|
})
|
|
|
|
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
|
|
|
account_data = sync_config.filter_collection.filter_room_account_data(
|
|
|
|
account_data_events
|
|
|
|
)
|
|
|
|
|
|
|
|
ephemeral = sync_config.filter_collection.filter_room_ephemeral(ephemeral)
|
|
|
|
|
|
|
|
if not (always_include or batch or account_data or ephemeral or full_state):
|
|
|
|
return
|
|
|
|
|
|
|
|
state = yield self.compute_state_delta(
|
2018-05-28 20:09:55 -04:00
|
|
|
room_id, batch, sync_config, since_token, now_token,
|
|
|
|
full_state=full_state
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
2018-08-16 04:46:50 -04:00
|
|
|
summary = {}
|
|
|
|
if (
|
|
|
|
sync_config.filter_collection.lazy_load_members() and
|
|
|
|
(
|
|
|
|
any(ev.type == EventTypes.Member for ev in batch.events) or
|
|
|
|
since_token is None
|
|
|
|
)
|
|
|
|
):
|
|
|
|
summary = yield self.compute_summary(
|
|
|
|
room_id, sync_config, batch, state, now_token
|
|
|
|
)
|
|
|
|
|
2016-05-24 04:43:35 -04:00
|
|
|
if room_builder.rtype == "joined":
|
2016-05-23 12:37:01 -04:00
|
|
|
unread_notifications = {}
|
|
|
|
room_sync = JoinedSyncResult(
|
|
|
|
room_id=room_id,
|
|
|
|
timeline=batch,
|
|
|
|
state=state,
|
|
|
|
ephemeral=ephemeral,
|
|
|
|
account_data=account_data_events,
|
|
|
|
unread_notifications=unread_notifications,
|
2018-08-16 04:46:50 -04:00
|
|
|
summary=summary,
|
2016-05-23 12:37:01 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
if room_sync or always_include:
|
|
|
|
notifs = yield self.unread_notifs_for_room_id(
|
|
|
|
room_id, sync_config
|
|
|
|
)
|
|
|
|
|
|
|
|
if notifs is not None:
|
|
|
|
unread_notifications["notification_count"] = notifs["notify_count"]
|
|
|
|
unread_notifications["highlight_count"] = notifs["highlight_count"]
|
|
|
|
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.joined.append(room_sync)
|
2016-05-24 04:43:35 -04:00
|
|
|
elif room_builder.rtype == "archived":
|
2016-05-23 12:37:01 -04:00
|
|
|
room_sync = ArchivedSyncResult(
|
|
|
|
room_id=room_id,
|
|
|
|
timeline=batch,
|
|
|
|
state=state,
|
|
|
|
account_data=account_data,
|
|
|
|
)
|
|
|
|
if room_sync or always_include:
|
2016-05-24 05:53:03 -04:00
|
|
|
sync_result_builder.archived.append(room_sync)
|
2016-05-24 04:43:35 -04:00
|
|
|
else:
|
|
|
|
raise Exception("Unrecognized rtype: %r", room_builder.rtype)
|
2016-05-23 12:37:01 -04:00
|
|
|
|
2018-03-05 07:06:19 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_rooms_for_user_at(self, user_id, stream_ordering):
|
|
|
|
"""Get set of joined rooms for a user at the given stream ordering.
|
|
|
|
|
|
|
|
The stream ordering *must* be recent, otherwise this may throw an
|
|
|
|
exception if older than a month. (This function is called with the
|
|
|
|
current token, which should be perfectly fine).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str)
|
|
|
|
stream_ordering (int)
|
|
|
|
|
|
|
|
ReturnValue:
|
|
|
|
Deferred[frozenset[str]]: Set of room_ids the user is in at given
|
|
|
|
stream_ordering.
|
|
|
|
"""
|
|
|
|
joined_rooms = yield self.store.get_rooms_for_user_with_stream_ordering(
|
|
|
|
user_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
joined_room_ids = set()
|
|
|
|
|
|
|
|
# We need to check that the stream ordering of the join for each room
|
|
|
|
# is before the stream_ordering asked for. This might not be the case
|
|
|
|
# if the user joins a room between us getting the current token and
|
|
|
|
# calling `get_rooms_for_user_with_stream_ordering`.
|
|
|
|
# If the membership's stream ordering is after the given stream
|
|
|
|
# ordering, we need to go and work out if the user was in the room
|
|
|
|
# before.
|
2018-03-05 08:12:08 -05:00
|
|
|
for room_id, membership_stream_ordering in joined_rooms:
|
|
|
|
if membership_stream_ordering <= stream_ordering:
|
2018-03-05 07:06:19 -05:00
|
|
|
joined_room_ids.add(room_id)
|
|
|
|
continue
|
|
|
|
|
2018-03-05 08:29:49 -05:00
|
|
|
logger.info("User joined room after current token: %s", room_id)
|
2018-03-05 07:06:19 -05:00
|
|
|
|
|
|
|
extrems = yield self.store.get_forward_extremeties_for_room(
|
|
|
|
room_id, stream_ordering,
|
|
|
|
)
|
|
|
|
users_in_room = yield self.state.get_current_user_in_room(
|
|
|
|
room_id, extrems,
|
|
|
|
)
|
|
|
|
if user_id in users_in_room:
|
|
|
|
joined_room_ids.add(room_id)
|
|
|
|
|
|
|
|
joined_room_ids = frozenset(joined_room_ids)
|
|
|
|
defer.returnValue(joined_room_ids)
|
|
|
|
|
2016-01-19 06:35:50 -05:00
|
|
|
|
|
|
|
def _action_has_highlight(actions):
|
|
|
|
for action in actions:
|
|
|
|
try:
|
|
|
|
if action.get("set_tweak", None) == "highlight":
|
|
|
|
return action.get("value", True)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return False
|
2016-02-01 10:59:40 -05:00
|
|
|
|
|
|
|
|
2018-07-23 14:21:20 -04:00
|
|
|
def _calculate_state(
|
|
|
|
timeline_contains, timeline_start, previous, current, lazy_load_members,
|
|
|
|
):
|
2016-02-01 10:59:40 -05:00
|
|
|
"""Works out what state to include in a sync response.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
timeline_contains (dict): state in the timeline
|
|
|
|
timeline_start (dict): state at the start of the timeline
|
|
|
|
previous (dict): state at the end of the previous sync (or empty dict
|
2016-02-01 11:52:27 -05:00
|
|
|
if this is an initial sync)
|
2016-02-12 06:13:06 -05:00
|
|
|
current (dict): state at the end of the timeline
|
2018-07-24 07:39:40 -04:00
|
|
|
lazy_load_members (bool): whether to return members from timeline_start
|
|
|
|
or not. assumes that timeline_start has already been filtered to
|
|
|
|
include only the members the client needs to know about.
|
2016-02-01 10:59:40 -05:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict
|
|
|
|
"""
|
2016-08-25 13:59:44 -04:00
|
|
|
event_id_to_key = {
|
|
|
|
e: key
|
|
|
|
for key, e in itertools.chain(
|
|
|
|
timeline_contains.items(),
|
|
|
|
previous.items(),
|
|
|
|
timeline_start.items(),
|
|
|
|
current.items(),
|
2016-02-01 10:59:40 -05:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2016-08-25 13:59:44 -04:00
|
|
|
c_ids = set(e for e in current.values())
|
|
|
|
ts_ids = set(e for e in timeline_start.values())
|
2018-03-13 18:03:42 -04:00
|
|
|
p_ids = set(e for e in previous.values())
|
|
|
|
tc_ids = set(e for e in timeline_contains.values())
|
2016-02-01 10:59:40 -05:00
|
|
|
|
2018-07-24 07:39:40 -04:00
|
|
|
# If we are lazyloading room members, we explicitly add the membership events
|
|
|
|
# for the senders in the timeline into the state block returned by /sync,
|
|
|
|
# as we may not have sent them to the client before. We find these membership
|
|
|
|
# events by filtering them out of timeline_start, which has already been filtered
|
|
|
|
# to only include membership events for the senders in the timeline.
|
2018-07-24 09:03:15 -04:00
|
|
|
# In practice, we can do this by removing them from the p_ids list,
|
|
|
|
# which is the list of relevant state we know we have already sent to the client.
|
2018-07-24 08:40:49 -04:00
|
|
|
# see https://github.com/matrix-org/synapse/pull/2970
|
|
|
|
# /files/efcdacad7d1b7f52f879179701c7e0d9b763511f#r204732809
|
2018-07-24 07:39:40 -04:00
|
|
|
|
2018-07-23 14:21:20 -04:00
|
|
|
if lazy_load_members:
|
2018-07-24 08:40:49 -04:00
|
|
|
p_ids.difference_update(
|
2018-07-23 14:21:20 -04:00
|
|
|
e for t, e in timeline_start.iteritems()
|
2018-07-24 08:40:49 -04:00
|
|
|
if t[0] == EventTypes.Member
|
2018-07-23 14:21:20 -04:00
|
|
|
)
|
|
|
|
|
2018-07-24 08:40:49 -04:00
|
|
|
state_ids = ((c_ids | ts_ids) - p_ids) - tc_ids
|
2016-02-01 10:59:40 -05:00
|
|
|
|
|
|
|
return {
|
2016-08-25 13:59:44 -04:00
|
|
|
event_id_to_key[e]: e for e in state_ids
|
2016-02-01 10:59:40 -05:00
|
|
|
}
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
|
|
|
|
class SyncResultBuilder(object):
|
2016-05-24 05:14:53 -04:00
|
|
|
"Used to help build up a new SyncResult for a user"
|
2018-03-05 07:06:19 -05:00
|
|
|
def __init__(self, sync_config, full_state, since_token, now_token,
|
|
|
|
joined_room_ids):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
sync_config(SyncConfig)
|
|
|
|
full_state(bool): The full_state flag as specified by user
|
|
|
|
since_token(StreamToken): The token supplied by user, or None.
|
|
|
|
now_token(StreamToken): The token to sync up to.
|
|
|
|
"""
|
2016-05-23 12:37:01 -04:00
|
|
|
self.sync_config = sync_config
|
|
|
|
self.full_state = full_state
|
|
|
|
self.since_token = since_token
|
|
|
|
self.now_token = now_token
|
2018-03-05 07:06:19 -05:00
|
|
|
self.joined_room_ids = joined_room_ids
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
self.presence = []
|
|
|
|
self.account_data = []
|
|
|
|
self.joined = []
|
|
|
|
self.invited = []
|
|
|
|
self.archived = []
|
2016-08-25 12:35:37 -04:00
|
|
|
self.device = []
|
2017-07-10 09:53:19 -04:00
|
|
|
self.groups = None
|
2017-09-07 10:08:39 -04:00
|
|
|
self.to_device = []
|
2016-05-23 12:37:01 -04:00
|
|
|
|
|
|
|
|
|
|
|
class RoomSyncResultBuilder(object):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""Stores information needed to create either a `JoinedSyncResult` or
|
|
|
|
`ArchivedSyncResult`.
|
|
|
|
"""
|
2016-05-24 04:43:35 -04:00
|
|
|
def __init__(self, room_id, rtype, events, newly_joined, full_state,
|
|
|
|
since_token, upto_token):
|
2016-05-24 05:14:53 -04:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
room_id(str)
|
|
|
|
rtype(str): One of `"joined"` or `"archived"`
|
|
|
|
events(list): List of events to include in the room, (more events
|
|
|
|
may be added when generating result).
|
|
|
|
newly_joined(bool): If the user has newly joined the room
|
|
|
|
full_state(bool): Whether the full state should be sent in result
|
|
|
|
since_token(StreamToken): Earliest point to return events from, or None
|
|
|
|
upto_token(StreamToken): Latest point to return events from.
|
|
|
|
"""
|
2016-05-23 12:37:01 -04:00
|
|
|
self.room_id = room_id
|
2016-05-24 04:43:35 -04:00
|
|
|
self.rtype = rtype
|
2016-05-23 12:37:01 -04:00
|
|
|
self.events = events
|
|
|
|
self.newly_joined = newly_joined
|
|
|
|
self.full_state = full_state
|
|
|
|
self.since_token = since_token
|
|
|
|
self.upto_token = upto_token
|