2015-01-26 13:53:31 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 13:01:18 -05:00
|
|
|
# Copyright 2015 - 2016 OpenMarket Ltd
|
2015-01-26 13:53:31 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from ._base import BaseHandler
|
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
from synapse.streams.config import PaginationConfig
|
2015-01-30 08:33:41 -05:00
|
|
|
from synapse.api.constants import Membership, EventTypes
|
2015-12-11 11:48:20 -05:00
|
|
|
from synapse.util import unwrapFirstError
|
2015-01-26 13:53:31 -05:00
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
import collections
|
2015-01-26 13:53:31 -05:00
|
|
|
import logging
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
|
|
|
|
SyncConfig = collections.namedtuple("SyncConfig", [
|
|
|
|
"user",
|
|
|
|
"filter",
|
2016-01-06 11:44:13 -05:00
|
|
|
"is_guest",
|
2015-01-26 13:53:31 -05:00
|
|
|
])
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
|
2015-10-01 12:53:07 -04:00
|
|
|
class TimelineBatch(collections.namedtuple("TimelineBatch", [
|
|
|
|
"prev_batch",
|
2015-01-27 11:24:22 -05:00
|
|
|
"events",
|
2015-10-01 12:53:07 -04:00
|
|
|
"limited",
|
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
|
|
|
return bool(self.events)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-10-05 11:39:22 -04:00
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
|
2015-11-13 05:31:15 -05:00
|
|
|
"room_id", # str
|
|
|
|
"timeline", # TimelineBatch
|
2015-11-12 11:34:42 -05:00
|
|
|
"state", # dict[(str, str), FrozenEvent]
|
2015-01-29 11:41:21 -05:00
|
|
|
"ephemeral",
|
2015-11-18 10:31:04 -05:00
|
|
|
"account_data",
|
2016-01-19 12:19:53 -05:00
|
|
|
"unread_notifications",
|
2015-01-27 11:24:22 -05:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
2015-01-30 10:52:05 -05:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2015-11-02 11:23:15 -05:00
|
|
|
return bool(
|
|
|
|
self.timeline
|
|
|
|
or self.state
|
|
|
|
or self.ephemeral
|
2015-11-18 10:31:04 -05:00
|
|
|
or self.account_data
|
2015-12-18 12:47:00 -05:00
|
|
|
# nb the notification count does not, er, count: if there's nothing
|
|
|
|
# else in the result, we don't need to send it.
|
2015-11-02 11:23:15 -05:00
|
|
|
)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
class ArchivedSyncResult(collections.namedtuple("JoinedSyncResult", [
|
2015-11-13 05:31:15 -05:00
|
|
|
"room_id", # str
|
|
|
|
"timeline", # TimelineBatch
|
2015-11-12 11:34:42 -05:00
|
|
|
"state", # dict[(str, str), FrozenEvent]
|
2015-11-18 10:31:04 -05:00
|
|
|
"account_data",
|
2015-10-19 12:26:18 -04:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2015-11-02 11:23:15 -05:00
|
|
|
return bool(
|
|
|
|
self.timeline
|
|
|
|
or self.state
|
2015-11-18 10:31:04 -05:00
|
|
|
or self.account_data
|
2015-11-02 11:23:15 -05:00
|
|
|
)
|
2015-10-19 12:26:18 -04:00
|
|
|
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
|
2015-11-13 05:31:15 -05:00
|
|
|
"room_id", # str
|
|
|
|
"invite", # FrozenEvent: the invite event
|
2015-10-13 05:24:51 -04:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
def __nonzero__(self):
|
|
|
|
"""Invited rooms should always be reported to the client"""
|
|
|
|
return True
|
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
class SyncResult(collections.namedtuple("SyncResult", [
|
2015-01-27 15:19:36 -05:00
|
|
|
"next_batch", # Token for the next sync
|
2015-10-01 12:53:07 -04:00
|
|
|
"presence", # List of presence events for the user.
|
2015-12-01 13:41:32 -05:00
|
|
|
"account_data", # List of account_data events for the user.
|
2015-10-13 05:24:51 -04:00
|
|
|
"joined", # JoinedSyncResult for each joined room.
|
|
|
|
"invited", # InvitedSyncResult for each invited room.
|
2015-10-19 12:26:18 -04:00
|
|
|
"archived", # ArchivedSyncResult for each archived room.
|
2015-01-26 10:46:31 -05:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
2015-01-30 10:52:05 -05:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if the notifier needs to wait for more events when polling for
|
|
|
|
events.
|
|
|
|
"""
|
2015-01-27 11:24:22 -05:00
|
|
|
return bool(
|
2016-01-05 09:49:06 -05:00
|
|
|
self.presence or self.joined or self.invited or self.archived
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
|
|
|
|
|
|
|
class SyncHandler(BaseHandler):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(SyncHandler, self).__init__(hs)
|
|
|
|
self.event_sources = hs.get_event_sources()
|
2015-01-26 13:53:31 -05:00
|
|
|
self.clock = hs.get_clock()
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-01-27 15:09:52 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-10-26 14:47:18 -04:00
|
|
|
def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0,
|
|
|
|
full_state=False):
|
2015-01-27 11:24:22 -05:00
|
|
|
"""Get the sync for a client if we have new data for it now. Otherwise
|
|
|
|
wait for new data to arrive on the server. If the timeout expires, then
|
|
|
|
return an empty sync result.
|
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
2015-10-26 14:47:18 -04:00
|
|
|
|
|
|
|
if timeout == 0 or since_token is None or full_state:
|
|
|
|
# we are going to return immediately, so don't bother calling
|
|
|
|
# notifier.wait_for_events.
|
|
|
|
result = yield self.current_sync_for_user(sync_config, since_token,
|
|
|
|
full_state=full_state)
|
2015-01-27 15:09:52 -05:00
|
|
|
defer.returnValue(result)
|
2015-01-26 10:46:31 -05:00
|
|
|
else:
|
2015-05-14 06:25:30 -04:00
|
|
|
def current_sync_callback(before_token, after_token):
|
2015-01-27 15:09:52 -05:00
|
|
|
return self.current_sync_for_user(sync_config, since_token)
|
|
|
|
|
|
|
|
result = yield self.notifier.wait_for_events(
|
2016-01-20 10:34:07 -05:00
|
|
|
sync_config.user.to_string(), timeout, current_sync_callback,
|
2015-10-14 08:16:53 -04:00
|
|
|
from_token=since_token
|
2015-01-26 10:46:31 -05:00
|
|
|
)
|
2015-01-27 15:09:52 -05:00
|
|
|
defer.returnValue(result)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-10-26 14:47:18 -04:00
|
|
|
def current_sync_for_user(self, sync_config, since_token=None,
|
|
|
|
full_state=False):
|
2015-01-27 11:24:22 -05:00
|
|
|
"""Get the sync for client needed to match what the server has now.
|
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
2015-10-26 14:47:18 -04:00
|
|
|
if since_token is None or full_state:
|
|
|
|
return self.full_state_sync(sync_config, since_token)
|
2015-01-26 10:46:31 -05:00
|
|
|
else:
|
2015-10-05 11:39:22 -04:00
|
|
|
return self.incremental_sync_with_gap(sync_config, since_token)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-12-16 13:42:09 -05:00
|
|
|
def last_read_event_id_for_room_and_user(self, room_id, user_id, ephemeral_by_room):
|
|
|
|
if room_id not in ephemeral_by_room:
|
|
|
|
return None
|
|
|
|
for e in ephemeral_by_room[room_id]:
|
|
|
|
if e['type'] != 'm.receipt':
|
|
|
|
continue
|
2015-12-21 05:14:57 -05:00
|
|
|
for receipt_event_id, val in e['content'].items():
|
2015-12-16 13:42:09 -05:00
|
|
|
if 'm.read' in val:
|
|
|
|
if user_id in val['m.read']:
|
|
|
|
return receipt_event_id
|
|
|
|
return None
|
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-10-26 14:47:18 -04:00
|
|
|
def full_state_sync(self, sync_config, timeline_since_token):
|
|
|
|
"""Get a sync for a client which is starting without any state.
|
|
|
|
|
|
|
|
If a 'message_since_token' is given, only timeline events which have
|
|
|
|
happened since that token will be returned.
|
|
|
|
|
2015-01-27 11:24:22 -05:00
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
2015-01-26 10:46:31 -05:00
|
|
|
now_token = yield self.event_sources.get_current_token()
|
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
|
|
|
sync_config, now_token
|
|
|
|
)
|
2015-12-04 12:32:09 -05:00
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
presence_stream = self.event_sources.sources["presence"]
|
|
|
|
# TODO (mjark): This looks wrong, shouldn't we be getting the presence
|
|
|
|
# UP to the present rather than after the present?
|
|
|
|
pagination_config = PaginationConfig(from_token=now_token)
|
|
|
|
presence, _ = yield presence_stream.get_pagination_rows(
|
|
|
|
user=sync_config.user,
|
|
|
|
pagination_config=pagination_config.get_source_config("presence"),
|
|
|
|
key=None
|
|
|
|
)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
membership_list = (Membership.INVITE, Membership.JOIN)
|
|
|
|
if sync_config.filter.include_leave:
|
|
|
|
membership_list += (Membership.LEAVE, Membership.BAN)
|
2015-12-22 06:21:03 -05:00
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
|
|
|
user_id=sync_config.user.to_string(),
|
|
|
|
membership_list=membership_list
|
|
|
|
)
|
2015-12-22 06:21:03 -05:00
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
account_data, account_data_by_room = (
|
|
|
|
yield self.store.get_account_data_for_user(
|
2015-12-01 13:41:32 -05:00
|
|
|
sync_config.user.to_string()
|
|
|
|
)
|
2015-11-02 11:23:15 -05:00
|
|
|
)
|
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
tags_by_room = yield self.store.get_tags_for_user(
|
|
|
|
sync_config.user.to_string()
|
2015-12-22 06:59:55 -05:00
|
|
|
)
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
joined = []
|
2015-10-13 06:03:48 -04:00
|
|
|
invited = []
|
2015-10-19 12:26:18 -04:00
|
|
|
archived = []
|
2015-12-11 11:48:20 -05:00
|
|
|
deferreds = []
|
2015-01-26 10:46:31 -05:00
|
|
|
for event in room_list:
|
2015-10-13 05:24:51 -04:00
|
|
|
if event.membership == Membership.JOIN:
|
2015-12-11 11:48:20 -05:00
|
|
|
room_sync_deferred = self.full_state_sync_for_joined_room(
|
2015-10-26 14:47:18 -04:00
|
|
|
room_id=event.room_id,
|
|
|
|
sync_config=sync_config,
|
|
|
|
now_token=now_token,
|
|
|
|
timeline_since_token=timeline_since_token,
|
2015-11-02 12:54:04 -05:00
|
|
|
ephemeral_by_room=ephemeral_by_room,
|
2015-11-02 11:23:15 -05:00
|
|
|
tags_by_room=tags_by_room,
|
2015-12-01 13:41:32 -05:00
|
|
|
account_data_by_room=account_data_by_room,
|
2015-10-13 05:24:51 -04:00
|
|
|
)
|
2015-12-11 11:48:20 -05:00
|
|
|
room_sync_deferred.addCallback(joined.append)
|
|
|
|
deferreds.append(room_sync_deferred)
|
2015-10-13 05:24:51 -04:00
|
|
|
elif event.membership == Membership.INVITE:
|
2015-10-13 06:03:48 -04:00
|
|
|
invite = yield self.store.get_event(event.event_id)
|
2015-10-13 05:24:51 -04:00
|
|
|
invited.append(InvitedSyncResult(
|
|
|
|
room_id=event.room_id,
|
2015-10-13 06:03:48 -04:00
|
|
|
invite=invite,
|
|
|
|
))
|
2015-10-21 06:15:48 -04:00
|
|
|
elif event.membership in (Membership.LEAVE, Membership.BAN):
|
2015-10-19 12:26:18 -04:00
|
|
|
leave_token = now_token.copy_and_replace(
|
|
|
|
"room_key", "s%d" % (event.stream_ordering,)
|
|
|
|
)
|
2015-12-11 11:48:20 -05:00
|
|
|
room_sync_deferred = self.full_state_sync_for_archived_room(
|
2015-10-19 12:26:18 -04:00
|
|
|
sync_config=sync_config,
|
|
|
|
room_id=event.room_id,
|
|
|
|
leave_event_id=event.event_id,
|
|
|
|
leave_token=leave_token,
|
2015-10-26 14:47:18 -04:00
|
|
|
timeline_since_token=timeline_since_token,
|
2015-11-02 11:23:15 -05:00
|
|
|
tags_by_room=tags_by_room,
|
2015-12-01 13:41:32 -05:00
|
|
|
account_data_by_room=account_data_by_room,
|
2015-10-19 12:26:18 -04:00
|
|
|
)
|
2015-12-11 11:48:20 -05:00
|
|
|
room_sync_deferred.addCallback(archived.append)
|
|
|
|
deferreds.append(room_sync_deferred)
|
|
|
|
|
|
|
|
yield defer.gatherResults(
|
|
|
|
deferreds, consumeErrors=True
|
|
|
|
).addErrback(unwrapFirstError)
|
2015-01-27 11:24:22 -05:00
|
|
|
|
|
|
|
defer.returnValue(SyncResult(
|
2015-10-01 12:53:07 -04:00
|
|
|
presence=presence,
|
2015-12-01 13:41:32 -05:00
|
|
|
account_data=self.account_data_for_user(account_data),
|
2015-10-13 05:24:51 -04:00
|
|
|
joined=joined,
|
2015-10-13 06:03:48 -04:00
|
|
|
invited=invited,
|
2015-10-19 12:26:18 -04:00
|
|
|
archived=archived,
|
2015-01-27 11:24:22 -05:00
|
|
|
next_batch=now_token,
|
|
|
|
))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-10-26 14:47:18 -04:00
|
|
|
def full_state_sync_for_joined_room(self, room_id, sync_config,
|
|
|
|
now_token, timeline_since_token,
|
2015-12-01 13:41:32 -05:00
|
|
|
ephemeral_by_room, tags_by_room,
|
|
|
|
account_data_by_room):
|
2015-01-27 11:24:22 -05:00
|
|
|
"""Sync a room for a client which is starting without any state
|
|
|
|
Returns:
|
2015-10-13 05:24:51 -04:00
|
|
|
A Deferred JoinedSyncResult.
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
2015-01-30 06:35:20 -05:00
|
|
|
|
2015-10-05 11:39:22 -04:00
|
|
|
batch = yield self.load_filtered_recents(
|
2015-10-26 14:47:18 -04:00
|
|
|
room_id, sync_config, now_token, since_token=timeline_since_token
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
2015-01-30 06:35:20 -05:00
|
|
|
|
2015-12-18 12:47:00 -05:00
|
|
|
notifs = yield self.unread_notifs_for_room_id(
|
2015-12-21 05:14:57 -05:00
|
|
|
room_id, sync_config, ephemeral_by_room
|
2015-12-16 13:42:09 -05:00
|
|
|
)
|
2016-01-19 06:35:50 -05:00
|
|
|
|
2016-01-19 12:19:53 -05:00
|
|
|
unread_notifications = {}
|
2015-12-18 12:47:00 -05:00
|
|
|
if notifs is not None:
|
2016-01-19 12:19:53 -05:00
|
|
|
unread_notifications["notification_count"] = len(notifs)
|
|
|
|
unread_notifications["highlight_count"] = len([
|
2016-01-19 06:35:50 -05:00
|
|
|
1 for notif in notifs if _action_has_highlight(notif["actions"])
|
|
|
|
])
|
2015-12-16 13:42:09 -05:00
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
current_state = yield self.get_state_at(room_id, now_token)
|
2015-01-27 11:24:22 -05:00
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
defer.returnValue(JoinedSyncResult(
|
2015-01-27 11:24:22 -05:00
|
|
|
room_id=room_id,
|
2015-10-05 11:39:22 -04:00
|
|
|
timeline=batch,
|
2015-11-12 11:34:42 -05:00
|
|
|
state=current_state,
|
2015-11-02 12:54:04 -05:00
|
|
|
ephemeral=ephemeral_by_room.get(room_id, []),
|
2015-11-18 10:31:04 -05:00
|
|
|
account_data=self.account_data_for_room(
|
2015-12-01 13:41:32 -05:00
|
|
|
room_id, tags_by_room, account_data_by_room
|
2015-11-02 11:23:15 -05:00
|
|
|
),
|
2016-01-19 12:19:53 -05:00
|
|
|
unread_notifications=unread_notifications,
|
2015-01-27 11:24:22 -05:00
|
|
|
))
|
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
def account_data_for_user(self, account_data):
|
|
|
|
account_data_events = []
|
|
|
|
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
|
|
|
return account_data_events
|
|
|
|
|
|
|
|
def account_data_for_room(self, room_id, tags_by_room, account_data_by_room):
|
|
|
|
account_data_events = []
|
2015-11-02 11:23:15 -05:00
|
|
|
tags = tags_by_room.get(room_id)
|
2015-11-09 09:52:18 -05:00
|
|
|
if tags is not None:
|
2015-12-01 13:41:32 -05:00
|
|
|
account_data_events.append({
|
2015-11-02 11:23:15 -05:00
|
|
|
"type": "m.tag",
|
|
|
|
"content": {"tags": tags},
|
|
|
|
})
|
2015-12-01 13:41:32 -05:00
|
|
|
|
|
|
|
account_data = account_data_by_room.get(room_id, {})
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
|
|
|
return account_data_events
|
2015-11-02 11:23:15 -05:00
|
|
|
|
2015-10-20 11:36:20 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-01-06 11:44:13 -05:00
|
|
|
def ephemeral_by_room(self, sync_config, now_token, since_token=None):
|
2015-11-02 12:54:04 -05:00
|
|
|
"""Get the ephemeral events for each room the user is in
|
2015-10-21 10:45:37 -04:00
|
|
|
Args:
|
|
|
|
sync_config (SyncConfig): The flags, filters and user for the sync.
|
|
|
|
now_token (StreamToken): Where the server is currently up to.
|
|
|
|
since_token (StreamToken): Where the server was when the client
|
|
|
|
last synced.
|
|
|
|
Returns:
|
|
|
|
A tuple of the now StreamToken, updated to reflect the which typing
|
|
|
|
events are included, and a dict mapping from room_id to a list of
|
|
|
|
typing events for that room.
|
|
|
|
"""
|
|
|
|
|
2015-10-20 11:36:20 -04:00
|
|
|
typing_key = since_token.typing_key if since_token else "0"
|
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
|
|
|
|
room_ids = [room.room_id for room in rooms]
|
|
|
|
|
2015-10-20 11:36:20 -04:00
|
|
|
typing_source = self.event_sources.sources["typing"]
|
2015-11-05 09:32:26 -05:00
|
|
|
typing, typing_key = yield typing_source.get_new_events(
|
2015-10-20 11:36:20 -04:00
|
|
|
user=sync_config.user,
|
|
|
|
from_key=typing_key,
|
|
|
|
limit=sync_config.filter.ephemeral_limit(),
|
2015-11-05 09:32:26 -05:00
|
|
|
room_ids=room_ids,
|
2016-01-06 11:54:57 -05:00
|
|
|
is_guest=sync_config.is_guest,
|
2015-10-20 11:36:20 -04:00
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("typing_key", typing_key)
|
|
|
|
|
2015-11-02 12:54:04 -05:00
|
|
|
ephemeral_by_room = {}
|
|
|
|
|
2015-10-20 11:36:20 -04:00
|
|
|
for event in typing:
|
2015-11-09 13:09:46 -05:00
|
|
|
# we want to exclude the room_id from the event, but modifying the
|
|
|
|
# result returned by the event source is poor form (it might cache
|
|
|
|
# the object)
|
|
|
|
room_id = event["room_id"]
|
|
|
|
event_copy = {k: v for (k, v) in event.iteritems()
|
|
|
|
if k != "room_id"}
|
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2015-11-02 12:54:04 -05:00
|
|
|
receipt_key = since_token.receipt_key if since_token else "0"
|
|
|
|
|
|
|
|
receipt_source = self.event_sources.sources["receipt"]
|
2015-11-05 09:32:26 -05:00
|
|
|
receipts, receipt_key = yield receipt_source.get_new_events(
|
2015-11-02 12:54:04 -05:00
|
|
|
user=sync_config.user,
|
|
|
|
from_key=receipt_key,
|
|
|
|
limit=sync_config.filter.ephemeral_limit(),
|
2015-11-05 09:32:26 -05:00
|
|
|
room_ids=room_ids,
|
2016-01-06 11:54:57 -05:00
|
|
|
is_guest=sync_config.is_guest,
|
2015-11-02 12:54:04 -05:00
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("receipt_key", receipt_key)
|
|
|
|
|
|
|
|
for event in receipts:
|
2015-11-09 13:09:46 -05:00
|
|
|
room_id = event["room_id"]
|
|
|
|
# exclude room id, as above
|
|
|
|
event_copy = {k: v for (k, v) in event.iteritems()
|
|
|
|
if k != "room_id"}
|
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2015-11-02 12:54:04 -05:00
|
|
|
defer.returnValue((now_token, ephemeral_by_room))
|
2015-10-20 11:36:20 -04:00
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-10-26 14:47:18 -04:00
|
|
|
def full_state_sync_for_archived_room(self, room_id, sync_config,
|
|
|
|
leave_event_id, leave_token,
|
2015-12-01 13:41:32 -05:00
|
|
|
timeline_since_token, tags_by_room,
|
|
|
|
account_data_by_room):
|
2015-10-19 12:26:18 -04:00
|
|
|
"""Sync a room for a client which is starting without any state
|
|
|
|
Returns:
|
|
|
|
A Deferred JoinedSyncResult.
|
|
|
|
"""
|
|
|
|
|
|
|
|
batch = yield self.load_filtered_recents(
|
2015-10-26 14:47:18 -04:00
|
|
|
room_id, sync_config, leave_token, since_token=timeline_since_token
|
2015-10-19 12:26:18 -04:00
|
|
|
)
|
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
leave_state = yield self.store.get_state_for_event(leave_event_id)
|
2015-10-19 12:26:18 -04:00
|
|
|
|
|
|
|
defer.returnValue(ArchivedSyncResult(
|
|
|
|
room_id=room_id,
|
|
|
|
timeline=batch,
|
2015-11-10 13:27:23 -05:00
|
|
|
state=leave_state,
|
2015-11-18 10:31:04 -05:00
|
|
|
account_data=self.account_data_for_room(
|
2015-12-01 13:41:32 -05:00
|
|
|
room_id, tags_by_room, account_data_by_room
|
2015-11-02 11:23:15 -05:00
|
|
|
),
|
2015-10-19 12:26:18 -04:00
|
|
|
))
|
|
|
|
|
2015-01-27 11:24:22 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def incremental_sync_with_gap(self, sync_config, since_token):
|
|
|
|
""" Get the incremental delta needed to bring the client up to
|
|
|
|
date with the server.
|
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
|
|
|
now_token = yield self.event_sources.get_current_token()
|
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
|
|
|
|
room_ids = [room.room_id for room in rooms]
|
2015-12-22 06:59:55 -05:00
|
|
|
|
2015-01-28 22:33:51 -05:00
|
|
|
presence_source = self.event_sources.sources["presence"]
|
2015-11-05 09:32:26 -05:00
|
|
|
presence, presence_key = yield presence_source.get_new_events(
|
2015-01-28 22:33:51 -05:00
|
|
|
user=sync_config.user,
|
|
|
|
from_key=since_token.presence_key,
|
2015-10-05 11:39:22 -04:00
|
|
|
limit=sync_config.filter.presence_limit(),
|
2015-11-05 09:32:26 -05:00
|
|
|
room_ids=room_ids,
|
2016-01-06 11:54:57 -05:00
|
|
|
is_guest=sync_config.is_guest,
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
2015-01-28 22:33:51 -05:00
|
|
|
now_token = now_token.copy_and_replace("presence_key", presence_key)
|
|
|
|
|
2015-12-22 12:19:22 -05:00
|
|
|
# We now fetch all ephemeral events for this room in order to get
|
|
|
|
# this users current read receipt. This could almost certainly be
|
|
|
|
# optimised.
|
2015-12-18 12:47:00 -05:00
|
|
|
_, all_ephemeral_by_room = yield self.ephemeral_by_room(
|
2016-01-06 12:28:55 -05:00
|
|
|
sync_config, now_token
|
2015-12-18 12:47:00 -05:00
|
|
|
)
|
|
|
|
|
2016-01-06 11:44:13 -05:00
|
|
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
|
|
|
sync_config, now_token, since_token
|
|
|
|
)
|
|
|
|
|
2015-01-29 09:40:28 -05:00
|
|
|
rm_handler = self.hs.get_handlers().room_member_handler
|
2015-08-19 08:46:03 -04:00
|
|
|
app_service = yield self.store.get_app_service_by_user_id(
|
|
|
|
sync_config.user.to_string()
|
|
|
|
)
|
|
|
|
if app_service:
|
|
|
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
2015-10-13 06:43:12 -04:00
|
|
|
joined_room_ids = set(r.room_id for r in rooms)
|
2015-08-19 08:46:03 -04:00
|
|
|
else:
|
2015-10-13 06:43:12 -04:00
|
|
|
joined_room_ids = yield rm_handler.get_joined_rooms_for_user(
|
2015-08-19 08:46:03 -04:00
|
|
|
sync_config.user
|
|
|
|
)
|
2015-01-27 11:24:22 -05:00
|
|
|
|
2015-10-01 12:53:07 -04:00
|
|
|
timeline_limit = sync_config.filter.timeline_limit()
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-01-29 09:40:28 -05:00
|
|
|
room_events, _ = yield self.store.get_room_events_stream(
|
|
|
|
sync_config.user.to_string(),
|
|
|
|
from_key=since_token.room_key,
|
|
|
|
to_key=now_token.room_key,
|
2015-10-01 12:53:07 -04:00
|
|
|
limit=timeline_limit + 1,
|
2016-01-06 11:44:13 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
tags_by_room = yield self.store.get_updated_tags(
|
|
|
|
sync_config.user.to_string(),
|
|
|
|
since_token.account_data_key,
|
|
|
|
)
|
|
|
|
|
|
|
|
account_data, account_data_by_room = (
|
|
|
|
yield self.store.get_updated_account_data_for_user(
|
|
|
|
sync_config.user.to_string(),
|
|
|
|
since_token.account_data_key,
|
|
|
|
)
|
2015-12-01 13:41:32 -05:00
|
|
|
)
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
joined = []
|
2015-10-19 12:26:18 -04:00
|
|
|
archived = []
|
2015-10-01 12:53:07 -04:00
|
|
|
if len(room_events) <= timeline_limit:
|
2015-01-29 09:40:28 -05:00
|
|
|
# There is no gap in any of the rooms. Therefore we can just
|
|
|
|
# partition the new events by room and return them.
|
2015-11-10 13:27:23 -05:00
|
|
|
logger.debug("Got %i events for incremental sync - not limited",
|
|
|
|
len(room_events))
|
|
|
|
|
2015-10-13 06:43:12 -04:00
|
|
|
invite_events = []
|
2015-10-19 12:26:18 -04:00
|
|
|
leave_events = []
|
2015-01-29 09:40:28 -05:00
|
|
|
events_by_room_id = {}
|
|
|
|
for event in room_events:
|
|
|
|
events_by_room_id.setdefault(event.room_id, []).append(event)
|
2015-10-13 06:43:12 -04:00
|
|
|
if event.room_id not in joined_room_ids:
|
|
|
|
if (event.type == EventTypes.Member
|
|
|
|
and event.state_key == sync_config.user.to_string()):
|
2015-10-19 12:26:18 -04:00
|
|
|
if event.membership == Membership.INVITE:
|
|
|
|
invite_events.append(event)
|
2015-10-21 06:15:48 -04:00
|
|
|
elif event.membership in (Membership.LEAVE, Membership.BAN):
|
2015-10-19 12:26:18 -04:00
|
|
|
leave_events.append(event)
|
2015-01-29 09:40:28 -05:00
|
|
|
|
2015-10-13 06:43:12 -04:00
|
|
|
for room_id in joined_room_ids:
|
2015-01-29 09:40:28 -05:00
|
|
|
recents = events_by_room_id.get(room_id, [])
|
2015-11-10 13:27:23 -05:00
|
|
|
logger.debug("Events for room %s: %r", room_id, recents)
|
2015-11-12 11:34:42 -05:00
|
|
|
state = {
|
|
|
|
(event.type, event.state_key): event
|
|
|
|
for event in recents if event.is_state()}
|
2015-11-10 13:27:23 -05:00
|
|
|
limited = False
|
2015-11-12 11:34:42 -05:00
|
|
|
|
2015-01-29 09:40:28 -05:00
|
|
|
if recents:
|
|
|
|
prev_batch = now_token.copy_and_replace(
|
|
|
|
"room_key", recents[0].internal_metadata.before
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
prev_batch = now_token
|
2015-01-30 08:33:41 -05:00
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
just_joined = yield self.check_joined_room(sync_config, state)
|
|
|
|
if just_joined:
|
|
|
|
logger.debug("User has just joined %s: needs full state",
|
|
|
|
room_id)
|
|
|
|
state = yield self.get_state_at(room_id, now_token)
|
|
|
|
# the timeline is inherently limited if we've just joined
|
|
|
|
limited = True
|
2015-01-30 08:33:41 -05:00
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
room_sync = JoinedSyncResult(
|
2015-01-29 09:40:28 -05:00
|
|
|
room_id=room_id,
|
2015-10-08 10:17:43 -04:00
|
|
|
timeline=TimelineBatch(
|
|
|
|
events=recents,
|
|
|
|
prev_batch=prev_batch,
|
2015-10-29 15:58:51 -04:00
|
|
|
limited=limited,
|
2015-10-08 10:17:43 -04:00
|
|
|
),
|
2015-01-29 09:40:28 -05:00
|
|
|
state=state,
|
2015-11-03 12:31:17 -05:00
|
|
|
ephemeral=ephemeral_by_room.get(room_id, []),
|
2015-11-18 10:31:04 -05:00
|
|
|
account_data=self.account_data_for_room(
|
2015-12-01 13:41:32 -05:00
|
|
|
room_id, tags_by_room, account_data_by_room
|
2015-11-02 11:23:15 -05:00
|
|
|
),
|
2016-01-19 12:19:53 -05:00
|
|
|
unread_notifications={},
|
2015-01-29 09:40:28 -05:00
|
|
|
)
|
2015-11-10 13:27:23 -05:00
|
|
|
logger.debug("Result for room %s: %r", room_id, room_sync)
|
|
|
|
|
2015-01-29 11:27:38 -05:00
|
|
|
if room_sync:
|
2016-01-19 12:19:53 -05:00
|
|
|
notifs = yield self.unread_notifs_for_room_id(
|
|
|
|
room_id, sync_config, all_ephemeral_by_room
|
|
|
|
)
|
|
|
|
|
|
|
|
if notifs is not None:
|
|
|
|
notif_dict = room_sync.unread_notifications
|
|
|
|
notif_dict["notification_count"] = len(notifs)
|
|
|
|
notif_dict["highlight_count"] = len([
|
|
|
|
1 for notif in notifs
|
|
|
|
if _action_has_highlight(notif["actions"])
|
|
|
|
])
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
joined.append(room_sync)
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2015-01-29 09:40:28 -05:00
|
|
|
else:
|
2015-11-10 13:27:23 -05:00
|
|
|
logger.debug("Got %i events for incremental sync - hit limit",
|
|
|
|
len(room_events))
|
|
|
|
|
2015-10-13 09:08:38 -04:00
|
|
|
invite_events = yield self.store.get_invites_for_user(
|
|
|
|
sync_config.user.to_string()
|
2015-10-13 06:43:12 -04:00
|
|
|
)
|
|
|
|
|
2015-10-21 06:15:48 -04:00
|
|
|
leave_events = yield self.store.get_leave_and_ban_events_for_user(
|
2015-10-19 12:26:18 -04:00
|
|
|
sync_config.user.to_string()
|
|
|
|
)
|
|
|
|
|
2015-10-13 06:43:12 -04:00
|
|
|
for room_id in joined_room_ids:
|
2015-01-29 09:40:28 -05:00
|
|
|
room_sync = yield self.incremental_sync_with_gap_for_room(
|
|
|
|
room_id, sync_config, since_token, now_token,
|
2016-01-19 09:26:58 -05:00
|
|
|
ephemeral_by_room, tags_by_room, account_data_by_room,
|
|
|
|
all_ephemeral_by_room=all_ephemeral_by_room,
|
2015-01-29 09:40:28 -05:00
|
|
|
)
|
|
|
|
if room_sync:
|
2015-10-13 05:24:51 -04:00
|
|
|
joined.append(room_sync)
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
for leave_event in leave_events:
|
|
|
|
room_sync = yield self.incremental_sync_for_archived_room(
|
2015-12-01 13:41:32 -05:00
|
|
|
sync_config, leave_event, since_token, tags_by_room,
|
|
|
|
account_data_by_room
|
2015-10-19 12:26:18 -04:00
|
|
|
)
|
2016-01-12 10:01:56 -05:00
|
|
|
if room_sync:
|
|
|
|
archived.append(room_sync)
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2015-10-13 06:43:12 -04:00
|
|
|
invited = [
|
|
|
|
InvitedSyncResult(room_id=event.room_id, invite=event)
|
|
|
|
for event in invite_events
|
|
|
|
]
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
defer.returnValue(SyncResult(
|
2015-10-05 11:39:22 -04:00
|
|
|
presence=presence,
|
2015-12-01 13:41:32 -05:00
|
|
|
account_data=self.account_data_for_user(account_data),
|
2015-10-13 05:24:51 -04:00
|
|
|
joined=joined,
|
2015-10-13 06:43:12 -04:00
|
|
|
invited=invited,
|
2015-10-19 12:26:18 -04:00
|
|
|
archived=archived,
|
2015-01-26 13:53:31 -05:00
|
|
|
next_batch=now_token,
|
|
|
|
))
|
2015-01-26 10:46:31 -05:00
|
|
|
|
2015-01-30 06:32:35 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-01-30 06:35:20 -05:00
|
|
|
def load_filtered_recents(self, room_id, sync_config, now_token,
|
|
|
|
since_token=None):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
:returns a Deferred TimelineBatch
|
|
|
|
"""
|
2015-01-30 06:32:35 -05:00
|
|
|
limited = True
|
|
|
|
recents = []
|
|
|
|
filtering_factor = 2
|
2015-10-08 10:17:43 -04:00
|
|
|
timeline_limit = sync_config.filter.timeline_limit()
|
|
|
|
load_limit = max(timeline_limit * filtering_factor, 100)
|
2015-01-30 06:32:35 -05:00
|
|
|
max_repeat = 3 # Only try a few times per room, otherwise
|
|
|
|
room_key = now_token.room_key
|
2015-02-06 10:58:40 -05:00
|
|
|
end_key = room_key
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2015-10-01 12:53:07 -04:00
|
|
|
while limited and len(recents) < timeline_limit and max_repeat:
|
2015-01-30 06:50:15 -05:00
|
|
|
events, keys = yield self.store.get_recent_events_for_room(
|
2015-01-30 06:32:35 -05:00
|
|
|
room_id,
|
|
|
|
limit=load_limit + 1,
|
2015-01-30 06:35:20 -05:00
|
|
|
from_token=since_token.room_key if since_token else None,
|
2015-02-06 10:58:40 -05:00
|
|
|
end_token=end_key,
|
2015-01-30 06:32:35 -05:00
|
|
|
)
|
2015-01-30 06:50:15 -05:00
|
|
|
(room_key, _) = keys
|
2015-02-06 10:58:40 -05:00
|
|
|
end_key = "s" + room_key.split('-')[-1]
|
2015-10-12 11:54:58 -04:00
|
|
|
loaded_recents = sync_config.filter.filter_room_timeline(events)
|
2015-07-03 09:51:01 -04:00
|
|
|
loaded_recents = yield self._filter_events_for_client(
|
2015-12-22 06:21:03 -05:00
|
|
|
sync_config.user.to_string(),
|
|
|
|
loaded_recents,
|
2016-01-20 10:34:07 -05:00
|
|
|
is_peeking=sync_config.is_guest,
|
2015-07-03 09:51:01 -04:00
|
|
|
)
|
2015-01-30 06:32:35 -05:00
|
|
|
loaded_recents.extend(recents)
|
|
|
|
recents = loaded_recents
|
|
|
|
if len(events) <= load_limit:
|
|
|
|
limited = False
|
|
|
|
max_repeat -= 1
|
|
|
|
|
2015-10-08 10:17:43 -04:00
|
|
|
if len(recents) > timeline_limit:
|
|
|
|
limited = True
|
|
|
|
recents = recents[-timeline_limit:]
|
2015-01-30 06:32:35 -05:00
|
|
|
room_key = recents[0].internal_metadata.before
|
|
|
|
|
|
|
|
prev_batch_token = now_token.copy_and_replace(
|
|
|
|
"room_key", room_key
|
|
|
|
)
|
|
|
|
|
2015-10-01 12:53:07 -04:00
|
|
|
defer.returnValue(TimelineBatch(
|
|
|
|
events=recents, prev_batch=prev_batch_token, limited=limited
|
|
|
|
))
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2015-01-26 10:46:31 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-01-27 11:24:22 -05:00
|
|
|
def incremental_sync_with_gap_for_room(self, room_id, sync_config,
|
|
|
|
since_token, now_token,
|
2015-12-01 13:41:32 -05:00
|
|
|
ephemeral_by_room, tags_by_room,
|
2016-01-19 09:26:58 -05:00
|
|
|
account_data_by_room,
|
|
|
|
all_ephemeral_by_room):
|
2015-01-27 11:24:22 -05:00
|
|
|
""" Get the incremental delta needed to bring the client up to date for
|
|
|
|
the room. Gives the client the most recent events and the changes to
|
|
|
|
state.
|
|
|
|
Returns:
|
2015-10-13 05:24:51 -04:00
|
|
|
A Deferred JoinedSyncResult
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
2015-11-10 13:27:23 -05:00
|
|
|
logger.debug("Doing incremental sync for room %s between %s and %s",
|
|
|
|
room_id, since_token, now_token)
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2015-01-27 11:24:22 -05:00
|
|
|
# TODO(mjark): Check for redactions we might have missed.
|
2015-01-30 06:32:35 -05:00
|
|
|
|
2015-10-01 12:53:07 -04:00
|
|
|
batch = yield self.load_filtered_recents(
|
2015-01-30 06:35:20 -05:00
|
|
|
room_id, sync_config, now_token, since_token,
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
|
|
|
|
2016-01-14 04:56:26 -05:00
|
|
|
logger.debug("Recents %r", batch)
|
2015-01-27 11:24:22 -05:00
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
current_state = yield self.get_state_at(room_id, now_token)
|
2015-01-27 11:24:22 -05:00
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
state_at_previous_sync = yield self.get_state_at(
|
|
|
|
room_id, stream_position=since_token
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
state = yield self.compute_state_delta(
|
2015-01-27 11:24:22 -05:00
|
|
|
since_token=since_token,
|
|
|
|
previous_state=state_at_previous_sync,
|
2015-11-12 11:34:42 -05:00
|
|
|
current_state=current_state,
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
just_joined = yield self.check_joined_room(sync_config, state)
|
|
|
|
if just_joined:
|
|
|
|
state = yield self.get_state_at(room_id, now_token)
|
2015-01-30 08:33:41 -05:00
|
|
|
|
2016-01-04 09:50:36 -05:00
|
|
|
notifs = yield self.unread_notifs_for_room_id(
|
2016-01-19 09:26:58 -05:00
|
|
|
room_id, sync_config, all_ephemeral_by_room
|
2016-01-04 09:50:36 -05:00
|
|
|
)
|
2016-01-19 06:35:50 -05:00
|
|
|
|
2016-01-19 12:19:53 -05:00
|
|
|
unread_notifications = {}
|
2016-01-04 09:50:36 -05:00
|
|
|
if notifs is not None:
|
2016-01-19 12:19:53 -05:00
|
|
|
unread_notifications["notification_count"] = len(notifs)
|
|
|
|
unread_notifications["highlight_count"] = len([
|
2016-01-19 06:35:50 -05:00
|
|
|
1 for notif in notifs if _action_has_highlight(notif["actions"])
|
|
|
|
])
|
2016-01-04 09:50:36 -05:00
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
room_sync = JoinedSyncResult(
|
2015-01-27 11:24:22 -05:00
|
|
|
room_id=room_id,
|
2015-10-01 12:53:07 -04:00
|
|
|
timeline=batch,
|
2015-11-10 13:27:23 -05:00
|
|
|
state=state,
|
2015-11-03 12:31:17 -05:00
|
|
|
ephemeral=ephemeral_by_room.get(room_id, []),
|
2015-11-18 10:31:04 -05:00
|
|
|
account_data=self.account_data_for_room(
|
2015-12-01 13:41:32 -05:00
|
|
|
room_id, tags_by_room, account_data_by_room
|
2015-11-02 11:23:15 -05:00
|
|
|
),
|
2016-01-19 12:19:53 -05:00
|
|
|
unread_notifications=unread_notifications,
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
|
|
|
|
2016-01-07 10:28:17 -05:00
|
|
|
logger.debug("Room sync: %r", room_sync)
|
2015-01-27 11:24:22 -05:00
|
|
|
|
|
|
|
defer.returnValue(room_sync)
|
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def incremental_sync_for_archived_room(self, sync_config, leave_event,
|
2015-12-01 13:41:32 -05:00
|
|
|
since_token, tags_by_room,
|
|
|
|
account_data_by_room):
|
2015-10-19 12:26:18 -04:00
|
|
|
""" Get the incremental delta needed to bring the client up to date for
|
|
|
|
the archived room.
|
|
|
|
Returns:
|
|
|
|
A Deferred ArchivedSyncResult
|
|
|
|
"""
|
|
|
|
|
|
|
|
stream_token = yield self.store.get_stream_token_for_event(
|
|
|
|
leave_event.event_id
|
|
|
|
)
|
|
|
|
|
|
|
|
leave_token = since_token.copy_and_replace("room_key", stream_token)
|
|
|
|
|
2016-01-12 10:01:56 -05:00
|
|
|
if since_token.is_after(leave_token):
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
batch = yield self.load_filtered_recents(
|
|
|
|
leave_event.room_id, sync_config, leave_token, since_token,
|
|
|
|
)
|
|
|
|
|
2016-01-14 04:56:26 -05:00
|
|
|
logger.debug("Recents %r", batch)
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
state_events_at_leave = yield self.store.get_state_for_event(
|
|
|
|
leave_event.event_id
|
2015-10-19 12:26:18 -04:00
|
|
|
)
|
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
state_at_previous_sync = yield self.get_state_at(
|
|
|
|
leave_event.room_id, stream_position=since_token
|
2015-10-19 12:26:18 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
state_events_delta = yield self.compute_state_delta(
|
|
|
|
since_token=since_token,
|
|
|
|
previous_state=state_at_previous_sync,
|
|
|
|
current_state=state_events_at_leave,
|
|
|
|
)
|
|
|
|
|
|
|
|
room_sync = ArchivedSyncResult(
|
|
|
|
room_id=leave_event.room_id,
|
|
|
|
timeline=batch,
|
|
|
|
state=state_events_delta,
|
2015-11-18 10:31:04 -05:00
|
|
|
account_data=self.account_data_for_room(
|
2015-12-01 13:41:32 -05:00
|
|
|
leave_event.room_id, tags_by_room, account_data_by_room
|
2015-11-02 11:23:15 -05:00
|
|
|
),
|
2015-10-19 12:26:18 -04:00
|
|
|
)
|
|
|
|
|
2016-01-07 10:28:17 -05:00
|
|
|
logger.debug("Room sync: %r", room_sync)
|
2015-10-19 12:26:18 -04:00
|
|
|
|
|
|
|
defer.returnValue(room_sync)
|
|
|
|
|
2015-01-27 11:24:22 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-11-10 13:27:23 -05:00
|
|
|
def get_state_after_event(self, event):
|
|
|
|
"""
|
|
|
|
Get the room state after the given event
|
|
|
|
|
|
|
|
:param synapse.events.EventBase event: event of interest
|
|
|
|
:return: A Deferred map from ((type, state_key)->Event)
|
|
|
|
"""
|
|
|
|
state = yield self.store.get_state_for_event(event.event_id)
|
|
|
|
if event.is_state():
|
|
|
|
state = state.copy()
|
|
|
|
state[(event.type, event.state_key)] = event
|
|
|
|
defer.returnValue(state)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_state_at(self, room_id, stream_position):
|
|
|
|
""" Get the room state at a particular stream position
|
|
|
|
:param str room_id: room for which to get state
|
|
|
|
:param StreamToken stream_position: point at which to get state
|
|
|
|
:returns: A Deferred map from ((type, state_key)->Event)
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
|
|
|
last_events, token = yield self.store.get_recent_events_for_room(
|
2015-11-10 13:27:23 -05:00
|
|
|
room_id, end_token=stream_position.room_key, limit=1,
|
2015-01-27 11:24:22 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if last_events:
|
2015-11-10 13:27:23 -05:00
|
|
|
last_event = last_events[-1]
|
|
|
|
state = yield self.get_state_after_event(last_event)
|
|
|
|
|
2015-01-27 11:24:22 -05:00
|
|
|
else:
|
2015-11-10 13:27:23 -05:00
|
|
|
# no events in this room - so presumably no state
|
2015-11-12 11:34:42 -05:00
|
|
|
state = {}
|
2015-01-27 11:24:22 -05:00
|
|
|
defer.returnValue(state)
|
|
|
|
|
|
|
|
def compute_state_delta(self, since_token, previous_state, current_state):
|
|
|
|
""" Works out the differnce in state between the current state and the
|
|
|
|
state the client got when it last performed a sync.
|
2015-11-13 05:31:15 -05:00
|
|
|
|
|
|
|
:param str since_token: the point we are comparing against
|
2015-11-12 11:34:42 -05:00
|
|
|
:param dict[(str,str), synapse.events.FrozenEvent] previous_state: the
|
|
|
|
state to compare to
|
|
|
|
:param dict[(str,str), synapse.events.FrozenEvent] current_state: the
|
|
|
|
new state
|
2015-11-13 05:31:15 -05:00
|
|
|
|
2015-11-12 11:34:42 -05:00
|
|
|
:returns A new event dictionary
|
2015-01-27 11:24:22 -05:00
|
|
|
"""
|
|
|
|
# TODO(mjark) Check if the state events were received by the server
|
|
|
|
# after the previous sync, since we need to include those state
|
|
|
|
# updates even if they occured logically before the previous event.
|
|
|
|
# TODO(mjark) Check for new redactions in the state events.
|
2015-11-12 11:34:42 -05:00
|
|
|
|
|
|
|
state_delta = {}
|
|
|
|
for key, event in current_state.iteritems():
|
|
|
|
if (key not in previous_state or
|
|
|
|
previous_state[key].event_id != event.event_id):
|
|
|
|
state_delta[key] = event
|
2015-01-27 11:24:22 -05:00
|
|
|
return state_delta
|
2015-01-30 08:33:41 -05:00
|
|
|
|
2015-11-10 13:27:23 -05:00
|
|
|
def check_joined_room(self, sync_config, state_delta):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
2015-11-10 13:27:23 -05:00
|
|
|
Check if the user has just joined the given room (so should
|
|
|
|
be given the full state)
|
2015-11-13 05:31:15 -05:00
|
|
|
|
2015-11-12 11:34:42 -05:00
|
|
|
:param sync_config:
|
|
|
|
:param dict[(str,str), synapse.events.FrozenEvent] state_delta: the
|
|
|
|
difference in state since the last sync
|
|
|
|
|
2015-11-13 05:31:15 -05:00
|
|
|
:returns A deferred Tuple (state_delta, limited)
|
|
|
|
"""
|
2015-11-12 11:34:42 -05:00
|
|
|
join_event = state_delta.get((
|
|
|
|
EventTypes.Member, sync_config.user.to_string()), None)
|
|
|
|
if join_event is not None:
|
|
|
|
if join_event.content["membership"] == Membership.JOIN:
|
2015-11-10 13:27:23 -05:00
|
|
|
return True
|
|
|
|
return False
|
2015-12-18 12:47:00 -05:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-01-13 12:43:39 -05:00
|
|
|
def unread_notifs_for_room_id(self, room_id, sync_config, ephemeral_by_room):
|
2015-12-18 12:47:00 -05:00
|
|
|
last_unread_event_id = self.last_read_event_id_for_room_and_user(
|
|
|
|
room_id, sync_config.user.to_string(), ephemeral_by_room
|
|
|
|
)
|
|
|
|
|
|
|
|
notifs = []
|
|
|
|
if last_unread_event_id:
|
2016-01-04 09:05:37 -05:00
|
|
|
notifs = yield self.store.get_unread_event_push_actions_by_room_for_user(
|
2015-12-18 12:47:00 -05:00
|
|
|
room_id, sync_config.user.to_string(), last_unread_event_id
|
|
|
|
)
|
2016-01-19 06:35:50 -05:00
|
|
|
defer.returnValue(notifs)
|
|
|
|
|
|
|
|
# There is no new information in this period, so your notification
|
|
|
|
# count is whatever it was last time.
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
|
|
|
|
|
|
|
def _action_has_highlight(actions):
|
|
|
|
for action in actions:
|
|
|
|
try:
|
|
|
|
if action.get("set_tweak", None) == "highlight":
|
|
|
|
return action.get("value", True)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return False
|