2015-01-23 13:31:29 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-26 13:53:31 -05:00
|
|
|
# Copyright 2015 OpenMarket Ltd
|
2015-01-23 13:31:29 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
2015-04-21 11:35:53 -04:00
|
|
|
from synapse.http.servlet import (
|
2015-10-26 14:47:18 -04:00
|
|
|
RestServlet, parse_string, parse_integer, parse_boolean
|
2015-04-21 11:35:53 -04:00
|
|
|
)
|
2015-01-26 13:53:31 -05:00
|
|
|
from synapse.handlers.sync import SyncConfig
|
|
|
|
from synapse.types import StreamToken
|
2015-11-10 13:29:25 -05:00
|
|
|
from synapse.events import FrozenEvent
|
2015-01-28 21:45:33 -05:00
|
|
|
from synapse.events.utils import (
|
2015-11-12 05:33:19 -05:00
|
|
|
serialize_event, format_event_for_client_v2_without_room_id,
|
2015-01-28 21:45:33 -05:00
|
|
|
)
|
2015-10-20 10:33:25 -04:00
|
|
|
from synapse.api.filtering import FilterCollection
|
2015-12-01 12:34:32 -05:00
|
|
|
from ._base import client_v2_patterns
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2015-10-09 13:50:15 -04:00
|
|
|
import copy
|
2015-01-23 13:31:29 -05:00
|
|
|
import logging
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class SyncRestServlet(RestServlet):
|
|
|
|
"""
|
|
|
|
|
|
|
|
GET parameters::
|
|
|
|
timeout(int): How long to wait for new events in milliseconds.
|
|
|
|
since(batch_token): Batch token when asking for incremental deltas.
|
|
|
|
set_presence(str): What state the device presence should be set to.
|
|
|
|
default is "online".
|
|
|
|
filter(filter_id): A filter to apply to the events returned.
|
|
|
|
|
|
|
|
Response JSON::
|
|
|
|
{
|
2015-10-01 12:53:07 -04:00
|
|
|
"next_batch": // batch token for the next /sync
|
|
|
|
"presence": // presence data for the user.
|
2015-10-07 10:55:20 -04:00
|
|
|
"rooms": {
|
|
|
|
"joined": { // Joined rooms being updated.
|
|
|
|
"${room_id}": { // Id of the room being updated
|
|
|
|
"event_map": // Map of EventID -> event JSON.
|
|
|
|
"timeline": { // The recent events in the room if gap is "true"
|
2015-10-01 12:53:07 -04:00
|
|
|
"limited": // Was the per-room event limit exceeded?
|
2015-10-07 10:55:20 -04:00
|
|
|
// otherwise the next events in the room.
|
|
|
|
"events": [] // list of EventIDs in the "event_map".
|
2015-10-01 12:53:07 -04:00
|
|
|
"prev_batch": // back token for getting previous events.
|
2015-10-07 10:55:20 -04:00
|
|
|
}
|
|
|
|
"state": {"events": []} // list of EventIDs updating the
|
|
|
|
// current state to be what it should
|
|
|
|
// be at the end of the batch.
|
|
|
|
"ephemeral": {"events": []} // list of event objects
|
2015-10-01 12:53:07 -04:00
|
|
|
}
|
2015-10-07 10:55:20 -04:00
|
|
|
},
|
2015-10-12 11:54:58 -04:00
|
|
|
"invited": {}, // Invited rooms being updated.
|
|
|
|
"archived": {} // Archived rooms being updated.
|
2015-10-01 12:53:07 -04:00
|
|
|
}
|
2015-01-23 13:31:29 -05:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2015-12-01 12:34:32 -05:00
|
|
|
PATTERNS = client_v2_patterns("/sync$")
|
2015-10-01 12:53:07 -04:00
|
|
|
ALLOWED_PRESENCE = set(["online", "offline"])
|
2015-01-23 13:31:29 -05:00
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(SyncRestServlet, self).__init__()
|
|
|
|
self.auth = hs.get_auth()
|
2015-10-09 14:57:50 -04:00
|
|
|
self.event_stream_handler = hs.get_handlers().event_stream_handler
|
2015-01-26 13:53:31 -05:00
|
|
|
self.sync_handler = hs.get_handlers().sync_handler
|
|
|
|
self.clock = hs.get_clock()
|
2015-01-29 13:11:28 -05:00
|
|
|
self.filtering = hs.get_filtering()
|
2015-01-23 13:31:29 -05:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_GET(self, request):
|
2015-11-04 12:29:07 -05:00
|
|
|
user, token_id, _ = yield self.auth.get_user_by_req(request)
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2015-04-21 11:35:53 -04:00
|
|
|
timeout = parse_integer(request, "timeout", default=0)
|
|
|
|
since = parse_string(request, "since")
|
|
|
|
set_presence = parse_string(
|
2015-01-23 13:31:29 -05:00
|
|
|
request, "set_presence", default="online",
|
|
|
|
allowed_values=self.ALLOWED_PRESENCE
|
|
|
|
)
|
2015-04-21 11:35:53 -04:00
|
|
|
filter_id = parse_string(request, "filter", default=None)
|
2015-10-26 14:47:18 -04:00
|
|
|
full_state = parse_boolean(request, "full_state", default=False)
|
2015-01-23 13:31:29 -05:00
|
|
|
|
|
|
|
logger.info(
|
2015-10-01 12:53:07 -04:00
|
|
|
"/sync: user=%r, timeout=%r, since=%r,"
|
|
|
|
" set_presence=%r, filter_id=%r" % (
|
|
|
|
user, timeout, since, set_presence, filter_id
|
2015-01-23 13:31:29 -05:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2015-01-29 13:11:28 -05:00
|
|
|
try:
|
|
|
|
filter = yield self.filtering.get_user_filter(
|
|
|
|
user.localpart, filter_id
|
|
|
|
)
|
|
|
|
except:
|
2015-10-20 10:33:25 -04:00
|
|
|
filter = FilterCollection({})
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
sync_config = SyncConfig(
|
|
|
|
user=user,
|
2015-01-29 13:11:28 -05:00
|
|
|
filter=filter,
|
2015-01-26 13:53:31 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if since is not None:
|
|
|
|
since_token = StreamToken.from_string(since)
|
2015-01-23 13:31:29 -05:00
|
|
|
else:
|
2015-01-26 13:53:31 -05:00
|
|
|
since_token = None
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2015-10-09 14:57:50 -04:00
|
|
|
if set_presence == "online":
|
|
|
|
yield self.event_stream_handler.started_stream(user)
|
|
|
|
|
|
|
|
try:
|
|
|
|
sync_result = yield self.sync_handler.wait_for_sync_for_user(
|
2015-10-26 14:47:18 -04:00
|
|
|
sync_config, since_token=since_token, timeout=timeout,
|
|
|
|
full_state=full_state
|
2015-10-09 14:57:50 -04:00
|
|
|
)
|
|
|
|
finally:
|
|
|
|
if set_presence == "online":
|
|
|
|
self.event_stream_handler.stopped_stream(user)
|
2015-01-23 13:31:29 -05:00
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
joined = self.encode_joined(
|
|
|
|
sync_result.joined, filter, time_now, token_id
|
2015-10-05 11:39:22 -04:00
|
|
|
)
|
|
|
|
|
2015-10-13 06:03:48 -04:00
|
|
|
invited = self.encode_invited(
|
|
|
|
sync_result.invited, filter, time_now, token_id
|
|
|
|
)
|
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
archived = self.encode_archived(
|
|
|
|
sync_result.archived, filter, time_now, token_id
|
|
|
|
)
|
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
response_content = {
|
2015-12-01 13:41:32 -05:00
|
|
|
"account_data": self.encode_account_data(
|
|
|
|
sync_result.account_data, filter, time_now
|
|
|
|
),
|
2015-10-09 13:50:15 -04:00
|
|
|
"presence": self.encode_presence(
|
2015-10-01 12:53:07 -04:00
|
|
|
sync_result.presence, filter, time_now
|
2015-01-26 13:53:31 -05:00
|
|
|
),
|
2015-10-13 05:24:51 -04:00
|
|
|
"rooms": {
|
2015-11-18 06:38:36 -05:00
|
|
|
"join": joined,
|
|
|
|
"invite": invited,
|
|
|
|
"leave": archived,
|
2015-10-13 05:24:51 -04:00
|
|
|
},
|
2015-01-26 13:53:31 -05:00
|
|
|
"next_batch": sync_result.next_batch.to_string(),
|
|
|
|
}
|
2015-01-23 13:31:29 -05:00
|
|
|
|
|
|
|
defer.returnValue((200, response_content))
|
|
|
|
|
2015-10-09 13:50:15 -04:00
|
|
|
def encode_presence(self, events, filter, time_now):
|
|
|
|
formatted = []
|
|
|
|
for event in events:
|
|
|
|
event = copy.deepcopy(event)
|
2015-10-12 11:54:58 -04:00
|
|
|
event['sender'] = event['content'].pop('user_id')
|
2015-10-09 13:50:15 -04:00
|
|
|
formatted.append(event)
|
2015-10-12 11:54:58 -04:00
|
|
|
return {"events": filter.filter_presence(formatted)}
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
def encode_account_data(self, events, filter, time_now):
|
|
|
|
return {"events": filter.filter_account_data(events)}
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
def encode_joined(self, rooms, filter, time_now, token_id):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
Encode the joined rooms in a sync result
|
|
|
|
|
|
|
|
:param list[synapse.handlers.sync.JoinedSyncResult] rooms: list of sync
|
|
|
|
results for rooms this user is joined to
|
|
|
|
:param FilterCollection filter: filters to apply to the results
|
|
|
|
:param int time_now: current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
:param int token_id: ID of the user's auth token - used for namespacing
|
|
|
|
of transaction IDs
|
|
|
|
|
|
|
|
:return: the joined rooms list, in our response format
|
|
|
|
:rtype: dict[str, dict[str, object]]
|
|
|
|
"""
|
2015-10-07 10:55:20 -04:00
|
|
|
joined = {}
|
2015-10-05 11:39:22 -04:00
|
|
|
for room in rooms:
|
2015-10-07 10:55:20 -04:00
|
|
|
joined[room.room_id] = self.encode_room(
|
2015-10-05 11:39:22 -04:00
|
|
|
room, filter, time_now, token_id
|
|
|
|
)
|
|
|
|
|
2015-10-13 05:24:51 -04:00
|
|
|
return joined
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2015-10-13 06:03:48 -04:00
|
|
|
def encode_invited(self, rooms, filter, time_now, token_id):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
Encode the invited rooms in a sync result
|
|
|
|
|
|
|
|
:param list[synapse.handlers.sync.InvitedSyncResult] rooms: list of
|
|
|
|
sync results for rooms this user is joined to
|
|
|
|
:param FilterCollection filter: filters to apply to the results
|
|
|
|
:param int time_now: current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
:param int token_id: ID of the user's auth token - used for namespacing
|
|
|
|
of transaction IDs
|
|
|
|
|
|
|
|
:return: the invited rooms list, in our response format
|
|
|
|
:rtype: dict[str, dict[str, object]]
|
|
|
|
"""
|
2015-10-13 06:03:48 -04:00
|
|
|
invited = {}
|
|
|
|
for room in rooms:
|
|
|
|
invite = serialize_event(
|
|
|
|
room.invite, time_now, token_id=token_id,
|
2015-11-12 05:33:19 -05:00
|
|
|
event_format=format_event_for_client_v2_without_room_id,
|
2015-10-13 06:03:48 -04:00
|
|
|
)
|
|
|
|
invited_state = invite.get("unsigned", {}).pop("invite_room_state", [])
|
|
|
|
invited_state.append(invite)
|
|
|
|
invited[room.room_id] = {
|
2015-10-13 06:43:12 -04:00
|
|
|
"invite_state": {"events": invited_state}
|
2015-10-13 06:03:48 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
return invited
|
|
|
|
|
2015-10-19 12:26:18 -04:00
|
|
|
def encode_archived(self, rooms, filter, time_now, token_id):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
Encode the archived rooms in a sync result
|
|
|
|
|
|
|
|
:param list[synapse.handlers.sync.ArchivedSyncResult] rooms: list of
|
|
|
|
sync results for rooms this user is joined to
|
|
|
|
:param FilterCollection filter: filters to apply to the results
|
|
|
|
:param int time_now: current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
:param int token_id: ID of the user's auth token - used for namespacing
|
|
|
|
of transaction IDs
|
|
|
|
|
|
|
|
:return: the invited rooms list, in our response format
|
|
|
|
:rtype: dict[str, dict[str, object]]
|
|
|
|
"""
|
2015-10-19 12:26:18 -04:00
|
|
|
joined = {}
|
|
|
|
for room in rooms:
|
|
|
|
joined[room.room_id] = self.encode_room(
|
|
|
|
room, filter, time_now, token_id, joined=False
|
|
|
|
)
|
|
|
|
|
|
|
|
return joined
|
|
|
|
|
2015-01-26 13:53:31 -05:00
|
|
|
@staticmethod
|
2015-10-19 12:26:18 -04:00
|
|
|
def encode_room(room, filter, time_now, token_id, joined=True):
|
2015-11-13 05:31:15 -05:00
|
|
|
"""
|
|
|
|
:param JoinedSyncResult|ArchivedSyncResult room: sync result for a
|
|
|
|
single room
|
|
|
|
:param FilterCollection filter: filters to apply to the results
|
|
|
|
:param int time_now: current time - used as a baseline for age
|
|
|
|
calculations
|
|
|
|
:param int token_id: ID of the user's auth token - used for namespacing
|
|
|
|
of transaction IDs
|
|
|
|
:param joined: True if the user is joined to this room - will mean
|
|
|
|
we handle ephemeral events
|
|
|
|
|
|
|
|
:return: the room, encoded in our response format
|
|
|
|
:rtype: dict[str, object]
|
|
|
|
"""
|
2015-11-12 05:33:19 -05:00
|
|
|
def serialize(event):
|
|
|
|
# TODO(mjark): Respect formatting requirements in the filter.
|
|
|
|
return serialize_event(
|
|
|
|
event, time_now, token_id=token_id,
|
|
|
|
event_format=format_event_for_client_v2_without_room_id,
|
|
|
|
)
|
|
|
|
|
2015-11-10 13:29:25 -05:00
|
|
|
state_dict = room.state
|
|
|
|
timeline_events = filter.filter_room_timeline(room.timeline.events)
|
|
|
|
|
|
|
|
state_dict = SyncRestServlet._rollback_state_for_timeline(
|
|
|
|
state_dict, timeline_events)
|
|
|
|
|
|
|
|
state_events = filter.filter_room_state(state_dict.values())
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2015-11-12 05:33:19 -05:00
|
|
|
serialized_state = [serialize(e) for e in state_events]
|
|
|
|
serialized_timeline = [serialize(e) for e in timeline_events]
|
2015-10-19 12:26:18 -04:00
|
|
|
|
2015-11-18 10:31:04 -05:00
|
|
|
account_data = filter.filter_room_account_data(
|
|
|
|
room.account_data
|
2015-11-02 11:23:15 -05:00
|
|
|
)
|
|
|
|
|
2015-01-28 22:33:51 -05:00
|
|
|
result = {
|
2015-10-08 10:17:43 -04:00
|
|
|
"timeline": {
|
2015-11-12 05:33:19 -05:00
|
|
|
"events": serialized_timeline,
|
2015-10-08 10:17:43 -04:00
|
|
|
"prev_batch": room.timeline.prev_batch.to_string(),
|
|
|
|
"limited": room.timeline.limited,
|
2015-01-26 13:53:31 -05:00
|
|
|
},
|
2015-11-19 07:23:42 -05:00
|
|
|
"state": {"events": serialized_state},
|
2015-11-18 10:31:04 -05:00
|
|
|
"account_data": {"events": account_data},
|
2015-01-26 13:53:31 -05:00
|
|
|
}
|
2015-10-19 12:26:18 -04:00
|
|
|
|
|
|
|
if joined:
|
|
|
|
ephemeral_events = filter.filter_room_ephemeral(room.ephemeral)
|
|
|
|
result["ephemeral"] = {"events": ephemeral_events}
|
|
|
|
|
2015-01-28 22:33:51 -05:00
|
|
|
return result
|
2015-01-26 13:53:31 -05:00
|
|
|
|
2015-11-10 13:29:25 -05:00
|
|
|
@staticmethod
|
|
|
|
def _rollback_state_for_timeline(state, timeline):
|
|
|
|
"""
|
|
|
|
Wind the state dictionary backwards, so that it represents the
|
|
|
|
state at the start of the timeline, rather than at the end.
|
|
|
|
|
|
|
|
:param dict[(str, str), synapse.events.EventBase] state: the
|
|
|
|
state dictionary. Will be updated to the state before the timeline.
|
|
|
|
:param list[synapse.events.EventBase] timeline: the event timeline
|
|
|
|
:return: updated state dictionary
|
|
|
|
"""
|
|
|
|
logger.debug("Processing state dict %r; timeline %r", state,
|
|
|
|
[e.get_dict() for e in timeline])
|
|
|
|
|
|
|
|
result = state.copy()
|
|
|
|
|
|
|
|
for timeline_event in reversed(timeline):
|
|
|
|
if not timeline_event.is_state():
|
|
|
|
continue
|
|
|
|
|
|
|
|
event_key = (timeline_event.type, timeline_event.state_key)
|
|
|
|
|
|
|
|
logger.debug("Considering %s for removal", event_key)
|
|
|
|
|
|
|
|
state_event = result.get(event_key)
|
|
|
|
if (state_event is None or
|
|
|
|
state_event.event_id != timeline_event.event_id):
|
|
|
|
# the event in the timeline isn't present in the state
|
|
|
|
# dictionary.
|
|
|
|
#
|
|
|
|
# the most likely cause for this is that there was a fork in
|
|
|
|
# the event graph, and the state is no longer valid. Really,
|
|
|
|
# the event shouldn't be in the timeline. We're going to ignore
|
|
|
|
# it for now, however.
|
|
|
|
logger.warn("Found state event %r in timeline which doesn't "
|
|
|
|
"match state dictionary", timeline_event)
|
|
|
|
continue
|
|
|
|
|
|
|
|
prev_event_id = timeline_event.unsigned.get("replaces_state", None)
|
|
|
|
logger.debug("Replacing %s with %s in state dict",
|
|
|
|
timeline_event.event_id, prev_event_id)
|
|
|
|
|
|
|
|
if prev_event_id is None:
|
|
|
|
del result[event_key]
|
|
|
|
else:
|
|
|
|
result[event_key] = FrozenEvent({
|
|
|
|
"type": timeline_event.type,
|
|
|
|
"state_key": timeline_event.state_key,
|
|
|
|
"content": timeline_event.unsigned['prev_content'],
|
|
|
|
"sender": timeline_event.unsigned['prev_sender'],
|
|
|
|
"event_id": prev_event_id,
|
|
|
|
"room_id": timeline_event.room_id,
|
|
|
|
})
|
|
|
|
logger.debug("New value: %r", result.get(event_key))
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2015-01-23 13:31:29 -05:00
|
|
|
|
|
|
|
def register_servlets(hs, http_server):
|
|
|
|
SyncRestServlet(hs).register(http_server)
|