synapse-product/synapse/handlers/message.py

927 lines
32 KiB
Python
Raw Normal View History

2014-08-27 12:59:36 -04:00
# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
2014-08-27 12:59:36 -04:00
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.events.utils import serialize_event
2014-12-10 12:59:47 -05:00
from synapse.events.validator import EventValidator
from synapse.push.action_generator import ActionGenerator
from synapse.streams.config import PaginationConfig
from synapse.types import (
UserID, RoomAlias, RoomStreamToken, StreamToken, get_domain_from_id
)
2015-05-12 08:14:29 -04:00
from synapse.util import unwrapFirstError
2016-06-02 08:02:33 -04:00
from synapse.util.async import concurrently_execute, run_on_reactor
from synapse.util.caches.snapshot_cache import SnapshotCache
2016-06-02 08:02:33 -04:00
from synapse.util.logcontext import preserve_fn
from synapse.visibility import filter_events_for_client
2014-12-10 12:59:47 -05:00
from ._base import BaseHandler
2014-08-27 12:59:36 -04:00
from canonicaljson import encode_canonical_json
2014-08-27 12:59:36 -04:00
import logging
logger = logging.getLogger(__name__)
class MessageHandler(BaseHandler):
2014-08-27 12:59:36 -04:00
def __init__(self, hs):
super(MessageHandler, self).__init__(hs)
self.hs = hs
self.state = hs.get_state_handler()
2014-08-27 12:59:36 -04:00
self.clock = hs.get_clock()
2014-12-10 12:59:47 -05:00
self.validator = EventValidator()
self.snapshot_cache = SnapshotCache()
2014-08-27 12:59:36 -04:00
@defer.inlineCallbacks
def get_messages(self, requester, room_id=None, pagin_config=None,
as_client_event=True):
2014-08-27 12:59:36 -04:00
"""Get messages in a room.
Args:
requester (Requester): The user requesting messages.
2014-08-27 12:59:36 -04:00
room_id (str): The room they want messages from.
pagin_config (synapse.api.streams.PaginationConfig): The pagination
config rules to apply, if any.
2015-01-08 09:36:33 -05:00
as_client_event (bool): True to get events in client-server format.
2014-08-27 12:59:36 -04:00
Returns:
dict: Pagination API results
"""
user_id = requester.user.to_string()
2014-08-27 12:59:36 -04:00
data_source = self.hs.get_event_sources().sources["room"]
if pagin_config.from_token:
room_token = pagin_config.from_token.room_key
else:
2014-11-11 13:00:13 -05:00
pagin_config.from_token = (
yield self.hs.get_event_sources().get_current_token(
direction='b'
)
2014-11-11 13:00:13 -05:00
)
room_token = pagin_config.from_token.room_key
2014-08-27 12:59:36 -04:00
room_token = RoomStreamToken.parse(room_token)
pagin_config.from_token = pagin_config.from_token.copy_and_replace(
"room_key", str(room_token)
)
source_config = pagin_config.get_source_config("room")
membership, member_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if source_config.direction == 'b':
# if we're going backwards, we might need to backfill. This
# requires that we have a topo token.
if room_token.topological:
max_topo = room_token.topological
else:
max_topo = yield self.store.get_max_topological_token_for_stream_and_room(
room_id, room_token.stream
)
if membership == Membership.LEAVE:
# If they have left the room then clamp the token to be before
# they left the room, to save the effort of loading from the
# database.
leave_token = yield self.store.get_topological_token_for_event(
member_event_id
)
leave_token = RoomStreamToken.parse(leave_token)
if leave_token.topological < max_topo:
source_config.from_key = str(leave_token)
yield self.hs.get_handlers().federation_handler.maybe_backfill(
room_id, max_topo
)
events, next_key = yield data_source.get_pagination_rows(
requester.user, source_config, room_id
)
next_token = pagin_config.from_token.copy_and_replace(
"room_key", next_key
2014-08-27 12:59:36 -04:00
)
if not events:
defer.returnValue({
"chunk": [],
"start": pagin_config.from_token.to_string(),
"end": next_token.to_string(),
})
events = yield filter_events_for_client(
self.store,
user_id,
events,
is_peeking=(member_event_id is None),
)
time_now = self.clock.time_msec()
2014-08-27 12:59:36 -04:00
chunk = {
2015-01-08 09:36:33 -05:00
"chunk": [
serialize_event(e, time_now, as_client_event)
for e in events
2015-01-08 09:36:33 -05:00
],
2014-08-27 12:59:36 -04:00
"start": pagin_config.from_token.to_string(),
"end": next_token.to_string(),
}
defer.returnValue(chunk)
@defer.inlineCallbacks
2016-04-01 12:39:32 -04:00
def create_event(self, event_dict, token_id=None, txn_id=None, prev_event_ids=None):
"""
Given a dict from a client, create a new event.
Creates an FrozenEvent object, filling out auth_events, prev_events,
etc.
Adds display names to Join membership events.
Args:
event_dict (dict): An entire event
2016-04-01 12:39:32 -04:00
token_id (str)
txn_id (str)
prev_event_ids (list): The prev event ids to use when creating the event
Returns:
Tuple of created event (FrozenEvent), Context
"""
builder = self.event_builder_factory.new(event_dict)
self.validator.validate_new(builder)
2014-12-10 12:59:47 -05:00
2014-12-08 09:50:48 -05:00
if builder.type == EventTypes.Member:
membership = builder.content.get("membership", None)
2016-03-04 09:29:58 -05:00
target = UserID.from_string(builder.state_key)
if membership in {Membership.JOIN, Membership.INVITE}:
2014-12-08 09:50:48 -05:00
# If event doesn't include a display name, add one.
2016-03-04 09:29:58 -05:00
profile = self.hs.get_handlers().profile_handler
content = builder.content
try:
content["displayname"] = yield profile.get_displayname(target)
content["avatar_url"] = yield profile.get_avatar_url(target)
except Exception as e:
logger.info(
2016-03-04 10:22:39 -05:00
"Failed to get profile information for %r: %s",
2016-03-04 09:29:58 -05:00
target, e
)
2014-12-08 09:50:48 -05:00
if token_id is not None:
builder.internal_metadata.token_id = token_id
if txn_id is not None:
builder.internal_metadata.txn_id = txn_id
event, context = yield self._create_new_client_event(
builder=builder,
2016-04-01 12:39:32 -04:00
prev_event_ids=prev_event_ids,
)
defer.returnValue((event, context))
@defer.inlineCallbacks
def send_nonmember_event(self, requester, event, context, ratelimit=True):
"""
Persists and notifies local clients and federation of an event.
Args:
event (FrozenEvent) the event to send.
context (Context) the context of the event.
ratelimit (bool): Whether to rate limit this send.
is_guest (bool): Whether the sender is a guest.
"""
if event.type == EventTypes.Member:
raise SynapseError(
500,
2016-02-18 06:02:14 -05:00
"Tried to send member event through non-member codepath"
)
user = UserID.from_string(event.sender)
assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
if event.is_state():
prev_state = self.deduplicate_state_event(event, context)
if prev_state is not None:
defer.returnValue(prev_state)
yield self.handle_new_client_event(
requester=requester,
event=event,
context=context,
ratelimit=ratelimit,
)
if event.type == EventTypes.Message:
presence = self.hs.get_presence_handler()
2016-02-15 12:10:40 -05:00
yield presence.bump_presence_active_time(user)
def deduplicate_state_event(self, event, context):
2016-02-16 09:25:23 -05:00
"""
Checks whether event is in the latest resolved state in context.
If so, returns the version of the event in context.
Otherwise, returns None.
"""
prev_event = context.current_state.get((event.type, event.state_key))
if prev_event and event.user_id == prev_event.user_id:
prev_content = encode_canonical_json(prev_event.content)
next_content = encode_canonical_json(event.content)
if prev_content == next_content:
2016-02-16 09:25:23 -05:00
return prev_event
return None
@defer.inlineCallbacks
def create_and_send_nonmember_event(
self,
requester,
event_dict,
ratelimit=True,
txn_id=None
):
"""
Creates an event, then sends it.
See self.create_event and self.send_nonmember_event.
"""
event, context = yield self.create_event(
event_dict,
token_id=requester.access_token_id,
txn_id=txn_id
)
yield self.send_nonmember_event(
requester,
event,
context,
ratelimit=ratelimit,
)
defer.returnValue(event)
2014-08-27 12:59:36 -04:00
@defer.inlineCallbacks
def get_room_data(self, user_id=None, room_id=None,
event_type=None, state_key="", is_guest=False):
2014-08-27 12:59:36 -04:00
""" Get data from a room.
Args:
event : The room path event
Returns:
The path data content.
Raises:
SynapseError if something went wrong.
"""
membership, membership_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
2014-08-27 12:59:36 -04:00
if membership == Membership.JOIN:
data = yield self.state_handler.get_current_state(
room_id, event_type, state_key
)
elif membership == Membership.LEAVE:
key = (event_type, state_key)
room_state = yield self.store.get_state_for_events(
[membership_event_id], [key]
)
data = room_state[membership_event_id].get(key)
2014-08-27 12:59:36 -04:00
defer.returnValue(data)
2014-09-23 10:35:58 -04:00
@defer.inlineCallbacks
def _check_in_room_or_world_readable(self, room_id, user_id):
try:
# check_user_was_in_room will return the most recent membership
# event for the user if:
# * The user is a non-guest user, and was ever in the room
# * The user is a guest user, and has joined the room
# else it will throw.
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
defer.returnValue((member_event.membership, member_event.event_id))
return
except AuthError:
visibility = yield self.state_handler.get_current_state(
room_id, EventTypes.RoomHistoryVisibility, ""
)
if (
visibility and
visibility.content["history_visibility"] == "world_readable"
):
defer.returnValue((Membership.JOIN, None))
return
raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
)
@defer.inlineCallbacks
def get_state_events(self, user_id, room_id, is_guest=False):
"""Retrieve all state events for a given room. If the user is
joined to the room then return the current state. If the user has
left the room return the state events from when they left.
2014-09-23 10:35:58 -04:00
Args:
user_id(str): The user requesting state events.
room_id(str): The room ID to get all state events from.
Returns:
A list of dicts representing state events. [{}, {}, {}]
"""
membership, membership_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id
)
if membership == Membership.JOIN:
room_state = yield self.state_handler.get_current_state(room_id)
elif membership == Membership.LEAVE:
room_state = yield self.store.get_state_for_events(
[membership_event_id], None
)
room_state = room_state[membership_event_id]
2014-09-23 10:35:58 -04:00
now = self.clock.time_msec()
defer.returnValue(
[serialize_event(c, now) for c in room_state.values()]
)
2014-09-23 10:35:58 -04:00
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
as_client_event=True, include_archived=False):
"""Retrieve a snapshot of all rooms the user is invited or has joined.
This snapshot may include messages for all rooms where the user is
joined, depending on the pagination config.
Args:
user_id (str): The ID of the user making the request.
pagin_config (synapse.api.streams.PaginationConfig): The pagination
config used to determine how many messages *PER ROOM* to return.
as_client_event (bool): True to get events in client-server format.
include_archived (bool): True to get rooms that the user has left
Returns:
A list of dicts with "room_id" and "membership" keys for all rooms
the user is currently invited or joined in on. Rooms where the user
is joined on, may return a "messages" key with messages, depending
on the specified PaginationConfig.
"""
key = (
user_id,
pagin_config.from_token,
pagin_config.to_token,
pagin_config.direction,
pagin_config.limit,
as_client_event,
include_archived,
)
now_ms = self.clock.time_msec()
result = self.snapshot_cache.get(now_ms, key)
if result is not None:
return result
return self.snapshot_cache.set(now_ms, key, self._snapshot_all_rooms(
user_id, pagin_config, as_client_event, include_archived
))
@defer.inlineCallbacks
def _snapshot_all_rooms(self, user_id=None, pagin_config=None,
as_client_event=True, include_archived=False):
2014-08-27 12:59:36 -04:00
memberships = [Membership.INVITE, Membership.JOIN]
if include_archived:
memberships.append(Membership.LEAVE)
2014-08-27 12:59:36 -04:00
room_list = yield self.store.get_rooms_for_user_where_membership_is(
user_id=user_id, membership_list=memberships
2014-08-27 12:59:36 -04:00
)
user = UserID.from_string(user_id)
2014-08-27 12:59:36 -04:00
rooms_ret = []
now_token = yield self.hs.get_event_sources().get_current_token()
presence_stream = self.hs.get_event_sources().sources["presence"]
pagination_config = PaginationConfig(from_token=now_token)
presence, _ = yield presence_stream.get_pagination_rows(
user, pagination_config.get_source_config("presence"), None
2014-08-27 12:59:36 -04:00
)
2015-07-08 05:54:01 -04:00
receipt_stream = self.hs.get_event_sources().sources["receipt"]
receipt, _ = yield receipt_stream.get_pagination_rows(
user, pagination_config.get_source_config("receipt"), None
)
2015-10-30 12:22:32 -04:00
tags_by_room = yield self.store.get_tags_for_user(user_id)
account_data, account_data_by_room = (
yield self.store.get_account_data_for_user(user_id)
)
public_room_ids = yield self.store.get_public_room_ids()
2014-08-27 12:59:36 -04:00
limit = pagin_config.limit
if limit is None:
2014-08-27 12:59:36 -04:00
limit = 10
2015-04-15 09:17:16 -04:00
@defer.inlineCallbacks
def handle_room(event):
2014-08-27 12:59:36 -04:00
d = {
"room_id": event.room_id,
"membership": event.membership,
2014-11-11 13:00:13 -05:00
"visibility": (
"public" if event.room_id in public_room_ids
else "private"
),
2014-08-27 12:59:36 -04:00
}
if event.membership == Membership.INVITE:
time_now = self.clock.time_msec()
d["inviter"] = event.sender
2014-08-27 12:59:36 -04:00
invite_event = yield self.store.get_event(event.event_id)
d["invite"] = serialize_event(invite_event, time_now, as_client_event)
2014-08-27 12:59:36 -04:00
rooms_ret.append(d)
if event.membership not in (Membership.JOIN, Membership.LEAVE):
2015-04-15 09:17:16 -04:00
return
2014-08-27 12:59:36 -04:00
try:
if event.membership == Membership.JOIN:
room_end_token = now_token.room_key
deferred_room_state = self.state_handler.get_current_state(
event.room_id
)
elif event.membership == Membership.LEAVE:
room_end_token = "s%d" % (event.stream_ordering,)
deferred_room_state = self.store.get_state_for_events(
2015-10-14 04:29:08 -04:00
[event.event_id], None
)
deferred_room_state.addCallback(
lambda states: states[event.event_id]
)
(messages, token), current_state = yield defer.gatherResults(
[
self.store.get_recent_events_for_room(
event.room_id,
limit=limit,
end_token=room_end_token,
),
deferred_room_state,
]
2015-05-12 08:14:29 -04:00
).addErrback(unwrapFirstError)
2014-08-27 12:59:36 -04:00
messages = yield filter_events_for_client(
self.store, user_id, messages
)
start_token = now_token.copy_and_replace("room_key", token[0])
end_token = now_token.copy_and_replace("room_key", token[1])
time_now = self.clock.time_msec()
2014-08-29 08:28:02 -04:00
2014-08-27 12:59:36 -04:00
d["messages"] = {
"chunk": [
serialize_event(m, time_now, as_client_event)
2015-01-08 08:59:29 -05:00
for m in messages
],
2014-08-29 08:28:02 -04:00
"start": start_token.to_string(),
"end": end_token.to_string(),
2014-08-27 12:59:36 -04:00
}
2014-11-11 13:00:13 -05:00
d["state"] = [
serialize_event(c, time_now, as_client_event)
for c in current_state.values()
2014-11-11 13:00:13 -05:00
]
2015-10-30 12:22:32 -04:00
account_data_events = []
2015-10-30 12:22:32 -04:00
tags = tags_by_room.get(event.room_id)
if tags:
account_data_events.append({
2015-10-30 12:22:32 -04:00
"type": "m.tag",
"content": {"tags": tags},
})
account_data = account_data_by_room.get(event.room_id, {})
for account_data_type, content in account_data.items():
account_data_events.append({
"type": account_data_type,
"content": content,
})
d["account_data"] = account_data_events
2014-08-27 12:59:36 -04:00
except:
logger.exception("Failed to get snapshot")
2016-04-01 09:06:00 -04:00
yield concurrently_execute(handle_room, room_list, 10)
2015-04-15 09:17:16 -04:00
account_data_events = []
for account_data_type, content in account_data.items():
account_data_events.append({
"type": account_data_type,
"content": content,
})
2014-08-27 12:59:36 -04:00
ret = {
"rooms": rooms_ret,
"presence": presence,
"account_data": account_data_events,
2015-07-08 05:54:01 -04:00
"receipts": receipt,
"end": now_token.to_string(),
2014-08-27 12:59:36 -04:00
}
defer.returnValue(ret)
@defer.inlineCallbacks
def room_initial_sync(self, requester, room_id, pagin_config=None):
"""Capture the a snapshot of a room. If user is currently a member of
the room this will be what is currently in the room. If the user left
the room this will be what was in the room when they left.
Args:
requester(Requester): The user to get a snapshot for.
room_id(str): The room to get a snapshot of.
pagin_config(synapse.streams.config.PaginationConfig):
The pagination config used to determine how many messages to
return.
Raises:
AuthError if the user wasn't in the room.
Returns:
A JSON serialisable dict with the snapshot of the room.
"""
user_id = requester.user.to_string()
2015-11-11 12:13:24 -05:00
membership, member_event_id = yield self._check_in_room_or_world_readable(
room_id, user_id,
2015-11-11 12:13:24 -05:00
)
is_peeking = member_event_id is None
2015-11-11 12:13:24 -05:00
if membership == Membership.JOIN:
result = yield self._room_initial_sync_joined(
user_id, room_id, pagin_config, membership, is_peeking
)
2015-11-11 12:13:24 -05:00
elif membership == Membership.LEAVE:
result = yield self._room_initial_sync_parted(
user_id, room_id, pagin_config, membership, member_event_id, is_peeking
)
2015-10-30 12:28:19 -04:00
account_data_events = []
2015-10-30 12:28:19 -04:00
tags = yield self.store.get_tags_for_room(user_id, room_id)
if tags:
account_data_events.append({
2015-10-30 12:28:19 -04:00
"type": "m.tag",
"content": {"tags": tags},
})
account_data = yield self.store.get_account_data_for_room(user_id, room_id)
for account_data_type, content in account_data.items():
account_data_events.append({
"type": account_data_type,
"content": content,
})
result["account_data"] = account_data_events
2015-10-30 12:28:19 -04:00
defer.returnValue(result)
@defer.inlineCallbacks
def _room_initial_sync_parted(self, user_id, room_id, pagin_config,
membership, member_event_id, is_peeking):
room_state = yield self.store.get_state_for_events(
2015-11-11 12:13:24 -05:00
[member_event_id], None
)
2015-11-11 12:13:24 -05:00
room_state = room_state[member_event_id]
limit = pagin_config.limit if pagin_config else None
if limit is None:
limit = 10
stream_token = yield self.store.get_stream_token_for_event(
2015-11-11 12:13:24 -05:00
member_event_id
)
messages, token = yield self.store.get_recent_events_for_room(
room_id,
limit=limit,
end_token=stream_token
)
messages = yield filter_events_for_client(
self.store, user_id, messages, is_peeking=is_peeking
)
2016-03-03 09:57:45 -05:00
start_token = StreamToken.START.copy_and_replace("room_key", token[0])
end_token = StreamToken.START.copy_and_replace("room_key", token[1])
time_now = self.clock.time_msec()
defer.returnValue({
2015-11-11 12:13:24 -05:00
"membership": membership,
"room_id": room_id,
"messages": {
"chunk": [serialize_event(m, time_now) for m in messages],
"start": start_token.to_string(),
"end": end_token.to_string(),
},
"state": [serialize_event(s, time_now) for s in room_state.values()],
"presence": [],
"receipts": [],
})
@defer.inlineCallbacks
def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
membership, is_peeking):
current_state = yield self.state.get_current_state(
room_id=room_id,
)
2014-08-27 12:59:36 -04:00
# TODO: These concurrently
time_now = self.clock.time_msec()
state = [
serialize_event(x, time_now)
for x in current_state.values()
]
now_token = yield self.hs.get_event_sources().get_current_token()
limit = pagin_config.limit if pagin_config else None
if limit is None:
limit = 10
room_members = [
m for m in current_state.values()
if m.type == EventTypes.Member
and m.content["membership"] == Membership.JOIN
]
presence_handler = self.hs.get_presence_handler()
2015-06-16 12:12:27 -04:00
@defer.inlineCallbacks
def get_presence():
states = yield presence_handler.get_states(
2016-02-15 12:10:40 -05:00
[m.user_id for m in room_members],
as_event=True,
)
2015-06-16 12:12:27 -04:00
2016-02-15 12:10:40 -05:00
defer.returnValue(states)
2015-06-16 12:12:27 -04:00
@defer.inlineCallbacks
def get_receipts():
receipts_handler = self.hs.get_handlers().receipts_handler
receipts = yield receipts_handler.get_receipts_for_room(
room_id,
now_token.receipt_key
)
defer.returnValue(receipts)
2015-07-08 05:54:01 -04:00
presence, receipts, (messages, token) = yield defer.gatherResults(
2015-06-16 12:03:24 -04:00
[
2015-06-16 12:12:27 -04:00
get_presence(),
get_receipts(),
2015-06-16 12:12:27 -04:00
self.store.get_recent_events_for_room(
room_id,
limit=limit,
end_token=now_token.room_key,
)
2015-06-16 12:03:24 -04:00
],
consumeErrors=True,
2015-06-16 12:12:27 -04:00
).addErrback(unwrapFirstError)
2015-06-16 12:03:24 -04:00
messages = yield filter_events_for_client(
self.store, user_id, messages, is_peeking=is_peeking,
)
2015-06-16 12:12:27 -04:00
start_token = now_token.copy_and_replace("room_key", token[0])
end_token = now_token.copy_and_replace("room_key", token[1])
time_now = self.clock.time_msec()
2015-11-11 12:13:24 -05:00
ret = {
"room_id": room_id,
"messages": {
"chunk": [serialize_event(m, time_now) for m in messages],
"start": start_token.to_string(),
"end": end_token.to_string(),
},
"state": state,
2015-07-08 05:54:01 -04:00
"presence": presence,
"receipts": receipts,
2015-11-11 12:13:24 -05:00
}
if not is_peeking:
2015-11-11 12:13:24 -05:00
ret["membership"] = membership
defer.returnValue(ret)
@defer.inlineCallbacks
def _create_new_client_event(self, builder, prev_event_ids=None):
if prev_event_ids:
prev_events = yield self.store.add_event_hashes(prev_event_ids)
prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids)
depth = prev_max_depth + 1
else:
latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room(
builder.room_id,
)
if latest_ret:
depth = max([d for _, _, d in latest_ret]) + 1
else:
depth = 1
prev_events = [
(event_id, prev_hashes)
for event_id, prev_hashes, _ in latest_ret
]
builder.prev_events = prev_events
builder.depth = depth
state_handler = self.state_handler
context = yield state_handler.compute_event_context(builder)
if builder.is_state():
builder.prev_state = yield self.store.add_event_hashes(
context.prev_state_events
)
yield self.auth.add_auth_events(builder, context)
signing_key = self.hs.config.signing_key[0]
add_hashes_and_signatures(
builder, self.server_name, signing_key
)
event = builder.build()
logger.debug(
"Created event %s with current state: %s",
event.event_id, context.current_state,
)
defer.returnValue(
(event, context,)
)
@defer.inlineCallbacks
def handle_new_client_event(
self,
requester,
event,
context,
ratelimit=True,
extra_users=[]
):
# We now need to go and hit out to wherever we need to hit out to.
if ratelimit:
self.ratelimit(requester)
try:
self.auth.check(event, auth_events=context.current_state)
except AuthError as err:
logger.warn("Denying new event %r because %s", event, err)
raise err
yield self.maybe_kick_guest_users(event, context.current_state.values())
if event.type == EventTypes.CanonicalAlias:
# Check the alias is acually valid (at this time at least)
room_alias_str = event.content.get("alias", None)
if room_alias_str:
room_alias = RoomAlias.from_string(room_alias_str)
directory_handler = self.hs.get_handlers().directory_handler
mapping = yield directory_handler.get_association(room_alias)
if mapping["room_id"] != event.room_id:
raise SynapseError(
400,
"Room alias %s does not point to the room" % (
room_alias_str,
)
)
federation_handler = self.hs.get_handlers().federation_handler
if event.type == EventTypes.Member:
if event.content["membership"] == Membership.INVITE:
def is_inviter_member_event(e):
return (
e.type == EventTypes.Member and
e.sender == event.sender
)
event.unsigned["invite_room_state"] = [
{
"type": e.type,
"state_key": e.state_key,
"content": e.content,
"sender": e.sender,
}
for k, e in context.current_state.items()
if e.type in self.hs.config.room_invite_state_types
or is_inviter_member_event(e)
]
invitee = UserID.from_string(event.state_key)
if not self.hs.is_mine(invitee):
# TODO: Can we add signature from remote server in a nicer
# way? If we have been invited by a remote server, we need
# to get them to sign the event.
returned_invite = yield federation_handler.send_invite(
invitee.domain,
event,
)
event.unsigned.pop("room_state", None)
# TODO: Make sure the signatures actually are correct.
event.signatures.update(
returned_invite.signatures
)
if event.type == EventTypes.Redaction:
if self.auth.check_redaction(event, auth_events=context.current_state):
original_event = yield self.store.get_event(
event.redacts,
check_redacted=False,
get_prev_content=False,
allow_rejected=False,
allow_none=False
)
if event.user_id != original_event.user_id:
raise AuthError(
403,
"You don't have permission to redact events"
)
if event.type == EventTypes.Create and context.current_state:
raise AuthError(
403,
"Changing the room create event is forbidden",
)
action_generator = ActionGenerator(self.hs)
yield action_generator.handle_push_actions_for_event(
event, context
)
(event_stream_id, max_stream_id) = yield self.store.persist_event(
event, context=context
)
# this intentionally does not yield: we don't care about the result
# and don't need to wait for it.
preserve_fn(self.hs.get_pusherpool().on_new_notifications)(
event_stream_id, max_stream_id
)
destinations = set()
for k, s in context.current_state.items():
try:
if k[0] == EventTypes.Member:
if s.content["membership"] == Membership.JOIN:
destinations.add(get_domain_from_id(s.state_key))
except SynapseError:
logger.warn(
"Failed to get destination from event %s", s.event_id
)
2016-06-02 08:02:33 -04:00
@defer.inlineCallbacks
def _notify():
yield run_on_reactor()
self.notifier.on_new_room_event(
event, event_stream_id, max_stream_id,
extra_users=extra_users
)
2016-06-02 08:02:33 -04:00
preserve_fn(_notify)()
# If invite, remove room_state from unsigned before sending.
event.unsigned.pop("invite_room_state", None)
federation_handler.handle_new_event(
event, destinations=destinations,
)