2015-02-03 10:00:42 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-02-03 10:00:42 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2017-09-19 07:20:11 -04:00
|
|
|
import logging
|
2018-09-03 20:09:12 -04:00
|
|
|
from collections import namedtuple
|
2015-02-03 10:00:42 -05:00
|
|
|
|
2018-05-01 11:19:39 -04:00
|
|
|
import six
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from twisted.internet import defer
|
2018-09-03 20:09:12 -04:00
|
|
|
from twisted.internet.defer import DeferredList
|
2018-07-09 02:09:20 -04:00
|
|
|
|
2019-04-01 05:24:38 -04:00
|
|
|
from synapse.api.constants import MAX_DEPTH, EventTypes, Membership
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.api.errors import Codes, SynapseError
|
2019-04-01 05:24:38 -04:00
|
|
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, EventFormatVersions
|
2017-09-19 07:20:11 -04:00
|
|
|
from synapse.crypto.event_signing import check_event_content_hash
|
2019-01-23 15:21:33 -05:00
|
|
|
from synapse.events import event_type_from_format_version
|
2017-09-19 07:20:11 -04:00
|
|
|
from synapse.events.utils import prune_event
|
2018-07-13 15:53:01 -04:00
|
|
|
from synapse.http.servlet import assert_params_in_dict
|
2018-09-03 20:09:12 -04:00
|
|
|
from synapse.types import get_domain_from_id
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.util import logcontext, unwrapFirstError
|
2015-02-03 10:00:42 -05:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class FederationBase(object):
|
2016-06-15 10:12:59 -04:00
|
|
|
def __init__(self, hs):
|
2018-03-12 10:07:39 -04:00
|
|
|
self.hs = hs
|
|
|
|
|
|
|
|
self.server_name = hs.hostname
|
|
|
|
self.keyring = hs.get_keyring()
|
2017-09-26 14:20:23 -04:00
|
|
|
self.spam_checker = hs.get_spam_checker()
|
2018-03-12 10:07:39 -04:00
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self._clock = hs.get_clock()
|
2016-06-15 10:12:59 -04:00
|
|
|
|
2015-02-03 10:00:42 -05:00
|
|
|
@defer.inlineCallbacks
|
2019-06-20 05:32:02 -04:00
|
|
|
def _check_sigs_and_hash_and_fetch(
|
|
|
|
self, origin, pdus, room_version, outlier=False, include_none=False
|
|
|
|
):
|
2015-02-03 10:00:42 -05:00
|
|
|
"""Takes a list of PDUs and checks the signatures and hashs of each
|
|
|
|
one. If a PDU fails its signature check then we check if we have it in
|
|
|
|
the database and if not then request if from the originating server of
|
|
|
|
that PDU.
|
|
|
|
|
|
|
|
If a PDU fails its content hash check then it is redacted.
|
|
|
|
|
|
|
|
The given list of PDUs are not modified, instead the function returns
|
|
|
|
a new list.
|
|
|
|
|
|
|
|
Args:
|
2019-01-23 12:19:58 -05:00
|
|
|
origin (str)
|
2015-02-03 10:00:42 -05:00
|
|
|
pdu (list)
|
2019-01-23 12:19:58 -05:00
|
|
|
room_version (str)
|
|
|
|
outlier (bool): Whether the events are outliers or not
|
|
|
|
include_none (str): Whether to include None in the returned list
|
|
|
|
for events that have failed their checks
|
2015-02-03 10:00:42 -05:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred : A list of PDUs that have valid signatures and hashes.
|
|
|
|
"""
|
2019-01-29 12:21:48 -05:00
|
|
|
deferreds = self._check_sigs_and_hashes(room_version, pdus)
|
2015-02-12 13:35:36 -05:00
|
|
|
|
2017-09-19 20:32:42 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def handle_check_result(pdu, deferred):
|
|
|
|
try:
|
|
|
|
res = yield logcontext.make_deferred_yieldable(deferred)
|
|
|
|
except SynapseError:
|
|
|
|
res = None
|
2015-02-12 13:35:36 -05:00
|
|
|
|
2015-06-26 04:52:24 -04:00
|
|
|
if not res:
|
2015-02-03 10:00:42 -05:00
|
|
|
# Check local db.
|
2017-09-19 20:32:42 -04:00
|
|
|
res = yield self.store.get_event(
|
2019-06-20 05:32:02 -04:00
|
|
|
pdu.event_id, allow_rejected=True, allow_none=True
|
2015-02-03 10:00:42 -05:00
|
|
|
)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
|
|
|
if not res and pdu.origin != origin:
|
2017-09-19 20:32:42 -04:00
|
|
|
try:
|
|
|
|
res = yield self.get_pdu(
|
|
|
|
destinations=[pdu.origin],
|
|
|
|
event_id=pdu.event_id,
|
2019-01-23 12:19:58 -05:00
|
|
|
room_version=room_version,
|
2017-09-19 20:32:42 -04:00
|
|
|
outlier=outlier,
|
|
|
|
timeout=10000,
|
|
|
|
)
|
|
|
|
except SynapseError:
|
|
|
|
pass
|
|
|
|
|
2015-06-26 04:52:24 -04:00
|
|
|
if not res:
|
2015-02-12 14:29:43 -05:00
|
|
|
logger.warn(
|
2019-06-20 05:32:02 -04:00
|
|
|
"Failed to find copy of %s with valid signature", pdu.event_id
|
2015-02-12 14:29:43 -05:00
|
|
|
)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2017-09-19 20:32:42 -04:00
|
|
|
defer.returnValue(res)
|
2015-02-03 10:00:42 -05:00
|
|
|
|
2017-09-19 20:32:42 -04:00
|
|
|
handle = logcontext.preserve_fn(handle_check_result)
|
2019-06-20 05:32:02 -04:00
|
|
|
deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
|
2017-09-19 20:32:42 -04:00
|
|
|
|
|
|
|
valid_pdus = yield logcontext.make_deferred_yieldable(
|
2019-06-20 05:32:02 -04:00
|
|
|
defer.gatherResults(deferreds2, consumeErrors=True)
|
2017-09-19 20:32:42 -04:00
|
|
|
).addErrback(unwrapFirstError)
|
2015-02-12 13:35:36 -05:00
|
|
|
|
2015-06-26 04:52:24 -04:00
|
|
|
if include_none:
|
|
|
|
defer.returnValue(valid_pdus)
|
|
|
|
else:
|
|
|
|
defer.returnValue([p for p in valid_pdus if p])
|
2015-02-03 10:00:42 -05:00
|
|
|
|
2019-01-29 12:21:48 -05:00
|
|
|
def _check_sigs_and_hash(self, room_version, pdu):
|
2017-09-19 20:32:42 -04:00
|
|
|
return logcontext.make_deferred_yieldable(
|
2019-06-20 05:32:02 -04:00
|
|
|
self._check_sigs_and_hashes(room_version, [pdu])[0]
|
2017-09-19 20:32:42 -04:00
|
|
|
)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2019-01-29 12:21:48 -05:00
|
|
|
def _check_sigs_and_hashes(self, room_version, pdus):
|
2017-09-19 07:20:11 -04:00
|
|
|
"""Checks that each of the received events is correctly signed by the
|
|
|
|
sending server.
|
|
|
|
|
|
|
|
Args:
|
2019-01-29 12:21:48 -05:00
|
|
|
room_version (str): The room version of the PDUs
|
2017-09-19 07:20:11 -04:00
|
|
|
pdus (list[FrozenEvent]): the events to be checked
|
2015-02-03 10:00:42 -05:00
|
|
|
|
|
|
|
Returns:
|
2017-09-19 07:20:11 -04:00
|
|
|
list[Deferred]: for each input event, a deferred which:
|
|
|
|
* returns the original event if the checks pass
|
|
|
|
* returns a redacted version of the event (if the signature
|
|
|
|
matched but the hash did not)
|
|
|
|
* throws a SynapseError if the signature check failed.
|
2017-09-19 20:32:42 -04:00
|
|
|
The deferreds run their callbacks in the sentinel logcontext.
|
2015-02-03 10:00:42 -05:00
|
|
|
"""
|
2019-01-29 12:21:48 -05:00
|
|
|
deferreds = _check_sigs_on_pdus(self.keyring, room_version, pdus)
|
2015-06-26 04:52:24 -04:00
|
|
|
|
2017-09-19 20:32:42 -04:00
|
|
|
ctx = logcontext.LoggingContext.current_context()
|
2017-09-19 07:20:11 -04:00
|
|
|
|
2018-09-03 20:09:12 -04:00
|
|
|
def callback(_, pdu):
|
2017-09-19 20:32:42 -04:00
|
|
|
with logcontext.PreserveLoggingContext(ctx):
|
|
|
|
if not check_event_content_hash(pdu):
|
2018-09-06 18:56:47 -04:00
|
|
|
# let's try to distinguish between failures because the event was
|
|
|
|
# redacted (which are somewhat expected) vs actual ball-tampering
|
|
|
|
# incidents.
|
|
|
|
#
|
|
|
|
# This is just a heuristic, so we just assume that if the keys are
|
|
|
|
# about the same between the redacted and received events, then the
|
|
|
|
# received event was probably a redacted copy (but we then use our
|
|
|
|
# *actual* redacted copy to be on the safe side.)
|
|
|
|
redacted_event = prune_event(pdu)
|
2019-06-20 05:32:02 -04:00
|
|
|
if set(redacted_event.keys()) == set(pdu.keys()) and set(
|
|
|
|
six.iterkeys(redacted_event.content)
|
|
|
|
) == set(six.iterkeys(pdu.content)):
|
2018-09-06 18:56:47 -04:00
|
|
|
logger.info(
|
|
|
|
"Event %s seems to have been redacted; using our redacted "
|
|
|
|
"copy",
|
|
|
|
pdu.event_id,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.warning(
|
|
|
|
"Event %s content has been tampered, redacting",
|
2019-06-20 05:32:02 -04:00
|
|
|
pdu.event_id,
|
|
|
|
pdu.get_pdu_json(),
|
2018-09-06 18:56:47 -04:00
|
|
|
)
|
|
|
|
return redacted_event
|
2017-09-19 20:32:42 -04:00
|
|
|
|
2017-09-26 14:20:23 -04:00
|
|
|
if self.spam_checker.check_event_for_spam(pdu):
|
2017-09-19 20:32:42 -04:00
|
|
|
logger.warn(
|
|
|
|
"Event contains spam, redacting %s: %s",
|
2019-06-20 05:32:02 -04:00
|
|
|
pdu.event_id,
|
|
|
|
pdu.get_pdu_json(),
|
2017-09-19 20:32:42 -04:00
|
|
|
)
|
2018-09-03 20:09:12 -04:00
|
|
|
return prune_event(pdu)
|
2017-09-19 20:32:42 -04:00
|
|
|
|
|
|
|
return pdu
|
2015-06-26 04:52:24 -04:00
|
|
|
|
|
|
|
def errback(failure, pdu):
|
|
|
|
failure.trap(SynapseError)
|
2017-09-19 20:32:42 -04:00
|
|
|
with logcontext.PreserveLoggingContext(ctx):
|
|
|
|
logger.warn(
|
2018-09-06 18:56:47 -04:00
|
|
|
"Signature check failed for %s: %s",
|
2019-06-20 05:32:02 -04:00
|
|
|
pdu.event_id,
|
|
|
|
failure.getErrorMessage(),
|
2017-09-19 20:32:42 -04:00
|
|
|
)
|
2015-06-26 04:52:24 -04:00
|
|
|
return failure
|
2015-02-03 10:00:42 -05:00
|
|
|
|
2018-09-03 20:09:12 -04:00
|
|
|
for deferred, pdu in zip(deferreds, pdus):
|
2015-06-26 04:52:24 -04:00
|
|
|
deferred.addCallbacks(
|
2019-06-20 05:32:02 -04:00
|
|
|
callback, errback, callbackArgs=[pdu], errbackArgs=[pdu]
|
2015-02-03 10:00:42 -05:00
|
|
|
)
|
|
|
|
|
2015-06-26 04:52:24 -04:00
|
|
|
return deferreds
|
2017-12-30 13:40:19 -05:00
|
|
|
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
class PduToCheckSig(
|
|
|
|
namedtuple(
|
|
|
|
"PduToCheckSig", ["pdu", "redacted_pdu_json", "sender_domain", "deferreds"]
|
|
|
|
)
|
|
|
|
):
|
2018-09-03 20:09:12 -04:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-01-29 12:21:48 -05:00
|
|
|
def _check_sigs_on_pdus(keyring, room_version, pdus):
|
2018-09-03 20:09:12 -04:00
|
|
|
"""Check that the given events are correctly signed
|
|
|
|
|
|
|
|
Args:
|
|
|
|
keyring (synapse.crypto.Keyring): keyring object to do the checks
|
2019-01-29 12:21:48 -05:00
|
|
|
room_version (str): the room version of the PDUs
|
2018-09-03 20:09:12 -04:00
|
|
|
pdus (Collection[EventBase]): the events to be checked
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
List[Deferred]: a Deferred for each event in pdus, which will either succeed if
|
|
|
|
the signatures are valid, or fail (with a SynapseError) if not.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# we want to check that the event is signed by:
|
|
|
|
#
|
2019-01-29 17:35:36 -05:00
|
|
|
# (a) the sender's server
|
2018-09-03 20:09:12 -04:00
|
|
|
#
|
|
|
|
# - except in the case of invites created from a 3pid invite, which are exempt
|
|
|
|
# from this check, because the sender has to match that of the original 3pid
|
|
|
|
# invite, but the event may come from a different HS, for reasons that I don't
|
|
|
|
# entirely grok (why do the senders have to match? and if they do, why doesn't the
|
|
|
|
# joining server ask the inviting server to do the switcheroo with
|
|
|
|
# exchange_third_party_invite?).
|
|
|
|
#
|
|
|
|
# That's pretty awful, since redacting such an invite will render it invalid
|
|
|
|
# (because it will then look like a regular invite without a valid signature),
|
|
|
|
# and signatures are *supposed* to be valid whether or not an event has been
|
|
|
|
# redacted. But this isn't the worst of the ways that 3pid invites are broken.
|
|
|
|
#
|
2019-01-29 17:35:36 -05:00
|
|
|
# (b) for V1 and V2 rooms, the server which created the event_id
|
|
|
|
#
|
2018-09-03 20:09:12 -04:00
|
|
|
# let's start by getting the domain for each pdu, and flattening the event back
|
|
|
|
# to JSON.
|
2019-01-29 12:21:48 -05:00
|
|
|
|
2018-09-03 20:09:12 -04:00
|
|
|
pdus_to_check = [
|
|
|
|
PduToCheckSig(
|
|
|
|
pdu=p,
|
|
|
|
redacted_pdu_json=prune_event(p).get_pdu_json(),
|
|
|
|
sender_domain=get_domain_from_id(p.sender),
|
|
|
|
deferreds=[],
|
|
|
|
)
|
|
|
|
for p in pdus
|
|
|
|
]
|
|
|
|
|
2019-06-05 05:38:25 -04:00
|
|
|
v = KNOWN_ROOM_VERSIONS.get(room_version)
|
|
|
|
if not v:
|
|
|
|
raise RuntimeError("Unrecognized room version %s" % (room_version,))
|
|
|
|
|
2019-01-29 12:21:48 -05:00
|
|
|
# First we check that the sender event is signed by the sender's domain
|
|
|
|
# (except if its a 3pid invite, in which case it may be sent by any server)
|
2019-06-20 05:32:02 -04:00
|
|
|
pdus_to_check_sender = [p for p in pdus_to_check if not _is_invite_via_3pid(p.pdu)]
|
2018-09-03 20:09:12 -04:00
|
|
|
|
2019-06-05 05:38:25 -04:00
|
|
|
more_deferreds = keyring.verify_json_objects_for_server(
|
|
|
|
[
|
|
|
|
(
|
|
|
|
p.sender_domain,
|
|
|
|
p.redacted_pdu_json,
|
|
|
|
p.pdu.origin_server_ts if v.enforce_key_validity else 0,
|
2019-06-04 11:12:57 -04:00
|
|
|
p.pdu.event_id,
|
2019-06-05 05:38:25 -04:00
|
|
|
)
|
|
|
|
for p in pdus_to_check_sender
|
|
|
|
]
|
|
|
|
)
|
2018-09-03 20:09:12 -04:00
|
|
|
|
2019-04-25 15:53:10 -04:00
|
|
|
def sender_err(e, pdu_to_check):
|
2019-04-25 17:17:59 -04:00
|
|
|
errmsg = "event id %s: unable to verify signature for sender %s: %s" % (
|
2019-04-25 15:53:10 -04:00
|
|
|
pdu_to_check.pdu.event_id,
|
|
|
|
pdu_to_check.sender_domain,
|
2019-04-25 16:08:12 -04:00
|
|
|
e.getErrorMessage(),
|
2019-04-25 15:53:10 -04:00
|
|
|
)
|
2019-04-25 17:17:59 -04:00
|
|
|
# XX not really sure if these are the right codes, but they are what
|
|
|
|
# we've done for ages
|
|
|
|
raise SynapseError(400, errmsg, Codes.UNAUTHORIZED)
|
2019-04-25 15:53:10 -04:00
|
|
|
|
2018-09-03 20:09:12 -04:00
|
|
|
for p, d in zip(pdus_to_check_sender, more_deferreds):
|
2019-04-25 15:53:10 -04:00
|
|
|
d.addErrback(sender_err, p)
|
2018-09-03 20:09:12 -04:00
|
|
|
p.deferreds.append(d)
|
|
|
|
|
2019-01-29 12:21:48 -05:00
|
|
|
# now let's look for events where the sender's domain is different to the
|
|
|
|
# event id's domain (normally only the case for joins/leaves), and add additional
|
|
|
|
# checks. Only do this if the room version has a concept of event ID domain
|
2019-04-01 05:24:38 -04:00
|
|
|
# (ie, the room version uses old-style non-hash event IDs).
|
|
|
|
if v.event_format == EventFormatVersions.V1:
|
2019-01-29 12:21:48 -05:00
|
|
|
pdus_to_check_event_id = [
|
2019-06-20 05:32:02 -04:00
|
|
|
p
|
|
|
|
for p in pdus_to_check
|
2019-01-29 12:21:48 -05:00
|
|
|
if p.sender_domain != get_domain_from_id(p.pdu.event_id)
|
|
|
|
]
|
|
|
|
|
2019-06-05 05:38:25 -04:00
|
|
|
more_deferreds = keyring.verify_json_objects_for_server(
|
|
|
|
[
|
|
|
|
(
|
|
|
|
get_domain_from_id(p.pdu.event_id),
|
|
|
|
p.redacted_pdu_json,
|
|
|
|
p.pdu.origin_server_ts if v.enforce_key_validity else 0,
|
2019-06-04 11:12:57 -04:00
|
|
|
p.pdu.event_id,
|
2019-06-05 05:38:25 -04:00
|
|
|
)
|
|
|
|
for p in pdus_to_check_event_id
|
|
|
|
]
|
|
|
|
)
|
2019-01-29 12:21:48 -05:00
|
|
|
|
2019-04-25 15:53:10 -04:00
|
|
|
def event_err(e, pdu_to_check):
|
2019-04-25 17:17:59 -04:00
|
|
|
errmsg = (
|
2019-06-20 05:32:02 -04:00
|
|
|
"event id %s: unable to verify signature for event id domain: %s"
|
|
|
|
% (pdu_to_check.pdu.event_id, e.getErrorMessage())
|
2019-04-25 15:53:10 -04:00
|
|
|
)
|
2019-04-25 17:17:59 -04:00
|
|
|
# XX as above: not really sure if these are the right codes
|
|
|
|
raise SynapseError(400, errmsg, Codes.UNAUTHORIZED)
|
2019-04-25 15:53:10 -04:00
|
|
|
|
2019-01-29 12:21:48 -05:00
|
|
|
for p, d in zip(pdus_to_check_event_id, more_deferreds):
|
2019-04-25 15:53:10 -04:00
|
|
|
d.addErrback(event_err, p)
|
2019-01-29 12:21:48 -05:00
|
|
|
p.deferreds.append(d)
|
|
|
|
|
2018-09-03 20:09:12 -04:00
|
|
|
# replace lists of deferreds with single Deferreds
|
|
|
|
return [_flatten_deferred_list(p.deferreds) for p in pdus_to_check]
|
|
|
|
|
|
|
|
|
|
|
|
def _flatten_deferred_list(deferreds):
|
2019-01-29 12:21:48 -05:00
|
|
|
"""Given a list of deferreds, either return the single deferred,
|
|
|
|
combine into a DeferredList, or return an already resolved deferred.
|
2018-09-03 20:09:12 -04:00
|
|
|
"""
|
|
|
|
if len(deferreds) > 1:
|
|
|
|
return DeferredList(deferreds, fireOnOneErrback=True, consumeErrors=True)
|
2019-01-29 12:21:48 -05:00
|
|
|
elif len(deferreds) == 1:
|
2018-09-03 20:09:12 -04:00
|
|
|
return deferreds[0]
|
2019-01-29 12:21:48 -05:00
|
|
|
else:
|
|
|
|
return defer.succeed(None)
|
2018-09-03 20:09:12 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _is_invite_via_3pid(event):
|
|
|
|
return (
|
|
|
|
event.type == EventTypes.Member
|
|
|
|
and event.membership == Membership.INVITE
|
|
|
|
and "third_party_invite" in event.content
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-01-23 15:21:33 -05:00
|
|
|
def event_from_pdu_json(pdu_json, event_format_version, outlier=False):
|
2017-12-30 13:40:19 -05:00
|
|
|
"""Construct a FrozenEvent from an event json received over federation
|
|
|
|
|
|
|
|
Args:
|
|
|
|
pdu_json (object): pdu as received over federation
|
2019-01-23 15:21:33 -05:00
|
|
|
event_format_version (int): The event format version
|
2017-12-30 13:40:19 -05:00
|
|
|
outlier (bool): True to mark this event as an outlier
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
FrozenEvent
|
2017-12-30 13:40:19 -05:00
|
|
|
|
|
|
|
Raises:
|
2018-05-01 11:19:39 -04:00
|
|
|
SynapseError: if the pdu is missing required fields or is otherwise
|
|
|
|
not a valid matrix event
|
2017-12-30 13:40:19 -05:00
|
|
|
"""
|
2017-12-30 13:40:19 -05:00
|
|
|
# we could probably enforce a bunch of other fields here (room_id, sender,
|
|
|
|
# origin, etc etc)
|
2019-06-20 05:32:02 -04:00
|
|
|
assert_params_in_dict(pdu_json, ("type", "depth"))
|
2018-05-01 11:19:39 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
depth = pdu_json["depth"]
|
2018-05-01 11:19:39 -04:00
|
|
|
if not isinstance(depth, six.integer_types):
|
2019-06-20 05:32:02 -04:00
|
|
|
raise SynapseError(400, "Depth %r not an intger" % (depth,), Codes.BAD_JSON)
|
2018-05-01 11:19:39 -04:00
|
|
|
|
|
|
|
if depth < 0:
|
|
|
|
raise SynapseError(400, "Depth too small", Codes.BAD_JSON)
|
|
|
|
elif depth > MAX_DEPTH:
|
|
|
|
raise SynapseError(400, "Depth too large", Codes.BAD_JSON)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
event = event_type_from_format_version(event_format_version)(pdu_json)
|
2017-12-30 13:40:19 -05:00
|
|
|
|
|
|
|
event.internal_metadata.outlier = outlier
|
|
|
|
|
|
|
|
return event
|