2019-10-02 09:44:58 -04:00
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2019-11-06 05:01:39 -05:00
|
|
|
import logging
|
2022-03-17 07:25:50 -04:00
|
|
|
from typing import List, cast
|
2020-05-14 13:24:01 -04:00
|
|
|
from unittest import TestCase
|
2019-11-06 05:01:39 -05:00
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor
|
|
|
|
|
2019-10-02 09:44:58 -04:00
|
|
|
from synapse.api.constants import EventTypes
|
2021-01-29 11:38:29 -05:00
|
|
|
from synapse.api.errors import AuthError, Codes, LimitExceededError, SynapseError
|
2020-05-14 13:24:01 -04:00
|
|
|
from synapse.api.room_versions import RoomVersions
|
2022-04-01 08:01:49 -04:00
|
|
|
from synapse.events import EventBase, make_event_from_dict
|
2019-11-06 05:01:39 -05:00
|
|
|
from synapse.federation.federation_base import event_from_pdu_json
|
|
|
|
from synapse.logging.context import LoggingContext, run_in_background
|
2019-10-02 09:44:58 -04:00
|
|
|
from synapse.rest import admin
|
2021-08-17 07:57:58 -04:00
|
|
|
from synapse.rest.client import login, room
|
2022-03-17 07:25:50 -04:00
|
|
|
from synapse.server import HomeServer
|
|
|
|
from synapse.util import Clock
|
2021-07-29 04:46:51 -04:00
|
|
|
from synapse.util.stringutils import random_string
|
2019-10-02 09:44:58 -04:00
|
|
|
|
|
|
|
from tests import unittest
|
2022-04-01 08:01:49 -04:00
|
|
|
from tests.test_utils import event_injection
|
2019-10-02 09:44:58 -04:00
|
|
|
|
2019-11-06 05:01:39 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2019-10-02 09:44:58 -04:00
|
|
|
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
def generate_fake_event_id() -> str:
|
|
|
|
return "$fake_" + random_string(43)
|
|
|
|
|
|
|
|
|
2022-04-01 08:01:49 -04:00
|
|
|
class FederationTestCase(unittest.FederatingHomeserverTestCase):
|
2019-10-02 09:44:58 -04:00
|
|
|
servlets = [
|
|
|
|
admin.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
room.register_servlets,
|
|
|
|
]
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
2020-12-02 11:09:24 -05:00
|
|
|
hs = self.setup_test_homeserver(federation_http_client=None)
|
2020-10-09 07:24:34 -04:00
|
|
|
self.handler = hs.get_federation_handler()
|
2022-02-23 06:04:02 -05:00
|
|
|
self.store = hs.get_datastores().main
|
2022-05-25 07:59:04 -04:00
|
|
|
self.state_storage = hs.get_storage().state
|
2021-07-29 04:46:51 -04:00
|
|
|
self._event_auth_handler = hs.get_event_auth_handler()
|
2019-10-02 09:44:58 -04:00
|
|
|
return hs
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_exchange_revoked_invite(self) -> None:
|
2019-10-02 09:44:58 -04:00
|
|
|
user_id = self.register_user("kermit", "test")
|
|
|
|
tok = self.login("kermit", "test")
|
|
|
|
|
2019-10-03 06:30:43 -04:00
|
|
|
room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
|
2019-10-02 09:44:58 -04:00
|
|
|
|
|
|
|
# Send a 3PID invite event with an empty body so it's considered as a revoked one.
|
|
|
|
invite_token = "sometoken"
|
|
|
|
self.helper.send_state(
|
|
|
|
room_id=room_id,
|
|
|
|
event_type=EventTypes.ThirdPartyInvite,
|
|
|
|
state_key=invite_token,
|
|
|
|
body={},
|
|
|
|
tok=tok,
|
|
|
|
)
|
|
|
|
|
|
|
|
d = self.handler.on_exchange_third_party_invite_request(
|
|
|
|
event_dict={
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": user_id,
|
|
|
|
"state_key": "@someone:example.org",
|
|
|
|
"content": {
|
|
|
|
"membership": "invite",
|
|
|
|
"third_party_invite": {
|
|
|
|
"display_name": "alice",
|
|
|
|
"signed": {
|
|
|
|
"mxid": "@alice:localhost",
|
|
|
|
"token": invite_token,
|
|
|
|
"signatures": {
|
|
|
|
"magic.forest": {
|
|
|
|
"ed25519:3": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg"
|
|
|
|
}
|
2019-10-03 06:29:07 -04:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-10-02 09:44:58 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
failure = self.get_failure(d, AuthError).value
|
|
|
|
|
|
|
|
self.assertEqual(failure.code, 403, failure)
|
|
|
|
self.assertEqual(failure.errcode, Codes.FORBIDDEN, failure)
|
|
|
|
self.assertEqual(failure.msg, "You are not invited to this room.")
|
2019-11-06 05:01:39 -05:00
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_rejected_message_event_state(self) -> None:
|
2019-11-06 05:01:39 -05:00
|
|
|
"""
|
|
|
|
Check that we store the state group correctly for rejected non-state events.
|
|
|
|
|
|
|
|
Regression test for #6289.
|
|
|
|
"""
|
|
|
|
OTHER_SERVER = "otherserver"
|
|
|
|
OTHER_USER = "@otheruser:" + OTHER_SERVER
|
|
|
|
|
|
|
|
# create the room
|
|
|
|
user_id = self.register_user("kermit", "test")
|
|
|
|
tok = self.login("kermit", "test")
|
|
|
|
room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
|
2020-01-31 11:50:13 -05:00
|
|
|
room_version = self.get_success(self.store.get_room_version(room_id))
|
2019-11-06 05:01:39 -05:00
|
|
|
|
|
|
|
# pretend that another server has joined
|
|
|
|
join_event = self._build_and_send_join_event(OTHER_SERVER, OTHER_USER, room_id)
|
|
|
|
|
|
|
|
# check the state group
|
|
|
|
sg = self.successResultOf(
|
|
|
|
self.store._get_state_group_for_event(join_event.event_id)
|
|
|
|
)
|
|
|
|
|
|
|
|
# build and send an event which will be rejected
|
|
|
|
ev = event_from_pdu_json(
|
|
|
|
{
|
|
|
|
"type": EventTypes.Message,
|
|
|
|
"content": {},
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": "@yetanotheruser:" + OTHER_SERVER,
|
2022-03-17 07:25:50 -04:00
|
|
|
"depth": cast(int, join_event["depth"]) + 1,
|
2019-11-06 05:01:39 -05:00
|
|
|
"prev_events": [join_event.event_id],
|
|
|
|
"auth_events": [],
|
|
|
|
"origin_server_ts": self.clock.time_msec(),
|
|
|
|
},
|
2020-01-31 11:50:13 -05:00
|
|
|
room_version,
|
2019-11-06 05:01:39 -05:00
|
|
|
)
|
|
|
|
|
2020-12-14 14:19:47 -05:00
|
|
|
with LoggingContext("send_rejected"):
|
2021-08-26 16:41:44 -04:00
|
|
|
d = run_in_background(
|
|
|
|
self.hs.get_federation_event_handler().on_receive_pdu, OTHER_SERVER, ev
|
|
|
|
)
|
2019-11-06 05:01:39 -05:00
|
|
|
self.get_success(d)
|
|
|
|
|
|
|
|
# that should have been rejected
|
|
|
|
e = self.get_success(self.store.get_event(ev.event_id, allow_rejected=True))
|
|
|
|
self.assertIsNotNone(e.rejected_reason)
|
|
|
|
|
|
|
|
# ... and the state group should be the same as before
|
|
|
|
sg2 = self.successResultOf(self.store._get_state_group_for_event(ev.event_id))
|
|
|
|
|
|
|
|
self.assertEqual(sg, sg2)
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_rejected_state_event_state(self) -> None:
|
2019-11-06 05:01:39 -05:00
|
|
|
"""
|
|
|
|
Check that we store the state group correctly for rejected state events.
|
|
|
|
|
|
|
|
Regression test for #6289.
|
|
|
|
"""
|
|
|
|
OTHER_SERVER = "otherserver"
|
|
|
|
OTHER_USER = "@otheruser:" + OTHER_SERVER
|
|
|
|
|
|
|
|
# create the room
|
|
|
|
user_id = self.register_user("kermit", "test")
|
|
|
|
tok = self.login("kermit", "test")
|
|
|
|
room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
|
2020-01-31 11:50:13 -05:00
|
|
|
room_version = self.get_success(self.store.get_room_version(room_id))
|
2019-11-06 05:01:39 -05:00
|
|
|
|
|
|
|
# pretend that another server has joined
|
|
|
|
join_event = self._build_and_send_join_event(OTHER_SERVER, OTHER_USER, room_id)
|
|
|
|
|
|
|
|
# check the state group
|
|
|
|
sg = self.successResultOf(
|
|
|
|
self.store._get_state_group_for_event(join_event.event_id)
|
|
|
|
)
|
|
|
|
|
|
|
|
# build and send an event which will be rejected
|
|
|
|
ev = event_from_pdu_json(
|
|
|
|
{
|
|
|
|
"type": "org.matrix.test",
|
|
|
|
"state_key": "test_key",
|
|
|
|
"content": {},
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": "@yetanotheruser:" + OTHER_SERVER,
|
2022-03-17 07:25:50 -04:00
|
|
|
"depth": cast(int, join_event["depth"]) + 1,
|
2019-11-06 05:01:39 -05:00
|
|
|
"prev_events": [join_event.event_id],
|
|
|
|
"auth_events": [],
|
|
|
|
"origin_server_ts": self.clock.time_msec(),
|
|
|
|
},
|
2020-01-31 11:50:13 -05:00
|
|
|
room_version,
|
2019-11-06 05:01:39 -05:00
|
|
|
)
|
|
|
|
|
2020-12-14 14:19:47 -05:00
|
|
|
with LoggingContext("send_rejected"):
|
2021-08-26 16:41:44 -04:00
|
|
|
d = run_in_background(
|
|
|
|
self.hs.get_federation_event_handler().on_receive_pdu, OTHER_SERVER, ev
|
|
|
|
)
|
2019-11-06 05:01:39 -05:00
|
|
|
self.get_success(d)
|
|
|
|
|
|
|
|
# that should have been rejected
|
|
|
|
e = self.get_success(self.store.get_event(ev.event_id, allow_rejected=True))
|
|
|
|
self.assertIsNotNone(e.rejected_reason)
|
|
|
|
|
|
|
|
# ... and the state group should be the same as before
|
|
|
|
sg2 = self.successResultOf(self.store._get_state_group_for_event(ev.event_id))
|
|
|
|
|
|
|
|
self.assertEqual(sg, sg2)
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_backfill_with_many_backward_extremities(self) -> None:
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
"""
|
|
|
|
Check that we can backfill with many backward extremities.
|
|
|
|
The goal is to make sure that when we only use a portion
|
|
|
|
of backwards extremities(the magic number is more than 5),
|
|
|
|
no errors are thrown.
|
|
|
|
|
|
|
|
Regression test, see #11027
|
|
|
|
"""
|
|
|
|
# create the room
|
|
|
|
user_id = self.register_user("kermit", "test")
|
|
|
|
tok = self.login("kermit", "test")
|
|
|
|
|
|
|
|
room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
|
2022-04-01 08:01:49 -04:00
|
|
|
room_version = self.get_success(self.store.get_room_version(room_id))
|
|
|
|
|
|
|
|
# we need a user on the remote server to be a member, so that we can send
|
|
|
|
# extremity-causing events.
|
|
|
|
self.get_success(
|
|
|
|
event_injection.inject_member_event(
|
|
|
|
self.hs, room_id, f"@user:{self.OTHER_SERVER_NAME}", "join"
|
|
|
|
)
|
|
|
|
)
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
|
2022-04-01 08:01:49 -04:00
|
|
|
send_result = self.helper.send(room_id, "first message", tok=tok)
|
|
|
|
ev1 = self.get_success(
|
|
|
|
self.store.get_event(send_result["event_id"], allow_none=False)
|
|
|
|
)
|
|
|
|
current_state = self.get_success(
|
|
|
|
self.store.get_events_as_list(
|
|
|
|
(self.get_success(self.store.get_current_state_ids(room_id))).values()
|
|
|
|
)
|
|
|
|
)
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
|
|
|
|
# Create "many" backward extremities. The magic number we're trying to
|
|
|
|
# create more than is 5 which corresponds to the number of backward
|
|
|
|
# extremities we slice off in `_maybe_backfill_inner`
|
2022-04-01 08:01:49 -04:00
|
|
|
federation_event_handler = self.hs.get_federation_event_handler()
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
for _ in range(0, 8):
|
2022-04-01 08:01:49 -04:00
|
|
|
event = make_event_from_dict(
|
|
|
|
self.add_hashes_and_signatures(
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
{
|
2022-04-01 08:01:49 -04:00
|
|
|
"origin_server_ts": 1,
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
"type": "m.room.message",
|
|
|
|
"content": {
|
|
|
|
"msgtype": "m.text",
|
|
|
|
"body": "message connected to fake event",
|
|
|
|
},
|
|
|
|
"room_id": room_id,
|
2022-04-01 08:01:49 -04:00
|
|
|
"sender": f"@user:{self.OTHER_SERVER_NAME}",
|
|
|
|
"prev_events": [
|
|
|
|
ev1.event_id,
|
|
|
|
# We're creating an backward extremity each time thanks
|
|
|
|
# to this fake event
|
|
|
|
generate_fake_event_id(),
|
|
|
|
],
|
|
|
|
# lazy: *everything* is an auth event
|
|
|
|
"auth_events": [ev.event_id for ev in current_state],
|
|
|
|
"depth": ev1.depth + 1,
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
},
|
2022-04-01 08:01:49 -04:00
|
|
|
room_version,
|
|
|
|
),
|
|
|
|
room_version,
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
)
|
2022-04-01 08:01:49 -04:00
|
|
|
|
|
|
|
# we poke this directly into _process_received_pdu, to avoid the
|
|
|
|
# federation handler wanting to backfill the fake event.
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
self.get_success(
|
2022-04-01 08:01:49 -04:00
|
|
|
federation_event_handler._process_received_pdu(
|
2022-05-26 05:48:12 -04:00
|
|
|
self.OTHER_SERVER_NAME,
|
|
|
|
event,
|
|
|
|
state_ids={
|
|
|
|
(e.type, e.state_key): e.event_id for e in current_state
|
|
|
|
},
|
2022-04-01 08:01:49 -04:00
|
|
|
)
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
)
|
|
|
|
|
2022-04-01 08:01:49 -04:00
|
|
|
# we should now have 8 backwards extremities.
|
|
|
|
backwards_extremities = self.get_success(
|
|
|
|
self.store.db_pool.simple_select_list(
|
|
|
|
"event_backward_extremities",
|
|
|
|
keyvalues={"room_id": room_id},
|
|
|
|
retcols=["event_id"],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(len(backwards_extremities), 8)
|
|
|
|
|
Fix 500 error on `/messages` when we accumulate more than 5 backward extremities (#11027)
Found while working on the Gitter backfill script and noticed
it only happened after we sent 7 batches, https://gitlab.com/gitterHQ/webapp/-/merge_requests/2229#note_665906390
When there are more than 5 backward extremities for a given depth,
backfill will throw an error because we sliced the extremity list
to 5 but then try to iterate over the full list. This causes
us to look for state that we never fetched and we get a `KeyError`.
Before when calling `/messages` when there are more than 5 backward extremities:
```
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 258, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 446, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/client/room.py", line 580, in on_GET
msgs = await self.pagination_handler.get_messages(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/pagination.py", line 396, in get_messages
await self.hs.get_federation_handler().maybe_backfill(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 133, in maybe_backfill
return await self._maybe_backfill_inner(room_id, current_depth, limit)
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/federation.py", line 386, in _maybe_backfill_inner
likely_extremeties_domains = get_domains_from_state(states[e_id])
KeyError: '$zpFflMEBtZdgcMQWTakaVItTLMjLFdKcRWUPHbbSZJl'
```
2021-10-14 19:53:45 -04:00
|
|
|
current_depth = 1
|
|
|
|
limit = 100
|
|
|
|
with LoggingContext("receive_pdu"):
|
|
|
|
# Make sure backfill still works
|
|
|
|
d = run_in_background(
|
|
|
|
self.hs.get_federation_handler().maybe_backfill,
|
|
|
|
room_id,
|
|
|
|
current_depth,
|
|
|
|
limit,
|
|
|
|
)
|
|
|
|
self.get_success(d)
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_backfill_floating_outlier_membership_auth(self) -> None:
|
2021-07-29 04:46:51 -04:00
|
|
|
"""
|
|
|
|
As the local homeserver, check that we can properly process a federated
|
|
|
|
event from the OTHER_SERVER with auth_events that include a floating
|
|
|
|
membership event from the OTHER_SERVER.
|
|
|
|
|
|
|
|
Regression test, see #10439.
|
|
|
|
"""
|
|
|
|
OTHER_SERVER = "otherserver"
|
|
|
|
OTHER_USER = "@otheruser:" + OTHER_SERVER
|
|
|
|
|
|
|
|
# create the room
|
|
|
|
user_id = self.register_user("kermit", "test")
|
|
|
|
tok = self.login("kermit", "test")
|
|
|
|
room_id = self.helper.create_room_as(
|
|
|
|
room_creator=user_id,
|
|
|
|
is_public=True,
|
|
|
|
tok=tok,
|
|
|
|
extra_content={
|
|
|
|
"preset": "public_chat",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
room_version = self.get_success(self.store.get_room_version(room_id))
|
|
|
|
|
|
|
|
prev_event_ids = self.get_success(self.store.get_prev_events_for_room(room_id))
|
|
|
|
(
|
|
|
|
most_recent_prev_event_id,
|
|
|
|
most_recent_prev_event_depth,
|
|
|
|
) = self.get_success(self.store.get_max_depth_of(prev_event_ids))
|
|
|
|
# mapping from (type, state_key) -> state_event_id
|
2022-05-18 11:02:10 -04:00
|
|
|
assert most_recent_prev_event_id is not None
|
2021-07-29 04:46:51 -04:00
|
|
|
prev_state_map = self.get_success(
|
2022-05-25 07:59:04 -04:00
|
|
|
self.state_storage.get_state_ids_for_event(most_recent_prev_event_id)
|
2021-07-29 04:46:51 -04:00
|
|
|
)
|
|
|
|
# List of state event ID's
|
|
|
|
prev_state_ids = list(prev_state_map.values())
|
|
|
|
auth_event_ids = prev_state_ids
|
|
|
|
auth_events = list(
|
|
|
|
self.get_success(self.store.get_events(auth_event_ids)).values()
|
|
|
|
)
|
|
|
|
|
|
|
|
# build a floating outlier member state event
|
|
|
|
fake_prev_event_id = "$" + random_string(43)
|
|
|
|
member_event_dict = {
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"content": {
|
|
|
|
"membership": "join",
|
|
|
|
},
|
|
|
|
"state_key": OTHER_USER,
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": OTHER_USER,
|
|
|
|
"depth": most_recent_prev_event_depth,
|
|
|
|
"prev_events": [fake_prev_event_id],
|
|
|
|
"origin_server_ts": self.clock.time_msec(),
|
|
|
|
"signatures": {OTHER_SERVER: {"ed25519:key_version": "SomeSignatureHere"}},
|
|
|
|
}
|
|
|
|
builder = self.hs.get_event_builder_factory().for_room_version(
|
|
|
|
room_version, member_event_dict
|
|
|
|
)
|
|
|
|
member_event = self.get_success(
|
|
|
|
builder.build(
|
|
|
|
prev_event_ids=member_event_dict["prev_events"],
|
|
|
|
auth_event_ids=self._event_auth_handler.compute_auth_events(
|
|
|
|
builder,
|
|
|
|
prev_state_map,
|
|
|
|
for_verification=False,
|
|
|
|
),
|
|
|
|
depth=member_event_dict["depth"],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# Override the signature added from "test" homeserver that we created the event with
|
|
|
|
member_event.signatures = member_event_dict["signatures"]
|
|
|
|
|
|
|
|
# Add the new member_event to the StateMap
|
2022-04-01 12:04:16 -04:00
|
|
|
updated_state_map = dict(prev_state_map)
|
|
|
|
updated_state_map[
|
2021-07-29 04:46:51 -04:00
|
|
|
(member_event.type, member_event.state_key)
|
|
|
|
] = member_event.event_id
|
|
|
|
auth_events.append(member_event)
|
|
|
|
|
|
|
|
# build and send an event authed based on the member event
|
|
|
|
message_event_dict = {
|
|
|
|
"type": EventTypes.Message,
|
|
|
|
"content": {},
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": OTHER_USER,
|
|
|
|
"depth": most_recent_prev_event_depth,
|
|
|
|
"prev_events": prev_event_ids.copy(),
|
|
|
|
"origin_server_ts": self.clock.time_msec(),
|
|
|
|
"signatures": {OTHER_SERVER: {"ed25519:key_version": "SomeSignatureHere"}},
|
|
|
|
}
|
|
|
|
builder = self.hs.get_event_builder_factory().for_room_version(
|
|
|
|
room_version, message_event_dict
|
|
|
|
)
|
|
|
|
message_event = self.get_success(
|
|
|
|
builder.build(
|
|
|
|
prev_event_ids=message_event_dict["prev_events"],
|
|
|
|
auth_event_ids=self._event_auth_handler.compute_auth_events(
|
|
|
|
builder,
|
2022-04-01 12:04:16 -04:00
|
|
|
updated_state_map,
|
2021-07-29 04:46:51 -04:00
|
|
|
for_verification=False,
|
|
|
|
),
|
|
|
|
depth=message_event_dict["depth"],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# Override the signature added from "test" homeserver that we created the event with
|
|
|
|
message_event.signatures = message_event_dict["signatures"]
|
|
|
|
|
|
|
|
# Stub the /event_auth response from the OTHER_SERVER
|
|
|
|
async def get_event_auth(
|
|
|
|
destination: str, room_id: str, event_id: str
|
|
|
|
) -> List[EventBase]:
|
2021-09-24 06:56:33 -04:00
|
|
|
return [
|
Refactor the way we set `outlier` (#11634)
* `_auth_and_persist_outliers`: mark persisted events as outliers
Mark any events that get persisted via `_auth_and_persist_outliers` as, well,
outliers.
Currently this will be a no-op as everything will already be flagged as an
outlier, but I'm going to change that.
* `process_remote_join`: stop flagging as outlier
The events are now flagged as outliers later on, by `_auth_and_persist_outliers`.
* `send_join`: remove `outlier=True`
The events created here are returned in the result of `send_join` to
`FederationHandler.do_invite_join`. From there they are passed into
`FederationEventHandler.process_remote_join`, which passes them to
`_auth_and_persist_outliers`... which sets the `outlier` flag.
* `get_event_auth`: remove `outlier=True`
stop flagging the events returned by `get_event_auth` as outliers. This method
is only called by `_get_remote_auth_chain_for_event`, which passes the results
into `_auth_and_persist_outliers`, which will flag them as outliers.
* `_get_remote_auth_chain_for_event`: remove `outlier=True`
we pass all the events into `_auth_and_persist_outliers`, which will now flag
the events as outliers.
* `_check_sigs_and_hash_and_fetch`: remove unused `outlier` parameter
This param is now never set to True, so we can remove it.
* `_check_sigs_and_hash_and_fetch_one`: remove unused `outlier` param
This is no longer set anywhere, so we can remove it.
* `get_pdu`: remove unused `outlier` parameter
... and chase it down into `get_pdu_from_destination_raw`.
* `event_from_pdu_json`: remove redundant `outlier` param
This is never set to `True`, so can be removed.
* changelog
* update docstring
2022-01-05 07:26:11 -05:00
|
|
|
event_from_pdu_json(ae.get_pdu_json(), room_version=room_version)
|
2021-09-24 06:56:33 -04:00
|
|
|
for ae in auth_events
|
|
|
|
]
|
2021-07-29 04:46:51 -04:00
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
self.handler.federation_client.get_event_auth = get_event_auth # type: ignore[assignment]
|
2021-07-29 04:46:51 -04:00
|
|
|
|
|
|
|
with LoggingContext("receive_pdu"):
|
|
|
|
# Fake the OTHER_SERVER federating the message event over to our local homeserver
|
|
|
|
d = run_in_background(
|
2021-08-26 16:41:44 -04:00
|
|
|
self.hs.get_federation_event_handler().on_receive_pdu,
|
|
|
|
OTHER_SERVER,
|
|
|
|
message_event,
|
2021-07-29 04:46:51 -04:00
|
|
|
)
|
|
|
|
self.get_success(d)
|
|
|
|
|
|
|
|
# Now try and get the events on our local homeserver
|
|
|
|
stored_event = self.get_success(
|
|
|
|
self.store.get_event(message_event.event_id, allow_none=True)
|
|
|
|
)
|
|
|
|
self.assertTrue(stored_event is not None)
|
|
|
|
|
2021-01-29 11:38:29 -05:00
|
|
|
@unittest.override_config(
|
|
|
|
{"rc_invites": {"per_user": {"per_second": 0.5, "burst_count": 3}}}
|
|
|
|
)
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_invite_by_user_ratelimit(self) -> None:
|
2021-01-29 11:38:29 -05:00
|
|
|
"""Tests that invites from federation to a particular user are
|
|
|
|
actually rate-limited.
|
|
|
|
"""
|
|
|
|
other_server = "otherserver"
|
|
|
|
other_user = "@otheruser:" + other_server
|
|
|
|
|
|
|
|
# create the room
|
|
|
|
user_id = self.register_user("kermit", "test")
|
|
|
|
tok = self.login("kermit", "test")
|
|
|
|
|
|
|
|
def create_invite():
|
|
|
|
room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
|
|
|
|
room_version = self.get_success(self.store.get_room_version(room_id))
|
|
|
|
return event_from_pdu_json(
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"content": {"membership": "invite"},
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": other_user,
|
|
|
|
"state_key": "@user:test",
|
|
|
|
"depth": 32,
|
|
|
|
"prev_events": [],
|
|
|
|
"auth_events": [],
|
|
|
|
"origin_server_ts": self.clock.time_msec(),
|
|
|
|
},
|
|
|
|
room_version,
|
|
|
|
)
|
|
|
|
|
2021-04-20 06:50:49 -04:00
|
|
|
for _ in range(3):
|
2021-01-29 11:38:29 -05:00
|
|
|
event = create_invite()
|
|
|
|
self.get_success(
|
2021-02-16 17:32:34 -05:00
|
|
|
self.handler.on_invite_request(
|
|
|
|
other_server,
|
|
|
|
event,
|
|
|
|
event.room_version,
|
|
|
|
)
|
2021-01-29 11:38:29 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
event = create_invite()
|
|
|
|
self.get_failure(
|
2021-02-16 17:32:34 -05:00
|
|
|
self.handler.on_invite_request(
|
|
|
|
other_server,
|
|
|
|
event,
|
|
|
|
event.room_version,
|
|
|
|
),
|
2021-01-29 11:38:29 -05:00
|
|
|
exc=LimitExceededError,
|
|
|
|
)
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def _build_and_send_join_event(
|
|
|
|
self, other_server: str, other_user: str, room_id: str
|
|
|
|
) -> EventBase:
|
2019-11-06 05:01:39 -05:00
|
|
|
join_event = self.get_success(
|
|
|
|
self.handler.on_make_join_request(other_server, room_id, other_user)
|
|
|
|
)
|
|
|
|
# the auth code requires that a signature exists, but doesn't check that
|
|
|
|
# signature... go figure.
|
|
|
|
join_event.signatures[other_server] = {"x": "y"}
|
2020-12-14 14:19:47 -05:00
|
|
|
with LoggingContext("send_join"):
|
2019-11-06 05:01:39 -05:00
|
|
|
d = run_in_background(
|
2021-08-26 16:41:44 -04:00
|
|
|
self.hs.get_federation_event_handler().on_send_membership_event,
|
|
|
|
other_server,
|
|
|
|
join_event,
|
2019-11-06 05:01:39 -05:00
|
|
|
)
|
|
|
|
self.get_success(d)
|
|
|
|
|
|
|
|
# sanity-check: the room should show that the new user is a member
|
|
|
|
r = self.get_success(self.store.get_current_state_ids(room_id))
|
|
|
|
self.assertEqual(r[(EventTypes.Member, other_user)], join_event.event_id)
|
|
|
|
|
|
|
|
return join_event
|
2020-05-14 13:24:01 -04:00
|
|
|
|
|
|
|
|
|
|
|
class EventFromPduTestCase(TestCase):
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_valid_json(self) -> None:
|
2020-05-14 13:24:01 -04:00
|
|
|
"""Valid JSON should be turned into an event."""
|
|
|
|
ev = event_from_pdu_json(
|
|
|
|
{
|
|
|
|
"type": EventTypes.Message,
|
|
|
|
"content": {"bool": True, "null": None, "int": 1, "str": "foobar"},
|
|
|
|
"room_id": "!room:test",
|
|
|
|
"sender": "@user:test",
|
|
|
|
"depth": 1,
|
|
|
|
"prev_events": [],
|
|
|
|
"auth_events": [],
|
|
|
|
"origin_server_ts": 1234,
|
|
|
|
},
|
2020-05-15 09:30:10 -04:00
|
|
|
RoomVersions.V6,
|
2020-05-14 13:24:01 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertIsInstance(ev, EventBase)
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_invalid_numbers(self) -> None:
|
2020-05-14 13:24:01 -04:00
|
|
|
"""Invalid values for an integer should be rejected, all floats should be rejected."""
|
|
|
|
for value in [
|
2022-03-29 06:41:19 -04:00
|
|
|
-(2**53),
|
|
|
|
2**53,
|
2020-05-14 13:24:01 -04:00
|
|
|
1.0,
|
|
|
|
float("inf"),
|
|
|
|
float("-inf"),
|
|
|
|
float("nan"),
|
|
|
|
]:
|
|
|
|
with self.assertRaises(SynapseError):
|
|
|
|
event_from_pdu_json(
|
|
|
|
{
|
|
|
|
"type": EventTypes.Message,
|
|
|
|
"content": {"foo": value},
|
|
|
|
"room_id": "!room:test",
|
|
|
|
"sender": "@user:test",
|
|
|
|
"depth": 1,
|
|
|
|
"prev_events": [],
|
|
|
|
"auth_events": [],
|
|
|
|
"origin_server_ts": 1234,
|
|
|
|
},
|
2020-05-15 09:30:10 -04:00
|
|
|
RoomVersions.V6,
|
2020-05-14 13:24:01 -04:00
|
|
|
)
|
|
|
|
|
2022-03-17 07:25:50 -04:00
|
|
|
def test_invalid_nested(self) -> None:
|
2020-05-14 13:24:01 -04:00
|
|
|
"""List and dictionaries are recursively searched."""
|
|
|
|
with self.assertRaises(SynapseError):
|
|
|
|
event_from_pdu_json(
|
|
|
|
{
|
|
|
|
"type": EventTypes.Message,
|
2022-03-29 06:41:19 -04:00
|
|
|
"content": {"foo": [{"bar": 2**56}]},
|
2020-05-14 13:24:01 -04:00
|
|
|
"room_id": "!room:test",
|
|
|
|
"sender": "@user:test",
|
|
|
|
"depth": 1,
|
|
|
|
"prev_events": [],
|
|
|
|
"auth_events": [],
|
|
|
|
"origin_server_ts": 1234,
|
|
|
|
},
|
2020-05-15 09:30:10 -04:00
|
|
|
RoomVersions.V6,
|
2020-05-14 13:24:01 -04:00
|
|
|
)
|