Merge remote-tracking branch 'upstream/release-v1.45'

This commit is contained in:
Tulir Asokan 2021-10-12 13:54:46 +03:00
commit 80adb0a6ca
196 changed files with 4122 additions and 2177 deletions

View file

@ -25,7 +25,7 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database" echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update. # Make sure the SQLite3 database is using the latest schema and has no pending background update.
scripts-dev/update_database --database-config .ci/sqlite-config.yaml scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Create the PostgreSQL database. # Create the PostgreSQL database.
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse" .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
@ -46,7 +46,7 @@ echo "--- Prepare empty SQLite database"
# we do this by deleting the sqlite db, and then doing the same again. # we do this by deleting the sqlite db, and then doing the same again.
rm .ci/test_db.db rm .ci/test_db.db
scripts-dev/update_database --database-config .ci/sqlite-config.yaml scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# re-create the PostgreSQL database. # re-create the PostgreSQL database.
.ci/scripts/postgres_exec.py \ .ci/scripts/postgres_exec.py \

2
.github/CODEOWNERS vendored Normal file
View file

@ -0,0 +1,2 @@
# Automatically request reviews from the synapse-core team when a pull request comes in.
* @matrix-org/synapse-core

View file

@ -76,22 +76,25 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
python-version: ["3.6", "3.7", "3.8", "3.9"] python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
database: ["sqlite"] database: ["sqlite"]
toxenv: ["py"]
include: include:
# Newest Python without optional deps # Newest Python without optional deps
- python-version: "3.9" - python-version: "3.10"
toxenv: "py-noextras,combine" toxenv: "py-noextras"
# Oldest Python with PostgreSQL # Oldest Python with PostgreSQL
- python-version: "3.6" - python-version: "3.6"
database: "postgres" database: "postgres"
postgres-version: "9.6" postgres-version: "9.6"
toxenv: "py"
# Newest Python with PostgreSQL # Newest Python with newest PostgreSQL
- python-version: "3.9" - python-version: "3.10"
database: "postgres" database: "postgres"
postgres-version: "13" postgres-version: "14"
toxenv: "py"
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -111,7 +114,7 @@ jobs:
if: ${{ matrix.postgres-version }} if: ${{ matrix.postgres-version }}
timeout-minutes: 2 timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done run: until pg_isready -h localhost; do sleep 1; done
- run: tox -e py,combine - run: tox -e ${{ matrix.toxenv }}
env: env:
TRIAL_FLAGS: "--jobs=2" TRIAL_FLAGS: "--jobs=2"
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }} SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
@ -169,7 +172,7 @@ jobs:
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- run: pip install tox - run: pip install tox
- run: tox -e py,combine - run: tox -e py
env: env:
TRIAL_FLAGS: "--jobs=2" TRIAL_FLAGS: "--jobs=2"
- name: Dump logs - name: Dump logs
@ -256,8 +259,8 @@ jobs:
- python-version: "3.6" - python-version: "3.6"
postgres-version: "9.6" postgres-version: "9.6"
- python-version: "3.9" - python-version: "3.10"
postgres-version: "13" postgres-version: "14"
services: services:
postgres: postgres:

View file

@ -1,3 +1,85 @@
Synapse 1.45.0rc1 (2021-10-12)
==============================
**Note:** We are aware of [a performance issue](https://github.com/matrix-org/synapse/issues/11049) introduced in Synapse v1.44.0, as well as [a bug](https://github.com/matrix-org/synapse/issues/11025) with the user directory when using application services. While this release candidate doesn't fix either of those issues, a second release candidate is expected to come out in a few days to address them.
Media storage providers module that read from Synapse's configuration need changes as of this version, see the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#upgrading-to-v1450) for more information.
Features
--------
- Add [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069) support to `/account/whoami`. ([\#9655](https://github.com/matrix-org/synapse/issues/9655))
- Support autodiscovery of oEmbed previews. ([\#10822](https://github.com/matrix-org/synapse/issues/10822))
- Add a `user_may_send_3pid_invite` spam checker callback for modules to allow or deny 3PID invites. ([\#10894](https://github.com/matrix-org/synapse/issues/10894))
- Add a spam checker callback to allow or deny room joins. ([\#10910](https://github.com/matrix-org/synapse/issues/10910))
- Include an `update_synapse_database` script in the distribution. Contributed by @Fizzadar at Beeper. ([\#10954](https://github.com/matrix-org/synapse/issues/10954))
- Include exception information in JSON logging output. Contributed by @Fizzadar at Beeper. ([\#11028](https://github.com/matrix-org/synapse/issues/11028))
Bugfixes
--------
- Fix a minor bug in the response to `/_matrix/client/r0/voip/turnServer`. Contributed by @lukaslihotzki. ([\#10922](https://github.com/matrix-org/synapse/issues/10922))
- Fix a bug where empty `yyyy-mm-dd/` directories would be left behind in the media store's `url_cache_thumbnails/` directory. ([\#10924](https://github.com/matrix-org/synapse/issues/10924))
- Fix a bug introduced in Synapse v1.40.0 where the signature checks for room version 8 and 9 could be applied to earlier room versions in some situations. ([\#10927](https://github.com/matrix-org/synapse/issues/10927))
- Fix a long-standing bug wherein deactivated users still count towards the monthly active users limit. ([\#10947](https://github.com/matrix-org/synapse/issues/10947))
- Fix a long-standing bug which meant that events received over federation were sometimes incorrectly accepted into the room state. ([\#10956](https://github.com/matrix-org/synapse/issues/10956))
- Fix a long-standing bug where rebuilding the user directory wouldn't exclude support and deactivated users. ([\#10960](https://github.com/matrix-org/synapse/issues/10960))
- Fix [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` endpoint rejecting subsequent batches with unknown batch ID error in existing room versions from the room creator. ([\#10962](https://github.com/matrix-org/synapse/issues/10962))
- Fix a bug that could leak local users' per-room nicknames and avatars when the user directory is rebuilt. ([\#10981](https://github.com/matrix-org/synapse/issues/10981))
- Fix a long-standing bug where the remainder of a batch of user directory changes would be silently dropped if the server left a room early in the batch. ([\#10982](https://github.com/matrix-org/synapse/issues/10982))
- Correct a bugfix introduced in Synapse v1.44.0 that would catch the wrong error if a connection is lost before a response could be written to it. ([\#10995](https://github.com/matrix-org/synapse/issues/10995))
- Fix a long-standing bug where local users' per-room nicknames/avatars were visible to anyone who could see you in the user directory. ([\#11002](https://github.com/matrix-org/synapse/issues/11002))
- Fix a long-standing bug where a user's per-room nickname/avatar would overwrite their profile in the user directory when a room was made public. ([\#11003](https://github.com/matrix-org/synapse/issues/11003))
- Work around a regression, introduced in Synapse v1.39.0, that caused `SynapseError`s raised by the experimental third-party rules module callback `check_event_allowed` to be ignored. ([\#11042](https://github.com/matrix-org/synapse/issues/11042))
- Fix a bug in [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) insertion events in rooms that could cause cross-talk/conflicts between batches. ([\#10877](https://github.com/matrix-org/synapse/issues/10877))
Improved Documentation
----------------------
- Change wording ("reference homeserver") in Synapse repository documentation. Contributed by @maxkratz. ([\#10971](https://github.com/matrix-org/synapse/issues/10971))
- Fix a dead URL in development documentation (SAML) and change wording from "Riot" to "Element". Contributed by @maxkratz. ([\#10973](https://github.com/matrix-org/synapse/issues/10973))
- Add additional content to the Welcome and Overview page of the documentation. ([\#10990](https://github.com/matrix-org/synapse/issues/10990))
- Update links to MSCs in documentation. Contributed by @dklimpel. ([\#10991](https://github.com/matrix-org/synapse/issues/10991))
Internal Changes
----------------
- Improve type hinting in `synapse.util`. ([\#10888](https://github.com/matrix-org/synapse/issues/10888))
- Add further type hints to `synapse.storage.util`. ([\#10892](https://github.com/matrix-org/synapse/issues/10892))
- Fix type hints to be compatible with an upcoming change to Twisted. ([\#10895](https://github.com/matrix-org/synapse/issues/10895))
- Update utility code to handle C implementations of frozendict. ([\#10902](https://github.com/matrix-org/synapse/issues/10902))
- Drop old functionality which maintained database compatibility with Synapse versions before v1.31. ([\#10903](https://github.com/matrix-org/synapse/issues/10903))
- Clean-up configuration helper classes for the `ServerConfig` class. ([\#10915](https://github.com/matrix-org/synapse/issues/10915))
- Use direct references to config flags. ([\#10916](https://github.com/matrix-org/synapse/issues/10916), [\#10959](https://github.com/matrix-org/synapse/issues/10959), [\#10985](https://github.com/matrix-org/synapse/issues/10985))
- Clean up some of the federation event authentication code for clarity. ([\#10926](https://github.com/matrix-org/synapse/issues/10926), [\#10940](https://github.com/matrix-org/synapse/issues/10940), [\#10986](https://github.com/matrix-org/synapse/issues/10986), [\#10987](https://github.com/matrix-org/synapse/issues/10987), [\#10988](https://github.com/matrix-org/synapse/issues/10988), [\#11010](https://github.com/matrix-org/synapse/issues/11010), [\#11011](https://github.com/matrix-org/synapse/issues/11011))
- Refactor various parts of the codebase to use `RoomVersion` objects instead of room version identifier strings. ([\#10934](https://github.com/matrix-org/synapse/issues/10934))
- Refactor user directory tests in preparation for upcoming changes. ([\#10935](https://github.com/matrix-org/synapse/issues/10935))
- Include the event id in the logcontext when handling PDUs received over federation. ([\#10936](https://github.com/matrix-org/synapse/issues/10936))
- Fix logged errors in unit tests. ([\#10939](https://github.com/matrix-org/synapse/issues/10939))
- Fix a broken test to ensure that consent configuration works during registration. ([\#10945](https://github.com/matrix-org/synapse/issues/10945))
- Add type hints to filtering classes. ([\#10958](https://github.com/matrix-org/synapse/issues/10958))
- Add type-hint to `HomeserverTestcase.setup_test_homeserver`. ([\#10961](https://github.com/matrix-org/synapse/issues/10961))
- Fix the test utility function `create_room_as` so that `is_public=True` will explicitly set the `visibility` parameter of room creation requests to `public`. Contributed by @AndrewFerr. ([\#10963](https://github.com/matrix-org/synapse/issues/10963))
- Make the release script more robust and transparent. ([\#10966](https://github.com/matrix-org/synapse/issues/10966))
- Refactor [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` mega function into smaller handler functions. ([\#10974](https://github.com/matrix-org/synapse/issues/10974))
- Log stack traces when a missing opentracing span is detected. ([\#10983](https://github.com/matrix-org/synapse/issues/10983))
- Update GHA config to run tests against Python 3.10 and PostgreSQL 14. ([\#10992](https://github.com/matrix-org/synapse/issues/10992))
- Fix a long-standing bug where `ReadWriteLock`s could drop logging contexts on exit. ([\#10993](https://github.com/matrix-org/synapse/issues/10993))
- Add a `CODEOWNERS` file to automatically request reviews from the `@matrix-org/synapse-core` team on new pull requests. ([\#10994](https://github.com/matrix-org/synapse/issues/10994))
- Add further type hints to `synapse.state`. ([\#11004](https://github.com/matrix-org/synapse/issues/11004))
- Remove the deprecated `BaseHandler` object. ([\#11005](https://github.com/matrix-org/synapse/issues/11005))
- Bump mypy version for CI to 0.910, and pull in new type stubs for dependencies. ([\#11006](https://github.com/matrix-org/synapse/issues/11006))
- Fix CI to run the unit tests without optional deps. ([\#11017](https://github.com/matrix-org/synapse/issues/11017))
- Ensure that cache config tests do not share state. ([\#11019](https://github.com/matrix-org/synapse/issues/11019))
- Add additional type hints to `synapse.server_notices`. ([\#11021](https://github.com/matrix-org/synapse/issues/11021))
- Add additional type hints for `synapse.push`. ([\#11023](https://github.com/matrix-org/synapse/issues/11023))
- When installing the optional developer dependencies, also include the dependencies needed for type-checking and unit testing. ([\#11034](https://github.com/matrix-org/synapse/issues/11034))
- Remove unnecessary list comprehension from `synapse_port_db` to satisfy code style requirements. ([\#11043](https://github.com/matrix-org/synapse/issues/11043))
Synapse 1.44.0 (2021-10-05) Synapse 1.44.0 (2021-10-05)
=========================== ===========================

View file

@ -55,11 +55,8 @@ solutions. The hope is for Matrix to act as the building blocks for a new
generation of fully open and interoperable messaging and VoIP apps for the generation of fully open and interoperable messaging and VoIP apps for the
internet. internet.
Synapse is a reference "homeserver" implementation of Matrix from the core Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
development team at matrix.org, written in Python/Twisted. It is intended to team, written in Python 3/Twisted.
showcase the concept of Matrix and let folks see the spec in the context of a
codebase and let you run your own homeserver and generally help bootstrap the
ecosystem.
In Matrix, every user runs one or more Matrix clients, which connect through to In Matrix, every user runs one or more Matrix clients, which connect through to
a Matrix homeserver. The homeserver stores all their personal chat history and a Matrix homeserver. The homeserver stores all their personal chat history and
@ -301,7 +298,7 @@ to install using pip and a virtualenv::
python3 -m venv ./env python3 -m venv ./env
source ./env/bin/activate source ./env/bin/activate
pip install -e ".[all,test]" pip install -e ".[all,dev]"
This will run a process of downloading and installing all the needed This will run a process of downloading and installing all the needed
dependencies into a virtual env. If any dependencies fail to install, dependencies into a virtual env. If any dependencies fail to install,

10
debian/changelog vendored
View file

@ -1,3 +1,13 @@
matrix-synapse-py3 (1.45.0~rc1) stable; urgency=medium
[ Nick @ Beeper ]
* Include an `update_synapse_database` script in the distribution.
[ Synapse Packaging team ]
* New synapse release 1.45.0~rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Oct 2021 10:46:27 +0100
matrix-synapse-py3 (1.44.0) stable; urgency=medium matrix-synapse-py3 (1.44.0) stable; urgency=medium
* New synapse release 1.44.0. * New synapse release 1.44.0.

View file

@ -3,3 +3,4 @@ opt/venvs/matrix-synapse/bin/register_new_matrix_user usr/bin/register_new_matri
opt/venvs/matrix-synapse/bin/synapse_port_db usr/bin/synapse_port_db opt/venvs/matrix-synapse/bin/synapse_port_db usr/bin/synapse_port_db
opt/venvs/matrix-synapse/bin/synapse_review_recent_signups usr/bin/synapse_review_recent_signups opt/venvs/matrix-synapse/bin/synapse_review_recent_signups usr/bin/synapse_review_recent_signups
opt/venvs/matrix-synapse/bin/synctl usr/bin/synctl opt/venvs/matrix-synapse/bin/synctl usr/bin/synctl
opt/venvs/matrix-synapse/bin/update_synapse_database usr/bin/update_synapse_database

View file

@ -3,7 +3,7 @@
## Historical Note ## Historical Note
This document was originally written to guide server admins through the upgrade This document was originally written to guide server admins through the upgrade
path towards Synapse 1.0. Specifically, path towards Synapse 1.0. Specifically,
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md) [MSC1711](https://github.com/matrix-org/matrix-doc/blob/main/proposals/1711-x509-for-federation.md)
required that all servers present valid TLS certificates on their federation required that all servers present valid TLS certificates on their federation
API. Admins were encouraged to achieve compliance from version 0.99.0 (released API. Admins were encouraged to achieve compliance from version 0.99.0 (released
in February 2019) ahead of version 1.0 (released June 2019) enforcing the in February 2019) ahead of version 1.0 (released June 2019) enforcing the
@ -282,7 +282,7 @@ coffin of the Perspectives project (which was already pretty dead). So, the
Spec Core Team decided that a better approach would be to mandate valid TLS Spec Core Team decided that a better approach would be to mandate valid TLS
certificates for federation alongside the rest of the Web. More details can be certificates for federation alongside the rest of the Web. More details can be
found in found in
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md#background-the-failure-of-the-perspectives-approach). [MSC1711](https://github.com/matrix-org/matrix-doc/blob/main/proposals/1711-x509-for-federation.md#background-the-failure-of-the-perspectives-approach).
This results in a breaking change, which is disruptive, but absolutely critical This results in a breaking change, which is disruptive, but absolutely critical
for the security model. However, the existence of Let's Encrypt as a trivial for the security model. However, the existence of Let's Encrypt as a trivial

View file

@ -6,9 +6,9 @@ Please update any links to point to the new website instead.
## About ## About
This directory currently holds a series of markdown files documenting how to install, use This directory currently holds a series of markdown files documenting how to install, use
and develop Synapse, the reference Matrix homeserver. The documentation is readable directly and develop Synapse. The documentation is readable directly from this repository, but it is
from this repository, but it is recommended to instead browse through the recommended to instead browse through the [website](https://matrix-org.github.io/synapse) for
[website](https://matrix-org.github.io/synapse) for easier discoverability. easier discoverability.
## Adding to the documentation ## Adding to the documentation

View file

@ -50,7 +50,7 @@ setup a *virtualenv*, as follows:
cd path/where/you/have/cloned/the/repository cd path/where/you/have/cloned/the/repository
python3 -m venv ./env python3 -m venv ./env
source ./env/bin/activate source ./env/bin/activate
pip install -e ".[all,lint,mypy,test]" pip install -e ".[all,dev]"
pip install tox pip install tox
``` ```
@ -63,7 +63,7 @@ TBD
# 5. Get in touch. # 5. Get in touch.
Join our developer community on Matrix: #synapse-dev:matrix.org ! Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
# 6. Pick an issue. # 6. Pick an issue.

View file

@ -1,10 +1,9 @@
# How to test SAML as a developer without a server # How to test SAML as a developer without a server
https://capriza.github.io/samling/samling.html (https://github.com/capriza/samling) is a great https://fujifish.github.io/samling/samling.html (https://github.com/fujifish/samling) is a great resource for being able to tinker with the
resource for being able to tinker with the SAML options within Synapse without needing to SAML options within Synapse without needing to deploy and configure a complicated software stack.
deploy and configure a complicated software stack.
To make Synapse (and therefore Riot) use it: To make Synapse (and therefore Element) use it:
1. Use the samling.html URL above or deploy your own and visit the IdP Metadata tab. 1. Use the samling.html URL above or deploy your own and visit the IdP Metadata tab.
2. Copy the XML to your clipboard. 2. Copy the XML to your clipboard.
@ -26,9 +25,9 @@ To make Synapse (and therefore Riot) use it:
the dependencies are installed and ready to go. the dependencies are installed and ready to go.
7. Restart Synapse. 7. Restart Synapse.
Then in Riot: Then in Element:
1. Visit the login page with a Riot pointing at your homeserver. 1. Visit the login page and point Element towards your homeserver using the `public_baseurl` above.
2. Click the Single Sign-On button. 2. Click the Single Sign-On button.
3. On the samling page, enter a Name Identifier and add a SAML Attribute for `uid=your_localpart`. 3. On the samling page, enter a Name Identifier and add a SAML Attribute for `uid=your_localpart`.
The response must also be signed. The response must also be signed.

View file

@ -19,6 +19,21 @@ either a `bool` to indicate whether the event must be rejected because of spam,
to indicate the event must be rejected because of spam and to give a rejection reason to to indicate the event must be rejected because of spam and to give a rejection reason to
forward to clients. forward to clients.
### `user_may_join_room`
```python
async def user_may_join_room(user: str, room: str, is_invited: bool) -> bool
```
Called when a user is trying to join a room. The module must return a `bool` to indicate
whether the user can join the room. The user is represented by their Matrix user ID (e.g.
`@alice:example.com`) and the room is represented by its Matrix ID (e.g.
`!room:example.com`). The module is also given a boolean to indicate whether the user
currently has a pending invite in the room.
This callback isn't called if the join is performed by a server administrator, or in the
context of a room creation.
### `user_may_invite` ### `user_may_invite`
```python ```python
@ -29,6 +44,41 @@ Called when processing an invitation. The module must return a `bool` indicating
the inviter can invite the invitee to the given room. Both inviter and invitee are the inviter can invite the invitee to the given room. Both inviter and invitee are
represented by their Matrix user ID (e.g. `@alice:example.com`). represented by their Matrix user ID (e.g. `@alice:example.com`).
### `user_may_send_3pid_invite`
```python
async def user_may_send_3pid_invite(
inviter: str,
medium: str,
address: str,
room_id: str,
) -> bool
```
Called when processing an invitation using a third-party identifier (also called a 3PID,
e.g. an email address or a phone number). The module must return a `bool` indicating
whether the inviter can invite the invitee to the given room.
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
invitee is represented by its medium (e.g. "email") and its address
(e.g. `alice@example.com`). See [the Matrix specification](https://matrix.org/docs/spec/appendices#pid-types)
for more information regarding third-party identifiers.
For example, a call to this callback to send an invitation to the email address
`alice@example.com` would look like this:
```python
await user_may_send_3pid_invite(
"@bob:example.com", # The inviter's user ID
"email", # The medium of the 3PID to invite
"alice@example.com", # The address of the 3PID to invite
"!some_room:example.com", # The ID of the room to send the invite into
)
```
**Note**: If the third-party identifier is already associated with a matrix user ID,
[`user_may_invite`](#user_may_invite) will be used instead.
### `user_may_create_room` ### `user_may_create_room`
```python ```python

View file

@ -85,6 +85,15 @@ process, for example:
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
``` ```
# Upgrading to v1.45.0
## Changes required to media storage provider modules when reading from the Synapse configuration object
Media storage provider modules that read from the Synapse configuration object (i.e. that
read the value of `hs.config.[...]`) now need to specify the configuration section they're
reading from. This means that if a module reads the value of e.g. `hs.config.media_store_path`,
it needs to replace it with `hs.config.media.media_store_path`.
# Upgrading to v1.44.0 # Upgrading to v1.44.0
## The URL preview cache is no longer mirrored to storage providers ## The URL preview cache is no longer mirrored to storage providers

View file

@ -1,7 +1,8 @@
# Registration Tokens # Registration Tokens
This API allows you to manage tokens which can be used to authenticate This API allows you to manage tokens which can be used to authenticate
registration requests, as proposed in [MSC3231](https://github.com/govynnus/matrix-doc/blob/token-registration/proposals/3231-token-authenticated-registration.md). registration requests, as proposed in
[MSC3231](https://github.com/matrix-org/matrix-doc/blob/main/proposals/3231-token-authenticated-registration.md).
To use it, you will need to enable the `registration_requires_token` config To use it, you will need to enable the `registration_requires_token` config
option, and authenticate by providing an `access_token` for a server admin: option, and authenticate by providing an `access_token` for a server admin:
see [Admin API](../../usage/administration/admin_api). see [Admin API](../../usage/administration/admin_api).

View file

@ -1,4 +1,79 @@
# Introduction # Introduction
Welcome to the documentation repository for Synapse, the reference Welcome to the documentation repository for Synapse, a
[Matrix](https://matrix.org) homeserver implementation. [Matrix](https://matrix.org) homeserver implementation developed by the matrix.org core
team.
## Installing and using Synapse
This documentation covers topics for **installation**, **configuration** and
**maintainence** of your Synapse process:
* Learn how to [install](setup/installation.md) and
[configure](usage/configuration/index.html) your own instance, perhaps with [Single
Sign-On](usage/configuration/user_authentication/index.html).
* See how to [upgrade](upgrade.md) between Synapse versions.
* Administer your instance using the [Admin
API](usage/administration/admin_api/index.html), installing [pluggable
modules](modules/index.html), or by accessing the [manhole](manhole.md).
* Learn how to [read log lines](usage/administration/request_log.md), configure
[logging](usage/configuration/logging_sample_config.md) or set up [structured
logging](structured_logging.md).
* Scale Synapse through additional [worker processes](workers.md).
* Set up [monitoring and metrics](metrics-howto.md) to keep an eye on your
Synapse instance's performance.
## Developing on Synapse
Contributions are welcome! Synapse is primarily written in
[Python](https://python.org). As a developer, you may be interested in the
following documentation:
* Read the [Contributing Guide](development/contributing_guide.md). It is meant
to walk new contributors through the process of developing and submitting a
change to the Synapse codebase (which is [hosted on
GitHub](https://github.com/matrix-org/synapse)).
* Set up your [development
environment](development/contributing_guide.md#2-what-do-i-need), then learn
how to [lint](development/contributing_guide.md#run-the-linters) and
[test](development/contributing_guide.md#8-test-test-test) your code.
* Look at [the issue tracker](https://github.com/matrix-org/synapse/issues) for
bugs to fix or features to add. If you're new, it may be best to start with
those labeled [good first
issue](https://github.com/matrix-org/synapse/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22).
* Understand [how Synapse is
built](development/internal_documentation/index.html), how to [migrate
database schemas](development/database_schema.md), learn about
[federation](federate.md) and how to [set up a local
federation](federate.md#running-a-demo-federation-of-synapses) for development.
* We like to keep our `git` history clean. [Learn](development/git.md) how to
do so!
* And finally, contribute to this documentation! The source for which is
[located here](https://github.com/matrix-org/synapse/tree/develop/docs).
## Donating to Synapse development
Want to help keep Synapse going but don't know how to code? Synapse is a
[Matrix.org Foundation](https://matrix.org) project. Consider becoming a
supportor on [Liberapay](https://liberapay.com/matrixdotorg),
[Patreon](https://patreon.com/matrixdotorg) or through
[PayPal](https://paypal.me/matrixdotorg) via a one-time donation.
If you are an organisation or enterprise and would like to sponsor development,
reach out to us over email at: support (at) matrix.org
## Reporting a security vulnerability
If you've found a security issue in Synapse or any other Matrix.org Foundation
project, please report it to us in accordance with our [Security Disclosure
Policy](https://www.matrix.org/security-disclosure-policy/). Thank you!

178
mypy.ini
View file

@ -96,15 +96,48 @@ files =
[mypy-synapse.handlers.*] [mypy-synapse.handlers.*]
disallow_untyped_defs = True disallow_untyped_defs = True
[mypy-synapse.push.*]
disallow_untyped_defs = True
[mypy-synapse.rest.*] [mypy-synapse.rest.*]
disallow_untyped_defs = True disallow_untyped_defs = True
[mypy-synapse.server_notices.*]
disallow_untyped_defs = True
[mypy-synapse.state.*]
disallow_untyped_defs = True
[mypy-synapse.storage.util.*]
disallow_untyped_defs = True
[mypy-synapse.streams.*]
disallow_untyped_defs = True
[mypy-synapse.util.batching_queue] [mypy-synapse.util.batching_queue]
disallow_untyped_defs = True disallow_untyped_defs = True
[mypy-synapse.util.caches.cached_call]
disallow_untyped_defs = True
[mypy-synapse.util.caches.dictionary_cache] [mypy-synapse.util.caches.dictionary_cache]
disallow_untyped_defs = True disallow_untyped_defs = True
[mypy-synapse.util.caches.lrucache]
disallow_untyped_defs = True
[mypy-synapse.util.caches.response_cache]
disallow_untyped_defs = True
[mypy-synapse.util.caches.stream_change_cache]
disallow_untyped_defs = True
[mypy-synapse.util.caches.ttl_cache]
disallow_untyped_defs = True
[mypy-synapse.util.daemonize]
disallow_untyped_defs = True
[mypy-synapse.util.file_consumer] [mypy-synapse.util.file_consumer]
disallow_untyped_defs = True disallow_untyped_defs = True
@ -141,6 +174,9 @@ disallow_untyped_defs = True
[mypy-synapse.util.msisdn] [mypy-synapse.util.msisdn]
disallow_untyped_defs = True disallow_untyped_defs = True
[mypy-synapse.util.patch_inline_callbacks]
disallow_untyped_defs = True
[mypy-synapse.util.ratelimitutils] [mypy-synapse.util.ratelimitutils]
disallow_untyped_defs = True disallow_untyped_defs = True
@ -162,98 +198,106 @@ disallow_untyped_defs = True
[mypy-synapse.util.wheel_timer] [mypy-synapse.util.wheel_timer]
disallow_untyped_defs = True disallow_untyped_defs = True
[mypy-pymacaroons.*] [mypy-synapse.util.versionstring]
ignore_missing_imports = True disallow_untyped_defs = True
[mypy-zope] [mypy-tests.handlers.test_user_directory]
disallow_untyped_defs = True
[mypy-tests.storage.test_user_directory]
disallow_untyped_defs = True
;; Dependencies without annotations
;; Before ignoring a module, check to see if type stubs are available.
;; The `typeshed` project maintains stubs here:
;; https://github.com/python/typeshed/tree/master/stubs
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
;; which we can pull in as a dev dependency by adding to `setup.py`'s
;; `CONDITIONAL_REQUIREMENTS["mypy"]` list.
[mypy-authlib.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-bcrypt] [mypy-bcrypt]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-constantly]
ignore_missing_imports = True
[mypy-twisted.*]
ignore_missing_imports = True
[mypy-treq.*]
ignore_missing_imports = True
[mypy-hyperlink]
ignore_missing_imports = True
[mypy-h11]
ignore_missing_imports = True
[mypy-msgpack]
ignore_missing_imports = True
[mypy-opentracing]
ignore_missing_imports = True
[mypy-OpenSSL.*]
ignore_missing_imports = True
[mypy-netaddr]
ignore_missing_imports = True
[mypy-saml2.*]
ignore_missing_imports = True
[mypy-canonicaljson] [mypy-canonicaljson]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-jaeger_client.*] [mypy-constantly]
ignore_missing_imports = True
[mypy-jsonschema]
ignore_missing_imports = True
[mypy-signedjson.*]
ignore_missing_imports = True
[mypy-prometheus_client.*]
ignore_missing_imports = True
[mypy-service_identity.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-daemonize] [mypy-daemonize]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-sentry_sdk] [mypy-h11]
ignore_missing_imports = True
[mypy-PIL.*]
ignore_missing_imports = True
[mypy-lxml]
ignore_missing_imports = True
[mypy-jwt.*]
ignore_missing_imports = True
[mypy-authlib.*]
ignore_missing_imports = True
[mypy-rust_python_jaeger_reporter.*]
ignore_missing_imports = True
[mypy-nacl.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-hiredis] [mypy-hiredis]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-hyperlink]
ignore_missing_imports = True
[mypy-ijson.*]
ignore_missing_imports = True
[mypy-jaeger_client.*]
ignore_missing_imports = True
[mypy-josepy.*] [mypy-josepy.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-pympler.*] [mypy-jwt.*]
ignore_missing_imports = True
[mypy-lxml]
ignore_missing_imports = True
[mypy-msgpack]
ignore_missing_imports = True
[mypy-nacl.*]
ignore_missing_imports = True
[mypy-netaddr]
ignore_missing_imports = True
[mypy-opentracing]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-phonenumbers.*] [mypy-phonenumbers.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-ijson.*] [mypy-prometheus_client.*]
ignore_missing_imports = True
[mypy-pymacaroons.*]
ignore_missing_imports = True
[mypy-pympler.*]
ignore_missing_imports = True
[mypy-rust_python_jaeger_reporter.*]
ignore_missing_imports = True
[mypy-saml2.*]
ignore_missing_imports = True
[mypy-sentry_sdk]
ignore_missing_imports = True
[mypy-service_identity.*]
ignore_missing_imports = True
[mypy-signedjson.*]
ignore_missing_imports = True
[mypy-treq.*]
ignore_missing_imports = True
[mypy-twisted.*]
ignore_missing_imports = True
[mypy-zope]
ignore_missing_imports = True ignore_missing_imports = True

View file

@ -90,10 +90,10 @@ else
"scripts/hash_password" "scripts/hash_password"
"scripts/register_new_matrix_user" "scripts/register_new_matrix_user"
"scripts/synapse_port_db" "scripts/synapse_port_db"
"scripts/update_synapse_database"
"scripts-dev" "scripts-dev"
"scripts-dev/build_debian_packages" "scripts-dev/build_debian_packages"
"scripts-dev/sign_json" "scripts-dev/sign_json"
"scripts-dev/update_database"
"contrib" "synctl" "setup.py" "synmark" "stubs" ".ci" "contrib" "synctl" "setup.py" "synmark" "stubs" ".ci"
) )
fi fi

View file

@ -147,7 +147,7 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
# Make sure the SQLite3 database is using the latest schema and has no pending background update. # Make sure the SQLite3 database is using the latest schema and has no pending background update.
echo "Running db background jobs..." echo "Running db background jobs..."
scripts-dev/update_database --database-config "$SQLITE_CONFIG" scripts/update_synapse_database --database-config --run-background-updates "$SQLITE_CONFIG"
# Create the PostgreSQL database. # Create the PostgreSQL database.
echo "Creating postgres database..." echo "Creating postgres database..."

View file

@ -35,6 +35,19 @@ from github import Github
from packaging import version from packaging import version
def run_until_successful(command, *args, **kwargs):
while True:
completed_process = subprocess.run(command, *args, **kwargs)
exit_code = completed_process.returncode
if exit_code == 0:
# successful, so nothing more to do here.
return completed_process
print(f"The command {command!r} failed with exit code {exit_code}.")
print("Please try to correct the failure and then re-run.")
click.confirm("Try again?", abort=True)
@click.group() @click.group()
def cli(): def cli():
"""An interactive script to walk through the parts of creating a release. """An interactive script to walk through the parts of creating a release.
@ -197,7 +210,7 @@ def prepare():
f.write(parsed_synapse_ast.dumps()) f.write(parsed_synapse_ast.dumps())
# Generate changelogs # Generate changelogs
subprocess.run("python3 -m towncrier", shell=True) run_until_successful("python3 -m towncrier", shell=True)
# Generate debian changelogs # Generate debian changelogs
if parsed_new_version.pre is not None: if parsed_new_version.pre is not None:
@ -209,11 +222,11 @@ def prepare():
else: else:
debian_version = new_version debian_version = new_version
subprocess.run( run_until_successful(
f'dch -M -v {debian_version} "New synapse release {debian_version}."', f'dch -M -v {debian_version} "New synapse release {debian_version}."',
shell=True, shell=True,
) )
subprocess.run('dch -M -r -D stable ""', shell=True) run_until_successful('dch -M -r -D stable ""', shell=True)
# Show the user the changes and ask if they want to edit the change log. # Show the user the changes and ask if they want to edit the change log.
repo.git.add("-u") repo.git.add("-u")
@ -224,7 +237,7 @@ def prepare():
# Commit the changes. # Commit the changes.
repo.git.add("-u") repo.git.add("-u")
repo.git.commit(f"-m {new_version}") repo.git.commit("-m", new_version)
# We give the option to bail here in case the user wants to make sure things # We give the option to bail here in case the user wants to make sure things
# are OK before pushing. # are OK before pushing.
@ -239,6 +252,8 @@ def prepare():
# Otherwise, push and open the changelog in the browser. # Otherwise, push and open the changelog in the browser.
repo.git.push("-u", repo.remote().name, repo.active_branch.name) repo.git.push("-u", repo.remote().name, repo.active_branch.name)
print("Opening the changelog in your browser...")
print("Please ask others to give it a check.")
click.launch( click.launch(
f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md" f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
) )
@ -290,7 +305,19 @@ def tag(gh_token: Optional[str]):
# If no token was given, we bail here # If no token was given, we bail here
if not gh_token: if not gh_token:
print("Launching the GitHub release page in your browser.")
print("Please correct the title and create a draft.")
if current_version.is_prerelease:
print("As this is an RC, remember to mark it as a pre-release!")
print("(by the way, this step can be automated by passing --gh-token,")
print("or one of the GH_TOKEN or GITHUB_TOKEN env vars.)")
click.launch(f"https://github.com/matrix-org/synapse/releases/edit/{tag_name}") click.launch(f"https://github.com/matrix-org/synapse/releases/edit/{tag_name}")
print("Once done, you need to wait for the release assets to build.")
if click.confirm("Launch the release assets actions page?", default=True):
click.launch(
f"https://github.com/matrix-org/synapse/actions?query=branch%3A{tag_name}"
)
return return
# Create a new draft release # Create a new draft release
@ -305,6 +332,7 @@ def tag(gh_token: Optional[str]):
) )
# Open the release and the actions where we are building the assets. # Open the release and the actions where we are building the assets.
print("Launching the release page and the actions page.")
click.launch(release.html_url) click.launch(release.html_url)
click.launch( click.launch(
f"https://github.com/matrix-org/synapse/actions?query=branch%3A{tag_name}" f"https://github.com/matrix-org/synapse/actions?query=branch%3A{tag_name}"

View file

@ -215,7 +215,7 @@ class MockHomeserver:
def __init__(self, config): def __init__(self, config):
self.clock = Clock(reactor) self.clock = Clock(reactor)
self.config = config self.config = config
self.hostname = config.server_name self.hostname = config.server.server_name
self.version_string = "Synapse/" + get_version_string(synapse) self.version_string = "Synapse/" + get_version_string(synapse)
def get_clock(self): def get_clock(self):
@ -583,7 +583,7 @@ class Porter(object):
return return
self.postgres_store = self.build_db_store( self.postgres_store = self.build_db_store(
self.hs_config.get_single_database() self.hs_config.database.get_single_database()
) )
await self.run_background_updates_on_postgres() await self.run_background_updates_on_postgres()
@ -1069,7 +1069,7 @@ class CursesProgress(Progress):
self.stdscr.addstr(0, 0, status, curses.A_BOLD) self.stdscr.addstr(0, 0, status, curses.A_BOLD)
max_len = max([len(t) for t in self.tables.keys()]) max_len = max(len(t) for t in self.tables.keys())
left_margin = 5 left_margin = 5
middle_space = 1 middle_space = 1

View file

@ -36,16 +36,35 @@ class MockHomeserver(HomeServer):
def __init__(self, config, **kwargs): def __init__(self, config, **kwargs):
super(MockHomeserver, self).__init__( super(MockHomeserver, self).__init__(
config.server_name, reactor=reactor, config=config, **kwargs config.server.server_name, reactor=reactor, config=config, **kwargs
) )
self.version_string = "Synapse/" + get_version_string(synapse) self.version_string = "Synapse/" + get_version_string(synapse)
if __name__ == "__main__": def run_background_updates(hs):
store = hs.get_datastore()
async def run_background_updates():
await store.db_pool.updates.run_background_updates(sleep=False)
# Stop the reactor to exit the script once every background update is run.
reactor.stop()
def run():
# Apply all background updates on the database.
defer.ensureDeferred(
run_as_background_process("background_updates", run_background_updates)
)
reactor.callWhenRunning(run)
reactor.run()
def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description=( description=(
"Updates a synapse database to the latest schema and runs background updates" "Updates a synapse database to the latest schema and optionally runs background updates"
" on it." " on it."
) )
) )
@ -54,7 +73,13 @@ if __name__ == "__main__":
"--database-config", "--database-config",
type=argparse.FileType("r"), type=argparse.FileType("r"),
required=True, required=True,
help="A database config file for either a SQLite3 database or a PostgreSQL one.", help="Synapse configuration file, giving the details of the database to be updated",
)
parser.add_argument(
"--run-background-updates",
action="store_true",
required=False,
help="run background updates after upgrading the database schema",
) )
args = parser.parse_args() args = parser.parse_args()
@ -82,19 +107,10 @@ if __name__ == "__main__":
# Setup instantiates the store within the homeserver object and updates the # Setup instantiates the store within the homeserver object and updates the
# DB. # DB.
hs.setup() hs.setup()
store = hs.get_datastore()
async def run_background_updates(): if args.run_background_updates:
await store.db_pool.updates.run_background_updates(sleep=False) run_background_updates(hs)
# Stop the reactor to exit the script once every background update is run.
reactor.stop()
def run():
# Apply all background updates on the database.
defer.ensureDeferred(
run_as_background_process("background_updates", run_background_updates)
)
reactor.callWhenRunning(run) if __name__ == "__main__":
main()
reactor.run()

View file

@ -103,17 +103,17 @@ CONDITIONAL_REQUIREMENTS["lint"] = [
"flake8", "flake8",
] ]
CONDITIONAL_REQUIREMENTS["dev"] = CONDITIONAL_REQUIREMENTS["lint"] + [ CONDITIONAL_REQUIREMENTS["mypy"] = [
# The following are used by the release script "mypy==0.910",
"click==7.1.2", "mypy-zope==0.3.2",
"redbaron==0.9.2", "types-bleach>=4.1.0",
"GitPython==3.1.14", "types-jsonschema>=3.2.0",
"commonmark==0.9.1", "types-Pillow>=8.3.4",
"pygithub==1.55", "types-pyOpenSSL>=20.0.7",
"types-PyYAML>=5.4.10",
"types-setuptools>=57.4.0",
] ]
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
# Dependencies which are exclusively required by unit test code. This is # Dependencies which are exclusively required by unit test code. This is
# NOT a list of all modules that are necessary to run the unit tests. # NOT a list of all modules that are necessary to run the unit tests.
# Tests assume that all optional dependencies are installed. # Tests assume that all optional dependencies are installed.
@ -121,6 +121,20 @@ CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
# parameterized_class decorator was introduced in parameterized 0.7.0 # parameterized_class decorator was introduced in parameterized 0.7.0
CONDITIONAL_REQUIREMENTS["test"] = ["parameterized>=0.7.0"] CONDITIONAL_REQUIREMENTS["test"] = ["parameterized>=0.7.0"]
CONDITIONAL_REQUIREMENTS["dev"] = (
CONDITIONAL_REQUIREMENTS["lint"]
+ CONDITIONAL_REQUIREMENTS["mypy"]
+ CONDITIONAL_REQUIREMENTS["test"]
+ [
# The following are used by the release script
"click==7.1.2",
"redbaron==0.9.2",
"GitPython==3.1.14",
"commonmark==0.9.1",
"pygithub==1.55",
]
)
setup( setup(
name="matrix-synapse", name="matrix-synapse",
version=version, version=version,

View file

@ -47,7 +47,7 @@ try:
except ImportError: except ImportError:
pass pass
__version__ = "1.44.0" __version__ = "1.45.0rc1"
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when # We import here so that we don't have to install a bunch of deps when

View file

@ -15,7 +15,17 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import json import json
from typing import List from typing import (
TYPE_CHECKING,
Awaitable,
Container,
Iterable,
List,
Optional,
Set,
TypeVar,
Union,
)
import jsonschema import jsonschema
from jsonschema import FormatChecker from jsonschema import FormatChecker
@ -23,7 +33,11 @@ from jsonschema import FormatChecker
from synapse.api.constants import EventContentFields from synapse.api.constants import EventContentFields
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.api.presence import UserPresenceState from synapse.api.presence import UserPresenceState
from synapse.types import RoomID, UserID from synapse.events import EventBase
from synapse.types import JsonDict, RoomID, UserID
if TYPE_CHECKING:
from synapse.server import HomeServer
FILTER_SCHEMA = { FILTER_SCHEMA = {
"additionalProperties": False, "additionalProperties": False,
@ -120,25 +134,29 @@ USER_FILTER_SCHEMA = {
@FormatChecker.cls_checks("matrix_room_id") @FormatChecker.cls_checks("matrix_room_id")
def matrix_room_id_validator(room_id_str): def matrix_room_id_validator(room_id_str: str) -> RoomID:
return RoomID.from_string(room_id_str) return RoomID.from_string(room_id_str)
@FormatChecker.cls_checks("matrix_user_id") @FormatChecker.cls_checks("matrix_user_id")
def matrix_user_id_validator(user_id_str): def matrix_user_id_validator(user_id_str: str) -> UserID:
return UserID.from_string(user_id_str) return UserID.from_string(user_id_str)
class Filtering: class Filtering:
def __init__(self, hs): def __init__(self, hs: "HomeServer"):
super().__init__() super().__init__()
self.store = hs.get_datastore() self.store = hs.get_datastore()
async def get_user_filter(self, user_localpart, filter_id): async def get_user_filter(
self, user_localpart: str, filter_id: Union[int, str]
) -> "FilterCollection":
result = await self.store.get_user_filter(user_localpart, filter_id) result = await self.store.get_user_filter(user_localpart, filter_id)
return FilterCollection(result) return FilterCollection(result)
def add_user_filter(self, user_localpart, user_filter): def add_user_filter(
self, user_localpart: str, user_filter: JsonDict
) -> Awaitable[int]:
self.check_valid_filter(user_filter) self.check_valid_filter(user_filter)
return self.store.add_user_filter(user_localpart, user_filter) return self.store.add_user_filter(user_localpart, user_filter)
@ -146,13 +164,13 @@ class Filtering:
# replace_user_filter at some point? There's no REST API specified for # replace_user_filter at some point? There's no REST API specified for
# them however # them however
def check_valid_filter(self, user_filter_json): def check_valid_filter(self, user_filter_json: JsonDict) -> None:
"""Check if the provided filter is valid. """Check if the provided filter is valid.
This inspects all definitions contained within the filter. This inspects all definitions contained within the filter.
Args: Args:
user_filter_json(dict): The filter user_filter_json: The filter
Raises: Raises:
SynapseError: If the filter is not valid. SynapseError: If the filter is not valid.
""" """
@ -167,8 +185,12 @@ class Filtering:
raise SynapseError(400, str(e)) raise SynapseError(400, str(e))
# Filters work across events, presence EDUs, and account data.
FilterEvent = TypeVar("FilterEvent", EventBase, UserPresenceState, JsonDict)
class FilterCollection: class FilterCollection:
def __init__(self, filter_json): def __init__(self, filter_json: JsonDict):
self._filter_json = filter_json self._filter_json = filter_json
room_filter_json = self._filter_json.get("room", {}) room_filter_json = self._filter_json.get("room", {})
@ -188,25 +210,25 @@ class FilterCollection:
self.event_fields = filter_json.get("event_fields", []) self.event_fields = filter_json.get("event_fields", [])
self.event_format = filter_json.get("event_format", "client") self.event_format = filter_json.get("event_format", "client")
def __repr__(self): def __repr__(self) -> str:
return "<FilterCollection %s>" % (json.dumps(self._filter_json),) return "<FilterCollection %s>" % (json.dumps(self._filter_json),)
def get_filter_json(self): def get_filter_json(self) -> JsonDict:
return self._filter_json return self._filter_json
def timeline_limit(self): def timeline_limit(self) -> int:
return self._room_timeline_filter.limit() return self._room_timeline_filter.limit()
def presence_limit(self): def presence_limit(self) -> int:
return self._presence_filter.limit() return self._presence_filter.limit()
def ephemeral_limit(self): def ephemeral_limit(self) -> int:
return self._room_ephemeral_filter.limit() return self._room_ephemeral_filter.limit()
def lazy_load_members(self): def lazy_load_members(self) -> bool:
return self._room_state_filter.lazy_load_members() return self._room_state_filter.lazy_load_members()
def include_redundant_members(self): def include_redundant_members(self) -> bool:
return self._room_state_filter.include_redundant_members() return self._room_state_filter.include_redundant_members()
def filter_presence(self, events): def filter_presence(self, events):
@ -218,29 +240,31 @@ class FilterCollection:
def filter_room_state(self, events): def filter_room_state(self, events):
return self._room_state_filter.filter(self._room_filter.filter(events)) return self._room_state_filter.filter(self._room_filter.filter(events))
def filter_room_timeline(self, events): def filter_room_timeline(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
return self._room_timeline_filter.filter(self._room_filter.filter(events)) return self._room_timeline_filter.filter(self._room_filter.filter(events))
def filter_room_ephemeral(self, events): def filter_room_ephemeral(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
return self._room_ephemeral_filter.filter(self._room_filter.filter(events)) return self._room_ephemeral_filter.filter(self._room_filter.filter(events))
def filter_room_account_data(self, events): def filter_room_account_data(
self, events: Iterable[FilterEvent]
) -> List[FilterEvent]:
return self._room_account_data.filter(self._room_filter.filter(events)) return self._room_account_data.filter(self._room_filter.filter(events))
def blocks_all_presence(self): def blocks_all_presence(self) -> bool:
return ( return (
self._presence_filter.filters_all_types() self._presence_filter.filters_all_types()
or self._presence_filter.filters_all_senders() or self._presence_filter.filters_all_senders()
) )
def blocks_all_room_ephemeral(self): def blocks_all_room_ephemeral(self) -> bool:
return ( return (
self._room_ephemeral_filter.filters_all_types() self._room_ephemeral_filter.filters_all_types()
or self._room_ephemeral_filter.filters_all_senders() or self._room_ephemeral_filter.filters_all_senders()
or self._room_ephemeral_filter.filters_all_rooms() or self._room_ephemeral_filter.filters_all_rooms()
) )
def blocks_all_room_timeline(self): def blocks_all_room_timeline(self) -> bool:
return ( return (
self._room_timeline_filter.filters_all_types() self._room_timeline_filter.filters_all_types()
or self._room_timeline_filter.filters_all_senders() or self._room_timeline_filter.filters_all_senders()
@ -249,7 +273,7 @@ class FilterCollection:
class Filter: class Filter:
def __init__(self, filter_json): def __init__(self, filter_json: JsonDict):
self.filter_json = filter_json self.filter_json = filter_json
self.types = self.filter_json.get("types", None) self.types = self.filter_json.get("types", None)
@ -266,20 +290,20 @@ class Filter:
self.labels = self.filter_json.get("org.matrix.labels", None) self.labels = self.filter_json.get("org.matrix.labels", None)
self.not_labels = self.filter_json.get("org.matrix.not_labels", []) self.not_labels = self.filter_json.get("org.matrix.not_labels", [])
def filters_all_types(self): def filters_all_types(self) -> bool:
return "*" in self.not_types return "*" in self.not_types
def filters_all_senders(self): def filters_all_senders(self) -> bool:
return "*" in self.not_senders return "*" in self.not_senders
def filters_all_rooms(self): def filters_all_rooms(self) -> bool:
return "*" in self.not_rooms return "*" in self.not_rooms
def check(self, event): def check(self, event: FilterEvent) -> bool:
"""Checks whether the filter matches the given event. """Checks whether the filter matches the given event.
Returns: Returns:
bool: True if the event matches True if the event matches
""" """
# We usually get the full "events" as dictionaries coming through, # We usually get the full "events" as dictionaries coming through,
# except for presence which actually gets passed around as its own # except for presence which actually gets passed around as its own
@ -305,18 +329,25 @@ class Filter:
room_id = event.get("room_id", None) room_id = event.get("room_id", None)
ev_type = event.get("type", None) ev_type = event.get("type", None)
content = event.get("content", {}) content = event.get("content") or {}
# check if there is a string url field in the content for filtering purposes # check if there is a string url field in the content for filtering purposes
contains_url = isinstance(content.get("url"), str) contains_url = isinstance(content.get("url"), str)
labels = content.get(EventContentFields.LABELS, []) labels = content.get(EventContentFields.LABELS, [])
return self.check_fields(room_id, sender, ev_type, labels, contains_url) return self.check_fields(room_id, sender, ev_type, labels, contains_url)
def check_fields(self, room_id, sender, event_type, labels, contains_url): def check_fields(
self,
room_id: Optional[str],
sender: Optional[str],
event_type: Optional[str],
labels: Container[str],
contains_url: bool,
) -> bool:
"""Checks whether the filter matches the given event fields. """Checks whether the filter matches the given event fields.
Returns: Returns:
bool: True if the event fields match True if the event fields match
""" """
literal_keys = { literal_keys = {
"rooms": lambda v: room_id == v, "rooms": lambda v: room_id == v,
@ -343,14 +374,14 @@ class Filter:
return True return True
def filter_rooms(self, room_ids): def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]:
"""Apply the 'rooms' filter to a given list of rooms. """Apply the 'rooms' filter to a given list of rooms.
Args: Args:
room_ids (list): A list of room_ids. room_ids: A list of room_ids.
Returns: Returns:
list: A list of room_ids that match the filter A list of room_ids that match the filter
""" """
room_ids = set(room_ids) room_ids = set(room_ids)
@ -363,23 +394,23 @@ class Filter:
return room_ids return room_ids
def filter(self, events): def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
return list(filter(self.check, events)) return list(filter(self.check, events))
def limit(self): def limit(self) -> int:
return self.filter_json.get("limit", 10) return self.filter_json.get("limit", 10)
def lazy_load_members(self): def lazy_load_members(self) -> bool:
return self.filter_json.get("lazy_load_members", False) return self.filter_json.get("lazy_load_members", False)
def include_redundant_members(self): def include_redundant_members(self) -> bool:
return self.filter_json.get("include_redundant_members", False) return self.filter_json.get("include_redundant_members", False)
def with_room_ids(self, room_ids): def with_room_ids(self, room_ids: Iterable[str]) -> "Filter":
"""Returns a new filter with the given room IDs appended. """Returns a new filter with the given room IDs appended.
Args: Args:
room_ids (iterable[unicode]): The room_ids to add room_ids: The room_ids to add
Returns: Returns:
filter: A new filter including the given rooms and the old filter: A new filter including the given rooms and the old
@ -390,8 +421,8 @@ class Filter:
return newFilter return newFilter
def _matches_wildcard(actual_value, filter_value): def _matches_wildcard(actual_value: Optional[str], filter_value: str) -> bool:
if filter_value.endswith("*"): if filter_value.endswith("*") and isinstance(actual_value, str):
type_prefix = filter_value[:-1] type_prefix = filter_value[:-1]
return actual_value.startswith(type_prefix) return actual_value.startswith(type_prefix)
else: else:

View file

@ -17,6 +17,7 @@ from collections import OrderedDict
from typing import Hashable, Optional, Tuple from typing import Hashable, Optional, Tuple
from synapse.api.errors import LimitExceededError from synapse.api.errors import LimitExceededError
from synapse.config.ratelimiting import RateLimitConfig
from synapse.storage.databases.main import DataStore from synapse.storage.databases.main import DataStore
from synapse.types import Requester from synapse.types import Requester
from synapse.util import Clock from synapse.util import Clock
@ -233,3 +234,88 @@ class Ratelimiter:
raise LimitExceededError( raise LimitExceededError(
retry_after_ms=int(1000 * (time_allowed - time_now_s)) retry_after_ms=int(1000 * (time_allowed - time_now_s))
) )
class RequestRatelimiter:
def __init__(
self,
store: DataStore,
clock: Clock,
rc_message: RateLimitConfig,
rc_admin_redaction: Optional[RateLimitConfig],
):
self.store = store
self.clock = clock
# The rate_hz and burst_count are overridden on a per-user basis
self.request_ratelimiter = Ratelimiter(
store=self.store, clock=self.clock, rate_hz=0, burst_count=0
)
self._rc_message = rc_message
# Check whether ratelimiting room admin message redaction is enabled
# by the presence of rate limits in the config
if rc_admin_redaction:
self.admin_redaction_ratelimiter: Optional[Ratelimiter] = Ratelimiter(
store=self.store,
clock=self.clock,
rate_hz=rc_admin_redaction.per_second,
burst_count=rc_admin_redaction.burst_count,
)
else:
self.admin_redaction_ratelimiter = None
async def ratelimit(
self,
requester: Requester,
update: bool = True,
is_admin_redaction: bool = False,
) -> None:
"""Ratelimits requests.
Args:
requester
update: Whether to record that a request is being processed.
Set to False when doing multiple checks for one request (e.g.
to check up front if we would reject the request), and set to
True for the last call for a given request.
is_admin_redaction: Whether this is a room admin/moderator
redacting an event. If so then we may apply different
ratelimits depending on config.
Raises:
LimitExceededError if the request should be ratelimited
"""
user_id = requester.user.to_string()
# The AS user itself is never rate limited.
app_service = self.store.get_app_service_by_user_id(user_id)
if app_service is not None:
return # do not ratelimit app service senders
messages_per_second = self._rc_message.per_second
burst_count = self._rc_message.burst_count
# Check if there is a per user override in the DB.
override = await self.store.get_ratelimit_for_user(user_id)
if override:
# If overridden with a null Hz then ratelimiting has been entirely
# disabled for the user
if not override.messages_per_second:
return
messages_per_second = override.messages_per_second
burst_count = override.burst_count
if is_admin_redaction and self.admin_redaction_ratelimiter:
# If we have separate config for admin redactions, use a separate
# ratelimiter as to not have user_ids clash
await self.admin_redaction_ratelimiter.ratelimit(requester, update=update)
else:
# Override rate and burst count per-user
await self.request_ratelimiter.ratelimit(
requester,
rate_hz=messages_per_second,
burst_count=burst_count,
update=update,
)

View file

@ -86,11 +86,11 @@ def start_worker_reactor(appname, config, run_command=reactor.run):
start_reactor( start_reactor(
appname, appname,
soft_file_limit=config.soft_file_limit, soft_file_limit=config.server.soft_file_limit,
gc_thresholds=config.gc_thresholds, gc_thresholds=config.server.gc_thresholds,
pid_file=config.worker.worker_pid_file, pid_file=config.worker.worker_pid_file,
daemonize=config.worker.worker_daemonize, daemonize=config.worker.worker_daemonize,
print_pidfile=config.print_pidfile, print_pidfile=config.server.print_pidfile,
logger=logger, logger=logger,
run_command=run_command, run_command=run_command,
) )
@ -298,10 +298,10 @@ def refresh_certificate(hs):
Refresh the TLS certificates that Synapse is using by re-reading them from Refresh the TLS certificates that Synapse is using by re-reading them from
disk and updating the TLS context factories to use them. disk and updating the TLS context factories to use them.
""" """
if not hs.config.has_tls_listener(): if not hs.config.server.has_tls_listener():
return return
hs.config.read_certificate_from_disk() hs.config.tls.read_certificate_from_disk()
hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config) hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config)
if hs._listening_services: if hs._listening_services:

View file

@ -195,14 +195,14 @@ def start(config_options):
config.logging.no_redirect_stdio = True config.logging.no_redirect_stdio = True
# Explicitly disable background processes # Explicitly disable background processes
config.update_user_directory = False config.server.update_user_directory = False
config.worker.run_background_tasks = False config.worker.run_background_tasks = False
config.start_pushers = False config.worker.start_pushers = False
config.pusher_shard_config.instances = [] config.pusher_shard_config.instances = []
config.send_federation = False config.worker.send_federation = False
config.federation_shard_config.instances = [] config.federation_shard_config.instances = []
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
ss = AdminCmdServer( ss = AdminCmdServer(
config.server.server_name, config.server.server_name,

View file

@ -462,7 +462,7 @@ def start(config_options):
# For other worker types we force this to off. # For other worker types we force this to off.
config.server.update_user_directory = False config.server.update_user_directory = False
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
if config.server.gc_seconds: if config.server.gc_seconds:

View file

@ -234,7 +234,7 @@ class SynapseHomeServer(HomeServer):
) )
if name in ["media", "federation", "client"]: if name in ["media", "federation", "client"]:
if self.config.media.enable_media_repo: if self.config.server.enable_media_repo:
media_repo = self.get_media_repository_resource() media_repo = self.get_media_repository_resource()
resources.update( resources.update(
{MEDIA_PREFIX: media_repo, LEGACY_MEDIA_PREFIX: media_repo} {MEDIA_PREFIX: media_repo, LEGACY_MEDIA_PREFIX: media_repo}
@ -248,7 +248,7 @@ class SynapseHomeServer(HomeServer):
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self) resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
if name == "webclient": if name == "webclient":
webclient_loc = self.config.web_client_location webclient_loc = self.config.server.web_client_location
if webclient_loc is None: if webclient_loc is None:
logger.warning( logger.warning(
@ -343,7 +343,7 @@ def setup(config_options):
# generating config files and shouldn't try to continue. # generating config files and shouldn't try to continue.
sys.exit(0) sys.exit(0)
events.USE_FROZEN_DICTS = config.use_frozen_dicts events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
if config.server.gc_seconds: if config.server.gc_seconds:
@ -439,11 +439,11 @@ def run(hs):
_base.start_reactor( _base.start_reactor(
"synapse-homeserver", "synapse-homeserver",
soft_file_limit=hs.config.soft_file_limit, soft_file_limit=hs.config.server.soft_file_limit,
gc_thresholds=hs.config.gc_thresholds, gc_thresholds=hs.config.server.gc_thresholds,
pid_file=hs.config.pid_file, pid_file=hs.config.server.pid_file,
daemonize=hs.config.daemonize, daemonize=hs.config.server.daemonize,
print_pidfile=hs.config.print_pidfile, print_pidfile=hs.config.server.print_pidfile,
logger=logger, logger=logger,
) )

View file

@ -74,7 +74,7 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process):
store = hs.get_datastore() store = hs.get_datastore()
stats["homeserver"] = hs.config.server.server_name stats["homeserver"] = hs.config.server.server_name
stats["server_context"] = hs.config.server_context stats["server_context"] = hs.config.server.server_context
stats["timestamp"] = now stats["timestamp"] = now
stats["uptime_seconds"] = uptime stats["uptime_seconds"] = uptime
version = sys.version_info version = sys.version_info
@ -171,7 +171,7 @@ def start_phone_stats_home(hs):
current_mau_count_by_service = {} current_mau_count_by_service = {}
reserved_users = () reserved_users = ()
store = hs.get_datastore() store = hs.get_datastore()
if hs.config.limit_usage_by_mau or hs.config.mau_stats_only: if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
current_mau_count = await store.get_monthly_active_count() current_mau_count = await store.get_monthly_active_count()
current_mau_count_by_service = ( current_mau_count_by_service = (
await store.get_monthly_active_count_by_service() await store.get_monthly_active_count_by_service()
@ -183,9 +183,9 @@ def start_phone_stats_home(hs):
current_mau_by_service_gauge.labels(app_service).set(float(count)) current_mau_by_service_gauge.labels(app_service).set(float(count))
registered_reserved_users_mau_gauge.set(float(len(reserved_users))) registered_reserved_users_mau_gauge.set(float(len(reserved_users)))
max_mau_gauge.set(float(hs.config.max_mau_value)) max_mau_gauge.set(float(hs.config.server.max_mau_value))
if hs.config.limit_usage_by_mau or hs.config.mau_stats_only: if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
generate_monthly_active_users() generate_monthly_active_users()
clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
# End of monthly active user settings # End of monthly active user settings

View file

@ -118,21 +118,6 @@ class Config:
"synapse", "res/templates" "synapse", "res/templates"
) )
def __getattr__(self, item: str) -> Any:
"""
Try and fetch a configuration option that does not exist on this class.
This is so that existing configs that rely on `self.value`, where value
is actually from a different config section, continue to work.
"""
if item in ["generate_config_section", "read_config"]:
raise AttributeError(item)
if self.root is None:
raise AttributeError(item)
else:
return self.root._get_unclassed_config(self.section, item)
@staticmethod @staticmethod
def parse_size(value): def parse_size(value):
if isinstance(value, int): if isinstance(value, int):
@ -289,7 +274,9 @@ class Config:
env.filters.update( env.filters.update(
{ {
"format_ts": _format_ts_filter, "format_ts": _format_ts_filter,
"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl), "mxc_to_http": _create_mxc_to_http_filter(
self.root.server.public_baseurl
),
} }
) )
@ -311,8 +298,6 @@ class RootConfig:
config_classes = [] config_classes = []
def __init__(self): def __init__(self):
self._configs = OrderedDict()
for config_class in self.config_classes: for config_class in self.config_classes:
if config_class.section is None: if config_class.section is None:
raise ValueError("%r requires a section name" % (config_class,)) raise ValueError("%r requires a section name" % (config_class,))
@ -321,42 +306,7 @@ class RootConfig:
conf = config_class(self) conf = config_class(self)
except Exception as e: except Exception as e:
raise Exception("Failed making %s: %r" % (config_class.section, e)) raise Exception("Failed making %s: %r" % (config_class.section, e))
self._configs[config_class.section] = conf setattr(self, config_class.section, conf)
def __getattr__(self, item: str) -> Any:
"""
Redirect lookups on this object either to config objects, or values on
config objects, so that `config.tls.blah` works, as well as legacy uses
of things like `config.server_name`. It will first look up the config
section name, and then values on those config classes.
"""
if item in self._configs.keys():
return self._configs[item]
return self._get_unclassed_config(None, item)
def _get_unclassed_config(self, asking_section: Optional[str], item: str):
"""
Fetch a config value from one of the instantiated config classes that
has not been fetched directly.
Args:
asking_section: If this check is coming from a Config child, which
one? This section will not be asked if it has the value.
item: The configuration value key.
Raises:
AttributeError if no config classes have the config key. The body
will contain what sections were checked.
"""
for key, val in self._configs.items():
if key == asking_section:
continue
if item in dir(val):
return getattr(val, item)
raise AttributeError(item, "not found in %s" % (list(self._configs.keys()),))
def invoke_all(self, func_name: str, *args, **kwargs) -> MutableMapping[str, Any]: def invoke_all(self, func_name: str, *args, **kwargs) -> MutableMapping[str, Any]:
""" """
@ -373,9 +323,11 @@ class RootConfig:
""" """
res = OrderedDict() res = OrderedDict()
for name, config in self._configs.items(): for config_class in self.config_classes:
config = getattr(self, config_class.section)
if hasattr(config, func_name): if hasattr(config, func_name):
res[name] = getattr(config, func_name)(*args, **kwargs) res[config_class.section] = getattr(config, func_name)(*args, **kwargs)
return res return res

View file

@ -76,7 +76,7 @@ class AccountValidityConfig(Config):
) )
if self.account_validity_renew_by_email_enabled: if self.account_validity_renew_by_email_enabled:
if not self.public_baseurl: if not self.root.server.public_baseurl:
raise ConfigError("Can't send renewal emails without 'public_baseurl'") raise ConfigError("Can't send renewal emails without 'public_baseurl'")
# Load account validity templates. # Load account validity templates.

View file

@ -37,7 +37,7 @@ class CasConfig(Config):
# The public baseurl is required because it is used by the redirect # The public baseurl is required because it is used by the redirect
# template. # template.
public_baseurl = self.public_baseurl public_baseurl = self.root.server.public_baseurl
if not public_baseurl: if not public_baseurl:
raise ConfigError("cas_config requires a public_baseurl to be set") raise ConfigError("cas_config requires a public_baseurl to be set")

View file

@ -19,7 +19,6 @@ import email.utils
import logging import logging
import os import os
from enum import Enum from enum import Enum
from typing import Optional
import attr import attr
@ -135,7 +134,7 @@ class EmailConfig(Config):
# msisdn is currently always remote while Synapse does not support any method of # msisdn is currently always remote while Synapse does not support any method of
# sending SMS messages # sending SMS messages
ThreepidBehaviour.REMOTE ThreepidBehaviour.REMOTE
if self.account_threepid_delegate_email if self.root.registration.account_threepid_delegate_email
else ThreepidBehaviour.LOCAL else ThreepidBehaviour.LOCAL
) )
# Prior to Synapse v1.4.0, there was another option that defined whether Synapse would # Prior to Synapse v1.4.0, there was another option that defined whether Synapse would
@ -144,7 +143,7 @@ class EmailConfig(Config):
# identity server in the process. # identity server in the process.
self.using_identity_server_from_trusted_list = False self.using_identity_server_from_trusted_list = False
if ( if (
not self.account_threepid_delegate_email not self.root.registration.account_threepid_delegate_email
and config.get("trust_identity_server_for_password_resets", False) is True and config.get("trust_identity_server_for_password_resets", False) is True
): ):
# Use the first entry in self.trusted_third_party_id_servers instead # Use the first entry in self.trusted_third_party_id_servers instead
@ -156,7 +155,7 @@ class EmailConfig(Config):
# trusted_third_party_id_servers does not contain a scheme whereas # trusted_third_party_id_servers does not contain a scheme whereas
# account_threepid_delegate_email is expected to. Presume https # account_threepid_delegate_email is expected to. Presume https
self.account_threepid_delegate_email: Optional[str] = ( self.root.registration.account_threepid_delegate_email = (
"https://" + first_trusted_identity_server "https://" + first_trusted_identity_server
) )
self.using_identity_server_from_trusted_list = True self.using_identity_server_from_trusted_list = True
@ -335,7 +334,7 @@ class EmailConfig(Config):
"client_base_url", email_config.get("riot_base_url", None) "client_base_url", email_config.get("riot_base_url", None)
) )
if self.account_validity_renew_by_email_enabled: if self.root.account_validity.account_validity_renew_by_email_enabled:
expiry_template_html = email_config.get( expiry_template_html = email_config.get(
"expiry_template_html", "notice_expiry.html" "expiry_template_html", "notice_expiry.html"
) )

View file

@ -145,11 +145,13 @@ class KeyConfig(Config):
# list of TrustedKeyServer objects # list of TrustedKeyServer objects
self.key_servers = list( self.key_servers = list(
_parse_key_servers(key_servers, self.federation_verify_certificates) _parse_key_servers(
key_servers, self.root.tls.federation_verify_certificates
)
) )
self.macaroon_secret_key = config.get( self.macaroon_secret_key = config.get(
"macaroon_secret_key", self.registration_shared_secret "macaroon_secret_key", self.root.registration.registration_shared_secret
) )
if not self.macaroon_secret_key: if not self.macaroon_secret_key:

View file

@ -58,7 +58,7 @@ class OIDCConfig(Config):
"Multiple OIDC providers have the idp_id %r." % idp_id "Multiple OIDC providers have the idp_id %r." % idp_id
) )
public_baseurl = self.public_baseurl public_baseurl = self.root.server.public_baseurl
if public_baseurl is None: if public_baseurl is None:
raise ConfigError("oidc_config requires a public_baseurl to be set") raise ConfigError("oidc_config requires a public_baseurl to be set")
self.oidc_callback_url = public_baseurl + "_synapse/client/oidc/callback" self.oidc_callback_url = public_baseurl + "_synapse/client/oidc/callback"

View file

@ -45,7 +45,10 @@ class RegistrationConfig(Config):
account_threepid_delegates = config.get("account_threepid_delegates") or {} account_threepid_delegates = config.get("account_threepid_delegates") or {}
self.account_threepid_delegate_email = account_threepid_delegates.get("email") self.account_threepid_delegate_email = account_threepid_delegates.get("email")
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn") self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
if self.account_threepid_delegate_msisdn and not self.public_baseurl: if (
self.account_threepid_delegate_msisdn
and not self.root.server.public_baseurl
):
raise ConfigError( raise ConfigError(
"The configuration option `public_baseurl` is required if " "The configuration option `public_baseurl` is required if "
"`account_threepid_delegate.msisdn` is set, such that " "`account_threepid_delegate.msisdn` is set, such that "
@ -85,7 +88,7 @@ class RegistrationConfig(Config):
if mxid_localpart: if mxid_localpart:
# Convert the localpart to a full mxid. # Convert the localpart to a full mxid.
self.auto_join_user_id = UserID( self.auto_join_user_id = UserID(
mxid_localpart, self.server_name mxid_localpart, self.root.server.server_name
).to_string() ).to_string()
if self.autocreate_auto_join_rooms: if self.autocreate_auto_join_rooms:

View file

@ -95,7 +95,7 @@ class ContentRepositoryConfig(Config):
# Only enable the media repo if either the media repo is enabled or the # Only enable the media repo if either the media repo is enabled or the
# current worker app is the media repo. # current worker app is the media repo.
if ( if (
self.enable_media_repo is False self.root.server.enable_media_repo is False
and config.get("worker_app") != "synapse.app.media_repository" and config.get("worker_app") != "synapse.app.media_repository"
): ):
self.can_load_media_repo = False self.can_load_media_repo = False

View file

@ -199,7 +199,7 @@ class SAML2Config(Config):
""" """
import saml2 import saml2
public_baseurl = self.public_baseurl public_baseurl = self.root.server.public_baseurl
if public_baseurl is None: if public_baseurl is None:
raise ConfigError("saml2_config requires a public_baseurl to be set") raise ConfigError("saml2_config requires a public_baseurl to be set")

View file

@ -1,6 +1,4 @@
# Copyright 2014-2016 OpenMarket Ltd # Copyright 2014-2021 The Matrix.org Foundation C.I.C.
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -19,7 +17,7 @@ import logging
import os.path import os.path
import re import re
from textwrap import indent from textwrap import indent
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
import attr import attr
import yaml import yaml
@ -184,49 +182,74 @@ KNOWN_RESOURCES = {
@attr.s(frozen=True) @attr.s(frozen=True)
class HttpResourceConfig: class HttpResourceConfig:
names = attr.ib( names: List[str] = attr.ib(
type=List[str],
factory=list, factory=list,
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore
) )
compress = attr.ib( compress: bool = attr.ib(
type=bool,
default=False, default=False,
validator=attr.validators.optional(attr.validators.instance_of(bool)), # type: ignore[arg-type] validator=attr.validators.optional(attr.validators.instance_of(bool)), # type: ignore[arg-type]
) )
@attr.s(frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class HttpListenerConfig: class HttpListenerConfig:
"""Object describing the http-specific parts of the config of a listener""" """Object describing the http-specific parts of the config of a listener"""
x_forwarded = attr.ib(type=bool, default=False) x_forwarded: bool = False
resources = attr.ib(type=List[HttpResourceConfig], factory=list) resources: List[HttpResourceConfig] = attr.ib(factory=list)
additional_resources = attr.ib(type=Dict[str, dict], factory=dict) additional_resources: Dict[str, dict] = attr.ib(factory=dict)
tag = attr.ib(type=str, default=None) tag: Optional[str] = None
@attr.s(frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class ListenerConfig: class ListenerConfig:
"""Object describing the configuration of a single listener.""" """Object describing the configuration of a single listener."""
port = attr.ib(type=int, validator=attr.validators.instance_of(int)) port: int = attr.ib(validator=attr.validators.instance_of(int))
bind_addresses = attr.ib(type=List[str]) bind_addresses: List[str]
type = attr.ib(type=str, validator=attr.validators.in_(KNOWN_LISTENER_TYPES)) type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
tls = attr.ib(type=bool, default=False) tls: bool = False
# http_options is only populated if type=http # http_options is only populated if type=http
http_options = attr.ib(type=Optional[HttpListenerConfig], default=None) http_options: Optional[HttpListenerConfig] = None
@attr.s(frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class ManholeConfig: class ManholeConfig:
"""Object describing the configuration of the manhole""" """Object describing the configuration of the manhole"""
username = attr.ib(type=str, validator=attr.validators.instance_of(str)) username: str = attr.ib(validator=attr.validators.instance_of(str))
password = attr.ib(type=str, validator=attr.validators.instance_of(str)) password: str = attr.ib(validator=attr.validators.instance_of(str))
priv_key = attr.ib(type=Optional[Key]) priv_key: Optional[Key]
pub_key = attr.ib(type=Optional[Key]) pub_key: Optional[Key]
@attr.s(slots=True, frozen=True, auto_attribs=True)
class RetentionConfig:
"""Object describing the configuration of the manhole"""
interval: int
shortest_max_lifetime: Optional[int]
longest_max_lifetime: Optional[int]
@attr.s(frozen=True)
class LimitRemoteRoomsConfig:
enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
complexity: Union[float, int] = attr.ib(
validator=attr.validators.instance_of(
(float, int) # type: ignore[arg-type] # noqa
),
default=1.0,
)
complexity_error: str = attr.ib(
validator=attr.validators.instance_of(str),
default=ROOM_COMPLEXITY_TOO_GREAT,
)
admins_can_join: bool = attr.ib(
validator=attr.validators.instance_of(bool), default=False
)
class ServerConfig(Config): class ServerConfig(Config):
@ -519,7 +542,7 @@ class ServerConfig(Config):
" greater than 'allowed_lifetime_max'" " greater than 'allowed_lifetime_max'"
) )
self.retention_purge_jobs: List[Dict[str, Optional[int]]] = [] self.retention_purge_jobs: List[RetentionConfig] = []
for purge_job_config in retention_config.get("purge_jobs", []): for purge_job_config in retention_config.get("purge_jobs", []):
interval_config = purge_job_config.get("interval") interval_config = purge_job_config.get("interval")
@ -553,20 +576,12 @@ class ServerConfig(Config):
) )
self.retention_purge_jobs.append( self.retention_purge_jobs.append(
{ RetentionConfig(interval, shortest_max_lifetime, longest_max_lifetime)
"interval": interval,
"shortest_max_lifetime": shortest_max_lifetime,
"longest_max_lifetime": longest_max_lifetime,
}
) )
if not self.retention_purge_jobs: if not self.retention_purge_jobs:
self.retention_purge_jobs = [ self.retention_purge_jobs = [
{ RetentionConfig(self.parse_duration("1d"), None, None)
"interval": self.parse_duration("1d"),
"shortest_max_lifetime": None,
"longest_max_lifetime": None,
}
] ]
self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])] self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])]
@ -591,25 +606,6 @@ class ServerConfig(Config):
self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None)) self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None))
self.gc_seconds = self.read_gc_intervals(config.get("gc_min_interval", None)) self.gc_seconds = self.read_gc_intervals(config.get("gc_min_interval", None))
@attr.s
class LimitRemoteRoomsConfig:
enabled = attr.ib(
validator=attr.validators.instance_of(bool), default=False
)
complexity = attr.ib(
validator=attr.validators.instance_of(
(float, int) # type: ignore[arg-type] # noqa
),
default=1.0,
)
complexity_error = attr.ib(
validator=attr.validators.instance_of(str),
default=ROOM_COMPLEXITY_TOO_GREAT,
)
admins_can_join = attr.ib(
validator=attr.validators.instance_of(bool), default=False
)
self.limit_remote_rooms = LimitRemoteRoomsConfig( self.limit_remote_rooms = LimitRemoteRoomsConfig(
**(config.get("limit_remote_rooms") or {}) **(config.get("limit_remote_rooms") or {})
) )

View file

@ -73,7 +73,9 @@ class ServerNoticesConfig(Config):
return return
mxid_localpart = c["system_mxid_localpart"] mxid_localpart = c["system_mxid_localpart"]
self.server_notices_mxid = UserID(mxid_localpart, self.server_name).to_string() self.server_notices_mxid = UserID(
mxid_localpart, self.root.server.server_name
).to_string()
self.server_notices_mxid_display_name = c.get("system_mxid_display_name", None) self.server_notices_mxid_display_name = c.get("system_mxid_display_name", None)
self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None) self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None)
# todo: i18n # todo: i18n

View file

@ -103,8 +103,10 @@ class SSOConfig(Config):
# the client's. # the client's.
# public_baseurl is an optional setting, so we only add the fallback's URL to the # public_baseurl is an optional setting, so we only add the fallback's URL to the
# list if it's provided (because we can't figure out what that URL is otherwise). # list if it's provided (because we can't figure out what that URL is otherwise).
if self.public_baseurl: if self.root.server.public_baseurl:
login_fallback_url = self.public_baseurl + "_matrix/static/client/login" login_fallback_url = (
self.root.server.public_baseurl + "_matrix/static/client/login"
)
self.sso_client_whitelist.append(login_fallback_url) self.sso_client_whitelist.append(login_fallback_url)
def generate_config_section(self, **kwargs): def generate_config_section(self, **kwargs):

View file

@ -172,9 +172,12 @@ class TlsConfig(Config):
) )
# YYYYMMDDhhmmssZ -- in UTC # YYYYMMDDhhmmssZ -- in UTC
expires_on = datetime.strptime( expiry_data = tls_certificate.get_notAfter()
tls_certificate.get_notAfter().decode("ascii"), "%Y%m%d%H%M%SZ" if expiry_data is None:
) raise ValueError(
"TLS Certificate has no expiry date, and this is not permitted"
)
expires_on = datetime.strptime(expiry_data.decode("ascii"), "%Y%m%d%H%M%SZ")
now = datetime.utcnow() now = datetime.utcnow()
days_remaining = (expires_on - now).days days_remaining = (expires_on - now).days
return days_remaining return days_remaining

View file

@ -41,42 +41,112 @@ from synapse.types import StateMap, UserID, get_domain_from_id
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def check( def validate_event_for_room_version(
room_version_obj: RoomVersion, room_version_obj: RoomVersion, event: EventBase
event: EventBase,
auth_events: StateMap[EventBase],
do_sig_check: bool = True,
do_size_check: bool = True,
) -> None: ) -> None:
"""Checks if this event is correctly authed. """Ensure that the event complies with the limits, and has the right signatures
NB: does not *validate* the signatures - it assumes that any signatures present
have already been checked.
NB: it does not check that the event satisfies the auth rules (that is done in
check_auth_rules_for_event) - these tests are independent of the rest of the state
in the room.
NB: This is used to check events that have been received over federation. As such,
it can only enforce the checks specified in the relevant room version, to avoid
a split-brain situation where some servers accept such events, and others reject
them.
TODO: consider moving this into EventValidator
Args: Args:
room_version_obj: the version of the room room_version_obj: the version of the room which contains this event
event: the event being checked. event: the event to be checked
auth_events: the existing room state.
do_sig_check: True if it should be verified that the sending server
signed the event.
do_size_check: True if the size of the event fields should be verified.
Raises: Raises:
AuthError if the checks fail SynapseError if there is a problem with the event
Returns:
if the auth checks pass.
""" """
assert isinstance(auth_events, dict) _check_size_limits(event)
if do_size_check:
_check_size_limits(event)
if not hasattr(event, "room_id"): if not hasattr(event, "room_id"):
raise AuthError(500, "Event has no room_id: %s" % event) raise AuthError(500, "Event has no room_id: %s" % event)
room_id = event.room_id # check that the event has the correct signatures
sender_domain = get_domain_from_id(event.sender)
is_invite_via_3pid = (
event.type == EventTypes.Member
and event.membership == Membership.INVITE
and "third_party_invite" in event.content
)
# Check the sender's domain has signed the event
if not event.signatures.get(sender_domain):
# We allow invites via 3pid to have a sender from a different
# HS, as the sender must match the sender of the original
# 3pid invite. This is checked further down with the
# other dedicated membership checks.
if not is_invite_via_3pid:
raise AuthError(403, "Event not signed by sender's server")
if event.format_version in (EventFormatVersions.V1,):
# Only older room versions have event IDs to check.
event_id_domain = get_domain_from_id(event.event_id)
# Check the origin domain has signed the event
if not event.signatures.get(event_id_domain):
raise AuthError(403, "Event not signed by sending server")
is_invite_via_allow_rule = (
room_version_obj.msc3083_join_rules
and event.type == EventTypes.Member
and event.membership == Membership.JOIN
and EventContentFields.AUTHORISING_USER in event.content
)
if is_invite_via_allow_rule:
authoriser_domain = get_domain_from_id(
event.content[EventContentFields.AUTHORISING_USER]
)
if not event.signatures.get(authoriser_domain):
raise AuthError(403, "Event not signed by authorising server")
def check_auth_rules_for_event(
room_version_obj: RoomVersion, event: EventBase, auth_events: StateMap[EventBase]
) -> None:
"""Check that an event complies with the auth rules
Checks whether an event passes the auth rules with a given set of state events
Assumes that we have already checked that the event is the right shape (it has
enough signatures, has a room ID, etc). In other words:
- it's fine for use in state resolution, when we have already decided whether to
accept the event or not, and are now trying to decide whether it should make it
into the room state
- when we're doing the initial event auth, it is only suitable in combination with
a bunch of other tests.
Args:
room_version_obj: the version of the room
event: the event being checked.
auth_events: the room state to check the events against.
Raises:
AuthError if the checks fail
"""
assert isinstance(auth_events, dict)
# We need to ensure that the auth events are actually for the same room, to # We need to ensure that the auth events are actually for the same room, to
# stop people from using powers they've been granted in other rooms for # stop people from using powers they've been granted in other rooms for
# example. # example.
#
# Arguably we don't need to do this when we're just doing state res, as presumably
# the state res algorithm isn't silly enough to give us events from different rooms.
# Still, it's easier to do it anyway.
room_id = event.room_id
for auth_event in auth_events.values(): for auth_event in auth_events.values():
if auth_event.room_id != room_id: if auth_event.room_id != room_id:
raise AuthError( raise AuthError(
@ -85,44 +155,12 @@ def check(
"which is in room %s" "which is in room %s"
% (event.event_id, room_id, auth_event.event_id, auth_event.room_id), % (event.event_id, room_id, auth_event.event_id, auth_event.room_id),
) )
if auth_event.rejected_reason:
if do_sig_check: raise AuthError(
sender_domain = get_domain_from_id(event.sender) 403,
"During auth for event %s: found rejected event %s in the state"
is_invite_via_3pid = ( % (event.event_id, auth_event.event_id),
event.type == EventTypes.Member
and event.membership == Membership.INVITE
and "third_party_invite" in event.content
)
# Check the sender's domain has signed the event
if not event.signatures.get(sender_domain):
# We allow invites via 3pid to have a sender from a different
# HS, as the sender must match the sender of the original
# 3pid invite. This is checked further down with the
# other dedicated membership checks.
if not is_invite_via_3pid:
raise AuthError(403, "Event not signed by sender's server")
if event.format_version in (EventFormatVersions.V1,):
# Only older room versions have event IDs to check.
event_id_domain = get_domain_from_id(event.event_id)
# Check the origin domain has signed the event
if not event.signatures.get(event_id_domain):
raise AuthError(403, "Event not signed by sending server")
is_invite_via_allow_rule = (
event.type == EventTypes.Member
and event.membership == Membership.JOIN
and EventContentFields.AUTHORISING_USER in event.content
)
if is_invite_via_allow_rule:
authoriser_domain = get_domain_from_id(
event.content[EventContentFields.AUTHORISING_USER]
) )
if not event.signatures.get(authoriser_domain):
raise AuthError(403, "Event not signed by authorising server")
# Implementation of https://matrix.org/docs/spec/rooms/v1#authorization-rules # Implementation of https://matrix.org/docs/spec/rooms/v1#authorization-rules
# #

View file

@ -18,10 +18,8 @@ import attr
from nacl.signing import SigningKey from nacl.signing import SigningKey
from synapse.api.constants import MAX_DEPTH from synapse.api.constants import MAX_DEPTH
from synapse.api.errors import UnsupportedRoomVersionError
from synapse.api.room_versions import ( from synapse.api.room_versions import (
KNOWN_EVENT_FORMAT_VERSIONS, KNOWN_EVENT_FORMAT_VERSIONS,
KNOWN_ROOM_VERSIONS,
EventFormatVersions, EventFormatVersions,
RoomVersion, RoomVersion,
) )
@ -197,24 +195,6 @@ class EventBuilderFactory:
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
self._event_auth_handler = hs.get_event_auth_handler() self._event_auth_handler = hs.get_event_auth_handler()
def new(self, room_version: str, key_values: dict) -> EventBuilder:
"""Generate an event builder appropriate for the given room version
Deprecated: use for_room_version with a RoomVersion object instead
Args:
room_version: Version of the room that we're creating an event builder for
key_values: Fields used as the basis of the new event
Returns:
EventBuilder
"""
v = KNOWN_ROOM_VERSIONS.get(room_version)
if not v:
# this can happen if support is withdrawn for a room version
raise UnsupportedRoomVersionError()
return self.for_room_version(v, key_values)
def for_room_version( def for_room_version(
self, room_version: RoomVersion, key_values: dict self, room_version: RoomVersion, key_values: dict
) -> EventBuilder: ) -> EventBuilder:

View file

@ -45,11 +45,11 @@ def load_legacy_presence_router(hs: "HomeServer"):
configuration, and registers the hooks they implement. configuration, and registers the hooks they implement.
""" """
if hs.config.presence_router_module_class is None: if hs.config.server.presence_router_module_class is None:
return return
module = hs.config.presence_router_module_class module = hs.config.server.presence_router_module_class
config = hs.config.presence_router_config config = hs.config.server.presence_router_config
api = hs.get_module_api() api = hs.get_module_api()
presence_router = module(config=config, module_api=api) presence_router = module(config=config, module_api=api)

View file

@ -44,7 +44,9 @@ CHECK_EVENT_FOR_SPAM_CALLBACK = Callable[
["synapse.events.EventBase"], ["synapse.events.EventBase"],
Awaitable[Union[bool, str]], Awaitable[Union[bool, str]],
] ]
USER_MAY_JOIN_ROOM_CALLBACK = Callable[[str, str, bool], Awaitable[bool]]
USER_MAY_INVITE_CALLBACK = Callable[[str, str, str], Awaitable[bool]] USER_MAY_INVITE_CALLBACK = Callable[[str, str, str], Awaitable[bool]]
USER_MAY_SEND_3PID_INVITE_CALLBACK = Callable[[str, str, str, str], Awaitable[bool]]
USER_MAY_CREATE_ROOM_CALLBACK = Callable[[str], Awaitable[bool]] USER_MAY_CREATE_ROOM_CALLBACK = Callable[[str], Awaitable[bool]]
USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK = Callable[ USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK = Callable[
[str, List[str], List[Dict[str, str]]], Awaitable[bool] [str, List[str], List[Dict[str, str]]], Awaitable[bool]
@ -165,7 +167,11 @@ def load_legacy_spam_checkers(hs: "synapse.server.HomeServer"):
class SpamChecker: class SpamChecker:
def __init__(self): def __init__(self):
self._check_event_for_spam_callbacks: List[CHECK_EVENT_FOR_SPAM_CALLBACK] = [] self._check_event_for_spam_callbacks: List[CHECK_EVENT_FOR_SPAM_CALLBACK] = []
self._user_may_join_room_callbacks: List[USER_MAY_JOIN_ROOM_CALLBACK] = []
self._user_may_invite_callbacks: List[USER_MAY_INVITE_CALLBACK] = [] self._user_may_invite_callbacks: List[USER_MAY_INVITE_CALLBACK] = []
self._user_may_send_3pid_invite_callbacks: List[
USER_MAY_SEND_3PID_INVITE_CALLBACK
] = []
self._user_may_create_room_callbacks: List[USER_MAY_CREATE_ROOM_CALLBACK] = [] self._user_may_create_room_callbacks: List[USER_MAY_CREATE_ROOM_CALLBACK] = []
self._user_may_create_room_with_invites_callbacks: List[ self._user_may_create_room_with_invites_callbacks: List[
USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK
@ -187,7 +193,9 @@ class SpamChecker:
def register_callbacks( def register_callbacks(
self, self,
check_event_for_spam: Optional[CHECK_EVENT_FOR_SPAM_CALLBACK] = None, check_event_for_spam: Optional[CHECK_EVENT_FOR_SPAM_CALLBACK] = None,
user_may_join_room: Optional[USER_MAY_JOIN_ROOM_CALLBACK] = None,
user_may_invite: Optional[USER_MAY_INVITE_CALLBACK] = None, user_may_invite: Optional[USER_MAY_INVITE_CALLBACK] = None,
user_may_send_3pid_invite: Optional[USER_MAY_SEND_3PID_INVITE_CALLBACK] = None,
user_may_create_room: Optional[USER_MAY_CREATE_ROOM_CALLBACK] = None, user_may_create_room: Optional[USER_MAY_CREATE_ROOM_CALLBACK] = None,
user_may_create_room_with_invites: Optional[ user_may_create_room_with_invites: Optional[
USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK USER_MAY_CREATE_ROOM_WITH_INVITES_CALLBACK
@ -206,9 +214,17 @@ class SpamChecker:
if check_event_for_spam is not None: if check_event_for_spam is not None:
self._check_event_for_spam_callbacks.append(check_event_for_spam) self._check_event_for_spam_callbacks.append(check_event_for_spam)
if user_may_join_room is not None:
self._user_may_join_room_callbacks.append(user_may_join_room)
if user_may_invite is not None: if user_may_invite is not None:
self._user_may_invite_callbacks.append(user_may_invite) self._user_may_invite_callbacks.append(user_may_invite)
if user_may_send_3pid_invite is not None:
self._user_may_send_3pid_invite_callbacks.append(
user_may_send_3pid_invite,
)
if user_may_create_room is not None: if user_may_create_room is not None:
self._user_may_create_room_callbacks.append(user_may_create_room) self._user_may_create_room_callbacks.append(user_may_create_room)
@ -259,6 +275,24 @@ class SpamChecker:
return False return False
async def user_may_join_room(self, user_id: str, room_id: str, is_invited: bool):
"""Checks if a given users is allowed to join a room.
Not called when a user creates a room.
Args:
userid: The ID of the user wanting to join the room
room_id: The ID of the room the user wants to join
is_invited: Whether the user is invited into the room
Returns:
bool: Whether the user may join the room
"""
for callback in self._user_may_join_room_callbacks:
if await callback(user_id, room_id, is_invited) is False:
return False
return True
async def user_may_invite( async def user_may_invite(
self, inviter_userid: str, invitee_userid: str, room_id: str self, inviter_userid: str, invitee_userid: str, room_id: str
) -> bool: ) -> bool:
@ -280,6 +314,31 @@ class SpamChecker:
return True return True
async def user_may_send_3pid_invite(
self, inviter_userid: str, medium: str, address: str, room_id: str
) -> bool:
"""Checks if a given user may invite a given threepid into the room
If this method returns false, the threepid invite will be rejected.
Note that if the threepid is already associated with a Matrix user ID, Synapse
will call user_may_invite with said user ID instead.
Args:
inviter_userid: The user ID of the sender of the invitation
medium: The 3PID's medium (e.g. "email")
address: The 3PID's address (e.g. "alice@example.com")
room_id: The room ID
Returns:
True if the user may send the invite, otherwise False
"""
for callback in self._user_may_send_3pid_invite_callbacks:
if await callback(inviter_userid, medium, address, room_id) is False:
return False
return True
async def user_may_create_room(self, userid: str) -> bool: async def user_may_create_room(self, userid: str) -> bool:
"""Checks if a given user may create a room """Checks if a given user may create a room

View file

@ -217,6 +217,15 @@ class ThirdPartyEventRules:
for callback in self._check_event_allowed_callbacks: for callback in self._check_event_allowed_callbacks:
try: try:
res, replacement_data = await callback(event, state_events) res, replacement_data = await callback(event, state_events)
except SynapseError as e:
# FIXME: Being able to throw SynapseErrors is relied upon by
# some modules. PR #10386 accidentally broke this ability.
# That said, we aren't keen on exposing this implementation detail
# to modules and we should one day have a proper way to do what
# is wanted.
# This module callback needs a rework so that hacks such as
# this one are not necessary.
raise e
except Exception as e: except Exception as e:
logger.warning("Failed to run module API callback %s: %s", callback, e) logger.warning("Failed to run module API callback %s: %s", callback, e)
continue continue

View file

@ -372,7 +372,7 @@ class EventClientSerializer:
def __init__(self, hs): def __init__(self, hs):
self.store = hs.get_datastore() self.store = hs.get_datastore()
self.experimental_msc1849_support_enabled = ( self.experimental_msc1849_support_enabled = (
hs.config.experimental_msc1849_support_enabled hs.config.server.experimental_msc1849_support_enabled
) )
async def serialize_event( async def serialize_event(

View file

@ -1008,7 +1008,10 @@ class FederationServer(FederationBase):
async with lock: async with lock:
logger.info("handling received PDU: %s", event) logger.info("handling received PDU: %s", event)
try: try:
await self._federation_event_handler.on_receive_pdu(origin, event) with nested_logging_context(event.event_id):
await self._federation_event_handler.on_receive_pdu(
origin, event
)
except FederationError as e: except FederationError as e:
# XXX: Ideally we'd inform the remote we failed to process # XXX: Ideally we'd inform the remote we failed to process
# the event, but we can't return an error in the transaction # the event, but we can't return an error in the transaction

View file

@ -117,7 +117,7 @@ class PublicRoomList(BaseFederationServlet):
): ):
super().__init__(hs, authenticator, ratelimiter, server_name) super().__init__(hs, authenticator, ratelimiter, server_name)
self.handler = hs.get_room_list_handler() self.handler = hs.get_room_list_handler()
self.allow_access = hs.config.allow_public_rooms_over_federation self.allow_access = hs.config.server.allow_public_rooms_over_federation
async def on_GET( async def on_GET(
self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]] self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]]

View file

@ -1,120 +0,0 @@
# Copyright 2014 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import TYPE_CHECKING, Optional
from synapse.api.ratelimiting import Ratelimiter
from synapse.types import Requester
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class BaseHandler:
"""
Common base class for the event handlers.
Deprecated: new code should not use this. Instead, Handler classes should define the
fields they actually need. The utility methods should either be factored out to
standalone helper functions, or to different Handler classes.
"""
def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastore()
self.auth = hs.get_auth()
self.notifier = hs.get_notifier()
self.state_handler = hs.get_state_handler()
self.distributor = hs.get_distributor()
self.clock = hs.get_clock()
self.hs = hs
# The rate_hz and burst_count are overridden on a per-user basis
self.request_ratelimiter = Ratelimiter(
store=self.store, clock=self.clock, rate_hz=0, burst_count=0
)
self._rc_message = self.hs.config.ratelimiting.rc_message
# Check whether ratelimiting room admin message redaction is enabled
# by the presence of rate limits in the config
if self.hs.config.ratelimiting.rc_admin_redaction:
self.admin_redaction_ratelimiter: Optional[Ratelimiter] = Ratelimiter(
store=self.store,
clock=self.clock,
rate_hz=self.hs.config.ratelimiting.rc_admin_redaction.per_second,
burst_count=self.hs.config.ratelimiting.rc_admin_redaction.burst_count,
)
else:
self.admin_redaction_ratelimiter = None
self.server_name = hs.hostname
self.event_builder_factory = hs.get_event_builder_factory()
async def ratelimit(
self,
requester: Requester,
update: bool = True,
is_admin_redaction: bool = False,
) -> None:
"""Ratelimits requests.
Args:
requester
update: Whether to record that a request is being processed.
Set to False when doing multiple checks for one request (e.g.
to check up front if we would reject the request), and set to
True for the last call for a given request.
is_admin_redaction: Whether this is a room admin/moderator
redacting an event. If so then we may apply different
ratelimits depending on config.
Raises:
LimitExceededError if the request should be ratelimited
"""
user_id = requester.user.to_string()
# The AS user itself is never rate limited.
app_service = self.store.get_app_service_by_user_id(user_id)
if app_service is not None:
return # do not ratelimit app service senders
messages_per_second = self._rc_message.per_second
burst_count = self._rc_message.burst_count
# Check if there is a per user override in the DB.
override = await self.store.get_ratelimit_for_user(user_id)
if override:
# If overridden with a null Hz then ratelimiting has been entirely
# disabled for the user
if not override.messages_per_second:
return
messages_per_second = override.messages_per_second
burst_count = override.burst_count
if is_admin_redaction and self.admin_redaction_ratelimiter:
# If we have separate config for admin redactions, use a separate
# ratelimiter as to not have user_ids clash
await self.admin_redaction_ratelimiter.ratelimit(requester, update=update)
else:
# Override rate and burst count per-user
await self.request_ratelimiter.ratelimit(
requester,
rate_hz=messages_per_second,
burst_count=burst_count,
update=update,
)

View file

@ -67,12 +67,8 @@ class AccountValidityHandler:
and self._account_validity_renew_by_email_enabled and self._account_validity_renew_by_email_enabled
): ):
# Don't do email-specific configuration if renewal by email is disabled. # Don't do email-specific configuration if renewal by email is disabled.
self._template_html = ( self._template_html = hs.config.email.account_validity_template_html
hs.config.account_validity.account_validity_template_html self._template_text = hs.config.email.account_validity_template_text
)
self._template_text = (
hs.config.account_validity.account_validity_template_text
)
self._renew_email_subject = ( self._renew_email_subject = (
hs.config.account_validity.account_validity_renew_email_subject hs.config.account_validity.account_validity_renew_email_subject
) )

View file

@ -21,18 +21,15 @@ from synapse.events import EventBase
from synapse.types import JsonDict, RoomStreamToken, StateMap, UserID from synapse.types import JsonDict, RoomStreamToken, StateMap, UserID
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class AdminHandler(BaseHandler): class AdminHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.storage = hs.get_storage() self.storage = hs.get_storage()
self.state_store = self.storage.state self.state_store = self.storage.state

View file

@ -52,7 +52,6 @@ from synapse.api.errors import (
UserDeactivatedError, UserDeactivatedError,
) )
from synapse.api.ratelimiting import Ratelimiter from synapse.api.ratelimiting import Ratelimiter
from synapse.handlers._base import BaseHandler
from synapse.handlers.ui_auth import ( from synapse.handlers.ui_auth import (
INTERACTIVE_AUTH_CHECKERS, INTERACTIVE_AUTH_CHECKERS,
UIAuthSessionDataConstants, UIAuthSessionDataConstants,
@ -186,19 +185,20 @@ class LoginTokenAttributes:
auth_provider_id = attr.ib(type=str) auth_provider_id = attr.ib(type=str)
class AuthHandler(BaseHandler): class AuthHandler:
SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000 SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.auth = hs.get_auth()
self.clock = hs.get_clock()
self.checkers: Dict[str, UserInteractiveAuthChecker] = {} self.checkers: Dict[str, UserInteractiveAuthChecker] = {}
for auth_checker_class in INTERACTIVE_AUTH_CHECKERS: for auth_checker_class in INTERACTIVE_AUTH_CHECKERS:
inst = auth_checker_class(hs) inst = auth_checker_class(hs)
if inst.is_enabled(): if inst.is_enabled():
self.checkers[inst.AUTH_TYPE] = inst # type: ignore self.checkers[inst.AUTH_TYPE] = inst # type: ignore
self.bcrypt_rounds = hs.config.bcrypt_rounds self.bcrypt_rounds = hs.config.registration.bcrypt_rounds
# we can't use hs.get_module_api() here, because to do so will create an # we can't use hs.get_module_api() here, because to do so will create an
# import loop. # import loop.

View file

@ -19,19 +19,17 @@ from synapse.api.errors import SynapseError
from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import Requester, UserID, create_requester from synapse.types import Requester, UserID, create_requester
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class DeactivateAccountHandler(BaseHandler): class DeactivateAccountHandler:
"""Handler which deals with deactivating user accounts.""" """Handler which deals with deactivating user accounts."""
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.hs = hs self.hs = hs
self._auth_handler = hs.get_auth_handler() self._auth_handler = hs.get_auth_handler()
self._device_handler = hs.get_device_handler() self._device_handler = hs.get_device_handler()
@ -133,6 +131,10 @@ class DeactivateAccountHandler(BaseHandler):
# delete from user directory # delete from user directory
await self.user_directory_handler.handle_local_user_deactivated(user_id) await self.user_directory_handler.handle_local_user_deactivated(user_id)
# If the user is present in the monthly active users table
# remove them
await self.store.remove_deactivated_user_from_mau_table(user_id)
# Mark the user as erased, if they asked for that # Mark the user as erased, if they asked for that
if erase_data: if erase_data:
user = UserID.from_string(user_id) user = UserID.from_string(user_id)

View file

@ -40,8 +40,6 @@ from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.metrics import measure_func from synapse.util.metrics import measure_func
from synapse.util.retryutils import NotRetryingDestination from synapse.util.retryutils import NotRetryingDestination
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -50,14 +48,16 @@ logger = logging.getLogger(__name__)
MAX_DEVICE_DISPLAY_NAME_LEN = 100 MAX_DEVICE_DISPLAY_NAME_LEN = 100
class DeviceWorkerHandler(BaseHandler): class DeviceWorkerHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.clock = hs.get_clock()
self.hs = hs self.hs = hs
self.store = hs.get_datastore()
self.notifier = hs.get_notifier()
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
self.state_store = hs.get_storage().state self.state_store = hs.get_storage().state
self._auth_handler = hs.get_auth_handler() self._auth_handler = hs.get_auth_handler()
self.server_name = hs.hostname
@trace @trace
async def get_devices_by_user(self, user_id: str) -> List[JsonDict]: async def get_devices_by_user(self, user_id: str) -> List[JsonDict]:

View file

@ -31,26 +31,25 @@ from synapse.appservice import ApplicationService
from synapse.storage.databases.main.directory import RoomAliasMapping from synapse.storage.databases.main.directory import RoomAliasMapping
from synapse.types import JsonDict, Requester, RoomAlias, UserID, get_domain_from_id from synapse.types import JsonDict, Requester, RoomAlias, UserID, get_domain_from_id
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class DirectoryHandler(BaseHandler): class DirectoryHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.auth = hs.get_auth()
self.hs = hs
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
self.appservice_handler = hs.get_application_service_handler() self.appservice_handler = hs.get_application_service_handler()
self.event_creation_handler = hs.get_event_creation_handler() self.event_creation_handler = hs.get_event_creation_handler()
self.store = hs.get_datastore() self.store = hs.get_datastore()
self.config = hs.config self.config = hs.config
self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search
self.require_membership = hs.config.require_membership_for_aliases self.require_membership = hs.config.server.require_membership_for_aliases
self.third_party_event_rules = hs.get_third_party_event_rules() self.third_party_event_rules = hs.get_third_party_event_rules()
self.server_name = hs.hostname
self.federation = hs.get_federation_client() self.federation = hs.get_federation_client()
hs.get_federation_registry().register_query_handler( hs.get_federation_registry().register_query_handler(

View file

@ -22,7 +22,8 @@ from synapse.api.constants import (
RestrictedJoinRuleTypes, RestrictedJoinRuleTypes,
) )
from synapse.api.errors import AuthError, Codes, SynapseError from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion from synapse.api.room_versions import RoomVersion
from synapse.event_auth import check_auth_rules_for_event
from synapse.events import EventBase from synapse.events import EventBase
from synapse.events.builder import EventBuilder from synapse.events.builder import EventBuilder
from synapse.events.snapshot import EventContext from synapse.events.snapshot import EventContext
@ -45,21 +46,17 @@ class EventAuthHandler:
self._store = hs.get_datastore() self._store = hs.get_datastore()
self._server_name = hs.hostname self._server_name = hs.hostname
async def check_from_context( async def check_auth_rules_from_context(
self, self,
room_version: str, room_version_obj: RoomVersion,
event: EventBase, event: EventBase,
context: EventContext, context: EventContext,
do_sig_check: bool = True,
) -> None: ) -> None:
"""Check an event passes the auth rules at its own auth events"""
auth_event_ids = event.auth_event_ids() auth_event_ids = event.auth_event_ids()
auth_events_by_id = await self._store.get_events(auth_event_ids) auth_events_by_id = await self._store.get_events(auth_event_ids)
auth_events = {(e.type, e.state_key): e for e in auth_events_by_id.values()} auth_events = {(e.type, e.state_key): e for e in auth_events_by_id.values()}
check_auth_rules_for_event(room_version_obj, event, auth_events)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
event_auth.check(
room_version_obj, event, auth_events=auth_events, do_sig_check=do_sig_check
)
def compute_auth_events( def compute_auth_events(
self, self,

View file

@ -25,8 +25,6 @@ from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict, UserID from synapse.types import JsonDict, UserID
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -34,11 +32,11 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class EventStreamHandler(BaseHandler): class EventStreamHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.hs = hs
self.notifier = hs.get_notifier() self.notifier = hs.get_notifier()
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
@ -138,9 +136,9 @@ class EventStreamHandler(BaseHandler):
return chunk return chunk
class EventHandler(BaseHandler): class EventHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.storage = hs.get_storage() self.storage = hs.get_storage()
async def get_event( async def get_event(

View file

@ -45,11 +45,14 @@ from synapse.api.errors import (
) )
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions
from synapse.crypto.event_signing import compute_event_signature from synapse.crypto.event_signing import compute_event_signature
from synapse.event_auth import (
check_auth_rules_for_event,
validate_event_for_room_version,
)
from synapse.events import EventBase from synapse.events import EventBase
from synapse.events.snapshot import EventContext from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator from synapse.events.validator import EventValidator
from synapse.federation.federation_client import InvalidResponseError from synapse.federation.federation_client import InvalidResponseError
from synapse.handlers._base import BaseHandler
from synapse.http.servlet import assert_params_in_dict from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import ( from synapse.logging.context import (
make_deferred_yieldable, make_deferred_yieldable,
@ -74,15 +77,13 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class FederationHandler(BaseHandler): class FederationHandler:
"""Handles general incoming federation requests """Handles general incoming federation requests
Incoming events are *not* handled here, for which see FederationEventHandler. Incoming events are *not* handled here, for which see FederationEventHandler.
""" """
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs self.hs = hs
self.store = hs.get_datastore() self.store = hs.get_datastore()
@ -95,6 +96,7 @@ class FederationHandler(BaseHandler):
self.is_mine_id = hs.is_mine_id self.is_mine_id = hs.is_mine_id
self.spam_checker = hs.get_spam_checker() self.spam_checker = hs.get_spam_checker()
self.event_creation_handler = hs.get_event_creation_handler() self.event_creation_handler = hs.get_event_creation_handler()
self.event_builder_factory = hs.get_event_builder_factory()
self._event_auth_handler = hs.get_event_auth_handler() self._event_auth_handler = hs.get_event_auth_handler()
self._server_notices_mxid = hs.config.servernotices.server_notices_mxid self._server_notices_mxid = hs.config.servernotices.server_notices_mxid
self.config = hs.config self.config = hs.config
@ -723,8 +725,8 @@ class FederationHandler(BaseHandler):
state_ids, state_ids,
) )
builder = self.event_builder_factory.new( builder = self.event_builder_factory.for_room_version(
room_version.identifier, room_version,
{ {
"type": EventTypes.Member, "type": EventTypes.Member,
"content": event_content, "content": event_content,
@ -747,10 +749,9 @@ class FederationHandler(BaseHandler):
# The remote hasn't signed it yet, obviously. We'll do the full checks # The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request` # when we get the event back in `on_send_join_request`
await self._event_auth_handler.check_from_context( await self._event_auth_handler.check_auth_rules_from_context(
room_version.identifier, event, context, do_sig_check=False room_version, event, context
) )
return event return event
async def on_invite_request( async def on_invite_request(
@ -767,7 +768,7 @@ class FederationHandler(BaseHandler):
if is_blocked: if is_blocked:
raise SynapseError(403, "This room has been blocked on this server") raise SynapseError(403, "This room has been blocked on this server")
if self.hs.config.block_non_admin_invites: if self.hs.config.server.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites") raise SynapseError(403, "This server does not accept room invites")
if not await self.spam_checker.user_may_invite( if not await self.spam_checker.user_may_invite(
@ -902,9 +903,9 @@ class FederationHandler(BaseHandler):
) )
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN) raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
room_version = await self.store.get_room_version_id(room_id) room_version_obj = await self.store.get_room_version(room_id)
builder = self.event_builder_factory.new( builder = self.event_builder_factory.for_room_version(
room_version, room_version_obj,
{ {
"type": EventTypes.Member, "type": EventTypes.Member,
"content": {"membership": Membership.LEAVE}, "content": {"membership": Membership.LEAVE},
@ -921,8 +922,8 @@ class FederationHandler(BaseHandler):
try: try:
# The remote hasn't signed it yet, obviously. We'll do the full checks # The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_leave_request` # when we get the event back in `on_send_leave_request`
await self._event_auth_handler.check_from_context( await self._event_auth_handler.check_auth_rules_from_context(
room_version, event, context, do_sig_check=False room_version_obj, event, context
) )
except AuthError as e: except AuthError as e:
logger.warning("Failed to create new leave %r because %s", event, e) logger.warning("Failed to create new leave %r because %s", event, e)
@ -954,10 +955,10 @@ class FederationHandler(BaseHandler):
) )
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN) raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
room_version = await self.store.get_room_version_id(room_id) room_version_obj = await self.store.get_room_version(room_id)
builder = self.event_builder_factory.new( builder = self.event_builder_factory.for_room_version(
room_version, room_version_obj,
{ {
"type": EventTypes.Member, "type": EventTypes.Member,
"content": {"membership": Membership.KNOCK}, "content": {"membership": Membership.KNOCK},
@ -983,8 +984,8 @@ class FederationHandler(BaseHandler):
try: try:
# The remote hasn't signed it yet, obviously. We'll do the full checks # The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_knock_request` # when we get the event back in `on_send_knock_request`
await self._event_auth_handler.check_from_context( await self._event_auth_handler.check_auth_rules_from_context(
room_version, event, context, do_sig_check=False room_version_obj, event, context
) )
except AuthError as e: except AuthError as e:
logger.warning("Failed to create new knock %r because %s", event, e) logger.warning("Failed to create new knock %r because %s", event, e)
@ -1173,7 +1174,8 @@ class FederationHandler(BaseHandler):
auth_for_e[(EventTypes.Create, "")] = create_event auth_for_e[(EventTypes.Create, "")] = create_event
try: try:
event_auth.check(room_version, e, auth_events=auth_for_e) validate_event_for_room_version(room_version, e)
check_auth_rules_for_event(room_version, e, auth_for_e)
except SynapseError as err: except SynapseError as err:
# we may get SynapseErrors here as well as AuthErrors. For # we may get SynapseErrors here as well as AuthErrors. For
# instance, there are a couple of (ancient) events in some # instance, there are a couple of (ancient) events in some
@ -1250,8 +1252,10 @@ class FederationHandler(BaseHandler):
} }
if await self._event_auth_handler.check_host_in_room(room_id, self.hs.hostname): if await self._event_auth_handler.check_host_in_room(room_id, self.hs.hostname):
room_version = await self.store.get_room_version_id(room_id) room_version_obj = await self.store.get_room_version(room_id)
builder = self.event_builder_factory.new(room_version, event_dict) builder = self.event_builder_factory.for_room_version(
room_version_obj, event_dict
)
EventValidator().validate_builder(builder, self.hs.config) EventValidator().validate_builder(builder, self.hs.config)
event, context = await self.event_creation_handler.create_new_client_event( event, context = await self.event_creation_handler.create_new_client_event(
@ -1259,7 +1263,7 @@ class FederationHandler(BaseHandler):
) )
event, context = await self.add_display_name_to_third_party_invite( event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context room_version_obj, event_dict, event, context
) )
EventValidator().validate_new(event, self.config) EventValidator().validate_new(event, self.config)
@ -1269,8 +1273,9 @@ class FederationHandler(BaseHandler):
event.internal_metadata.send_on_behalf_of = self.hs.hostname event.internal_metadata.send_on_behalf_of = self.hs.hostname
try: try:
await self._event_auth_handler.check_from_context( validate_event_for_room_version(room_version_obj, event)
room_version, event, context await self._event_auth_handler.check_auth_rules_from_context(
room_version_obj, event, context
) )
except AuthError as e: except AuthError as e:
logger.warning("Denying new third party invite %r because %s", event, e) logger.warning("Denying new third party invite %r because %s", event, e)
@ -1304,22 +1309,25 @@ class FederationHandler(BaseHandler):
""" """
assert_params_in_dict(event_dict, ["room_id"]) assert_params_in_dict(event_dict, ["room_id"])
room_version = await self.store.get_room_version_id(event_dict["room_id"]) room_version_obj = await self.store.get_room_version(event_dict["room_id"])
# NB: event_dict has a particular specced format we might need to fudge # NB: event_dict has a particular specced format we might need to fudge
# if we change event formats too much. # if we change event formats too much.
builder = self.event_builder_factory.new(room_version, event_dict) builder = self.event_builder_factory.for_room_version(
room_version_obj, event_dict
)
event, context = await self.event_creation_handler.create_new_client_event( event, context = await self.event_creation_handler.create_new_client_event(
builder=builder builder=builder
) )
event, context = await self.add_display_name_to_third_party_invite( event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context room_version_obj, event_dict, event, context
) )
try: try:
await self._event_auth_handler.check_from_context( validate_event_for_room_version(room_version_obj, event)
room_version, event, context await self._event_auth_handler.check_auth_rules_from_context(
room_version_obj, event, context
) )
except AuthError as e: except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e) logger.warning("Denying third party invite %r because %s", event, e)
@ -1336,7 +1344,7 @@ class FederationHandler(BaseHandler):
async def add_display_name_to_third_party_invite( async def add_display_name_to_third_party_invite(
self, self,
room_version: str, room_version_obj: RoomVersion,
event_dict: JsonDict, event_dict: JsonDict,
event: EventBase, event: EventBase,
context: EventContext, context: EventContext,
@ -1368,7 +1376,9 @@ class FederationHandler(BaseHandler):
# auth checks. If we need the invite and don't have it then the # auth checks. If we need the invite and don't have it then the
# auth check code will explode appropriately. # auth check code will explode appropriately.
builder = self.event_builder_factory.new(room_version, event_dict) builder = self.event_builder_factory.for_room_version(
room_version_obj, event_dict
)
EventValidator().validate_builder(builder, self.hs.config) EventValidator().validate_builder(builder, self.hs.config)
event, context = await self.event_creation_handler.create_new_client_event( event, context = await self.event_creation_handler.create_new_client_event(
builder=builder builder=builder

View file

@ -29,7 +29,6 @@ from typing import (
from prometheus_client import Counter from prometheus_client import Counter
from synapse import event_auth
from synapse.api.constants import ( from synapse.api.constants import (
EventContentFields, EventContentFields,
EventTypes, EventTypes,
@ -47,7 +46,11 @@ from synapse.api.errors import (
SynapseError, SynapseError,
) )
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.event_auth import auth_types_for_event from synapse.event_auth import (
auth_types_for_event,
check_auth_rules_for_event,
validate_event_for_room_version,
)
from synapse.events import EventBase from synapse.events import EventBase
from synapse.events.snapshot import EventContext from synapse.events.snapshot import EventContext
from synapse.federation.federation_client import InvalidResponseError from synapse.federation.federation_client import InvalidResponseError
@ -68,11 +71,7 @@ from synapse.types import (
UserID, UserID,
get_domain_from_id, get_domain_from_id,
) )
from synapse.util.async_helpers import ( from synapse.util.async_helpers import Linearizer, concurrently_execute
Linearizer,
concurrently_execute,
yieldable_gather_results,
)
from synapse.util.iterutils import batch_iter from synapse.util.iterutils import batch_iter
from synapse.util.retryutils import NotRetryingDestination from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import shortstr from synapse.util.stringutils import shortstr
@ -357,6 +356,11 @@ class FederationEventHandler:
) )
# all looks good, we can persist the event. # all looks good, we can persist the event.
# First, precalculate the joined hosts so that the federation sender doesn't
# need to.
await self._event_creation_handler.cache_joined_hosts_for_event(event, context)
await self._run_push_actions_and_persist_event(event, context) await self._run_push_actions_and_persist_event(event, context)
return event, context return event, context
@ -890,6 +894,9 @@ class FederationEventHandler:
backfilled=backfilled, backfilled=backfilled,
) )
except AuthError as e: except AuthError as e:
# FIXME richvdh 2021/10/07 I don't think this is reachable. Let's log it
# for now
logger.exception("Unexpected AuthError from _check_event_auth")
raise FederationError("ERROR", e.code, e.msg, affected=event.event_id) raise FederationError("ERROR", e.code, e.msg, affected=event.event_id)
await self._run_push_actions_and_persist_event(event, context, backfilled) await self._run_push_actions_and_persist_event(event, context, backfilled)
@ -1011,9 +1018,8 @@ class FederationEventHandler:
room_version = await self._store.get_room_version(marker_event.room_id) room_version = await self._store.get_room_version(marker_event.room_id)
create_event = await self._store.get_create_event_for_room(marker_event.room_id) create_event = await self._store.get_create_event_for_room(marker_event.room_id)
room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR)
if ( if not room_version.msc2716_historical and (
not room_version.msc2716_historical not self._config.experimental.msc2716_enabled
or not self._config.experimental.msc2716_enabled
or marker_event.sender != room_creator or marker_event.sender != room_creator
): ):
return return
@ -1155,7 +1161,10 @@ class FederationEventHandler:
return return
logger.info( logger.info(
"Persisting %i of %i remaining events", len(roots), len(event_map) "Persisting %i of %i remaining outliers: %s",
len(roots),
len(event_map),
shortstr(e.event_id for e in roots),
) )
await self._auth_and_persist_fetched_events_inner(origin, room_id, roots) await self._auth_and_persist_fetched_events_inner(origin, room_id, roots)
@ -1189,7 +1198,10 @@ class FederationEventHandler:
allow_rejected=True, allow_rejected=True,
) )
async def prep(event: EventBase) -> Optional[Tuple[EventBase, EventContext]]: room_version = await self._store.get_room_version_id(room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
def prep(event: EventBase) -> Optional[Tuple[EventBase, EventContext]]:
with nested_logging_context(suffix=event.event_id): with nested_logging_context(suffix=event.event_id):
auth = {} auth = {}
for auth_event_id in event.auth_event_ids(): for auth_event_id in event.auth_event_ids():
@ -1207,17 +1219,16 @@ class FederationEventHandler:
auth[(ae.type, ae.state_key)] = ae auth[(ae.type, ae.state_key)] = ae
context = EventContext.for_outlier() context = EventContext.for_outlier()
context = await self._check_event_auth( try:
origin, validate_event_for_room_version(room_version_obj, event)
event, check_auth_rules_for_event(room_version_obj, event, auth)
context, except AuthError as e:
claimed_auth_event_map=auth, logger.warning("Rejecting %r because %s", event, e)
) context.rejected = RejectedReason.AUTH_ERROR
return event, context return event, context
events_to_persist = ( events_to_persist = (x for x in (prep(event) for event in fetched_events) if x)
x for x in await yieldable_gather_results(prep, fetched_events) if x
)
await self.persist_events_and_notify(room_id, tuple(events_to_persist)) await self.persist_events_and_notify(room_id, tuple(events_to_persist))
async def _check_event_auth( async def _check_event_auth(
@ -1226,7 +1237,6 @@ class FederationEventHandler:
event: EventBase, event: EventBase,
context: EventContext, context: EventContext,
state: Optional[Iterable[EventBase]] = None, state: Optional[Iterable[EventBase]] = None,
claimed_auth_event_map: Optional[StateMap[EventBase]] = None,
backfilled: bool = False, backfilled: bool = False,
) -> EventContext: ) -> EventContext:
""" """
@ -1242,42 +1252,45 @@ class FederationEventHandler:
The state events used to check the event for soft-fail. If this is The state events used to check the event for soft-fail. If this is
not provided the current state events will be used. not provided the current state events will be used.
claimed_auth_event_map:
A map of (type, state_key) => event for the event's claimed auth_events.
Possibly including events that were rejected, or are in the wrong room.
Only populated when populating outliers.
backfilled: True if the event was backfilled. backfilled: True if the event was backfilled.
Returns: Returns:
The updated context object. The updated context object.
""" """
# claimed_auth_event_map should be given iff the event is an outlier # This method should only be used for non-outliers
assert bool(claimed_auth_event_map) == event.internal_metadata.outlier assert not event.internal_metadata.outlier
# first of all, check that the event itself is valid.
room_version = await self._store.get_room_version_id(event.room_id) room_version = await self._store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version] room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
if claimed_auth_event_map: try:
# if we have a copy of the auth events from the event, use that as the validate_event_for_room_version(room_version_obj, event)
# basis for auth. except AuthError as e:
auth_events = claimed_auth_event_map logger.warning("While validating received event %r: %s", event, e)
else: # TODO: use a different rejected reason here?
# otherwise, we calculate what the auth events *should* be, and use that context.rejected = RejectedReason.AUTH_ERROR
prev_state_ids = await context.get_prev_state_ids() return context
auth_events_ids = self._event_auth_handler.compute_auth_events(
event, prev_state_ids, for_verification=True # calculate what the auth events *should* be, to use as a basis for auth.
) prev_state_ids = await context.get_prev_state_ids()
auth_events_x = await self._store.get_events(auth_events_ids) auth_events_ids = self._event_auth_handler.compute_auth_events(
auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()} event, prev_state_ids, for_verification=True
)
auth_events_x = await self._store.get_events(auth_events_ids)
calculated_auth_event_map = {
(e.type, e.state_key): e for e in auth_events_x.values()
}
try: try:
( (
context, context,
auth_events_for_auth, auth_events_for_auth,
) = await self._update_auth_events_and_context_for_auth( ) = await self._update_auth_events_and_context_for_auth(
origin, event, context, auth_events origin,
event,
context,
calculated_auth_event_map=calculated_auth_event_map,
) )
except Exception: except Exception:
# We don't really mind if the above fails, so lets not fail # We don't really mind if the above fails, so lets not fail
@ -1289,24 +1302,17 @@ class FederationEventHandler:
"Ignoring failure and continuing processing of event.", "Ignoring failure and continuing processing of event.",
event.event_id, event.event_id,
) )
auth_events_for_auth = auth_events auth_events_for_auth = calculated_auth_event_map
try: try:
event_auth.check(room_version_obj, event, auth_events=auth_events_for_auth) check_auth_rules_for_event(room_version_obj, event, auth_events_for_auth)
except AuthError as e: except AuthError as e:
logger.warning("Failed auth resolution for %r because %s", event, e) logger.warning("Failed auth resolution for %r because %s", event, e)
context.rejected = RejectedReason.AUTH_ERROR context.rejected = RejectedReason.AUTH_ERROR
return context
if not context.rejected: await self._check_for_soft_fail(event, state, backfilled, origin=origin)
await self._check_for_soft_fail(event, state, backfilled, origin=origin) await self._maybe_kick_guest_users(event)
await self._maybe_kick_guest_users(event)
# If we are going to send this event over federation we precaclculate
# the joined hosts.
if event.internal_metadata.get_send_on_behalf_of():
await self._event_creation_handler.cache_joined_hosts_for_event(
event, context
)
return context return context
@ -1404,7 +1410,7 @@ class FederationEventHandler:
} }
try: try:
event_auth.check(room_version_obj, event, auth_events=current_auth_events) check_auth_rules_for_event(room_version_obj, event, current_auth_events)
except AuthError as e: except AuthError as e:
logger.warning( logger.warning(
"Soft-failing %r (from %s) because %s", "Soft-failing %r (from %s) because %s",
@ -1425,7 +1431,7 @@ class FederationEventHandler:
origin: str, origin: str,
event: EventBase, event: EventBase,
context: EventContext, context: EventContext,
input_auth_events: StateMap[EventBase], calculated_auth_event_map: StateMap[EventBase],
) -> Tuple[EventContext, StateMap[EventBase]]: ) -> Tuple[EventContext, StateMap[EventBase]]:
"""Helper for _check_event_auth. See there for docs. """Helper for _check_event_auth. See there for docs.
@ -1443,19 +1449,17 @@ class FederationEventHandler:
event: event:
context: context:
input_auth_events: calculated_auth_event_map:
Map from (event_type, state_key) to event Our calculated auth_events based on the state of the room
at the event's position in the DAG.
Normally, our calculated auth_events based on the state of the room
at the event's position in the DAG, though occasionally (eg if the
event is an outlier), may be the auth events claimed by the remote
server.
Returns: Returns:
updated context, updated auth event map updated context, updated auth event map
""" """
# take a copy of input_auth_events before we modify it. assert not event.internal_metadata.outlier
auth_events: MutableStateMap[EventBase] = dict(input_auth_events)
# take a copy of calculated_auth_event_map before we modify it.
auth_events: MutableStateMap[EventBase] = dict(calculated_auth_event_map)
event_auth_events = set(event.auth_event_ids()) event_auth_events = set(event.auth_event_ids())
@ -1475,6 +1479,11 @@ class FederationEventHandler:
logger.debug("Events %s are in the store", have_events) logger.debug("Events %s are in the store", have_events)
missing_auth.difference_update(have_events) missing_auth.difference_update(have_events)
# missing_auth is now the set of event_ids which:
# a. are listed in event.auth_events, *and*
# b. are *not* part of our calculated auth events based on room state, *and*
# c. are *not* yet in our database.
if missing_auth: if missing_auth:
# If we don't have all the auth events, we need to get them. # If we don't have all the auth events, we need to get them.
logger.info("auth_events contains unknown events: %s", missing_auth) logger.info("auth_events contains unknown events: %s", missing_auth)
@ -1496,19 +1505,31 @@ class FederationEventHandler:
} }
) )
if event.internal_metadata.is_outlier(): # auth_events now contains
# XXX: given that, for an outlier, we'll be working with the # 1. our *calculated* auth events based on the room state, plus:
# event's *claimed* auth events rather than those we calculated: # 2. any events which:
# (a) is there any point in this test, since different_auth below will # a. are listed in `event.auth_events`, *and*
# obviously be empty # b. are not part of our calculated auth events, *and*
# (b) alternatively, why don't we do it earlier? # c. were not in our database before the call to /event_auth
logger.info("Skipping auth_event fetch for outlier") # d. have since been added to our database (most likely by /event_auth).
return context, auth_events
different_auth = event_auth_events.difference( different_auth = event_auth_events.difference(
e.event_id for e in auth_events.values() e.event_id for e in auth_events.values()
) )
# different_auth is the set of events which *are* in `event.auth_events`, but
# which are *not* in `auth_events`. Comparing with (2.) above, this means
# exclusively the set of `event.auth_events` which we already had in our
# database before any call to /event_auth.
#
# I'm reasonably sure that the fact that events returned by /event_auth are
# blindly added to auth_events (and hence excluded from different_auth) is a bug
# - though it's a very long-standing one (see
# https://github.com/matrix-org/synapse/commit/78015948a7febb18e000651f72f8f58830a55b93#diff-0bc92da3d703202f5b9be2d3f845e375f5b1a6bc6ba61705a8af9be1121f5e42R786
# from Jan 2015 which seems to add it, though it actually just moves it from
# elsewhere (before that, it gets lost in a mess of huge "various bug fixes"
# PRs).
if not different_auth: if not different_auth:
return context, auth_events return context, auth_events

View file

@ -39,8 +39,6 @@ from synapse.util.stringutils import (
valid_id_server_location, valid_id_server_location,
) )
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -49,15 +47,14 @@ logger = logging.getLogger(__name__)
id_server_scheme = "https://" id_server_scheme = "https://"
class IdentityHandler(BaseHandler): class IdentityHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
# An HTTP client for contacting trusted URLs. # An HTTP client for contacting trusted URLs.
self.http_client = SimpleHttpClient(hs) self.http_client = SimpleHttpClient(hs)
# An HTTP client for contacting identity servers specified by clients. # An HTTP client for contacting identity servers specified by clients.
self.blacklisting_http_client = SimpleHttpClient( self.blacklisting_http_client = SimpleHttpClient(
hs, ip_blacklist=hs.config.federation_ip_range_blacklist hs, ip_blacklist=hs.config.server.federation_ip_range_blacklist
) )
self.federation_http_client = hs.get_federation_http_client() self.federation_http_client = hs.get_federation_http_client()
self.hs = hs self.hs = hs
@ -573,9 +570,15 @@ class IdentityHandler(BaseHandler):
# Try to validate as email # Try to validate as email
if self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE: if self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
# Remote emails will only be used if a valid identity server is provided.
assert (
self.hs.config.registration.account_threepid_delegate_email is not None
)
# Ask our delegated email identity server # Ask our delegated email identity server
validation_session = await self.threepid_from_creds( validation_session = await self.threepid_from_creds(
self.hs.config.account_threepid_delegate_email, threepid_creds self.hs.config.registration.account_threepid_delegate_email,
threepid_creds,
) )
elif self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL: elif self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
# Get a validated session matching these details # Get a validated session matching these details
@ -587,10 +590,11 @@ class IdentityHandler(BaseHandler):
return validation_session return validation_session
# Try to validate as msisdn # Try to validate as msisdn
if self.hs.config.account_threepid_delegate_msisdn: if self.hs.config.registration.account_threepid_delegate_msisdn:
# Ask our delegated msisdn identity server # Ask our delegated msisdn identity server
validation_session = await self.threepid_from_creds( validation_session = await self.threepid_from_creds(
self.hs.config.account_threepid_delegate_msisdn, threepid_creds self.hs.config.registration.account_threepid_delegate_msisdn,
threepid_creds,
) )
return validation_session return validation_session

View file

@ -31,8 +31,6 @@ from synapse.util.async_helpers import concurrently_execute
from synapse.util.caches.response_cache import ResponseCache from synapse.util.caches.response_cache import ResponseCache
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -40,9 +38,11 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class InitialSyncHandler(BaseHandler): class InitialSyncHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.auth = hs.get_auth()
self.state_handler = hs.get_state_handler()
self.hs = hs self.hs = hs
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
self.clock = hs.get_clock() self.clock = hs.get_clock()

View file

@ -16,6 +16,7 @@
# limitations under the License. # limitations under the License.
import logging import logging
import random import random
from http import HTTPStatus
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple
from canonicaljson import encode_canonical_json from canonicaljson import encode_canonical_json
@ -39,9 +40,11 @@ from synapse.api.errors import (
NotFoundError, NotFoundError,
ShadowBanError, ShadowBanError,
SynapseError, SynapseError,
UnsupportedRoomVersionError,
) )
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
from synapse.api.urls import ConsentURIBuilder from synapse.api.urls import ConsentURIBuilder
from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase from synapse.events import EventBase
from synapse.events.builder import EventBuilder from synapse.events.builder import EventBuilder
from synapse.events.snapshot import EventContext from synapse.events.snapshot import EventContext
@ -59,8 +62,6 @@ from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.metrics import measure_func from synapse.util.metrics import measure_func
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.events.third_party_rules import ThirdPartyEventRules from synapse.events.third_party_rules import ThirdPartyEventRules
from synapse.server import HomeServer from synapse.server import HomeServer
@ -79,7 +80,7 @@ class MessageHandler:
self.storage = hs.get_storage() self.storage = hs.get_storage()
self.state_store = self.storage.state self.state_store = self.storage.state
self._event_serializer = hs.get_event_client_serializer() self._event_serializer = hs.get_event_client_serializer()
self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages
# The scheduled call to self._expire_event. None if no call is currently # The scheduled call to self._expire_event. None if no call is currently
# scheduled. # scheduled.
@ -413,7 +414,9 @@ class EventCreationHandler:
self.server_name = hs.hostname self.server_name = hs.hostname
self.notifier = hs.get_notifier() self.notifier = hs.get_notifier()
self.config = hs.config self.config = hs.config
self.require_membership_for_aliases = hs.config.require_membership_for_aliases self.require_membership_for_aliases = (
hs.config.server.require_membership_for_aliases
)
self._events_shard_config = self.config.worker.events_shard_config self._events_shard_config = self.config.worker.events_shard_config
self._instance_name = hs.get_instance_name() self._instance_name = hs.get_instance_name()
@ -423,13 +426,12 @@ class EventCreationHandler:
Membership.JOIN, Membership.JOIN,
Membership.KNOCK, Membership.KNOCK,
} }
if self.hs.config.include_profile_data_on_invite: if self.hs.config.server.include_profile_data_on_invite:
self.membership_types_to_include_profile_data_in.add(Membership.INVITE) self.membership_types_to_include_profile_data_in.add(Membership.INVITE)
self.send_event = ReplicationSendEventRestServlet.make_client(hs) self.send_event = ReplicationSendEventRestServlet.make_client(hs)
# This is only used to get at ratelimit function self.request_ratelimiter = hs.get_request_ratelimiter()
self.base_handler = BaseHandler(hs)
# We arbitrarily limit concurrent event creation for a room to 5. # We arbitrarily limit concurrent event creation for a room to 5.
# This is to stop us from diverging history *too* much. # This is to stop us from diverging history *too* much.
@ -461,11 +463,11 @@ class EventCreationHandler:
# #
self._rooms_to_exclude_from_dummy_event_insertion: Dict[str, int] = {} self._rooms_to_exclude_from_dummy_event_insertion: Dict[str, int] = {}
# The number of forward extremeities before a dummy event is sent. # The number of forward extremeities before a dummy event is sent.
self._dummy_events_threshold = hs.config.dummy_events_threshold self._dummy_events_threshold = hs.config.server.dummy_events_threshold
if ( if (
self.config.worker.run_background_tasks self.config.worker.run_background_tasks
and self.config.cleanup_extremities_with_dummy_events and self.config.server.cleanup_extremities_with_dummy_events
): ):
self.clock.looping_call( self.clock.looping_call(
lambda: run_as_background_process( lambda: run_as_background_process(
@ -477,7 +479,7 @@ class EventCreationHandler:
self._message_handler = hs.get_message_handler() self._message_handler = hs.get_message_handler()
self._ephemeral_events_enabled = hs.config.enable_ephemeral_messages self._ephemeral_events_enabled = hs.config.server.enable_ephemeral_messages
self._external_cache = hs.get_external_cache() self._external_cache = hs.get_external_cache()
@ -551,16 +553,22 @@ class EventCreationHandler:
await self.auth.check_auth_blocking(requester=requester) await self.auth.check_auth_blocking(requester=requester)
if event_dict["type"] == EventTypes.Create and event_dict["state_key"] == "": if event_dict["type"] == EventTypes.Create and event_dict["state_key"] == "":
room_version = event_dict["content"]["room_version"] room_version_id = event_dict["content"]["room_version"]
room_version_obj = KNOWN_ROOM_VERSIONS.get(room_version_id)
if not room_version_obj:
# this can happen if support is withdrawn for a room version
raise UnsupportedRoomVersionError(room_version_id)
else: else:
try: try:
room_version = await self.store.get_room_version_id( room_version_obj = await self.store.get_room_version(
event_dict["room_id"] event_dict["room_id"]
) )
except NotFoundError: except NotFoundError:
raise AuthError(403, "Unknown room") raise AuthError(403, "Unknown room")
builder = self.event_builder_factory.new(room_version, event_dict) builder = self.event_builder_factory.for_room_version(
room_version_obj, event_dict
)
self.validator.validate_builder(builder, self.config) self.validator.validate_builder(builder, self.config)
@ -1066,9 +1074,17 @@ class EventCreationHandler:
EventTypes.Create, EventTypes.Create,
"", "",
): ):
room_version = event.content.get("room_version", RoomVersions.V1.identifier) room_version_id = event.content.get(
"room_version", RoomVersions.V1.identifier
)
room_version_obj = KNOWN_ROOM_VERSIONS.get(room_version_id)
if not room_version_obj:
raise UnsupportedRoomVersionError(
"Attempt to create a room with unsupported room version %s"
% (room_version_id,)
)
else: else:
room_version = await self.store.get_room_version_id(event.room_id) room_version_obj = await self.store.get_room_version(event.room_id)
if event.internal_metadata.is_out_of_band_membership(): if event.internal_metadata.is_out_of_band_membership():
# the only sort of out-of-band-membership events we expect to see here are # the only sort of out-of-band-membership events we expect to see here are
@ -1077,8 +1093,9 @@ class EventCreationHandler:
assert event.content["membership"] == Membership.LEAVE assert event.content["membership"] == Membership.LEAVE
else: else:
try: try:
await self._event_auth_handler.check_from_context( validate_event_for_room_version(room_version_obj, event)
room_version, event, context await self._event_auth_handler.check_auth_rules_from_context(
room_version_obj, event, context
) )
except AuthError as err: except AuthError as err:
logger.warning("Denying new event %r because %s", event, err) logger.warning("Denying new event %r because %s", event, err)
@ -1304,7 +1321,7 @@ class EventCreationHandler:
original_event and event.sender != original_event.sender original_event and event.sender != original_event.sender
) )
await self.base_handler.ratelimit( await self.request_ratelimiter.ratelimit(
requester, is_admin_redaction=is_admin_redaction requester, is_admin_redaction=is_admin_redaction
) )
@ -1459,6 +1476,39 @@ class EventCreationHandler:
if prev_state_ids: if prev_state_ids:
raise AuthError(403, "Changing the room create event is forbidden") raise AuthError(403, "Changing the room create event is forbidden")
if event.type == EventTypes.MSC2716_INSERTION:
room_version = await self.store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
create_event = await self.store.get_create_event_for_room(event.room_id)
room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR)
# Only check an insertion event if the room version
# supports it or the event is from the room creator.
if room_version_obj.msc2716_historical or (
self.config.experimental.msc2716_enabled
and event.sender == room_creator
):
next_batch_id = event.content.get(
EventContentFields.MSC2716_NEXT_BATCH_ID
)
conflicting_insertion_event_id = (
await self.store.get_insertion_event_by_batch_id(
event.room_id, next_batch_id
)
)
if conflicting_insertion_event_id is not None:
# The current insertion event that we're processing is invalid
# because an insertion event already exists in the room with the
# same next_batch_id. We can't allow multiple because the batch
# pointing will get weird, e.g. we can't determine which insertion
# event the batch event is pointing to.
raise SynapseError(
HTTPStatus.BAD_REQUEST,
"Another insertion event already exists with the same next_batch_id",
errcode=Codes.INVALID_PARAM,
)
# Mark any `m.historical` messages as backfilled so they don't appear # Mark any `m.historical` messages as backfilled so they don't appear
# in `/sync` and have the proper decrementing `stream_ordering` as we import # in `/sync` and have the proper decrementing `stream_ordering` as we import
backfilled = False backfilled = False

View file

@ -85,23 +85,29 @@ class PaginationHandler:
self._purges_by_id: Dict[str, PurgeStatus] = {} self._purges_by_id: Dict[str, PurgeStatus] = {}
self._event_serializer = hs.get_event_client_serializer() self._event_serializer = hs.get_event_client_serializer()
self._retention_default_max_lifetime = hs.config.retention_default_max_lifetime self._retention_default_max_lifetime = (
hs.config.server.retention_default_max_lifetime
)
self._retention_allowed_lifetime_min = hs.config.retention_allowed_lifetime_min self._retention_allowed_lifetime_min = (
self._retention_allowed_lifetime_max = hs.config.retention_allowed_lifetime_max hs.config.server.retention_allowed_lifetime_min
)
self._retention_allowed_lifetime_max = (
hs.config.server.retention_allowed_lifetime_max
)
if hs.config.worker.run_background_tasks and hs.config.retention_enabled: if hs.config.worker.run_background_tasks and hs.config.server.retention_enabled:
# Run the purge jobs described in the configuration file. # Run the purge jobs described in the configuration file.
for job in hs.config.retention_purge_jobs: for job in hs.config.server.retention_purge_jobs:
logger.info("Setting up purge job with config: %s", job) logger.info("Setting up purge job with config: %s", job)
self.clock.looping_call( self.clock.looping_call(
run_as_background_process, run_as_background_process,
job["interval"], job.interval,
"purge_history_for_rooms_in_range", "purge_history_for_rooms_in_range",
self.purge_history_for_rooms_in_range, self.purge_history_for_rooms_in_range,
job["shortest_max_lifetime"], job.shortest_max_lifetime,
job["longest_max_lifetime"], job.longest_max_lifetime,
) )
async def purge_history_for_rooms_in_range( async def purge_history_for_rooms_in_range(

View file

@ -32,8 +32,6 @@ from synapse.types import (
get_domain_from_id, get_domain_from_id,
) )
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -43,7 +41,7 @@ MAX_DISPLAYNAME_LEN = 256
MAX_AVATAR_URL_LEN = 1000 MAX_AVATAR_URL_LEN = 1000
class ProfileHandler(BaseHandler): class ProfileHandler:
"""Handles fetching and updating user profile information. """Handles fetching and updating user profile information.
ProfileHandler can be instantiated directly on workers and will ProfileHandler can be instantiated directly on workers and will
@ -54,7 +52,9 @@ class ProfileHandler(BaseHandler):
PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000 PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.clock = hs.get_clock()
self.hs = hs
self.federation = hs.get_federation_client() self.federation = hs.get_federation_client()
hs.get_federation_registry().register_query_handler( hs.get_federation_registry().register_query_handler(
@ -62,6 +62,7 @@ class ProfileHandler(BaseHandler):
) )
self.user_directory_handler = hs.get_user_directory_handler() self.user_directory_handler = hs.get_user_directory_handler()
self.request_ratelimiter = hs.get_request_ratelimiter()
if hs.config.worker.run_background_tasks: if hs.config.worker.run_background_tasks:
self.clock.looping_call( self.clock.looping_call(
@ -178,7 +179,7 @@ class ProfileHandler(BaseHandler):
if not by_admin and target_user != requester.user: if not by_admin and target_user != requester.user:
raise AuthError(400, "Cannot set another user's displayname") raise AuthError(400, "Cannot set another user's displayname")
if not by_admin and not self.hs.config.enable_set_displayname: if not by_admin and not self.hs.config.registration.enable_set_displayname:
profile = await self.store.get_profileinfo(target_user.localpart) profile = await self.store.get_profileinfo(target_user.localpart)
if profile.display_name: if profile.display_name:
raise SynapseError( raise SynapseError(
@ -268,7 +269,7 @@ class ProfileHandler(BaseHandler):
if not by_admin and target_user != requester.user: if not by_admin and target_user != requester.user:
raise AuthError(400, "Cannot set another user's avatar_url") raise AuthError(400, "Cannot set another user's avatar_url")
if not by_admin and not self.hs.config.enable_set_avatar_url: if not by_admin and not self.hs.config.registration.enable_set_avatar_url:
profile = await self.store.get_profileinfo(target_user.localpart) profile = await self.store.get_profileinfo(target_user.localpart)
if profile.avatar_url: if profile.avatar_url:
raise SynapseError( raise SynapseError(
@ -346,7 +347,7 @@ class ProfileHandler(BaseHandler):
if not self.hs.is_mine(target_user): if not self.hs.is_mine(target_user):
return return
await self.ratelimit(requester) await self.request_ratelimiter.ratelimit(requester)
# Do not actually update the room state for shadow-banned users. # Do not actually update the room state for shadow-banned users.
if requester.shadow_banned: if requester.shadow_banned:
@ -397,7 +398,7 @@ class ProfileHandler(BaseHandler):
# when building a membership event. In this case, we must allow the # when building a membership event. In this case, we must allow the
# lookup. # lookup.
if ( if (
not self.hs.config.limit_profile_requests_to_users_who_share_rooms not self.hs.config.server.limit_profile_requests_to_users_who_share_rooms
or not requester or not requester
): ):
return return

View file

@ -17,17 +17,14 @@ from typing import TYPE_CHECKING
from synapse.util.async_helpers import Linearizer from synapse.util.async_helpers import Linearizer
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class ReadMarkerHandler(BaseHandler): class ReadMarkerHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.server_name = hs.config.server.server_name self.server_name = hs.config.server.server_name
self.store = hs.get_datastore() self.store = hs.get_datastore()
self.account_data_handler = hs.get_account_data_handler() self.account_data_handler = hs.get_account_data_handler()

View file

@ -16,7 +16,6 @@ from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple
from synapse.api.constants import ReadReceiptEventFields from synapse.api.constants import ReadReceiptEventFields
from synapse.appservice import ApplicationService from synapse.appservice import ApplicationService
from synapse.handlers._base import BaseHandler
from synapse.streams import EventSource from synapse.streams import EventSource
from synapse.types import JsonDict, ReadReceipt, UserID, get_domain_from_id from synapse.types import JsonDict, ReadReceipt, UserID, get_domain_from_id
@ -26,10 +25,9 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class ReceiptsHandler(BaseHandler): class ReceiptsHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.notifier = hs.get_notifier()
self.server_name = hs.config.server.server_name self.server_name = hs.config.server.server_name
self.store = hs.get_datastore() self.store = hs.get_datastore()
self.event_auth_handler = hs.get_event_auth_handler() self.event_auth_handler = hs.get_event_auth_handler()

View file

@ -41,8 +41,6 @@ from synapse.spam_checker_api import RegistrationBehaviour
from synapse.storage.state import StateFilter from synapse.storage.state import StateFilter
from synapse.types import RoomAlias, UserID, create_requester from synapse.types import RoomAlias, UserID, create_requester
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -85,9 +83,10 @@ class LoginDict(TypedDict):
refresh_token: Optional[str] refresh_token: Optional[str]
class RegistrationHandler(BaseHandler): class RegistrationHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.clock = hs.get_clock()
self.hs = hs self.hs = hs
self.auth = hs.get_auth() self.auth = hs.get_auth()
self._auth_handler = hs.get_auth_handler() self._auth_handler = hs.get_auth_handler()
@ -116,8 +115,8 @@ class RegistrationHandler(BaseHandler):
self._register_device_client = self.register_device_inner self._register_device_client = self.register_device_inner
self.pusher_pool = hs.get_pusherpool() self.pusher_pool = hs.get_pusherpool()
self.session_lifetime = hs.config.session_lifetime self.session_lifetime = hs.config.registration.session_lifetime
self.access_token_lifetime = hs.config.access_token_lifetime self.access_token_lifetime = hs.config.registration.access_token_lifetime
init_counters_for_auth_provider("") init_counters_for_auth_provider("")
@ -347,8 +346,13 @@ class RegistrationHandler(BaseHandler):
auth_provider=(auth_provider_id or ""), auth_provider=(auth_provider_id or ""),
).inc() ).inc()
# If the user does not need to consent at registration, auto-join any
# configured rooms.
if not self.hs.config.consent.user_consent_at_registration: if not self.hs.config.consent.user_consent_at_registration:
if not self.hs.config.auto_join_rooms_for_guests and make_guest: if (
not self.hs.config.registration.auto_join_rooms_for_guests
and make_guest
):
logger.info( logger.info(
"Skipping auto-join for %s because auto-join for guests is disabled", "Skipping auto-join for %s because auto-join for guests is disabled",
user_id, user_id,
@ -394,7 +398,7 @@ class RegistrationHandler(BaseHandler):
"preset": self.hs.config.registration.autocreate_auto_join_room_preset, "preset": self.hs.config.registration.autocreate_auto_join_room_preset,
} }
# If the configuration providers a user ID to create rooms with, use # If the configuration provides a user ID to create rooms with, use
# that instead of the first user registered. # that instead of the first user registered.
requires_join = False requires_join = False
if self.hs.config.registration.auto_join_user_id: if self.hs.config.registration.auto_join_user_id:
@ -517,7 +521,7 @@ class RegistrationHandler(BaseHandler):
# we don't have a local user in the room to craft up an invite with. # we don't have a local user in the room to craft up an invite with.
requires_invite = await self.store.is_host_joined( requires_invite = await self.store.is_host_joined(
room_id, room_id,
self.server_name, self._server_name,
) )
if requires_invite: if requires_invite:
@ -861,7 +865,7 @@ class RegistrationHandler(BaseHandler):
# Necessary due to auth checks prior to the threepid being # Necessary due to auth checks prior to the threepid being
# written to the db # written to the db
if is_threepid_reserved( if is_threepid_reserved(
self.hs.config.mau_limits_reserved_threepids, threepid self.hs.config.server.mau_limits_reserved_threepids, threepid
): ):
await self.store.upsert_monthly_active_user(user_id) await self.store.upsert_monthly_active_user(user_id)

View file

@ -52,6 +52,7 @@ from synapse.api.errors import (
) )
from synapse.api.filtering import Filter from synapse.api.filtering import Filter
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase from synapse.events import EventBase
from synapse.events.utils import copy_power_levels_contents from synapse.events.utils import copy_power_levels_contents
from synapse.rest.admin._base import assert_user_is_admin from synapse.rest.admin._base import assert_user_is_admin
@ -75,8 +76,6 @@ from synapse.util.caches.response_cache import ResponseCache
from synapse.util.stringutils import parse_and_validate_server_name from synapse.util.stringutils import parse_and_validate_server_name
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -87,15 +86,18 @@ id_server_scheme = "https://"
FIVE_MINUTES_IN_MS = 5 * 60 * 1000 FIVE_MINUTES_IN_MS = 5 * 60 * 1000
class RoomCreationHandler(BaseHandler): class RoomCreationHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.auth = hs.get_auth()
self.clock = hs.get_clock()
self.hs = hs
self.spam_checker = hs.get_spam_checker() self.spam_checker = hs.get_spam_checker()
self.event_creation_handler = hs.get_event_creation_handler() self.event_creation_handler = hs.get_event_creation_handler()
self.room_member_handler = hs.get_room_member_handler() self.room_member_handler = hs.get_room_member_handler()
self._event_auth_handler = hs.get_event_auth_handler() self._event_auth_handler = hs.get_event_auth_handler()
self.config = hs.config self.config = hs.config
self.request_ratelimiter = hs.get_request_ratelimiter()
# Room state based off defined presets # Room state based off defined presets
self._presets_dict: Dict[str, Dict[str, Any]] = { self._presets_dict: Dict[str, Dict[str, Any]] = {
@ -161,7 +163,7 @@ class RoomCreationHandler(BaseHandler):
Raises: Raises:
ShadowBanError if the requester is shadow-banned. ShadowBanError if the requester is shadow-banned.
""" """
await self.ratelimit(requester) await self.request_ratelimiter.ratelimit(requester)
user_id = requester.user.to_string() user_id = requester.user.to_string()
@ -237,8 +239,9 @@ class RoomCreationHandler(BaseHandler):
}, },
}, },
) )
old_room_version = await self.store.get_room_version_id(old_room_id) old_room_version = await self.store.get_room_version(old_room_id)
await self._event_auth_handler.check_from_context( validate_event_for_room_version(old_room_version, tombstone_event)
await self._event_auth_handler.check_auth_rules_from_context(
old_room_version, tombstone_event, tombstone_context old_room_version, tombstone_event, tombstone_context
) )
@ -663,10 +666,10 @@ class RoomCreationHandler(BaseHandler):
raise SynapseError(403, "You are not permitted to create rooms") raise SynapseError(403, "You are not permitted to create rooms")
if ratelimit: if ratelimit:
await self.ratelimit(requester) await self.request_ratelimiter.ratelimit(requester)
room_version_id = config.get( room_version_id = config.get(
"room_version", self.config.default_room_version.identifier "room_version", self.config.server.default_room_version.identifier
) )
if not isinstance(room_version_id, str): if not isinstance(room_version_id, str):
@ -868,6 +871,7 @@ class RoomCreationHandler(BaseHandler):
"invite", "invite",
ratelimit=False, ratelimit=False,
content=content, content=content,
new_room=True,
) )
for invite_3pid in invite_3pid_list: for invite_3pid in invite_3pid_list:
@ -970,6 +974,7 @@ class RoomCreationHandler(BaseHandler):
"join", "join",
ratelimit=ratelimit, ratelimit=ratelimit,
content=creator_join_profile, content=creator_join_profile,
new_room=True,
) )
# We treat the power levels override specially as this needs to be one # We treat the power levels override specially as this needs to be one

View file

@ -0,0 +1,423 @@
import logging
from typing import TYPE_CHECKING, List, Tuple
from synapse.api.constants import EventContentFields, EventTypes
from synapse.appservice import ApplicationService
from synapse.http.servlet import assert_params_in_dict
from synapse.types import JsonDict, Requester, UserID, create_requester
from synapse.util.stringutils import random_string
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class RoomBatchHandler:
def __init__(self, hs: "HomeServer"):
self.hs = hs
self.store = hs.get_datastore()
self.state_store = hs.get_storage().state
self.event_creation_handler = hs.get_event_creation_handler()
self.room_member_handler = hs.get_room_member_handler()
self.auth = hs.get_auth()
async def inherit_depth_from_prev_ids(self, prev_event_ids: List[str]) -> int:
"""Finds the depth which would sort it after the most-recent
prev_event_id but before the successors of those events. If no
successors are found, we assume it's an historical extremity part of the
current batch and use the same depth of the prev_event_ids.
Args:
prev_event_ids: List of prev event IDs
Returns:
Inherited depth
"""
(
most_recent_prev_event_id,
most_recent_prev_event_depth,
) = await self.store.get_max_depth_of(prev_event_ids)
# We want to insert the historical event after the `prev_event` but before the successor event
#
# We inherit depth from the successor event instead of the `prev_event`
# because events returned from `/messages` are first sorted by `topological_ordering`
# which is just the `depth` and then tie-break with `stream_ordering`.
#
# We mark these inserted historical events as "backfilled" which gives them a
# negative `stream_ordering`. If we use the same depth as the `prev_event`,
# then our historical event will tie-break and be sorted before the `prev_event`
# when it should come after.
#
# We want to use the successor event depth so they appear after `prev_event` because
# it has a larger `depth` but before the successor event because the `stream_ordering`
# is negative before the successor event.
successor_event_ids = await self.store.get_successor_events(
[most_recent_prev_event_id]
)
# If we can't find any successor events, then it's a forward extremity of
# historical messages and we can just inherit from the previous historical
# event which we can already assume has the correct depth where we want
# to insert into.
if not successor_event_ids:
depth = most_recent_prev_event_depth
else:
(
_,
oldest_successor_depth,
) = await self.store.get_min_depth_of(successor_event_ids)
depth = oldest_successor_depth
return depth
def create_insertion_event_dict(
self, sender: str, room_id: str, origin_server_ts: int
) -> JsonDict:
"""Creates an event dict for an "insertion" event with the proper fields
and a random batch ID.
Args:
sender: The event author MXID
room_id: The room ID that the event belongs to
origin_server_ts: Timestamp when the event was sent
Returns:
The new event dictionary to insert.
"""
next_batch_id = random_string(8)
insertion_event = {
"type": EventTypes.MSC2716_INSERTION,
"sender": sender,
"room_id": room_id,
"content": {
EventContentFields.MSC2716_NEXT_BATCH_ID: next_batch_id,
EventContentFields.MSC2716_HISTORICAL: True,
},
"origin_server_ts": origin_server_ts,
}
return insertion_event
async def create_requester_for_user_id_from_app_service(
self, user_id: str, app_service: ApplicationService
) -> Requester:
"""Creates a new requester for the given user_id
and validates that the app service is allowed to control
the given user.
Args:
user_id: The author MXID that the app service is controlling
app_service: The app service that controls the user
Returns:
Requester object
"""
await self.auth.validate_appservice_can_control_user_id(app_service, user_id)
return create_requester(user_id, app_service=app_service)
async def get_most_recent_auth_event_ids_from_event_id_list(
self, event_ids: List[str]
) -> List[str]:
"""Find the most recent auth event ids (derived from state events) that
allowed that message to be sent. We will use this as a base
to auth our historical messages against.
Args:
event_ids: List of event ID's to look at
Returns:
List of event ID's
"""
(
most_recent_prev_event_id,
_,
) = await self.store.get_max_depth_of(event_ids)
# mapping from (type, state_key) -> state_event_id
prev_state_map = await self.state_store.get_state_ids_for_event(
most_recent_prev_event_id
)
# List of state event ID's
prev_state_ids = list(prev_state_map.values())
auth_event_ids = prev_state_ids
return auth_event_ids
async def persist_state_events_at_start(
self,
state_events_at_start: List[JsonDict],
room_id: str,
initial_auth_event_ids: List[str],
app_service_requester: Requester,
) -> List[str]:
"""Takes all `state_events_at_start` event dictionaries and creates/persists
them as floating state events which don't resolve into the current room state.
They are floating because they reference a fake prev_event which doesn't connect
to the normal DAG at all.
Args:
state_events_at_start:
room_id: Room where you want the events persisted in.
initial_auth_event_ids: These will be the auth_events for the first
state event created. Each event created afterwards will be
added to the list of auth events for the next state event
created.
app_service_requester: The requester of an application service.
Returns:
List of state event ID's we just persisted
"""
assert app_service_requester.app_service
state_event_ids_at_start = []
auth_event_ids = initial_auth_event_ids.copy()
for state_event in state_events_at_start:
assert_params_in_dict(
state_event, ["type", "origin_server_ts", "content", "sender"]
)
logger.debug(
"RoomBatchSendEventRestServlet inserting state_event=%s, auth_event_ids=%s",
state_event,
auth_event_ids,
)
event_dict = {
"type": state_event["type"],
"origin_server_ts": state_event["origin_server_ts"],
"content": state_event["content"],
"room_id": room_id,
"sender": state_event["sender"],
"state_key": state_event["state_key"],
}
# Mark all events as historical
event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True
# Make the state events float off on their own so we don't have a
# bunch of `@mxid joined the room` noise between each batch
fake_prev_event_id = "$" + random_string(43)
# TODO: This is pretty much the same as some other code to handle inserting state in this file
if event_dict["type"] == EventTypes.Member:
membership = event_dict["content"].get("membership", None)
event_id, _ = await self.room_member_handler.update_membership(
await self.create_requester_for_user_id_from_app_service(
state_event["sender"], app_service_requester.app_service
),
target=UserID.from_string(event_dict["state_key"]),
room_id=room_id,
action=membership,
content=event_dict["content"],
outlier=True,
prev_event_ids=[fake_prev_event_id],
# Make sure to use a copy of this list because we modify it
# later in the loop here. Otherwise it will be the same
# reference and also update in the event when we append later.
auth_event_ids=auth_event_ids.copy(),
)
else:
# TODO: Add some complement tests that adds state that is not member joins
# and will use this code path. Maybe we only want to support join state events
# and can get rid of this `else`?
(
event,
_,
) = await self.event_creation_handler.create_and_send_nonmember_event(
await self.create_requester_for_user_id_from_app_service(
state_event["sender"], app_service_requester.app_service
),
event_dict,
outlier=True,
prev_event_ids=[fake_prev_event_id],
# Make sure to use a copy of this list because we modify it
# later in the loop here. Otherwise it will be the same
# reference and also update in the event when we append later.
auth_event_ids=auth_event_ids.copy(),
)
event_id = event.event_id
state_event_ids_at_start.append(event_id)
auth_event_ids.append(event_id)
return state_event_ids_at_start
async def persist_historical_events(
self,
events_to_create: List[JsonDict],
room_id: str,
initial_prev_event_ids: List[str],
inherited_depth: int,
auth_event_ids: List[str],
app_service_requester: Requester,
) -> List[str]:
"""Create and persists all events provided sequentially. Handles the
complexity of creating events in chronological order so they can
reference each other by prev_event but still persists in
reverse-chronoloical order so they have the correct
(topological_ordering, stream_ordering) and sort correctly from
/messages.
Args:
events_to_create: List of historical events to create in JSON
dictionary format.
room_id: Room where you want the events persisted in.
initial_prev_event_ids: These will be the prev_events for the first
event created. Each event created afterwards will point to the
previous event created.
inherited_depth: The depth to create the events at (you will
probably by calling inherit_depth_from_prev_ids(...)).
auth_event_ids: Define which events allow you to create the given
event in the room.
app_service_requester: The requester of an application service.
Returns:
List of persisted event IDs
"""
assert app_service_requester.app_service
prev_event_ids = initial_prev_event_ids.copy()
event_ids = []
events_to_persist = []
for ev in events_to_create:
assert_params_in_dict(ev, ["type", "origin_server_ts", "content", "sender"])
event_dict = {
"type": ev["type"],
"origin_server_ts": ev["origin_server_ts"],
"content": ev["content"],
"room_id": room_id,
"sender": ev["sender"], # requester.user.to_string(),
"prev_events": prev_event_ids.copy(),
}
# Mark all events as historical
event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True
event, context = await self.event_creation_handler.create_event(
await self.create_requester_for_user_id_from_app_service(
ev["sender"], app_service_requester.app_service
),
event_dict,
prev_event_ids=event_dict.get("prev_events"),
auth_event_ids=auth_event_ids,
historical=True,
depth=inherited_depth,
)
logger.debug(
"RoomBatchSendEventRestServlet inserting event=%s, prev_event_ids=%s, auth_event_ids=%s",
event,
prev_event_ids,
auth_event_ids,
)
assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % (
event.sender,
)
events_to_persist.append((event, context))
event_id = event.event_id
event_ids.append(event_id)
prev_event_ids = [event_id]
# Persist events in reverse-chronological order so they have the
# correct stream_ordering as they are backfilled (which decrements).
# Events are sorted by (topological_ordering, stream_ordering)
# where topological_ordering is just depth.
for (event, context) in reversed(events_to_persist):
await self.event_creation_handler.handle_new_client_event(
await self.create_requester_for_user_id_from_app_service(
event["sender"], app_service_requester.app_service
),
event=event,
context=context,
)
return event_ids
async def handle_batch_of_events(
self,
events_to_create: List[JsonDict],
room_id: str,
batch_id_to_connect_to: str,
initial_prev_event_ids: List[str],
inherited_depth: int,
auth_event_ids: List[str],
app_service_requester: Requester,
) -> Tuple[List[str], str]:
"""
Handles creating and persisting all of the historical events as well
as insertion and batch meta events to make the batch navigable in the DAG.
Args:
events_to_create: List of historical events to create in JSON
dictionary format.
room_id: Room where you want the events created in.
batch_id_to_connect_to: The batch_id from the insertion event you
want this batch to connect to.
initial_prev_event_ids: These will be the prev_events for the first
event created. Each event created afterwards will point to the
previous event created.
inherited_depth: The depth to create the events at (you will
probably by calling inherit_depth_from_prev_ids(...)).
auth_event_ids: Define which events allow you to create the given
event in the room.
app_service_requester: The requester of an application service.
Returns:
Tuple containing a list of created events and the next_batch_id
"""
# Connect this current batch to the insertion event from the previous batch
last_event_in_batch = events_to_create[-1]
batch_event = {
"type": EventTypes.MSC2716_BATCH,
"sender": app_service_requester.user.to_string(),
"room_id": room_id,
"content": {
EventContentFields.MSC2716_BATCH_ID: batch_id_to_connect_to,
EventContentFields.MSC2716_HISTORICAL: True,
},
# Since the batch event is put at the end of the batch,
# where the newest-in-time event is, copy the origin_server_ts from
# the last event we're inserting
"origin_server_ts": last_event_in_batch["origin_server_ts"],
}
# Add the batch event to the end of the batch (newest-in-time)
events_to_create.append(batch_event)
# Add an "insertion" event to the start of each batch (next to the oldest-in-time
# event in the batch) so the next batch can be connected to this one.
insertion_event = self.create_insertion_event_dict(
sender=app_service_requester.user.to_string(),
room_id=room_id,
# Since the insertion event is put at the start of the batch,
# where the oldest-in-time event is, copy the origin_server_ts from
# the first event we're inserting
origin_server_ts=events_to_create[0]["origin_server_ts"],
)
next_batch_id = insertion_event["content"][
EventContentFields.MSC2716_NEXT_BATCH_ID
]
# Prepend the insertion event to the start of the batch (oldest-in-time)
events_to_create = [insertion_event] + events_to_create
# Create and persist all of the historical events
event_ids = await self.persist_historical_events(
events_to_create=events_to_create,
room_id=room_id,
initial_prev_event_ids=initial_prev_event_ids,
inherited_depth=inherited_depth,
auth_event_ids=auth_event_ids,
app_service_requester=app_service_requester,
)
return event_ids, next_batch_id

View file

@ -36,8 +36,6 @@ from synapse.types import JsonDict, ThirdPartyInstanceID
from synapse.util.caches.descriptors import _CacheContext, cached from synapse.util.caches.descriptors import _CacheContext, cached
from synapse.util.caches.response_cache import ResponseCache from synapse.util.caches.response_cache import ResponseCache
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -49,9 +47,10 @@ REMOTE_ROOM_LIST_POLL_INTERVAL = 60 * 1000
EMPTY_THIRD_PARTY_ID = ThirdPartyInstanceID(None, None) EMPTY_THIRD_PARTY_ID = ThirdPartyInstanceID(None, None)
class RoomListHandler(BaseHandler): class RoomListHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.hs = hs
self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search
self.response_cache: ResponseCache[ self.response_cache: ResponseCache[
Tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]] Tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]]

View file

@ -51,8 +51,6 @@ from synapse.types import (
from synapse.util.async_helpers import Linearizer from synapse.util.async_helpers import Linearizer
from synapse.util.distributor import user_left_room from synapse.util.distributor import user_left_room
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -89,8 +87,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
self.spam_checker = hs.get_spam_checker() self.spam_checker = hs.get_spam_checker()
self.third_party_event_rules = hs.get_third_party_event_rules() self.third_party_event_rules = hs.get_third_party_event_rules()
self._server_notices_mxid = self.config.servernotices.server_notices_mxid self._server_notices_mxid = self.config.servernotices.server_notices_mxid
self._enable_lookup = hs.config.enable_3pid_lookup self._enable_lookup = hs.config.registration.enable_3pid_lookup
self.allow_per_room_profiles = self.config.allow_per_room_profiles self.allow_per_room_profiles = self.config.server.allow_per_room_profiles
self._join_rate_limiter_local = Ratelimiter( self._join_rate_limiter_local = Ratelimiter(
store=self.store, store=self.store,
@ -118,9 +116,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
burst_count=hs.config.ratelimiting.rc_invites_per_user.burst_count, burst_count=hs.config.ratelimiting.rc_invites_per_user.burst_count,
) )
# This is only used to get at the ratelimit function. It's fine there are self.request_ratelimiter = hs.get_request_ratelimiter()
# multiple of these as it doesn't store state.
self.base_handler = BaseHandler(hs)
@abc.abstractmethod @abc.abstractmethod
async def _remote_join( async def _remote_join(
@ -434,6 +430,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed: Optional[dict] = None, third_party_signed: Optional[dict] = None,
ratelimit: bool = True, ratelimit: bool = True,
content: Optional[dict] = None, content: Optional[dict] = None,
new_room: bool = False,
require_consent: bool = True, require_consent: bool = True,
outlier: bool = False, outlier: bool = False,
prev_event_ids: Optional[List[str]] = None, prev_event_ids: Optional[List[str]] = None,
@ -451,6 +448,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed: Information from a 3PID invite. third_party_signed: Information from a 3PID invite.
ratelimit: Whether to rate limit the request. ratelimit: Whether to rate limit the request.
content: The content of the created event. content: The content of the created event.
new_room: Whether the membership update is happening in the context of a room
creation.
require_consent: Whether consent is required. require_consent: Whether consent is required.
outlier: Indicates whether the event is an `outlier`, i.e. if outlier: Indicates whether the event is an `outlier`, i.e. if
it's from an arbitrary point and floating in the DAG as it's from an arbitrary point and floating in the DAG as
@ -485,6 +484,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed=third_party_signed, third_party_signed=third_party_signed,
ratelimit=ratelimit, ratelimit=ratelimit,
content=content, content=content,
new_room=new_room,
require_consent=require_consent, require_consent=require_consent,
outlier=outlier, outlier=outlier,
prev_event_ids=prev_event_ids, prev_event_ids=prev_event_ids,
@ -504,6 +504,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed: Optional[dict] = None, third_party_signed: Optional[dict] = None,
ratelimit: bool = True, ratelimit: bool = True,
content: Optional[dict] = None, content: Optional[dict] = None,
new_room: bool = False,
require_consent: bool = True, require_consent: bool = True,
outlier: bool = False, outlier: bool = False,
prev_event_ids: Optional[List[str]] = None, prev_event_ids: Optional[List[str]] = None,
@ -523,6 +524,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
third_party_signed: third_party_signed:
ratelimit: ratelimit:
content: content:
new_room: Whether the membership update is happening in the context of a room
creation.
require_consent: require_consent:
outlier: Indicates whether the event is an `outlier`, i.e. if outlier: Indicates whether the event is an `outlier`, i.e. if
it's from an arbitrary point and floating in the DAG as it's from an arbitrary point and floating in the DAG as
@ -625,7 +628,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
is_requester_admin = await self.auth.is_server_admin(requester.user) is_requester_admin = await self.auth.is_server_admin(requester.user)
if not is_requester_admin: if not is_requester_admin:
if self.config.block_non_admin_invites: if self.config.server.block_non_admin_invites:
logger.info( logger.info(
"Blocking invite: user is not admin and non-admin " "Blocking invite: user is not admin and non-admin "
"invites disabled" "invites disabled"
@ -726,6 +729,30 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
# so don't really fit into the general auth process. # so don't really fit into the general auth process.
raise AuthError(403, "Guest access not allowed") raise AuthError(403, "Guest access not allowed")
# Figure out whether the user is a server admin to determine whether they
# should be able to bypass the spam checker.
if (
self._server_notices_mxid is not None
and requester.user.to_string() == self._server_notices_mxid
):
# allow the server notices mxid to join rooms
bypass_spam_checker = True
else:
bypass_spam_checker = await self.auth.is_server_admin(requester.user)
inviter = await self._get_inviter(target.to_string(), room_id)
if (
not bypass_spam_checker
# We assume that if the spam checker allowed the user to create
# a room then they're allowed to join it.
and not new_room
and not await self.spam_checker.user_may_join_room(
target.to_string(), room_id, is_invited=inviter is not None
)
):
raise SynapseError(403, "Not allowed to join this room")
# Check if a remote join should be performed. # Check if a remote join should be performed.
remote_join, remote_room_hosts = await self._should_perform_remote_join( remote_join, remote_room_hosts = await self._should_perform_remote_join(
target.to_string(), room_id, remote_room_hosts, content, is_host_in_room target.to_string(), room_id, remote_room_hosts, content, is_host_in_room
@ -1230,7 +1257,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
Raises: Raises:
ShadowBanError if the requester has been shadow-banned. ShadowBanError if the requester has been shadow-banned.
""" """
if self.config.block_non_admin_invites: if self.config.server.block_non_admin_invites:
is_requester_admin = await self.auth.is_server_admin(requester.user) is_requester_admin = await self.auth.is_server_admin(requester.user)
if not is_requester_admin: if not is_requester_admin:
raise SynapseError( raise SynapseError(
@ -1244,7 +1271,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
# We need to rate limit *before* we send out any 3PID invites, so we # We need to rate limit *before* we send out any 3PID invites, so we
# can't just rely on the standard ratelimiting of events. # can't just rely on the standard ratelimiting of events.
await self.base_handler.ratelimit(requester) await self.request_ratelimiter.ratelimit(requester)
can_invite = await self.third_party_event_rules.check_threepid_can_be_invited( can_invite = await self.third_party_event_rules.check_threepid_can_be_invited(
medium, address, room_id medium, address, room_id
@ -1268,10 +1295,22 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
if invitee: if invitee:
# Note that update_membership with an action of "invite" can raise # Note that update_membership with an action of "invite" can raise
# a ShadowBanError, but this was done above already. # a ShadowBanError, but this was done above already.
# We don't check the invite against the spamchecker(s) here (through
# user_may_invite) because we'll do it further down the line anyway (in
# update_membership_locked).
_, stream_id = await self.update_membership( _, stream_id = await self.update_membership(
requester, UserID.from_string(invitee), room_id, "invite", txn_id=txn_id requester, UserID.from_string(invitee), room_id, "invite", txn_id=txn_id
) )
else: else:
# Check if the spamchecker(s) allow this invite to go through.
if not await self.spam_checker.user_may_send_3pid_invite(
inviter_userid=requester.user.to_string(),
medium=medium,
address=address,
room_id=room_id,
):
raise SynapseError(403, "Cannot send threepid invite")
stream_id = await self._make_and_store_3pid_invite( stream_id = await self._make_and_store_3pid_invite(
requester, requester,
id_server, id_server,
@ -1428,7 +1467,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
Returns: bool of whether the complexity is too great, or None Returns: bool of whether the complexity is too great, or None
if unable to be fetched if unable to be fetched
""" """
max_complexity = self.hs.config.limit_remote_rooms.complexity max_complexity = self.hs.config.server.limit_remote_rooms.complexity
complexity = await self.federation_handler.get_room_complexity( complexity = await self.federation_handler.get_room_complexity(
remote_room_hosts, room_id remote_room_hosts, room_id
) )
@ -1444,7 +1483,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
Args: Args:
room_id: The room ID to check for complexity. room_id: The room ID to check for complexity.
""" """
max_complexity = self.hs.config.limit_remote_rooms.complexity max_complexity = self.hs.config.server.limit_remote_rooms.complexity
complexity = await self.store.get_room_complexity(room_id) complexity = await self.store.get_room_complexity(room_id)
return complexity["v1"] > max_complexity return complexity["v1"] > max_complexity
@ -1468,8 +1507,11 @@ class RoomMemberMasterHandler(RoomMemberHandler):
if len(remote_room_hosts) == 0: if len(remote_room_hosts) == 0:
raise SynapseError(404, "No known servers") raise SynapseError(404, "No known servers")
check_complexity = self.hs.config.limit_remote_rooms.enabled check_complexity = self.hs.config.server.limit_remote_rooms.enabled
if check_complexity and self.hs.config.limit_remote_rooms.admins_can_join: if (
check_complexity
and self.hs.config.server.limit_remote_rooms.admins_can_join
):
check_complexity = not await self.auth.is_server_admin(user) check_complexity = not await self.auth.is_server_admin(user)
if check_complexity: if check_complexity:
@ -1480,7 +1522,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
if too_complex is True: if too_complex is True:
raise SynapseError( raise SynapseError(
code=400, code=400,
msg=self.hs.config.limit_remote_rooms.complexity_error, msg=self.hs.config.server.limit_remote_rooms.complexity_error,
errcode=Codes.RESOURCE_LIMIT_EXCEEDED, errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
) )
@ -1515,7 +1557,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
) )
raise SynapseError( raise SynapseError(
code=400, code=400,
msg=self.hs.config.limit_remote_rooms.complexity_error, msg=self.hs.config.server.limit_remote_rooms.complexity_error,
errcode=Codes.RESOURCE_LIMIT_EXCEEDED, errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
) )

View file

@ -22,7 +22,6 @@ from saml2.client import Saml2Client
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.config import ConfigError from synapse.config import ConfigError
from synapse.handlers._base import BaseHandler
from synapse.handlers.sso import MappingException, UserAttributes from synapse.handlers.sso import MappingException, UserAttributes
from synapse.http.servlet import parse_string from synapse.http.servlet import parse_string
from synapse.http.site import SynapseRequest from synapse.http.site import SynapseRequest
@ -51,9 +50,11 @@ class Saml2SessionData:
ui_auth_session_id: Optional[str] = None ui_auth_session_id: Optional[str] = None
class SamlHandler(BaseHandler): class SamlHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.clock = hs.get_clock()
self.server_name = hs.hostname
self._saml_client = Saml2Client(hs.config.saml2.saml2_sp_config) self._saml_client = Saml2Client(hs.config.saml2.saml2_sp_config)
self._saml_idp_entityid = hs.config.saml2.saml2_idp_entityid self._saml_idp_entityid = hs.config.saml2.saml2_idp_entityid

View file

@ -26,17 +26,18 @@ from synapse.storage.state import StateFilter
from synapse.types import JsonDict, UserID from synapse.types import JsonDict, UserID
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class SearchHandler(BaseHandler): class SearchHandler:
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self.state_handler = hs.get_state_handler()
self.clock = hs.get_clock()
self.hs = hs
self._event_serializer = hs.get_event_client_serializer() self._event_serializer = hs.get_event_client_serializer()
self.storage = hs.get_storage() self.storage = hs.get_storage()
self.state_store = self.storage.state self.state_store = self.storage.state
@ -105,7 +106,7 @@ class SearchHandler(BaseHandler):
dict to be returned to the client with results of search dict to be returned to the client with results of search
""" """
if not self.hs.config.enable_search: if not self.hs.config.server.enable_search:
raise SynapseError(400, "Search is disabled on this homeserver") raise SynapseError(400, "Search is disabled on this homeserver")
batch_group = None batch_group = None

View file

@ -105,8 +105,13 @@ async def _sendmail(
# set to enable TLS. # set to enable TLS.
factory = build_sender_factory(hostname=smtphost if enable_tls else None) factory = build_sender_factory(hostname=smtphost if enable_tls else None)
# the IReactorTCP interface claims host has to be a bytes, which seems to be wrong reactor.connectTCP(
reactor.connectTCP(smtphost, smtpport, factory, timeout=30, bindAddress=None) # type: ignore[arg-type] smtphost, # type: ignore[arg-type]
smtpport,
factory,
timeout=30,
bindAddress=None,
)
await make_deferred_yieldable(d) await make_deferred_yieldable(d)

View file

@ -17,19 +17,17 @@ from typing import TYPE_CHECKING, Optional
from synapse.api.errors import Codes, StoreError, SynapseError from synapse.api.errors import Codes, StoreError, SynapseError
from synapse.types import Requester from synapse.types import Requester
from ._base import BaseHandler
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class SetPasswordHandler(BaseHandler): class SetPasswordHandler:
"""Handler which deals with changing user account passwords""" """Handler which deals with changing user account passwords"""
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) self.store = hs.get_datastore()
self._auth_handler = hs.get_auth_handler() self._auth_handler = hs.get_auth_handler()
self._device_handler = hs.get_device_handler() self._device_handler = hs.get_device_handler()

View file

@ -153,21 +153,23 @@ class _BaseThreepidAuthChecker:
# msisdns are currently always ThreepidBehaviour.REMOTE # msisdns are currently always ThreepidBehaviour.REMOTE
if medium == "msisdn": if medium == "msisdn":
if not self.hs.config.account_threepid_delegate_msisdn: if not self.hs.config.registration.account_threepid_delegate_msisdn:
raise SynapseError( raise SynapseError(
400, "Phone number verification is not enabled on this homeserver" 400, "Phone number verification is not enabled on this homeserver"
) )
threepid = await identity_handler.threepid_from_creds( threepid = await identity_handler.threepid_from_creds(
self.hs.config.account_threepid_delegate_msisdn, threepid_creds self.hs.config.registration.account_threepid_delegate_msisdn,
threepid_creds,
) )
elif medium == "email": elif medium == "email":
if ( if (
self.hs.config.email.threepid_behaviour_email self.hs.config.email.threepid_behaviour_email
== ThreepidBehaviour.REMOTE == ThreepidBehaviour.REMOTE
): ):
assert self.hs.config.account_threepid_delegate_email assert self.hs.config.registration.account_threepid_delegate_email
threepid = await identity_handler.threepid_from_creds( threepid = await identity_handler.threepid_from_creds(
self.hs.config.account_threepid_delegate_email, threepid_creds self.hs.config.registration.account_threepid_delegate_email,
threepid_creds,
) )
elif ( elif (
self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL self.hs.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL
@ -240,7 +242,7 @@ class MsisdnAuthChecker(UserInteractiveAuthChecker, _BaseThreepidAuthChecker):
_BaseThreepidAuthChecker.__init__(self, hs) _BaseThreepidAuthChecker.__init__(self, hs)
def is_enabled(self) -> bool: def is_enabled(self) -> bool:
return bool(self.hs.config.account_threepid_delegate_msisdn) return bool(self.hs.config.registration.account_threepid_delegate_msisdn)
async def check_auth(self, authdict: dict, clientip: str) -> Any: async def check_auth(self, authdict: dict, clientip: str) -> Any:
return await self._check_threepid("msisdn", authdict) return await self._check_threepid("msisdn", authdict)
@ -252,7 +254,7 @@ class RegistrationTokenAuthChecker(UserInteractiveAuthChecker):
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__(hs) super().__init__(hs)
self.hs = hs self.hs = hs
self._enabled = bool(hs.config.registration_requires_token) self._enabled = bool(hs.config.registration.registration_requires_token)
self.store = hs.get_datastore() self.store = hs.get_datastore()
def is_enabled(self) -> bool: def is_enabled(self) -> bool:

View file

@ -60,7 +60,7 @@ class UserDirectoryHandler(StateDeltasHandler):
self.clock = hs.get_clock() self.clock = hs.get_clock()
self.notifier = hs.get_notifier() self.notifier = hs.get_notifier()
self.is_mine_id = hs.is_mine_id self.is_mine_id = hs.is_mine_id
self.update_user_directory = hs.config.update_user_directory self.update_user_directory = hs.config.server.update_user_directory
self.search_all_users = hs.config.userdirectory.user_directory_search_all_users self.search_all_users = hs.config.userdirectory.user_directory_search_all_users
self.spam_checker = hs.get_spam_checker() self.spam_checker = hs.get_spam_checker()
# The current position in the current_state_delta stream # The current position in the current_state_delta stream
@ -132,12 +132,7 @@ class UserDirectoryHandler(StateDeltasHandler):
# FIXME(#3714): We should probably do this in the same worker as all # FIXME(#3714): We should probably do this in the same worker as all
# the other changes. # the other changes.
# Support users are for diagnostics and should not appear in the user directory. if await self.store.should_include_local_user_in_dir(user_id):
is_support = await self.store.is_support_user(user_id)
# When change profile information of deactivated user it should not appear in the user directory.
is_deactivated = await self.store.get_user_deactivated_status(user_id)
if not (is_support or is_deactivated):
await self.store.update_profile_in_user_dir( await self.store.update_profile_in_user_dir(
user_id, profile.display_name, profile.avatar_url user_id, profile.display_name, profile.avatar_url
) )
@ -208,6 +203,7 @@ class UserDirectoryHandler(StateDeltasHandler):
public_value=Membership.JOIN, public_value=Membership.JOIN,
) )
is_remote = not self.is_mine_id(state_key)
if change is MatchChange.now_false: if change is MatchChange.now_false:
# Need to check if the server left the room entirely, if so # Need to check if the server left the room entirely, if so
# we might need to remove all the users in that room # we might need to remove all the users in that room
@ -225,32 +221,36 @@ class UserDirectoryHandler(StateDeltasHandler):
for user_id in user_ids: for user_id in user_ids:
await self._handle_remove_user(room_id, user_id) await self._handle_remove_user(room_id, user_id)
return continue
else: else:
logger.debug("Server is still in room: %r", room_id) logger.debug("Server is still in room: %r", room_id)
is_support = await self.store.is_support_user(state_key) include_in_dir = (
if not is_support: is_remote
or await self.store.should_include_local_user_in_dir(state_key)
)
if include_in_dir:
if change is MatchChange.no_change: if change is MatchChange.no_change:
# Handle any profile changes # Handle any profile changes for remote users.
await self._handle_profile_change( # (For local users we are not forced to scan membership
state_key, room_id, prev_event_id, event_id # events; instead the rest of the application calls
) # `handle_local_profile_change`.)
if is_remote:
await self._handle_profile_change(
state_key, room_id, prev_event_id, event_id
)
continue continue
if change is MatchChange.now_true: # The user joined if change is MatchChange.now_true: # The user joined
event = await self.store.get_event(event_id, allow_none=True) # This may be the first time we've seen a remote user. If
# It isn't expected for this event to not exist, but we # so, ensure we have a directory entry for them. (We don't
# don't want the entire background process to break. # need to do this for local users: their directory entry
if event is None: # is created at the point of registration.
continue if is_remote:
await self._upsert_directory_entry_for_remote_user(
profile = ProfileInfo( state_key, event_id
avatar_url=event.content.get("avatar_url"), )
display_name=event.content.get("displayname"), await self._track_user_joined_room(room_id, state_key)
)
await self._handle_new_user(room_id, state_key, profile)
else: # The user left else: # The user left
await self._handle_remove_user(room_id, state_key) await self._handle_remove_user(room_id, state_key)
else: else:
@ -300,7 +300,7 @@ class UserDirectoryHandler(StateDeltasHandler):
room_id room_id
) )
logger.debug("Change: %r, publicness: %r", publicness, is_public) logger.debug("Publicness change: %r, is_public: %r", publicness, is_public)
if publicness is MatchChange.now_true and not is_public: if publicness is MatchChange.now_true and not is_public:
# If we became world readable but room isn't currently public then # If we became world readable but room isn't currently public then
@ -311,42 +311,50 @@ class UserDirectoryHandler(StateDeltasHandler):
# ignore the change # ignore the change
return return
other_users_in_room_with_profiles = ( users_in_room = await self.store.get_users_in_room(room_id)
await self.store.get_users_in_room_with_profiles(room_id)
)
# Remove every user from the sharing tables for that room. # Remove every user from the sharing tables for that room.
for user_id in other_users_in_room_with_profiles.keys(): for user_id in users_in_room:
await self.store.remove_user_who_share_room(user_id, room_id) await self.store.remove_user_who_share_room(user_id, room_id)
# Then, re-add them to the tables. # Then, re-add them to the tables.
# NOTE: this is not the most efficient method, as handle_new_user sets # NOTE: this is not the most efficient method, as _track_user_joined_room sets
# up local_user -> other_user and other_user_whos_local -> local_user, # up local_user -> other_user and other_user_whos_local -> local_user,
# which when ran over an entire room, will result in the same values # which when ran over an entire room, will result in the same values
# being added multiple times. The batching upserts shouldn't make this # being added multiple times. The batching upserts shouldn't make this
# too bad, though. # too bad, though.
for user_id, profile in other_users_in_room_with_profiles.items(): for user_id in users_in_room:
await self._handle_new_user(room_id, user_id, profile) await self._track_user_joined_room(room_id, user_id)
async def _handle_new_user( async def _upsert_directory_entry_for_remote_user(
self, room_id: str, user_id: str, profile: ProfileInfo self, user_id: str, event_id: str
) -> None: ) -> None:
"""Called when we might need to add user to directory """A remote user has just joined a room. Ensure they have an entry in
the user directory. The caller is responsible for making sure they're
Args: remote.
room_id: The room ID that user joined or started being public
user_id
""" """
event = await self.store.get_event(event_id, allow_none=True)
# It isn't expected for this event to not exist, but we
# don't want the entire background process to break.
if event is None:
return
logger.debug("Adding new user to dir, %r", user_id) logger.debug("Adding new user to dir, %r", user_id)
await self.store.update_profile_in_user_dir( await self.store.update_profile_in_user_dir(
user_id, profile.display_name, profile.avatar_url user_id, event.content.get("displayname"), event.content.get("avatar_url")
) )
async def _track_user_joined_room(self, room_id: str, user_id: str) -> None:
"""Someone's just joined a room. Update `users_in_public_rooms` or
`users_who_share_private_rooms` as appropriate.
The caller is responsible for ensuring that the given user is not excluded
from the user directory.
"""
is_public = await self.store.is_room_world_readable_or_publicly_joinable( is_public = await self.store.is_room_world_readable_or_publicly_joinable(
room_id room_id
) )
# Now we update users who share rooms with users.
other_users_in_room = await self.store.get_users_in_room(room_id) other_users_in_room = await self.store.get_users_in_room(room_id)
if is_public: if is_public:
@ -356,13 +364,7 @@ class UserDirectoryHandler(StateDeltasHandler):
# First, if they're our user then we need to update for every user # First, if they're our user then we need to update for every user
if self.is_mine_id(user_id): if self.is_mine_id(user_id):
if await self.store.should_include_local_user_in_dir(user_id):
is_appservice = self.store.get_if_app_services_interested_in_user(
user_id
)
# We don't care about appservice users.
if not is_appservice:
for other_user_id in other_users_in_room: for other_user_id in other_users_in_room:
if user_id == other_user_id: if user_id == other_user_id:
continue continue
@ -374,10 +376,10 @@ class UserDirectoryHandler(StateDeltasHandler):
if user_id == other_user_id: if user_id == other_user_id:
continue continue
is_appservice = self.store.get_if_app_services_interested_in_user( include_other_user = self.is_mine_id(
other_user_id other_user_id
) ) and await self.store.should_include_local_user_in_dir(other_user_id)
if self.is_mine_id(other_user_id) and not is_appservice: if include_other_user:
to_insert.add((other_user_id, user_id)) to_insert.add((other_user_id, user_id))
if to_insert: if to_insert:

View file

@ -913,7 +913,7 @@ class InsecureInterceptableContextFactory(ssl.ContextFactory):
def __init__(self): def __init__(self):
self._context = SSL.Context(SSL.SSLv23_METHOD) self._context = SSL.Context(SSL.SSLv23_METHOD)
self._context.set_verify(VERIFY_NONE, lambda *_: None) self._context.set_verify(VERIFY_NONE, lambda *_: False)
def getContext(self, hostname=None, port=None): def getContext(self, hostname=None, port=None):
return self._context return self._context

View file

@ -327,23 +327,23 @@ class MatrixFederationHttpClient:
self.reactor = hs.get_reactor() self.reactor = hs.get_reactor()
user_agent = hs.version_string user_agent = hs.version_string
if hs.config.user_agent_suffix: if hs.config.server.user_agent_suffix:
user_agent = "%s %s" % (user_agent, hs.config.user_agent_suffix) user_agent = "%s %s" % (user_agent, hs.config.server.user_agent_suffix)
user_agent = user_agent.encode("ascii") user_agent = user_agent.encode("ascii")
federation_agent = MatrixFederationAgent( federation_agent = MatrixFederationAgent(
self.reactor, self.reactor,
tls_client_options_factory, tls_client_options_factory,
user_agent, user_agent,
hs.config.federation_ip_range_whitelist, hs.config.server.federation_ip_range_whitelist,
hs.config.federation_ip_range_blacklist, hs.config.server.federation_ip_range_blacklist,
) )
# Use a BlacklistingAgentWrapper to prevent circumventing the IP # Use a BlacklistingAgentWrapper to prevent circumventing the IP
# blacklist via IP literals in server names # blacklist via IP literals in server names
self.agent = BlacklistingAgentWrapper( self.agent = BlacklistingAgentWrapper(
federation_agent, federation_agent,
ip_blacklist=hs.config.federation_ip_range_blacklist, ip_blacklist=hs.config.server.federation_ip_range_blacklist,
) )
self.clock = hs.get_clock() self.clock = hs.get_clock()

View file

@ -563,7 +563,10 @@ class _ByteProducer:
try: try:
self._request.registerProducer(self, True) self._request.registerProducer(self, True)
except RuntimeError as e: except AttributeError as e:
# Calling self._request.registerProducer might raise an AttributeError since
# the underlying Twisted code calls self._request.channel.registerProducer,
# however self._request.channel will be None if the connection was lost.
logger.info("Connection disconnected before response was written: %r", e) logger.info("Connection disconnected before response was written: %r", e)
# We drop our references to data we'll not use. # We drop our references to data we'll not use.

View file

@ -65,6 +65,12 @@ class JsonFormatter(logging.Formatter):
if key not in _IGNORED_LOG_RECORD_ATTRIBUTES: if key not in _IGNORED_LOG_RECORD_ATTRIBUTES:
event[key] = value event[key] = value
if record.exc_info:
exc_type, exc_value, _ = record.exc_info
if exc_type:
event["exc_type"] = f"{exc_type.__name__}"
event["exc_value"] = f"{exc_value}"
return _encoder.encode(event) return _encoder.encode(event)

View file

@ -52,7 +52,7 @@ try:
is_thread_resource_usage_supported = True is_thread_resource_usage_supported = True
def get_thread_resource_usage() -> "Optional[resource._RUsage]": def get_thread_resource_usage() -> "Optional[resource.struct_rusage]":
return resource.getrusage(RUSAGE_THREAD) return resource.getrusage(RUSAGE_THREAD)
@ -61,7 +61,7 @@ except Exception:
# won't track resource usage. # won't track resource usage.
is_thread_resource_usage_supported = False is_thread_resource_usage_supported = False
def get_thread_resource_usage() -> "Optional[resource._RUsage]": def get_thread_resource_usage() -> "Optional[resource.struct_rusage]":
return None return None
@ -226,10 +226,10 @@ class _Sentinel:
def copy_to(self, record): def copy_to(self, record):
pass pass
def start(self, rusage: "Optional[resource._RUsage]"): def start(self, rusage: "Optional[resource.struct_rusage]"):
pass pass
def stop(self, rusage: "Optional[resource._RUsage]"): def stop(self, rusage: "Optional[resource.struct_rusage]"):
pass pass
def add_database_transaction(self, duration_sec): def add_database_transaction(self, duration_sec):
@ -289,7 +289,7 @@ class LoggingContext:
# The thread resource usage when the logcontext became active. None # The thread resource usage when the logcontext became active. None
# if the context is not currently active. # if the context is not currently active.
self.usage_start: Optional[resource._RUsage] = None self.usage_start: Optional[resource.struct_rusage] = None
self.main_thread = get_thread_id() self.main_thread = get_thread_id()
self.request = None self.request = None
@ -410,7 +410,7 @@ class LoggingContext:
# we also track the current scope: # we also track the current scope:
record.scope = self.scope record.scope = self.scope
def start(self, rusage: "Optional[resource._RUsage]") -> None: def start(self, rusage: "Optional[resource.struct_rusage]") -> None:
""" """
Record that this logcontext is currently running. Record that this logcontext is currently running.
@ -435,7 +435,7 @@ class LoggingContext:
else: else:
self.usage_start = rusage self.usage_start = rusage
def stop(self, rusage: "Optional[resource._RUsage]") -> None: def stop(self, rusage: "Optional[resource.struct_rusage]") -> None:
""" """
Record that this logcontext is no longer running. Record that this logcontext is no longer running.
@ -490,7 +490,7 @@ class LoggingContext:
return res return res
def _get_cputime(self, current: "resource._RUsage") -> Tuple[float, float]: def _get_cputime(self, current: "resource.struct_rusage") -> Tuple[float, float]:
"""Get the cpu usage time between start() and the given rusage """Get the cpu usage time between start() and the given rusage
Args: Args:

View file

@ -339,6 +339,7 @@ def ensure_active_span(message, ret=None):
"There was no active span when trying to %s." "There was no active span when trying to %s."
" Did you forget to start one or did a context slip?", " Did you forget to start one or did a context slip?",
message, message,
stack_info=True,
) )
return ret return ret

View file

@ -265,7 +265,7 @@ class BackgroundProcessLoggingContext(LoggingContext):
super().__init__("%s-%s" % (name, instance_id)) super().__init__("%s-%s" % (name, instance_id))
self._proc = _BackgroundProcess(name, self) self._proc = _BackgroundProcess(name, self)
def start(self, rusage: "Optional[resource._RUsage]"): def start(self, rusage: "Optional[resource.struct_rusage]"):
"""Log context has started running (again).""" """Log context has started running (again)."""
super().start(rusage) super().start(rusage)

View file

@ -94,7 +94,7 @@ class Pusher(metaclass=abc.ABCMeta):
self._start_processing() self._start_processing()
@abc.abstractmethod @abc.abstractmethod
def _start_processing(self): def _start_processing(self) -> None:
"""Start processing push notifications.""" """Start processing push notifications."""
raise NotImplementedError() raise NotImplementedError()

View file

@ -290,6 +290,12 @@ def _condition_checker(
return True return True
MemberMap = Dict[str, Tuple[str, str]]
Rule = Dict[str, dict]
RulesByUser = Dict[str, List[Rule]]
StateGroup = Union[object, int]
@attr.s(slots=True) @attr.s(slots=True)
class RulesForRoomData: class RulesForRoomData:
"""The data stored in the cache by `RulesForRoom`. """The data stored in the cache by `RulesForRoom`.
@ -299,16 +305,16 @@ class RulesForRoomData:
""" """
# event_id -> (user_id, state) # event_id -> (user_id, state)
member_map = attr.ib(type=Dict[str, Tuple[str, str]], factory=dict) member_map = attr.ib(type=MemberMap, factory=dict)
# user_id -> rules # user_id -> rules
rules_by_user = attr.ib(type=Dict[str, List[Dict[str, dict]]], factory=dict) rules_by_user = attr.ib(type=RulesByUser, factory=dict)
# The last state group we updated the caches for. If the state_group of # The last state group we updated the caches for. If the state_group of
# a new event comes along, we know that we can just return the cached # a new event comes along, we know that we can just return the cached
# result. # result.
# On invalidation of the rules themselves (if the user changes them), # On invalidation of the rules themselves (if the user changes them),
# we invalidate everything and set state_group to `object()` # we invalidate everything and set state_group to `object()`
state_group = attr.ib(type=Union[object, int], factory=object) state_group = attr.ib(type=StateGroup, factory=object)
# A sequence number to keep track of when we're allowed to update the # A sequence number to keep track of when we're allowed to update the
# cache. We bump the sequence number when we invalidate the cache. If # cache. We bump the sequence number when we invalidate the cache. If
@ -532,7 +538,13 @@ class RulesForRoom:
self.update_cache(sequence, members, ret_rules_by_user, state_group) self.update_cache(sequence, members, ret_rules_by_user, state_group)
def update_cache(self, sequence, members, rules_by_user, state_group) -> None: def update_cache(
self,
sequence: int,
members: MemberMap,
rules_by_user: RulesByUser,
state_group: StateGroup,
) -> None:
if sequence == self.data.sequence: if sequence == self.data.sequence:
self.data.member_map.update(members) self.data.member_map.update(members)
self.data.rules_by_user = rules_by_user self.data.rules_by_user = rules_by_user

View file

@ -19,7 +19,9 @@ from synapse.push.rulekinds import PRIORITY_CLASS_INVERSE_MAP, PRIORITY_CLASS_MA
from synapse.types import UserID from synapse.types import UserID
def format_push_rules_for_user(user: UserID, ruleslist) -> Dict[str, Dict[str, list]]: def format_push_rules_for_user(
user: UserID, ruleslist: List
) -> Dict[str, Dict[str, list]]:
"""Converts a list of rawrules and a enabled map into nested dictionaries """Converts a list of rawrules and a enabled map into nested dictionaries
to match the Matrix client-server format for push rules""" to match the Matrix client-server format for push rules"""

View file

@ -396,10 +396,10 @@ class HttpPusher(Pusher):
rejected = resp["rejected"] rejected = resp["rejected"]
return rejected return rejected
async def _send_badge(self, badge): async def _send_badge(self, badge: int) -> None:
""" """
Args: Args:
badge (int): number of unread messages badge: number of unread messages
""" """
logger.debug("Sending updated badge count %d to %s", badge, self.name) logger.debug("Sending updated badge count %d to %s", badge, self.name)
d = { d = {

View file

@ -892,7 +892,7 @@ def safe_text(raw_text: str) -> jinja2.Markup:
A Markup object ready to safely use in a Jinja template. A Markup object ready to safely use in a Jinja template.
""" """
return jinja2.Markup( return jinja2.Markup(
bleach.linkify(bleach.clean(raw_text, tags=[], attributes={}, strip=False)) bleach.linkify(bleach.clean(raw_text, tags=[], attributes=[], strip=False))
) )

View file

@ -13,14 +13,14 @@
# limitations under the License. # limitations under the License.
from typing import List, Optional, Tuple from typing import List, Optional, Tuple
from synapse.storage.types import Connection from synapse.storage.database import LoggingDatabaseConnection
from synapse.storage.util.id_generators import _load_current_id from synapse.storage.util.id_generators import _load_current_id
class SlavedIdTracker: class SlavedIdTracker:
def __init__( def __init__(
self, self,
db_conn: Connection, db_conn: LoggingDatabaseConnection,
table: str, table: str,
column: str, column: str,
extra_tables: Optional[List[Tuple[str, str]]] = None, extra_tables: Optional[List[Tuple[str, str]]] = None,

View file

@ -15,9 +15,8 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from synapse.replication.tcp.streams import PushersStream from synapse.replication.tcp.streams import PushersStream
from synapse.storage.database import DatabasePool from synapse.storage.database import DatabasePool, LoggingDatabaseConnection
from synapse.storage.databases.main.pusher import PusherWorkerStore from synapse.storage.databases.main.pusher import PusherWorkerStore
from synapse.storage.types import Connection
from ._base import BaseSlavedStore from ._base import BaseSlavedStore
from ._slaved_id_tracker import SlavedIdTracker from ._slaved_id_tracker import SlavedIdTracker
@ -27,7 +26,12 @@ if TYPE_CHECKING:
class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore): class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore):
def __init__(self, database: DatabasePool, db_conn: Connection, hs: "HomeServer"): def __init__(
self,
database: DatabasePool,
db_conn: LoggingDatabaseConnection,
hs: "HomeServer",
):
super().__init__(database, db_conn, hs) super().__init__(database, db_conn, hs)
self._pushers_id_gen = SlavedIdTracker( # type: ignore self._pushers_id_gen = SlavedIdTracker( # type: ignore
db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")] db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")]

View file

@ -117,7 +117,7 @@ class ReplicationDataHandler:
self._instance_name = hs.get_instance_name() self._instance_name = hs.get_instance_name()
self._typing_handler = hs.get_typing_handler() self._typing_handler = hs.get_typing_handler()
self._notify_pushers = hs.config.start_pushers self._notify_pushers = hs.config.worker.start_pushers
self._pusher_pool = hs.get_pusherpool() self._pusher_pool = hs.get_pusherpool()
self._presence_handler = hs.get_presence_handler() self._presence_handler = hs.get_presence_handler()

View file

@ -171,7 +171,10 @@ class ReplicationCommandHandler:
if hs.config.worker.worker_app is not None: if hs.config.worker.worker_app is not None:
continue continue
if stream.NAME == FederationStream.NAME and hs.config.send_federation: if (
stream.NAME == FederationStream.NAME
and hs.config.worker.send_federation
):
# We only support federation stream if federation sending # We only support federation stream if federation sending
# has been disabled on the master. # has been disabled on the master.
continue continue
@ -225,7 +228,7 @@ class ReplicationCommandHandler:
self._is_master = hs.config.worker.worker_app is None self._is_master = hs.config.worker.worker_app is None
self._federation_sender = None self._federation_sender = None
if self._is_master and not hs.config.send_federation: if self._is_master and not hs.config.worker.send_federation:
self._federation_sender = hs.get_federation_sender() self._federation_sender = hs.get_federation_sender()
self._server_notices_sender = None self._server_notices_sender = None
@ -315,7 +318,7 @@ class ReplicationCommandHandler:
hs, outbound_redis_connection hs, outbound_redis_connection
) )
hs.get_reactor().connectTCP( hs.get_reactor().connectTCP(
hs.config.redis.redis_host.encode(), hs.config.redis.redis_host, # type: ignore[arg-type]
hs.config.redis.redis_port, hs.config.redis.redis_port,
self._factory, self._factory,
) )
@ -324,7 +327,11 @@ class ReplicationCommandHandler:
self._factory = DirectTcpReplicationClientFactory(hs, client_name, self) self._factory = DirectTcpReplicationClientFactory(hs, client_name, self)
host = hs.config.worker.worker_replication_host host = hs.config.worker.worker_replication_host
port = hs.config.worker.worker_replication_port port = hs.config.worker.worker_replication_port
hs.get_reactor().connectTCP(host.encode(), port, self._factory) hs.get_reactor().connectTCP(
host, # type: ignore[arg-type]
port,
self._factory,
)
def get_streams(self) -> Dict[str, Stream]: def get_streams(self) -> Dict[str, Stream]:
"""Get a map from stream name to all streams.""" """Get a map from stream name to all streams."""

View file

@ -364,6 +364,12 @@ def lazyConnection(
factory.continueTrying = reconnect factory.continueTrying = reconnect
reactor = hs.get_reactor() reactor = hs.get_reactor()
reactor.connectTCP(host.encode(), port, factory, timeout=30, bindAddress=None) reactor.connectTCP(
host, # type: ignore[arg-type]
port,
factory,
timeout=30,
bindAddress=None,
)
return factory.handler return factory.handler

View file

@ -71,7 +71,7 @@ class ReplicationStreamer:
self.notifier = hs.get_notifier() self.notifier = hs.get_notifier()
self._instance_name = hs.get_instance_name() self._instance_name = hs.get_instance_name()
self._replication_torture_level = hs.config.replication_torture_level self._replication_torture_level = hs.config.server.replication_torture_level
self.notifier.add_replication_callback(self.on_notifier_poke) self.notifier.add_replication_callback(self.on_notifier_poke)

View file

@ -442,7 +442,7 @@ class UserRegisterServlet(RestServlet):
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
self._clear_old_nonces() self._clear_old_nonces()
if not self.hs.config.registration_shared_secret: if not self.hs.config.registration.registration_shared_secret:
raise SynapseError(400, "Shared secret registration is not enabled") raise SynapseError(400, "Shared secret registration is not enabled")
body = parse_json_object_from_request(request) body = parse_json_object_from_request(request)
@ -498,7 +498,7 @@ class UserRegisterServlet(RestServlet):
got_mac = body["mac"] got_mac = body["mac"]
want_mac_builder = hmac.new( want_mac_builder = hmac.new(
key=self.hs.config.registration_shared_secret.encode(), key=self.hs.config.registration.registration_shared_secret.encode(),
digestmod=hashlib.sha1, digestmod=hashlib.sha1,
) )
want_mac_builder.update(nonce.encode("utf8")) want_mac_builder.update(nonce.encode("utf8"))

View file

@ -119,7 +119,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
) )
if existing_user_id is None: if existing_user_id is None:
if self.config.request_token_inhibit_3pid_errors: if self.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the # Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors. # comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it # Also wait for some random amount of time between 100ms and 1s to make it
@ -130,11 +130,11 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email not found", Codes.THREEPID_NOT_FOUND) raise SynapseError(400, "Email not found", Codes.THREEPID_NOT_FOUND)
if self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE: if self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
assert self.hs.config.account_threepid_delegate_email assert self.hs.config.registration.account_threepid_delegate_email
# Have the configured identity server handle the request # Have the configured identity server handle the request
ret = await self.identity_handler.requestEmailToken( ret = await self.identity_handler.requestEmailToken(
self.hs.config.account_threepid_delegate_email, self.hs.config.registration.account_threepid_delegate_email,
email, email,
client_secret, client_secret,
send_attempt, send_attempt,
@ -403,7 +403,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
existing_user_id = await self.store.get_user_id_by_threepid("email", email) existing_user_id = await self.store.get_user_id_by_threepid("email", email)
if existing_user_id is not None: if existing_user_id is not None:
if self.config.request_token_inhibit_3pid_errors: if self.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the # Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors. # comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it # Also wait for some random amount of time between 100ms and 1s to make it
@ -414,11 +414,11 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE) raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
if self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE: if self.config.email.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
assert self.hs.config.account_threepid_delegate_email assert self.hs.config.registration.account_threepid_delegate_email
# Have the configured identity server handle the request # Have the configured identity server handle the request
ret = await self.identity_handler.requestEmailToken( ret = await self.identity_handler.requestEmailToken(
self.hs.config.account_threepid_delegate_email, self.hs.config.registration.account_threepid_delegate_email,
email, email,
client_secret, client_secret,
send_attempt, send_attempt,
@ -486,7 +486,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
existing_user_id = await self.store.get_user_id_by_threepid("msisdn", msisdn) existing_user_id = await self.store.get_user_id_by_threepid("msisdn", msisdn)
if existing_user_id is not None: if existing_user_id is not None:
if self.hs.config.request_token_inhibit_3pid_errors: if self.hs.config.server.request_token_inhibit_3pid_errors:
# Make the client think the operation succeeded. See the rationale in the # Make the client think the operation succeeded. See the rationale in the
# comments for request_token_inhibit_3pid_errors. # comments for request_token_inhibit_3pid_errors.
# Also wait for some random amount of time between 100ms and 1s to make it # Also wait for some random amount of time between 100ms and 1s to make it
@ -496,7 +496,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE) raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE)
if not self.hs.config.account_threepid_delegate_msisdn: if not self.hs.config.registration.account_threepid_delegate_msisdn:
logger.warning( logger.warning(
"No upstream msisdn account_threepid_delegate configured on the server to " "No upstream msisdn account_threepid_delegate configured on the server to "
"handle this request" "handle this request"
@ -507,7 +507,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
) )
ret = await self.identity_handler.requestMsisdnToken( ret = await self.identity_handler.requestMsisdnToken(
self.hs.config.account_threepid_delegate_msisdn, self.hs.config.registration.account_threepid_delegate_msisdn,
country, country,
phone_number, phone_number,
client_secret, client_secret,
@ -604,7 +604,7 @@ class AddThreepidMsisdnSubmitTokenServlet(RestServlet):
self.identity_handler = hs.get_identity_handler() self.identity_handler = hs.get_identity_handler()
async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: async def on_POST(self, request: Request) -> Tuple[int, JsonDict]:
if not self.config.account_threepid_delegate_msisdn: if not self.config.registration.account_threepid_delegate_msisdn:
raise SynapseError( raise SynapseError(
400, 400,
"This homeserver is not validating phone numbers. Use an identity server " "This homeserver is not validating phone numbers. Use an identity server "
@ -617,7 +617,7 @@ class AddThreepidMsisdnSubmitTokenServlet(RestServlet):
# Proxy submit_token request to msisdn threepid delegate # Proxy submit_token request to msisdn threepid delegate
response = await self.identity_handler.proxy_msisdn_submit_token( response = await self.identity_handler.proxy_msisdn_submit_token(
self.config.account_threepid_delegate_msisdn, self.config.registration.account_threepid_delegate_msisdn,
body["client_secret"], body["client_secret"],
body["sid"], body["sid"],
body["token"], body["token"],
@ -644,7 +644,7 @@ class ThreepidRestServlet(RestServlet):
return 200, {"threepids": threepids} return 200, {"threepids": threepids}
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
if not self.hs.config.enable_3pid_changes: if not self.hs.config.registration.enable_3pid_changes:
raise SynapseError( raise SynapseError(
400, "3PID changes are disabled on this server", Codes.FORBIDDEN 400, "3PID changes are disabled on this server", Codes.FORBIDDEN
) )
@ -693,7 +693,7 @@ class ThreepidAddRestServlet(RestServlet):
@interactive_auth_handler @interactive_auth_handler
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
if not self.hs.config.enable_3pid_changes: if not self.hs.config.registration.enable_3pid_changes:
raise SynapseError( raise SynapseError(
400, "3PID changes are disabled on this server", Codes.FORBIDDEN 400, "3PID changes are disabled on this server", Codes.FORBIDDEN
) )
@ -801,7 +801,7 @@ class ThreepidDeleteRestServlet(RestServlet):
self.auth_handler = hs.get_auth_handler() self.auth_handler = hs.get_auth_handler()
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
if not self.hs.config.enable_3pid_changes: if not self.hs.config.registration.enable_3pid_changes:
raise SynapseError( raise SynapseError(
400, "3PID changes are disabled on this server", Codes.FORBIDDEN 400, "3PID changes are disabled on this server", Codes.FORBIDDEN
) )
@ -857,8 +857,8 @@ def assert_valid_next_link(hs: "HomeServer", next_link: str) -> None:
# If the domain whitelist is set, the domain must be in it # If the domain whitelist is set, the domain must be in it
if ( if (
valid valid
and hs.config.next_link_domain_whitelist is not None and hs.config.server.next_link_domain_whitelist is not None
and next_link_parsed.hostname not in hs.config.next_link_domain_whitelist and next_link_parsed.hostname not in hs.config.server.next_link_domain_whitelist
): ):
valid = False valid = False
@ -878,9 +878,13 @@ class WhoamiRestServlet(RestServlet):
self.auth = hs.get_auth() self.auth = hs.get_auth()
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request, allow_guest=True)
response = {"user_id": requester.user.to_string()} response = {
"user_id": requester.user.to_string(),
# MSC: https://github.com/matrix-org/matrix-doc/pull/3069
"org.matrix.msc3069.is_guest": bool(requester.is_guest),
}
# Appservices and similar accounts do not have device IDs # Appservices and similar accounts do not have device IDs
# that we can report on, so exclude them for compliance. # that we can report on, so exclude them for compliance.

Some files were not shown because too many files have changed in this diff Show more