mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2025-04-25 14:39:09 -04:00
Merge remote-tracking branch 'upstream/release-v1.71'
This commit is contained in:
commit
2337ca829d
9
.flake8
9
.flake8
@ -8,4 +8,11 @@
|
|||||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||||
# E731: do not assign a lambda expression, use a def
|
# E731: do not assign a lambda expression, use a def
|
||||||
# E501: Line too long (black enforces this for us)
|
# E501: Line too long (black enforces this for us)
|
||||||
ignore=W503,W504,E203,E731,E501
|
#
|
||||||
|
# flake8-bugbear runs extra checks. Its error codes are described at
|
||||||
|
# https://github.com/PyCQA/flake8-bugbear#list-of-warnings
|
||||||
|
# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
|
||||||
|
# B023: Functions defined inside a loop must not use variables redefined in the loop
|
||||||
|
# B024: Abstract base class with no abstract method.
|
||||||
|
|
||||||
|
ignore=W503,W504,E203,E731,E501,B019,B023,B024
|
||||||
|
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@ -20,7 +20,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ jobs:
|
|||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
|
uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
publish_dir: ./book
|
publish_dir: ./book
|
||||||
|
6
.github/workflows/release-artifacts.yml
vendored
6
.github/workflows/release-artifacts.yml
vendored
@ -99,7 +99,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-20.04, macos-10.15]
|
os: [ubuntu-20.04, macos-11]
|
||||||
arch: [x86_64, aarch64]
|
arch: [x86_64, aarch64]
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||||
# It is not read by the rest of the workflow.
|
# It is not read by the rest of the workflow.
|
||||||
@ -109,9 +109,9 @@ jobs:
|
|||||||
exclude:
|
exclude:
|
||||||
# Don't build macos wheels on PR CI.
|
# Don't build macos wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "macos-10.15"
|
os: "macos-11"
|
||||||
# Don't build aarch64 wheels on mac.
|
# Don't build aarch64 wheels on mac.
|
||||||
- os: "macos-10.15"
|
- os: "macos-11"
|
||||||
arch: aarch64
|
arch: aarch64
|
||||||
# Don't build aarch64 wheels on PR CI.
|
# Don't build aarch64 wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
|
8
.github/workflows/tests.yml
vendored
8
.github/workflows/tests.yml
vendored
@ -167,6 +167,14 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: 1.58.1
|
||||||
|
override: true
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.job.python-version }}
|
python-version: ${{ matrix.job.python-version }}
|
||||||
|
27
.github/workflows/triage-incoming.yml
vendored
27
.github/workflows/triage-incoming.yml
vendored
@ -5,24 +5,11 @@ on:
|
|||||||
types: [ opened ]
|
types: [ opened ]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
add_new_issues:
|
triage:
|
||||||
name: Add new issues to the triage board
|
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: octokit/graphql-action@v2.x
|
|
||||||
id: add_to_project
|
|
||||||
with:
|
with:
|
||||||
headers: '{"GraphQL-Features": "projects_next_graphql"}'
|
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||||
query: |
|
content_id: ${{ github.event.issue.node_id }}
|
||||||
mutation add_to_project($projectid:ID!,$contentid:ID!) {
|
secrets:
|
||||||
addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) {
|
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
item {
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
projectid: ${{ env.PROJECT_ID }}
|
|
||||||
contentid: ${{ github.event.issue.node_id }}
|
|
||||||
env:
|
|
||||||
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
|
3
.github/workflows/twisted_trunk.yml
vendored
3
.github/workflows/twisted_trunk.yml
vendored
@ -151,12 +151,11 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||||
pipx install poetry==1.1.14
|
pipx install poetry==1.2.0
|
||||||
|
|
||||||
poetry remove -n twisted
|
poetry remove -n twisted
|
||||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
poetry lock --no-update
|
poetry lock --no-update
|
||||||
# NOT IN 1.1.14 poetry lock --check
|
|
||||||
working-directory: synapse
|
working-directory: synapse
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
|
90
CHANGES.md
90
CHANGES.md
@ -1,6 +1,96 @@
|
|||||||
|
Synapse 1.71.0rc1 (2022-11-01)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Support back-channel logouts from OpenID Connect providers. ([\#11414](https://github.com/matrix-org/synapse/issues/11414))
|
||||||
|
- Allow use of Postgres and SQLlite full-text search operators in search queries. ([\#11635](https://github.com/matrix-org/synapse/issues/11635), [\#14310](https://github.com/matrix-org/synapse/issues/14310), [\#14311](https://github.com/matrix-org/synapse/issues/14311))
|
||||||
|
- Implement [MSC3664](https://github.com/matrix-org/matrix-doc/pull/3664), Pushrules for relations. Contributed by Nico. ([\#11804](https://github.com/matrix-org/synapse/issues/11804))
|
||||||
|
- Improve aesthetics of HTML templates. Note that these changes do not retroactively apply to templates which have been [customised](https://matrix-org.github.io/synapse/latest/templates.html#templates) by server admins. ([\#13652](https://github.com/matrix-org/synapse/issues/13652))
|
||||||
|
- Enable write-ahead logging for SQLite installations. Contributed by [@asymmetric](https://github.com/asymmetric). ([\#13897](https://github.com/matrix-org/synapse/issues/13897))
|
||||||
|
- Show erasure status when [listing users](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html#query-user-account) in the Admin API. ([\#14205](https://github.com/matrix-org/synapse/issues/14205))
|
||||||
|
- Provide a specific error code when a `/sync` request provides a filter which doesn't represent a JSON object. ([\#14262](https://github.com/matrix-org/synapse/issues/14262))
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix a long-standing bug where the `update_synapse_database` script could not be run with multiple databases. Contributed by @thefinn93 @ Beeper. ([\#13422](https://github.com/matrix-org/synapse/issues/13422))
|
||||||
|
- Fix a bug which prevented setting an avatar on homeservers which have an explicit port in their `server_name` and have `max_avatar_size` and/or `allowed_avatar_mimetypes` configuration. Contributed by @ashfame. ([\#13927](https://github.com/matrix-org/synapse/issues/13927))
|
||||||
|
- Check appservice user interest against the local users instead of all users in the room to align with [MSC3905](https://github.com/matrix-org/matrix-spec-proposals/pull/3905). ([\#13958](https://github.com/matrix-org/synapse/issues/13958))
|
||||||
|
- Fix a long-standing bug where Synapse would accidentally include extra information in the response to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14064](https://github.com/matrix-org/synapse/issues/14064))
|
||||||
|
- Fix a bug introduced in Synapse 1.64.0 where presence updates could be missing from `/sync` responses. ([\#14243](https://github.com/matrix-org/synapse/issues/14243))
|
||||||
|
- Fix a bug introduced in Synapse 1.60.0 which caused an error to be logged when Synapse received a SIGHUP signal if debug logging was enabled. ([\#14258](https://github.com/matrix-org/synapse/issues/14258))
|
||||||
|
- Prevent history insertion ([MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716)) during an partial join ([MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706)). ([\#14291](https://github.com/matrix-org/synapse/issues/14291))
|
||||||
|
- Fix a bug introduced in Synapse 1.34.0 where device names would be returned via a federation user key query request when `allow_device_name_lookup_over_federation` was set to `false`. ([\#14304](https://github.com/matrix-org/synapse/issues/14304))
|
||||||
|
- Fix a bug introduced in Synapse 0.34.0 where logs could include error spam when background processes are measured as taking a negative amount of time. ([\#14323](https://github.com/matrix-org/synapse/issues/14323))
|
||||||
|
- Fix a bug introduced in Synapse 1.70.0 where clients were unable to PUT new [dehydrated devices](https://github.com/matrix-org/matrix-spec-proposals/pull/2697). ([\#14336](https://github.com/matrix-org/synapse/issues/14336))
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Explain how to disable the use of [`trusted_key_servers`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#trusted_key_servers). ([\#13999](https://github.com/matrix-org/synapse/issues/13999))
|
||||||
|
- Add workers settings to [configuration manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#individual-worker-configuration). ([\#14086](https://github.com/matrix-org/synapse/issues/14086))
|
||||||
|
- Correct the name of the config option [`encryption_enabled_by_default_for_room_type`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#encryption_enabled_by_default_for_room_type). ([\#14110](https://github.com/matrix-org/synapse/issues/14110))
|
||||||
|
- Update docstrings of `SynapseError` and `FederationError` to bettter describe what they are used for and the effects of using them are. ([\#14191](https://github.com/matrix-org/synapse/issues/14191))
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Remove unused `@lru_cache` decorator. ([\#13595](https://github.com/matrix-org/synapse/issues/13595))
|
||||||
|
- Save login tokens in database and prevent login token reuse. ([\#13844](https://github.com/matrix-org/synapse/issues/13844))
|
||||||
|
- Refactor OIDC tests to better mimic an actual OIDC provider. ([\#13910](https://github.com/matrix-org/synapse/issues/13910))
|
||||||
|
- Fix type annotation causing import time error in the Complement forking launcher. ([\#14084](https://github.com/matrix-org/synapse/issues/14084))
|
||||||
|
- Refactor [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to loop over federation destinations with standard pattern and error handling. ([\#14096](https://github.com/matrix-org/synapse/issues/14096))
|
||||||
|
- Add initial power level event to batch of bulk persisted events when creating a new room. ([\#14228](https://github.com/matrix-org/synapse/issues/14228))
|
||||||
|
- Refactor `/key/` endpoints to use `RestServlet` classes. ([\#14229](https://github.com/matrix-org/synapse/issues/14229))
|
||||||
|
- Switch to using the `matrix-org/backend-meta` version of `triage-incoming` for new issues in CI. ([\#14230](https://github.com/matrix-org/synapse/issues/14230))
|
||||||
|
- Build wheels on macos 11, not 10.15. ([\#14249](https://github.com/matrix-org/synapse/issues/14249))
|
||||||
|
- Add debugging to help diagnose lost device list updates. ([\#14268](https://github.com/matrix-org/synapse/issues/14268))
|
||||||
|
- Add Rust cache to CI for `trial` runs. ([\#14287](https://github.com/matrix-org/synapse/issues/14287))
|
||||||
|
- Improve type hinting of `RawHeaders`. ([\#14303](https://github.com/matrix-org/synapse/issues/14303))
|
||||||
|
- Use Poetry 1.2.0 in the Twisted Trunk CI job. ([\#14305](https://github.com/matrix-org/synapse/issues/14305))
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Dependency updates</summary>
|
||||||
|
|
||||||
|
Runtime:
|
||||||
|
|
||||||
|
- Bump anyhow from 1.0.65 to 1.0.66. ([\#14278](https://github.com/matrix-org/synapse/issues/14278))
|
||||||
|
- Bump jinja2 from 3.0.3 to 3.1.2. ([\#14271](https://github.com/matrix-org/synapse/issues/14271))
|
||||||
|
- Bump prometheus-client from 0.14.0 to 0.15.0. ([\#14274](https://github.com/matrix-org/synapse/issues/14274))
|
||||||
|
- Bump psycopg2 from 2.9.4 to 2.9.5. ([\#14331](https://github.com/matrix-org/synapse/issues/14331))
|
||||||
|
- Bump pysaml2 from 7.1.2 to 7.2.1. ([\#14270](https://github.com/matrix-org/synapse/issues/14270))
|
||||||
|
- Bump sentry-sdk from 1.5.11 to 1.10.1. ([\#14330](https://github.com/matrix-org/synapse/issues/14330))
|
||||||
|
- Bump serde from 1.0.145 to 1.0.147. ([\#14277](https://github.com/matrix-org/synapse/issues/14277))
|
||||||
|
- Bump serde_json from 1.0.86 to 1.0.87. ([\#14279](https://github.com/matrix-org/synapse/issues/14279))
|
||||||
|
|
||||||
|
Tooling and CI:
|
||||||
|
|
||||||
|
- Bump black from 22.3.0 to 22.10.0. ([\#14328](https://github.com/matrix-org/synapse/issues/14328))
|
||||||
|
- Bump flake8-bugbear from 21.3.2 to 22.9.23. ([\#14042](https://github.com/matrix-org/synapse/issues/14042))
|
||||||
|
- Bump peaceiris/actions-gh-pages from 3.8.0 to 3.9.0. ([\#14276](https://github.com/matrix-org/synapse/issues/14276))
|
||||||
|
- Bump peaceiris/actions-mdbook from 1.1.14 to 1.2.0. ([\#14275](https://github.com/matrix-org/synapse/issues/14275))
|
||||||
|
- Bump setuptools-rust from 1.5.1 to 1.5.2. ([\#14273](https://github.com/matrix-org/synapse/issues/14273))
|
||||||
|
- Bump twine from 3.8.0 to 4.0.1. ([\#14332](https://github.com/matrix-org/synapse/issues/14332))
|
||||||
|
- Bump types-opentracing from 2.4.7 to 2.4.10. ([\#14133](https://github.com/matrix-org/synapse/issues/14133))
|
||||||
|
- Bump types-requests from 2.28.11 to 2.28.11.2. ([\#14272](https://github.com/matrix-org/synapse/issues/14272))
|
||||||
|
</details>
|
||||||
|
|
||||||
Synapse 1.70.1 (2022-10-28)
|
Synapse 1.70.1 (2022-10-28)
|
||||||
===========================
|
===========================
|
||||||
|
|
||||||
|
This release fixes some regressions that were discovered in 1.70.0.
|
||||||
|
|
||||||
|
[#14300](https://github.com/matrix-org/synapse/issues/14300)
|
||||||
|
was previously reported to be a regression in 1.70.0 as well. However, we have
|
||||||
|
since concluded that it was limited to the reporter and thus have not needed
|
||||||
|
to include any fix for it in 1.70.1.
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
Bugfixes
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
16
Cargo.lock
generated
16
Cargo.lock
generated
@ -13,9 +13,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.65"
|
version = "1.0.66"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602"
|
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arc-swap"
|
name = "arc-swap"
|
||||||
@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.145"
|
version = "1.0.147"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
|
checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.145"
|
version = "1.0.147"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
|
checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -343,9 +343,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.86"
|
version = "1.0.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"
|
checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
|
6
debian/changelog
vendored
6
debian/changelog
vendored
@ -1,3 +1,9 @@
|
|||||||
|
matrix-synapse-py3 (1.71.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.71.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Nov 2022 12:10:17 +0000
|
||||||
|
|
||||||
matrix-synapse-py3 (1.70.1) stable; urgency=medium
|
matrix-synapse-py3 (1.70.1) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.70.1.
|
* New Synapse release 1.70.1.
|
||||||
|
@ -37,6 +37,7 @@ It returns a JSON body like the following:
|
|||||||
"is_guest": 0,
|
"is_guest": 0,
|
||||||
"admin": 0,
|
"admin": 0,
|
||||||
"deactivated": 0,
|
"deactivated": 0,
|
||||||
|
"erased": false,
|
||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"creation_ts": 1560432506,
|
"creation_ts": 1560432506,
|
||||||
"appservice_id": null,
|
"appservice_id": null,
|
||||||
@ -167,6 +168,7 @@ A response body like the following is returned:
|
|||||||
"admin": 0,
|
"admin": 0,
|
||||||
"user_type": null,
|
"user_type": null,
|
||||||
"deactivated": 0,
|
"deactivated": 0,
|
||||||
|
"erased": false,
|
||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"displayname": "<User One>",
|
"displayname": "<User One>",
|
||||||
"avatar_url": null,
|
"avatar_url": null,
|
||||||
@ -177,6 +179,7 @@ A response body like the following is returned:
|
|||||||
"admin": 1,
|
"admin": 1,
|
||||||
"user_type": null,
|
"user_type": null,
|
||||||
"deactivated": 0,
|
"deactivated": 0,
|
||||||
|
"erased": false,
|
||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"displayname": "<User Two>",
|
"displayname": "<User Two>",
|
||||||
"avatar_url": "<avatar_url>",
|
"avatar_url": "<avatar_url>",
|
||||||
@ -247,6 +250,7 @@ The following fields are returned in the JSON response body:
|
|||||||
- `user_type` - string - Type of the user. Normal users are type `None`.
|
- `user_type` - string - Type of the user. Normal users are type `None`.
|
||||||
This allows user type specific behaviour. There are also types `support` and `bot`.
|
This allows user type specific behaviour. There are also types `support` and `bot`.
|
||||||
- `deactivated` - bool - Status if that user has been marked as deactivated.
|
- `deactivated` - bool - Status if that user has been marked as deactivated.
|
||||||
|
- `erased` - bool - Status if that user has been marked as erased.
|
||||||
- `shadow_banned` - bool - Status if that user has been marked as shadow banned.
|
- `shadow_banned` - bool - Status if that user has been marked as shadow banned.
|
||||||
- `displayname` - string - The user's display name if they have set one.
|
- `displayname` - string - The user's display name if they have set one.
|
||||||
- `avatar_url` - string - The user's avatar URL if they have set one.
|
- `avatar_url` - string - The user's avatar URL if they have set one.
|
||||||
|
@ -49,6 +49,13 @@ setting in your configuration file.
|
|||||||
See the [configuration manual](usage/configuration/config_documentation.md#oidc_providers) for some sample settings, as well as
|
See the [configuration manual](usage/configuration/config_documentation.md#oidc_providers) for some sample settings, as well as
|
||||||
the text below for example configurations for specific providers.
|
the text below for example configurations for specific providers.
|
||||||
|
|
||||||
|
## OIDC Back-Channel Logout
|
||||||
|
|
||||||
|
Synapse supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) notifications.
|
||||||
|
|
||||||
|
This lets the OpenID Connect Provider notify Synapse when a user logs out, so that Synapse can end that user session.
|
||||||
|
This feature can be enabled by setting the `backchannel_logout_enabled` property to `true` in the provider configuration, and setting the following URL as destination for Back-Channel Logout notifications in your OpenID Connect Provider: `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout`
|
||||||
|
|
||||||
## Sample configs
|
## Sample configs
|
||||||
|
|
||||||
Here are a few configs for providers that should work with Synapse.
|
Here are a few configs for providers that should work with Synapse.
|
||||||
@ -123,6 +130,9 @@ oidc_providers:
|
|||||||
|
|
||||||
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
|
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
|
||||||
|
|
||||||
|
Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak.
|
||||||
|
This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak.
|
||||||
|
|
||||||
Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm.
|
Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm.
|
||||||
|
|
||||||
1. Click `Clients` in the sidebar and click `Create`
|
1. Click `Clients` in the sidebar and click `Create`
|
||||||
@ -144,6 +154,8 @@ Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to
|
|||||||
| Client Protocol | `openid-connect` |
|
| Client Protocol | `openid-connect` |
|
||||||
| Access Type | `confidential` |
|
| Access Type | `confidential` |
|
||||||
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
|
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
|
||||||
|
| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` |
|
||||||
|
| Backchannel Logout Session Required (optional) | `On` |
|
||||||
|
|
||||||
5. Click `Save`
|
5. Click `Save`
|
||||||
6. On the Credentials tab, update the fields:
|
6. On the Credentials tab, update the fields:
|
||||||
@ -167,7 +179,9 @@ oidc_providers:
|
|||||||
config:
|
config:
|
||||||
localpart_template: "{{ user.preferred_username }}"
|
localpart_template: "{{ user.preferred_username }}"
|
||||||
display_name_template: "{{ user.name }}"
|
display_name_template: "{{ user.name }}"
|
||||||
|
backchannel_logout_enabled: true # Optional
|
||||||
```
|
```
|
||||||
|
|
||||||
### Auth0
|
### Auth0
|
||||||
|
|
||||||
[Auth0][auth0] is a hosted SaaS IdP solution.
|
[Auth0][auth0] is a hosted SaaS IdP solution.
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# Synapse also supports structured logging for machine readable logs which can
|
# Synapse also supports structured logging for machine readable logs which can
|
||||||
# be ingested by ELK stacks. See [2] for details.
|
# be ingested by ELK stacks. See [2] for details.
|
||||||
#
|
#
|
||||||
# [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
|
# [1]: https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
|
||||||
# [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
|
# [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
|
||||||
|
|
||||||
version: 1
|
version: 1
|
||||||
|
@ -88,6 +88,34 @@ process, for example:
|
|||||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Upgrading to v1.71.0
|
||||||
|
|
||||||
|
## Removal of the `generate_short_term_login_token` module API method
|
||||||
|
|
||||||
|
As announced with the release of [Synapse 1.69.0](#deprecation-of-the-generate_short_term_login_token-module-api-method), the deprecated `generate_short_term_login_token` module method has been removed.
|
||||||
|
|
||||||
|
Modules relying on it can instead use the `create_login_token` method.
|
||||||
|
|
||||||
|
|
||||||
|
## Changes to the events received by application services (interest)
|
||||||
|
|
||||||
|
To align with spec (changed in
|
||||||
|
[MSC3905](https://github.com/matrix-org/matrix-spec-proposals/pull/3905)), Synapse now
|
||||||
|
only considers local users to be interesting. In other words, the `users` namespace
|
||||||
|
regex is only be applied against local users of the homeserver.
|
||||||
|
|
||||||
|
Please note, this probably doesn't affect the expected behavior of your application
|
||||||
|
service, since an interesting local user in a room still means all messages in the room
|
||||||
|
(from local or remote users) will still be considered interesting. And matching a room
|
||||||
|
with the `rooms` or `aliases` namespace regex will still consider all events sent in the
|
||||||
|
room to be interesting to the application service.
|
||||||
|
|
||||||
|
If one of your application service's `users` regex was intending to match a remote user,
|
||||||
|
this will no longer match as you expect. The behavioral mismatch between matching all
|
||||||
|
local users and some remote users is why the spec was changed/clarified and this
|
||||||
|
caveat is no longer supported.
|
||||||
|
|
||||||
|
|
||||||
# Upgrading to v1.69.0
|
# Upgrading to v1.69.0
|
||||||
|
|
||||||
## Changes to the receipts replication streams
|
## Changes to the receipts replication streams
|
||||||
|
@ -99,7 +99,7 @@ modules:
|
|||||||
config: {}
|
config: {}
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Server ##
|
## Server
|
||||||
|
|
||||||
Define your homeserver name and other base options.
|
Define your homeserver name and other base options.
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ including _matrix/...). This is the same URL a user might enter into the
|
|||||||
'Custom Homeserver URL' field on their client. If you use Synapse with a
|
'Custom Homeserver URL' field on their client. If you use Synapse with a
|
||||||
reverse proxy, this should be the URL to reach Synapse via the proxy.
|
reverse proxy, this should be the URL to reach Synapse via the proxy.
|
||||||
Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
||||||
'listeners' below).
|
['listeners'](#listeners) below).
|
||||||
|
|
||||||
Defaults to `https://<server_name>/`.
|
Defaults to `https://<server_name>/`.
|
||||||
|
|
||||||
@ -570,7 +570,7 @@ Example configuration:
|
|||||||
delete_stale_devices_after: 1y
|
delete_stale_devices_after: 1y
|
||||||
```
|
```
|
||||||
|
|
||||||
## Homeserver blocking ##
|
## Homeserver blocking
|
||||||
Useful options for Synapse admins.
|
Useful options for Synapse admins.
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -922,7 +922,7 @@ retention:
|
|||||||
interval: 1d
|
interval: 1d
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## TLS ##
|
## TLS
|
||||||
|
|
||||||
Options related to TLS.
|
Options related to TLS.
|
||||||
|
|
||||||
@ -1012,7 +1012,7 @@ federation_custom_ca_list:
|
|||||||
- myCA3.pem
|
- myCA3.pem
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Federation ##
|
## Federation
|
||||||
|
|
||||||
Options related to federation.
|
Options related to federation.
|
||||||
|
|
||||||
@ -1071,7 +1071,7 @@ Example configuration:
|
|||||||
allow_device_name_lookup_over_federation: true
|
allow_device_name_lookup_over_federation: true
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Caching ##
|
## Caching
|
||||||
|
|
||||||
Options related to caching.
|
Options related to caching.
|
||||||
|
|
||||||
@ -1185,7 +1185,7 @@ file in Synapse's `contrib` directory, you can send a `SIGHUP` signal by using
|
|||||||
`systemctl reload matrix-synapse`.
|
`systemctl reload matrix-synapse`.
|
||||||
|
|
||||||
---
|
---
|
||||||
## Database ##
|
## Database
|
||||||
Config options related to database settings.
|
Config options related to database settings.
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -1332,20 +1332,21 @@ databases:
|
|||||||
cp_max: 10
|
cp_max: 10
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Logging ##
|
## Logging
|
||||||
Config options related to logging.
|
Config options related to logging.
|
||||||
|
|
||||||
---
|
---
|
||||||
### `log_config`
|
### `log_config`
|
||||||
|
|
||||||
This option specifies a yaml python logging config file as described [here](https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema).
|
This option specifies a yaml python logging config file as described
|
||||||
|
[here](https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema).
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
log_config: "CONFDIR/SERVERNAME.log.config"
|
log_config: "CONFDIR/SERVERNAME.log.config"
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Ratelimiting ##
|
## Ratelimiting
|
||||||
Options related to ratelimiting in Synapse.
|
Options related to ratelimiting in Synapse.
|
||||||
|
|
||||||
Each ratelimiting configuration is made of two parameters:
|
Each ratelimiting configuration is made of two parameters:
|
||||||
@ -1576,7 +1577,7 @@ Example configuration:
|
|||||||
federation_rr_transactions_per_room_per_second: 40
|
federation_rr_transactions_per_room_per_second: 40
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Media Store ##
|
## Media Store
|
||||||
Config options related to Synapse's media store.
|
Config options related to Synapse's media store.
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -1766,7 +1767,7 @@ url_preview_ip_range_blacklist:
|
|||||||
- 'ff00::/8'
|
- 'ff00::/8'
|
||||||
- 'fec0::/10'
|
- 'fec0::/10'
|
||||||
```
|
```
|
||||||
----
|
---
|
||||||
### `url_preview_ip_range_whitelist`
|
### `url_preview_ip_range_whitelist`
|
||||||
|
|
||||||
This option sets a list of IP address CIDR ranges that the URL preview spider is allowed
|
This option sets a list of IP address CIDR ranges that the URL preview spider is allowed
|
||||||
@ -1860,7 +1861,7 @@ Example configuration:
|
|||||||
- 'fr;q=0.8'
|
- 'fr;q=0.8'
|
||||||
- '*;q=0.7'
|
- '*;q=0.7'
|
||||||
```
|
```
|
||||||
----
|
---
|
||||||
### `oembed`
|
### `oembed`
|
||||||
|
|
||||||
oEmbed allows for easier embedding content from a website. It can be
|
oEmbed allows for easier embedding content from a website. It can be
|
||||||
@ -1877,7 +1878,7 @@ oembed:
|
|||||||
- oembed/my_providers.json
|
- oembed/my_providers.json
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Captcha ##
|
## Captcha
|
||||||
|
|
||||||
See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha.
|
See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha.
|
||||||
|
|
||||||
@ -1926,7 +1927,7 @@ Example configuration:
|
|||||||
recaptcha_siteverify_api: "https://my.recaptcha.site"
|
recaptcha_siteverify_api: "https://my.recaptcha.site"
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## TURN ##
|
## TURN
|
||||||
Options related to adding a TURN server to Synapse.
|
Options related to adding a TURN server to Synapse.
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -1947,7 +1948,7 @@ Example configuration:
|
|||||||
```yaml
|
```yaml
|
||||||
turn_shared_secret: "YOUR_SHARED_SECRET"
|
turn_shared_secret: "YOUR_SHARED_SECRET"
|
||||||
```
|
```
|
||||||
----
|
---
|
||||||
### `turn_username` and `turn_password`
|
### `turn_username` and `turn_password`
|
||||||
|
|
||||||
The Username and password if the TURN server needs them and does not use a token.
|
The Username and password if the TURN server needs them and does not use a token.
|
||||||
@ -2366,7 +2367,7 @@ Example configuration:
|
|||||||
```yaml
|
```yaml
|
||||||
session_lifetime: 24h
|
session_lifetime: 24h
|
||||||
```
|
```
|
||||||
----
|
---
|
||||||
### `refresh_access_token_lifetime`
|
### `refresh_access_token_lifetime`
|
||||||
|
|
||||||
Time that an access token remains valid for, if the session is using refresh tokens.
|
Time that an access token remains valid for, if the session is using refresh tokens.
|
||||||
@ -2422,7 +2423,7 @@ nonrefreshable_access_token_lifetime: 24h
|
|||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
## Metrics ###
|
## Metrics
|
||||||
Config options related to metrics.
|
Config options related to metrics.
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -2519,7 +2520,7 @@ Example configuration:
|
|||||||
report_stats_endpoint: https://example.com/report-usage-stats/push
|
report_stats_endpoint: https://example.com/report-usage-stats/push
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## API Configuration ##
|
## API Configuration
|
||||||
Config settings related to the client/server API
|
Config settings related to the client/server API
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -2619,7 +2620,7 @@ Example configuration:
|
|||||||
form_secret: <PRIVATE STRING>
|
form_secret: <PRIVATE STRING>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Signing Keys ##
|
## Signing Keys
|
||||||
Config options relating to signing keys
|
Config options relating to signing keys
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -2680,6 +2681,12 @@ is still supported for backwards-compatibility, but it is deprecated.
|
|||||||
warning on start-up. To suppress this warning, set
|
warning on start-up. To suppress this warning, set
|
||||||
`suppress_key_server_warning` to true.
|
`suppress_key_server_warning` to true.
|
||||||
|
|
||||||
|
If the use of a trusted key server has to be deactivated, e.g. in a private
|
||||||
|
federation or for privacy reasons, this can be realised by setting
|
||||||
|
an empty array (`trusted_key_servers: []`). Then Synapse will request the keys
|
||||||
|
directly from the server that owns the keys. If Synapse does not get keys directly
|
||||||
|
from the server, the events of this server will be rejected.
|
||||||
|
|
||||||
Options for each entry in the list include:
|
Options for each entry in the list include:
|
||||||
* `server_name`: the name of the server. Required.
|
* `server_name`: the name of the server. Required.
|
||||||
* `verify_keys`: an optional map from key id to base64-encoded public key.
|
* `verify_keys`: an optional map from key id to base64-encoded public key.
|
||||||
@ -2728,7 +2735,7 @@ Example configuration:
|
|||||||
key_server_signing_keys_path: "key_server_signing_keys.key"
|
key_server_signing_keys_path: "key_server_signing_keys.key"
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Single sign-on integration ##
|
## Single sign-on integration
|
||||||
|
|
||||||
The following settings can be used to make Synapse use a single sign-on
|
The following settings can be used to make Synapse use a single sign-on
|
||||||
provider for authentication, instead of its internal password database.
|
provider for authentication, instead of its internal password database.
|
||||||
@ -3014,6 +3021,15 @@ Options for each entry include:
|
|||||||
which is set to the claims returned by the UserInfo Endpoint and/or
|
which is set to the claims returned by the UserInfo Endpoint and/or
|
||||||
in the ID Token.
|
in the ID Token.
|
||||||
|
|
||||||
|
* `backchannel_logout_enabled`: set to `true` to process OIDC Back-Channel Logout notifications.
|
||||||
|
Those notifications are expected to be received on `/_synapse/client/oidc/backchannel_logout`.
|
||||||
|
Defaults to `false`.
|
||||||
|
|
||||||
|
* `backchannel_logout_ignore_sub`: by default, the OIDC Back-Channel Logout feature checks that the
|
||||||
|
`sub` claim matches the subject claim received during login. This check can be disabled by setting
|
||||||
|
this to `true`. Defaults to `false`.
|
||||||
|
|
||||||
|
You might want to disable this if the `subject_claim` returned by the mapping provider is not `sub`.
|
||||||
|
|
||||||
It is possible to configure Synapse to only allow logins if certain attributes
|
It is possible to configure Synapse to only allow logins if certain attributes
|
||||||
match particular values in the OIDC userinfo. The requirements can be listed under
|
match particular values in the OIDC userinfo. The requirements can be listed under
|
||||||
@ -3348,7 +3364,7 @@ email:
|
|||||||
email_validation: "[%(server_name)s] Validate your email"
|
email_validation: "[%(server_name)s] Validate your email"
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Push ##
|
## Push
|
||||||
Configuration settings related to push notifications
|
Configuration settings related to push notifications
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -3381,11 +3397,11 @@ push:
|
|||||||
group_unread_count_by_room: false
|
group_unread_count_by_room: false
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Rooms ##
|
## Rooms
|
||||||
Config options relating to rooms.
|
Config options relating to rooms.
|
||||||
|
|
||||||
---
|
---
|
||||||
### `encryption_enabled_by_default`
|
### `encryption_enabled_by_default_for_room_type`
|
||||||
|
|
||||||
Controls whether locally-created rooms should be end-to-end encrypted by
|
Controls whether locally-created rooms should be end-to-end encrypted by
|
||||||
default.
|
default.
|
||||||
@ -3627,7 +3643,7 @@ default_power_level_content_override:
|
|||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
## Opentracing ##
|
## Opentracing
|
||||||
Configuration options related to Opentracing support.
|
Configuration options related to Opentracing support.
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -3670,14 +3686,71 @@ opentracing:
|
|||||||
false
|
false
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
## Workers ##
|
## Coordinating workers
|
||||||
Configuration options related to workers.
|
Configuration options related to workers which belong in the main config file
|
||||||
|
(usually called `homeserver.yaml`).
|
||||||
|
A Synapse deployment can scale horizontally by running multiple Synapse processes
|
||||||
|
called _workers_. Incoming requests are distributed between workers to handle higher
|
||||||
|
loads. Some workers are privileged and can accept requests from other workers.
|
||||||
|
|
||||||
|
As a result, the worker configuration is divided into two parts.
|
||||||
|
|
||||||
|
1. The first part (in this section of the manual) defines which shardable tasks
|
||||||
|
are delegated to privileged workers. This allows unprivileged workers to make
|
||||||
|
request a privileged worker to act on their behalf.
|
||||||
|
1. [The second part](#individual-worker-configuration)
|
||||||
|
controls the behaviour of individual workers in isolation.
|
||||||
|
|
||||||
|
For guidance on setting up workers, see the [worker documentation](../../workers.md).
|
||||||
|
|
||||||
|
---
|
||||||
|
### `worker_replication_secret`
|
||||||
|
|
||||||
|
A shared secret used by the replication APIs on the main process to authenticate
|
||||||
|
HTTP requests from workers.
|
||||||
|
|
||||||
|
The default, this value is omitted (equivalently `null`), which means that
|
||||||
|
traffic between the workers and the main process is not authenticated.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_replication_secret: "secret_secret"
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `start_pushers`
|
||||||
|
|
||||||
|
Controls sending of push notifications on the main process. Set to `false`
|
||||||
|
if using a [pusher worker](../../workers.md#synapseapppusher). Defaults to `true`.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
start_pushers: false
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `pusher_instances`
|
||||||
|
|
||||||
|
It is possible to run multiple [pusher workers](../../workers.md#synapseapppusher),
|
||||||
|
in which case the work is balanced across them. Use this setting to list the pushers by
|
||||||
|
[`worker_name`](#worker_name). Ensure the main process and all pusher workers are
|
||||||
|
restarted after changing this option.
|
||||||
|
|
||||||
|
If no or only one pusher worker is configured, this setting is not necessary.
|
||||||
|
The main process will send out push notifications by default if you do not disable
|
||||||
|
it by setting [`start_pushers: false`](#start_pushers).
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
start_pushers: false
|
||||||
|
pusher_instances:
|
||||||
|
- pusher_worker1
|
||||||
|
- pusher_worker2
|
||||||
|
```
|
||||||
---
|
---
|
||||||
### `send_federation`
|
### `send_federation`
|
||||||
|
|
||||||
Controls sending of outbound federation transactions on the main process.
|
Controls sending of outbound federation transactions on the main process.
|
||||||
Set to false if using a federation sender worker. Defaults to true.
|
Set to `false` if using a [federation sender worker](../../workers.md#synapseappfederation_sender).
|
||||||
|
Defaults to `true`.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
@ -3686,8 +3759,9 @@ send_federation: false
|
|||||||
---
|
---
|
||||||
### `federation_sender_instances`
|
### `federation_sender_instances`
|
||||||
|
|
||||||
It is possible to run multiple federation sender workers, in which case the
|
It is possible to run multiple
|
||||||
work is balanced across them. Use this setting to list the senders.
|
[federation sender worker](../../workers.md#synapseappfederation_sender), in which
|
||||||
|
case the work is balanced across them. Use this setting to list the senders.
|
||||||
|
|
||||||
This configuration setting must be shared between all federation sender workers, and if
|
This configuration setting must be shared between all federation sender workers, and if
|
||||||
changed all federation sender workers must be stopped at the same time and then
|
changed all federation sender workers must be stopped at the same time and then
|
||||||
@ -3696,14 +3770,19 @@ events may be dropped).
|
|||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
|
send_federation: false
|
||||||
federation_sender_instances:
|
federation_sender_instances:
|
||||||
- federation_sender1
|
- federation_sender1
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `instance_map`
|
### `instance_map`
|
||||||
|
|
||||||
When using workers this should be a map from worker name to the
|
When using workers this should be a map from [`worker_name`](#worker_name) to the
|
||||||
HTTP replication listener of the worker, if configured.
|
HTTP replication listener of the worker, if configured.
|
||||||
|
Each worker declared under [`stream_writers`](../../workers.md#stream-writers) needs
|
||||||
|
a HTTP replication listener, and that listener should be included in the `instance_map`.
|
||||||
|
(The main process also needs an HTTP replication listener, but it should not be
|
||||||
|
listed in the `instance_map`.)
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
@ -3716,8 +3795,11 @@ instance_map:
|
|||||||
### `stream_writers`
|
### `stream_writers`
|
||||||
|
|
||||||
Experimental: When using workers you can define which workers should
|
Experimental: When using workers you can define which workers should
|
||||||
handle event persistence and typing notifications. Any worker
|
handle writing to streams such as event persistence and typing notifications.
|
||||||
specified here must also be in the `instance_map`.
|
Any worker specified here must also be in the [`instance_map`](#instance_map).
|
||||||
|
|
||||||
|
See the list of available streams in the
|
||||||
|
[worker documentation](../../workers.md#stream-writers).
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
@ -3728,29 +3810,18 @@ stream_writers:
|
|||||||
---
|
---
|
||||||
### `run_background_tasks_on`
|
### `run_background_tasks_on`
|
||||||
|
|
||||||
The worker that is used to run background tasks (e.g. cleaning up expired
|
The [worker](../../workers.md#background-tasks) that is used to run
|
||||||
data). If not provided this defaults to the main process.
|
background tasks (e.g. cleaning up expired data). If not provided this
|
||||||
|
defaults to the main process.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
run_background_tasks_on: worker1
|
run_background_tasks_on: worker1
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `worker_replication_secret`
|
|
||||||
|
|
||||||
A shared secret used by the replication APIs to authenticate HTTP requests
|
|
||||||
from workers.
|
|
||||||
|
|
||||||
By default this is unused and traffic is not authenticated.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
worker_replication_secret: "secret_secret"
|
|
||||||
```
|
|
||||||
### `redis`
|
### `redis`
|
||||||
|
|
||||||
Configuration for Redis when using workers. This *must* be enabled when
|
Configuration for Redis when using workers. This *must* be enabled when using workers.
|
||||||
using workers (unless using old style direct TCP configuration).
|
|
||||||
This setting has the following sub-options:
|
This setting has the following sub-options:
|
||||||
* `enabled`: whether to use Redis support. Defaults to false.
|
* `enabled`: whether to use Redis support. Defaults to false.
|
||||||
* `host` and `port`: Optional host and port to use to connect to redis. Defaults to
|
* `host` and `port`: Optional host and port to use to connect to redis. Defaults to
|
||||||
@ -3765,7 +3836,123 @@ redis:
|
|||||||
port: 6379
|
port: 6379
|
||||||
password: <secret_password>
|
password: <secret_password>
|
||||||
```
|
```
|
||||||
## Background Updates ##
|
---
|
||||||
|
## Individual worker configuration
|
||||||
|
These options configure an individual worker, in its worker configuration file.
|
||||||
|
They should be not be provided when configuring the main process.
|
||||||
|
|
||||||
|
Note also the configuration above for
|
||||||
|
[coordinating a cluster of workers](#coordinating-workers).
|
||||||
|
|
||||||
|
For guidance on setting up workers, see the [worker documentation](../../workers.md).
|
||||||
|
|
||||||
|
---
|
||||||
|
### `worker_app`
|
||||||
|
|
||||||
|
The type of worker. The currently available worker applications are listed
|
||||||
|
in [worker documentation](../../workers.md#available-worker-applications).
|
||||||
|
|
||||||
|
The most common worker is the
|
||||||
|
[`synapse.app.generic_worker`](../../workers.md#synapseappgeneric_worker).
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_app: synapse.app.generic_worker
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `worker_name`
|
||||||
|
|
||||||
|
A unique name for the worker. The worker needs a name to be addressed in
|
||||||
|
further parameters and identification in log files. We strongly recommend
|
||||||
|
giving each worker a unique `worker_name`.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_name: generic_worker1
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `worker_replication_host`
|
||||||
|
|
||||||
|
The HTTP replication endpoint that it should talk to on the main Synapse process.
|
||||||
|
The main Synapse process defines this with a `replication` resource in
|
||||||
|
[`listeners` option](#listeners).
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_replication_host: 127.0.0.1
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `worker_replication_http_port`
|
||||||
|
|
||||||
|
The HTTP replication port that it should talk to on the main Synapse process.
|
||||||
|
The main Synapse process defines this with a `replication` resource in
|
||||||
|
[`listeners` option](#listeners).
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_replication_http_port: 9093
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `worker_listeners`
|
||||||
|
|
||||||
|
A worker can handle HTTP requests. To do so, a `worker_listeners` option
|
||||||
|
must be declared, in the same way as the [`listeners` option](#listeners)
|
||||||
|
in the shared config.
|
||||||
|
|
||||||
|
Workers declared in [`stream_writers`](#stream_writers) will need to include a
|
||||||
|
`replication` listener here, in order to accept internal HTTP requests from
|
||||||
|
other workers.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_listeners:
|
||||||
|
- type: http
|
||||||
|
port: 8083
|
||||||
|
resources:
|
||||||
|
- names: [client, federation]
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `worker_daemonize`
|
||||||
|
|
||||||
|
Specifies whether the worker should be started as a daemon process.
|
||||||
|
If Synapse is being managed by [systemd](../../systemd-with-workers/README.md), this option
|
||||||
|
must be omitted or set to `false`.
|
||||||
|
|
||||||
|
Defaults to `false`.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_daemonize: true
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `worker_pid_file`
|
||||||
|
|
||||||
|
When running a worker as a daemon, we need a place to store the
|
||||||
|
[PID](https://en.wikipedia.org/wiki/Process_identifier) of the worker.
|
||||||
|
This option defines the location of that "pid file".
|
||||||
|
|
||||||
|
This option is required if `worker_daemonize` is `true` and ignored
|
||||||
|
otherwise. It has no default.
|
||||||
|
|
||||||
|
See also the [`pid_file` option](#pid_file) option for the main Synapse process.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_pid_file: DATADIR/generic_worker1.pid
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `worker_log_config`
|
||||||
|
|
||||||
|
This option specifies a yaml python logging config file as described
|
||||||
|
[here](https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema).
|
||||||
|
See also the [`log_config` option](#log_config) option for the main Synapse process.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||||
|
```
|
||||||
|
---
|
||||||
|
## Background Updates
|
||||||
Configuration settings related to background updates.
|
Configuration settings related to background updates.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
100
docs/workers.md
100
docs/workers.md
@ -88,10 +88,12 @@ shared configuration file.
|
|||||||
### Shared configuration
|
### Shared configuration
|
||||||
|
|
||||||
Normally, only a couple of changes are needed to make an existing configuration
|
Normally, only a couple of changes are needed to make an existing configuration
|
||||||
file suitable for use with workers. First, you need to enable an "HTTP replication
|
file suitable for use with workers. First, you need to enable an
|
||||||
listener" for the main process; and secondly, you need to enable redis-based
|
["HTTP replication listener"](usage/configuration/config_documentation.md#listeners)
|
||||||
replication. Optionally, a shared secret can be used to authenticate HTTP
|
for the main process; and secondly, you need to enable
|
||||||
traffic between workers. For example:
|
[redis-based replication](usage/configuration/config_documentation.md#redis).
|
||||||
|
Optionally, a [shared secret](usage/configuration/config_documentation.md#worker_replication_secret)
|
||||||
|
can be used to authenticate HTTP traffic between workers. For example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
# extend the existing `listeners` section. This defines the ports that the
|
# extend the existing `listeners` section. This defines the ports that the
|
||||||
@ -111,25 +113,28 @@ redis:
|
|||||||
enabled: true
|
enabled: true
|
||||||
```
|
```
|
||||||
|
|
||||||
See the [configuration manual](usage/configuration/config_documentation.html) for the full documentation of each option.
|
See the [configuration manual](usage/configuration/config_documentation.md)
|
||||||
|
for the full documentation of each option.
|
||||||
|
|
||||||
Under **no circumstances** should the replication listener be exposed to the
|
Under **no circumstances** should the replication listener be exposed to the
|
||||||
public internet; replication traffic is:
|
public internet; replication traffic is:
|
||||||
|
|
||||||
* always unencrypted
|
* always unencrypted
|
||||||
* unauthenticated, unless `worker_replication_secret` is configured
|
* unauthenticated, unless [`worker_replication_secret`](usage/configuration/config_documentation.md#worker_replication_secret)
|
||||||
|
is configured
|
||||||
|
|
||||||
|
|
||||||
### Worker configuration
|
### Worker configuration
|
||||||
|
|
||||||
In the config file for each worker, you must specify:
|
In the config file for each worker, you must specify:
|
||||||
* The type of worker (`worker_app`). The currently available worker applications are listed below.
|
* The type of worker ([`worker_app`](usage/configuration/config_documentation.md#worker_app)).
|
||||||
* A unique name for the worker (`worker_name`).
|
The currently available worker applications are listed [below](#available-worker-applications).
|
||||||
|
* A unique name for the worker ([`worker_name`](usage/configuration/config_documentation.md#worker_name)).
|
||||||
* The HTTP replication endpoint that it should talk to on the main synapse process
|
* The HTTP replication endpoint that it should talk to on the main synapse process
|
||||||
(`worker_replication_host` and `worker_replication_http_port`)
|
([`worker_replication_host`](usage/configuration/config_documentation.md#worker_replication_host) and
|
||||||
* If handling HTTP requests, a `worker_listeners` option with an `http`
|
[`worker_replication_http_port`](usage/configuration/config_documentation.md#worker_replication_http_port)).
|
||||||
listener, in the same way as the [`listeners`](usage/configuration/config_documentation.md#listeners)
|
* If handling HTTP requests, a [`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners) option
|
||||||
option in the shared config.
|
with an `http` listener.
|
||||||
* If handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
|
* If handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
|
||||||
the main process (`worker_main_http_uri`).
|
the main process (`worker_main_http_uri`).
|
||||||
|
|
||||||
@ -146,7 +151,6 @@ plain HTTP endpoint on port 8083 separately serving various endpoints, e.g.
|
|||||||
Obviously you should configure your reverse-proxy to route the relevant
|
Obviously you should configure your reverse-proxy to route the relevant
|
||||||
endpoints to the worker (`localhost:8083` in the above example).
|
endpoints to the worker (`localhost:8083` in the above example).
|
||||||
|
|
||||||
|
|
||||||
### Running Synapse with workers
|
### Running Synapse with workers
|
||||||
|
|
||||||
Finally, you need to start your worker processes. This can be done with either
|
Finally, you need to start your worker processes. This can be done with either
|
||||||
@ -288,7 +292,8 @@ For multiple workers not handling the SSO endpoints properly, see
|
|||||||
[#9427](https://github.com/matrix-org/synapse/issues/9427).
|
[#9427](https://github.com/matrix-org/synapse/issues/9427).
|
||||||
|
|
||||||
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
|
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
|
||||||
with `client` and `federation` `resources` must be configured in the `worker_listeners`
|
with `client` and `federation` `resources` must be configured in the
|
||||||
|
[`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners)
|
||||||
option in the worker config.
|
option in the worker config.
|
||||||
|
|
||||||
#### Load balancing
|
#### Load balancing
|
||||||
@ -331,9 +336,10 @@ of the main process to a particular worker.
|
|||||||
|
|
||||||
To enable this, the worker must have a
|
To enable this, the worker must have a
|
||||||
[HTTP `replication` listener](usage/configuration/config_documentation.md#listeners) configured,
|
[HTTP `replication` listener](usage/configuration/config_documentation.md#listeners) configured,
|
||||||
have a `worker_name` and be listed in the `instance_map` config. The same worker
|
have a [`worker_name`](usage/configuration/config_documentation.md#worker_name)
|
||||||
can handle multiple streams, but unless otherwise documented, each stream can only
|
and be listed in the [`instance_map`](usage/configuration/config_documentation.md#instance_map)
|
||||||
have a single writer.
|
config. The same worker can handle multiple streams, but unless otherwise documented,
|
||||||
|
each stream can only have a single writer.
|
||||||
|
|
||||||
For example, to move event persistence off to a dedicated worker, the shared
|
For example, to move event persistence off to a dedicated worker, the shared
|
||||||
configuration would include:
|
configuration would include:
|
||||||
@ -360,9 +366,26 @@ streams and the endpoints associated with them:
|
|||||||
|
|
||||||
##### The `events` stream
|
##### The `events` stream
|
||||||
|
|
||||||
The `events` stream experimentally supports having multiple writers, where work
|
The `events` stream experimentally supports having multiple writer workers, where load
|
||||||
is sharded between them by room ID. Note that you *must* restart all worker
|
is sharded between them by room ID. Each writer is called an _event persister_. They are
|
||||||
instances when adding or removing event persisters. An example `stream_writers`
|
responsible for
|
||||||
|
- receiving new events,
|
||||||
|
- linking them to those already in the room [DAG](development/room-dag-concepts.md),
|
||||||
|
- persisting them to the DB, and finally
|
||||||
|
- updating the events stream.
|
||||||
|
|
||||||
|
Because load is sharded in this way, you *must* restart all worker instances when
|
||||||
|
adding or removing event persisters.
|
||||||
|
|
||||||
|
An `event_persister` should not be mistaken for an `event_creator`.
|
||||||
|
An `event_creator` listens for requests from clients to create new events and does
|
||||||
|
so. It will then pass those events over HTTP replication to any configured event
|
||||||
|
persisters (or the main process if none are configured).
|
||||||
|
|
||||||
|
Note that `event_creator`s and `event_persister`s are implemented using the same
|
||||||
|
[`synapse.app.generic_worker`](#synapse.app.generic_worker).
|
||||||
|
|
||||||
|
An example [`stream_writers`](usage/configuration/config_documentation.md#stream_writers)
|
||||||
configuration with multiple writers:
|
configuration with multiple writers:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@ -416,16 +439,18 @@ worker. Background tasks are run periodically or started via replication. Exactl
|
|||||||
which tasks are configured to run depends on your Synapse configuration (e.g. if
|
which tasks are configured to run depends on your Synapse configuration (e.g. if
|
||||||
stats is enabled). This worker doesn't handle any REST endpoints itself.
|
stats is enabled). This worker doesn't handle any REST endpoints itself.
|
||||||
|
|
||||||
To enable this, the worker must have a `worker_name` and can be configured to run
|
To enable this, the worker must have a unique
|
||||||
background tasks. For example, to move background tasks to a dedicated worker,
|
[`worker_name`](usage/configuration/config_documentation.md#worker_name)
|
||||||
the shared configuration would include:
|
and can be configured to run background tasks. For example, to move background tasks
|
||||||
|
to a dedicated worker, the shared configuration would include:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
run_background_tasks_on: background_worker
|
run_background_tasks_on: background_worker
|
||||||
```
|
```
|
||||||
|
|
||||||
You might also wish to investigate the `update_user_directory_from_worker` and
|
You might also wish to investigate the
|
||||||
`media_instance_running_background_jobs` settings.
|
[`update_user_directory_from_worker`](#updating-the-user-directory) and
|
||||||
|
[`media_instance_running_background_jobs`](#synapseappmedia_repository) settings.
|
||||||
|
|
||||||
An example for a dedicated background worker instance:
|
An example for a dedicated background worker instance:
|
||||||
|
|
||||||
@ -478,13 +503,17 @@ worker application type.
|
|||||||
### `synapse.app.pusher`
|
### `synapse.app.pusher`
|
||||||
|
|
||||||
Handles sending push notifications to sygnal and email. Doesn't handle any
|
Handles sending push notifications to sygnal and email. Doesn't handle any
|
||||||
REST endpoints itself, but you should set `start_pushers: False` in the
|
REST endpoints itself, but you should set
|
||||||
|
[`start_pushers: false`](usage/configuration/config_documentation.md#start_pushers) in the
|
||||||
shared configuration file to stop the main synapse sending push notifications.
|
shared configuration file to stop the main synapse sending push notifications.
|
||||||
|
|
||||||
To run multiple instances at once the `pusher_instances` option should list all
|
To run multiple instances at once the
|
||||||
pusher instances by their worker name, e.g.:
|
[`pusher_instances`](usage/configuration/config_documentation.md#pusher_instances)
|
||||||
|
option should list all pusher instances by their
|
||||||
|
[`worker_name`](usage/configuration/config_documentation.md#worker_name), e.g.:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
start_pushers: false
|
||||||
pusher_instances:
|
pusher_instances:
|
||||||
- pusher_worker1
|
- pusher_worker1
|
||||||
- pusher_worker2
|
- pusher_worker2
|
||||||
@ -512,15 +541,20 @@ Note this worker cannot be load-balanced: only one instance should be active.
|
|||||||
### `synapse.app.federation_sender`
|
### `synapse.app.federation_sender`
|
||||||
|
|
||||||
Handles sending federation traffic to other servers. Doesn't handle any
|
Handles sending federation traffic to other servers. Doesn't handle any
|
||||||
REST endpoints itself, but you should set `send_federation: False` in the
|
REST endpoints itself, but you should set
|
||||||
shared configuration file to stop the main synapse sending this traffic.
|
[`send_federation: false`](usage/configuration/config_documentation.md#send_federation)
|
||||||
|
in the shared configuration file to stop the main synapse sending this traffic.
|
||||||
|
|
||||||
If running multiple federation senders then you must list each
|
If running multiple federation senders then you must list each
|
||||||
instance in the `federation_sender_instances` option by their `worker_name`.
|
instance in the
|
||||||
|
[`federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances)
|
||||||
|
option by their
|
||||||
|
[`worker_name`](usage/configuration/config_documentation.md#worker_name).
|
||||||
All instances must be stopped and started when adding or removing instances.
|
All instances must be stopped and started when adding or removing instances.
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
send_federation: false
|
||||||
federation_sender_instances:
|
federation_sender_instances:
|
||||||
- federation_sender1
|
- federation_sender1
|
||||||
- federation_sender2
|
- federation_sender2
|
||||||
@ -547,7 +581,9 @@ Handles the media repository. It can handle all endpoints starting with:
|
|||||||
^/_synapse/admin/v1/quarantine_media/.*$
|
^/_synapse/admin/v1/quarantine_media/.*$
|
||||||
^/_synapse/admin/v1/users/.*/media$
|
^/_synapse/admin/v1/users/.*/media$
|
||||||
|
|
||||||
You should also set `enable_media_repo: False` in the shared configuration
|
You should also set
|
||||||
|
[`enable_media_repo: False`](usage/configuration/config_documentation.md#enable_media_repo)
|
||||||
|
in the shared configuration
|
||||||
file to stop the main synapse running background jobs related to managing the
|
file to stop the main synapse running background jobs related to managing the
|
||||||
media repository. Note that doing so will prevent the main process from being
|
media repository. Note that doing so will prevent the main process from being
|
||||||
able to handle the above endpoints.
|
able to handle the above endpoints.
|
||||||
|
4
mypy.ini
4
mypy.ini
@ -56,7 +56,6 @@ exclude = (?x)
|
|||||||
|tests/rest/media/v1/test_media_storage.py
|
|tests/rest/media/v1/test_media_storage.py
|
||||||
|tests/server.py
|
|tests/server.py
|
||||||
|tests/server_notices/test_resource_limits_server_notices.py
|
|tests/server_notices/test_resource_limits_server_notices.py
|
||||||
|tests/test_metrics.py
|
|
||||||
|tests/test_state.py
|
|tests/test_state.py
|
||||||
|tests/test_terms_auth.py
|
|tests/test_terms_auth.py
|
||||||
|tests/util/caches/test_cached_call.py
|
|tests/util/caches/test_cached_call.py
|
||||||
@ -106,6 +105,9 @@ disallow_untyped_defs = False
|
|||||||
[mypy-tests.handlers.test_user_directory]
|
[mypy-tests.handlers.test_user_directory]
|
||||||
disallow_untyped_defs = True
|
disallow_untyped_defs = True
|
||||||
|
|
||||||
|
[mypy-tests.metrics.test_background_process_metrics]
|
||||||
|
disallow_untyped_defs = True
|
||||||
|
|
||||||
[mypy-tests.push.test_bulk_push_rule_evaluator]
|
[mypy-tests.push.test_bulk_push_rule_evaluator]
|
||||||
disallow_untyped_defs = True
|
disallow_untyped_defs = True
|
||||||
|
|
||||||
|
207
poetry.lock
generated
207
poetry.lock
generated
@ -52,18 +52,18 @@ typecheck = ["mypy"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "22.3.0"
|
version = "22.10.0"
|
||||||
description = "The uncompromising code formatter."
|
description = "The uncompromising code formatter."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6.2"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
click = ">=8.0.0"
|
click = ">=8.0.0"
|
||||||
mypy-extensions = ">=0.4.3"
|
mypy-extensions = ">=0.4.3"
|
||||||
pathspec = ">=0.9.0"
|
pathspec = ">=0.9.0"
|
||||||
platformdirs = ">=2"
|
platformdirs = ">=2"
|
||||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
|
||||||
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
|
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
|
||||||
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
||||||
|
|
||||||
@ -260,7 +260,7 @@ pyflakes = ">=2.4.0,<2.5.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flake8-bugbear"
|
name = "flake8-bugbear"
|
||||||
version = "21.3.2"
|
version = "22.9.23"
|
||||||
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -271,7 +271,7 @@ attrs = ">=19.2.0"
|
|||||||
flake8 = ">=3.0.0"
|
flake8 = ">=3.0.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["black", "coverage", "hypothesis", "hypothesmith"]
|
dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flake8-comprehensions"
|
name = "flake8-comprehensions"
|
||||||
@ -438,11 +438,11 @@ trio = ["async_generator", "trio"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jinja2"
|
name = "jinja2"
|
||||||
version = "3.0.3"
|
version = "3.1.2"
|
||||||
description = "A very fast and expressive template engine."
|
description = "A very fast and expressive template engine."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
MarkupSafe = ">=2.0"
|
MarkupSafe = ">=2.0"
|
||||||
@ -710,7 +710,7 @@ test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prometheus-client"
|
name = "prometheus-client"
|
||||||
version = "0.14.0"
|
version = "0.15.0"
|
||||||
description = "Python client for the Prometheus monitoring system."
|
description = "Python client for the Prometheus monitoring system."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -721,7 +721,7 @@ twisted = ["twisted"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psycopg2"
|
name = "psycopg2"
|
||||||
version = "2.9.4"
|
version = "2.9.5"
|
||||||
description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = true
|
optional = true
|
||||||
@ -918,14 +918,14 @@ python-versions = ">=3.7"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pysaml2"
|
name = "pysaml2"
|
||||||
version = "7.1.2"
|
version = "7.2.1"
|
||||||
description = "Python implementation of SAML Version 2 Standard"
|
description = "Python implementation of SAML Version 2 Standard"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = true
|
optional = true
|
||||||
python-versions = "<4,>=3.6"
|
python-versions = "<4,>=3.6"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
cryptography = ">=1.4"
|
cryptography = ">=3.1"
|
||||||
defusedxml = "*"
|
defusedxml = "*"
|
||||||
importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
|
importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
|
||||||
pyOpenSSL = "*"
|
pyOpenSSL = "*"
|
||||||
@ -976,11 +976,11 @@ python-versions = ">=3.6"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "readme-renderer"
|
name = "readme-renderer"
|
||||||
version = "33.0"
|
version = "37.2"
|
||||||
description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse"
|
description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
bleach = ">=2.1.0"
|
bleach = ">=2.1.0"
|
||||||
@ -1030,6 +1030,22 @@ python-versions = ">=3.7"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
idna2008 = ["idna"]
|
idna2008 = ["idna"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rich"
|
||||||
|
version = "12.6.0"
|
||||||
|
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6.3,<4.0.0"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
commonmark = ">=0.9.0,<0.10.0"
|
||||||
|
pygments = ">=2.6.0,<3.0.0"
|
||||||
|
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "secretstorage"
|
name = "secretstorage"
|
||||||
version = "3.3.1"
|
version = "3.3.1"
|
||||||
@ -1056,7 +1072,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sentry-sdk"
|
name = "sentry-sdk"
|
||||||
version = "1.5.11"
|
version = "1.10.1"
|
||||||
description = "Python client for Sentry (https://sentry.io)"
|
description = "Python client for Sentry (https://sentry.io)"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = true
|
optional = true
|
||||||
@ -1064,7 +1080,7 @@ python-versions = "*"
|
|||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
certifi = "*"
|
certifi = "*"
|
||||||
urllib3 = ">=1.10.0"
|
urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
aiohttp = ["aiohttp (>=3.5)"]
|
aiohttp = ["aiohttp (>=3.5)"]
|
||||||
@ -1074,6 +1090,7 @@ celery = ["celery (>=3)"]
|
|||||||
chalice = ["chalice (>=1.16.0)"]
|
chalice = ["chalice (>=1.16.0)"]
|
||||||
django = ["django (>=1.8)"]
|
django = ["django (>=1.8)"]
|
||||||
falcon = ["falcon (>=1.4)"]
|
falcon = ["falcon (>=1.4)"]
|
||||||
|
fastapi = ["fastapi (>=0.79.0)"]
|
||||||
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
||||||
httpx = ["httpx (>=0.16.0)"]
|
httpx = ["httpx (>=0.16.0)"]
|
||||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||||
@ -1082,6 +1099,7 @@ quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
|||||||
rq = ["rq (>=0.6)"]
|
rq = ["rq (>=0.6)"]
|
||||||
sanic = ["sanic (>=0.8)"]
|
sanic = ["sanic (>=0.8)"]
|
||||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||||
|
starlette = ["starlette (>=0.19.1)"]
|
||||||
tornado = ["tornado (>=5)"]
|
tornado = ["tornado (>=5)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1120,7 +1138,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "setuptools-rust"
|
name = "setuptools-rust"
|
||||||
version = "1.5.1"
|
version = "1.5.2"
|
||||||
description = "Setuptools Rust extension plugin"
|
description = "Setuptools Rust extension plugin"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -1251,22 +1269,6 @@ tomli = {version = "*", markers = "python_version >= \"3.6\""}
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["packaging"]
|
dev = ["packaging"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tqdm"
|
|
||||||
version = "4.63.0"
|
|
||||||
description = "Fast, Extensible Progress Meter"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["py-make (>=0.1.0)", "twine", "wheel"]
|
|
||||||
notebook = ["ipywidgets (>=6)"]
|
|
||||||
telegram = ["requests"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "treq"
|
name = "treq"
|
||||||
version = "22.2.0"
|
version = "22.2.0"
|
||||||
@ -1288,22 +1290,21 @@ docs = ["sphinx (>=1.4.8)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "twine"
|
name = "twine"
|
||||||
version = "3.8.0"
|
version = "4.0.1"
|
||||||
description = "Collection of utilities for publishing packages on PyPI"
|
description = "Collection of utilities for publishing packages on PyPI"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = ">=0.4.3"
|
|
||||||
importlib-metadata = ">=3.6"
|
importlib-metadata = ">=3.6"
|
||||||
keyring = ">=15.1"
|
keyring = ">=15.1"
|
||||||
pkginfo = ">=1.8.1"
|
pkginfo = ">=1.8.1"
|
||||||
readme-renderer = ">=21.0"
|
readme-renderer = ">=35.0"
|
||||||
requests = ">=2.20"
|
requests = ">=2.20"
|
||||||
requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
|
requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
|
||||||
rfc3986 = ">=1.4.0"
|
rfc3986 = ">=1.4.0"
|
||||||
tqdm = ">=4.14"
|
rich = ">=12.0.0"
|
||||||
urllib3 = ">=1.26.0"
|
urllib3 = ">=1.26.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1426,7 +1427,7 @@ python-versions = "*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "types-opentracing"
|
name = "types-opentracing"
|
||||||
version = "2.4.7"
|
version = "2.4.10"
|
||||||
description = "Typing stubs for opentracing"
|
description = "Typing stubs for opentracing"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -1469,7 +1470,7 @@ python-versions = "*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "types-requests"
|
name = "types-requests"
|
||||||
version = "2.28.11"
|
version = "2.28.11.2"
|
||||||
description = "Typing stubs for requests"
|
description = "Typing stubs for requests"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -1512,15 +1513,15 @@ python-versions = ">=3.6,<4.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "urllib3"
|
name = "urllib3"
|
||||||
version = "1.26.8"
|
version = "1.26.12"
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
brotli = ["brotlipy (>=0.6.0)"]
|
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
|
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1633,7 +1634,7 @@ url-preview = ["lxml"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.7.1"
|
python-versions = "^3.7.1"
|
||||||
content-hash = "9400cb5c92bb4648238f652f5e7f81df51cdcf9b7c69d645f35beaa4acb2f420"
|
content-hash = "27811bd21d56ceeb0f68ded5a00375efcd1a004928f0736f5b02927ce8594cb0"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
attrs = [
|
attrs = [
|
||||||
@ -1672,29 +1673,27 @@ bcrypt = [
|
|||||||
{file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
|
{file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
|
||||||
]
|
]
|
||||||
black = [
|
black = [
|
||||||
{file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"},
|
{file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"},
|
||||||
{file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"},
|
{file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"},
|
||||||
{file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"},
|
{file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"},
|
||||||
{file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"},
|
{file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"},
|
||||||
{file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"},
|
{file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"},
|
||||||
{file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"},
|
{file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"},
|
||||||
{file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"},
|
{file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"},
|
||||||
{file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"},
|
{file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"},
|
||||||
{file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"},
|
{file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"},
|
||||||
{file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"},
|
{file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"},
|
||||||
{file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"},
|
{file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"},
|
||||||
{file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"},
|
{file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"},
|
||||||
{file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"},
|
{file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
|
||||||
{file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"},
|
{file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
|
||||||
{file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"},
|
{file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
|
||||||
{file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"},
|
{file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
|
||||||
{file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"},
|
{file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
|
||||||
{file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"},
|
{file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
|
||||||
{file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"},
|
{file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
|
||||||
{file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"},
|
{file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
|
||||||
{file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"},
|
{file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
|
||||||
{file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"},
|
|
||||||
{file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"},
|
|
||||||
]
|
]
|
||||||
bleach = [
|
bleach = [
|
||||||
{file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"},
|
{file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"},
|
||||||
@ -1826,8 +1825,8 @@ flake8 = [
|
|||||||
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
|
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
|
||||||
]
|
]
|
||||||
flake8-bugbear = [
|
flake8-bugbear = [
|
||||||
{file = "flake8-bugbear-21.3.2.tar.gz", hash = "sha256:cadce434ceef96463b45a7c3000f23527c04ea4b531d16c7ac8886051f516ca0"},
|
{file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"},
|
||||||
{file = "flake8_bugbear-21.3.2-py36.py37.py38-none-any.whl", hash = "sha256:5d6ccb0c0676c738a6e066b4d50589c408dcc1c5bf1d73b464b18b73cd6c05c2"},
|
{file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"},
|
||||||
]
|
]
|
||||||
flake8-comprehensions = [
|
flake8-comprehensions = [
|
||||||
{file = "flake8-comprehensions-3.8.0.tar.gz", hash = "sha256:8e108707637b1d13734f38e03435984f6b7854fa6b5a4e34f93e69534be8e521"},
|
{file = "flake8-comprehensions-3.8.0.tar.gz", hash = "sha256:8e108707637b1d13734f38e03435984f6b7854fa6b5a4e34f93e69534be8e521"},
|
||||||
@ -1999,8 +1998,8 @@ jeepney = [
|
|||||||
{file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"},
|
{file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"},
|
||||||
]
|
]
|
||||||
jinja2 = [
|
jinja2 = [
|
||||||
{file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"},
|
{file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
|
||||||
{file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"},
|
{file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
|
||||||
]
|
]
|
||||||
jsonschema = [
|
jsonschema = [
|
||||||
{file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"},
|
{file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"},
|
||||||
@ -2301,21 +2300,21 @@ platformdirs = [
|
|||||||
{file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
|
{file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
|
||||||
]
|
]
|
||||||
prometheus-client = [
|
prometheus-client = [
|
||||||
{file = "prometheus_client-0.14.0-py3-none-any.whl", hash = "sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2"},
|
{file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"},
|
||||||
{file = "prometheus_client-0.14.0.tar.gz", hash = "sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750"},
|
{file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"},
|
||||||
]
|
]
|
||||||
psycopg2 = [
|
psycopg2 = [
|
||||||
{file = "psycopg2-2.9.4-cp310-cp310-win32.whl", hash = "sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797"},
|
{file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"},
|
||||||
{file = "psycopg2-2.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c"},
|
{file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"},
|
||||||
{file = "psycopg2-2.9.4-cp36-cp36m-win32.whl", hash = "sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a"},
|
{file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"},
|
||||||
{file = "psycopg2-2.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb"},
|
{file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"},
|
||||||
{file = "psycopg2-2.9.4-cp37-cp37m-win32.whl", hash = "sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb"},
|
{file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"},
|
||||||
{file = "psycopg2-2.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61"},
|
{file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"},
|
||||||
{file = "psycopg2-2.9.4-cp38-cp38-win32.whl", hash = "sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c"},
|
{file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"},
|
||||||
{file = "psycopg2-2.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184"},
|
{file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"},
|
||||||
{file = "psycopg2-2.9.4-cp39-cp39-win32.whl", hash = "sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426"},
|
{file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"},
|
||||||
{file = "psycopg2-2.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308"},
|
{file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"},
|
||||||
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
|
{file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
|
||||||
]
|
]
|
||||||
psycopg2cffi = [
|
psycopg2cffi = [
|
||||||
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
|
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
|
||||||
@ -2445,8 +2444,8 @@ pyrsistent = [
|
|||||||
{file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"},
|
{file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"},
|
||||||
]
|
]
|
||||||
pysaml2 = [
|
pysaml2 = [
|
||||||
{file = "pysaml2-7.1.2-py2.py3-none-any.whl", hash = "sha256:d915961aaa4d4d97d952b30fe5d18d64cf053465acf3e38d8090b36c5ff08325"},
|
{file = "pysaml2-7.2.1-py2.py3-none-any.whl", hash = "sha256:2ca155f4eeb1471b247a7b0cc79ccfd5780046d33d0b201e1199a00698dce795"},
|
||||||
{file = "pysaml2-7.1.2.tar.gz", hash = "sha256:1ec94442306511b93fe7a5710f224e05e0aba948682d506614d1e04f3232f827"},
|
{file = "pysaml2-7.2.1.tar.gz", hash = "sha256:f40f9576dce9afef156469179277ffeeca36829248be333252af0517a26d0b1f"},
|
||||||
]
|
]
|
||||||
python-dateutil = [
|
python-dateutil = [
|
||||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||||
@ -2503,8 +2502,8 @@ pyyaml = [
|
|||||||
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
||||||
]
|
]
|
||||||
readme-renderer = [
|
readme-renderer = [
|
||||||
{file = "readme_renderer-33.0-py3-none-any.whl", hash = "sha256:f02cee0c4de9636b5a62b6be50c9742427ba1b956aad1d938bfb087d0d72ccdf"},
|
{file = "readme_renderer-37.2-py3-none-any.whl", hash = "sha256:d3f06a69e8c40fca9ab3174eca48f96d9771eddb43517b17d96583418427b106"},
|
||||||
{file = "readme_renderer-33.0.tar.gz", hash = "sha256:e3b53bc84bd6af054e4cc1fe3567dc1ae19f554134221043a3f8c674e22209db"},
|
{file = "readme_renderer-37.2.tar.gz", hash = "sha256:e8ad25293c98f781dbc2c5a36a309929390009f902f99e1798c761aaf04a7923"},
|
||||||
]
|
]
|
||||||
requests = [
|
requests = [
|
||||||
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
|
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
|
||||||
@ -2518,6 +2517,10 @@ rfc3986 = [
|
|||||||
{file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"},
|
{file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"},
|
||||||
{file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"},
|
{file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"},
|
||||||
]
|
]
|
||||||
|
rich = [
|
||||||
|
{file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"},
|
||||||
|
{file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"},
|
||||||
|
]
|
||||||
secretstorage = [
|
secretstorage = [
|
||||||
{file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
|
{file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
|
||||||
{file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
|
{file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
|
||||||
@ -2527,8 +2530,8 @@ semantic-version = [
|
|||||||
{file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"},
|
{file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"},
|
||||||
]
|
]
|
||||||
sentry-sdk = [
|
sentry-sdk = [
|
||||||
{file = "sentry-sdk-1.5.11.tar.gz", hash = "sha256:6c01d9d0b65935fd275adc120194737d1df317dce811e642cbf0394d0d37a007"},
|
{file = "sentry-sdk-1.10.1.tar.gz", hash = "sha256:105faf7bd7b7fa25653404619ee261527266b14103fe1389e0ce077bd23a9691"},
|
||||||
{file = "sentry_sdk-1.5.11-py2.py3-none-any.whl", hash = "sha256:c17179183cac614e900cbd048dab03f49a48e2820182ec686c25e7ce46f8548f"},
|
{file = "sentry_sdk-1.10.1-py2.py3-none-any.whl", hash = "sha256:06c0fa9ccfdc80d7e3b5d2021978d6eb9351fa49db9b5847cf4d1f2a473414ad"},
|
||||||
]
|
]
|
||||||
service-identity = [
|
service-identity = [
|
||||||
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
|
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
|
||||||
@ -2539,8 +2542,8 @@ setuptools = [
|
|||||||
{file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"},
|
{file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"},
|
||||||
]
|
]
|
||||||
setuptools-rust = [
|
setuptools-rust = [
|
||||||
{file = "setuptools-rust-1.5.1.tar.gz", hash = "sha256:0e05e456645d59429cb1021370aede73c0760e9360bbfdaaefb5bced530eb9d7"},
|
{file = "setuptools-rust-1.5.2.tar.gz", hash = "sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7"},
|
||||||
{file = "setuptools_rust-1.5.1-py3-none-any.whl", hash = "sha256:306b236ff3aa5229180e58292610d0c2c51bb488191122d2fc559ae4caeb7d5e"},
|
{file = "setuptools_rust-1.5.2-py3-none-any.whl", hash = "sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206"},
|
||||||
]
|
]
|
||||||
signedjson = [
|
signedjson = [
|
||||||
{file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"},
|
{file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"},
|
||||||
@ -2682,17 +2685,13 @@ towncrier = [
|
|||||||
{file = "towncrier-21.9.0-py2.py3-none-any.whl", hash = "sha256:fc5a88a2a54988e3a8ed2b60d553599da8330f65722cc607c839614ed87e0f92"},
|
{file = "towncrier-21.9.0-py2.py3-none-any.whl", hash = "sha256:fc5a88a2a54988e3a8ed2b60d553599da8330f65722cc607c839614ed87e0f92"},
|
||||||
{file = "towncrier-21.9.0.tar.gz", hash = "sha256:9cb6f45c16e1a1eec9d0e7651165e7be60cd0ab81d13a5c96ca97a498ae87f48"},
|
{file = "towncrier-21.9.0.tar.gz", hash = "sha256:9cb6f45c16e1a1eec9d0e7651165e7be60cd0ab81d13a5c96ca97a498ae87f48"},
|
||||||
]
|
]
|
||||||
tqdm = [
|
|
||||||
{file = "tqdm-4.63.0-py2.py3-none-any.whl", hash = "sha256:e643e071046f17139dea55b880dc9b33822ce21613b4a4f5ea57f202833dbc29"},
|
|
||||||
{file = "tqdm-4.63.0.tar.gz", hash = "sha256:1d9835ede8e394bb8c9dcbffbca02d717217113adc679236873eeaac5bc0b3cd"},
|
|
||||||
]
|
|
||||||
treq = [
|
treq = [
|
||||||
{file = "treq-22.2.0-py3-none-any.whl", hash = "sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2"},
|
{file = "treq-22.2.0-py3-none-any.whl", hash = "sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2"},
|
||||||
{file = "treq-22.2.0.tar.gz", hash = "sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec"},
|
{file = "treq-22.2.0.tar.gz", hash = "sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec"},
|
||||||
]
|
]
|
||||||
twine = [
|
twine = [
|
||||||
{file = "twine-3.8.0-py3-none-any.whl", hash = "sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8"},
|
{file = "twine-4.0.1-py3-none-any.whl", hash = "sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e"},
|
||||||
{file = "twine-3.8.0.tar.gz", hash = "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19"},
|
{file = "twine-4.0.1.tar.gz", hash = "sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0"},
|
||||||
]
|
]
|
||||||
twisted = [
|
twisted = [
|
||||||
{file = "Twisted-22.8.0-py3-none-any.whl", hash = "sha256:8d4718d1e48dcc28933f8beb48dc71cfe77a125e37ad1eb7a3d0acc49baf6c99"},
|
{file = "Twisted-22.8.0-py3-none-any.whl", hash = "sha256:8d4718d1e48dcc28933f8beb48dc71cfe77a125e37ad1eb7a3d0acc49baf6c99"},
|
||||||
@ -2767,8 +2766,8 @@ types-jsonschema = [
|
|||||||
{file = "types_jsonschema-4.4.6-py3-none-any.whl", hash = "sha256:1db9031ca49a8444d01bd2ce8cf2f89318382b04610953b108321e6f8fb03390"},
|
{file = "types_jsonschema-4.4.6-py3-none-any.whl", hash = "sha256:1db9031ca49a8444d01bd2ce8cf2f89318382b04610953b108321e6f8fb03390"},
|
||||||
]
|
]
|
||||||
types-opentracing = [
|
types-opentracing = [
|
||||||
{file = "types-opentracing-2.4.7.tar.gz", hash = "sha256:be60e9618355aa892571ace002e6b353702538b1c0dc4fbc1c921219d6658830"},
|
{file = "types-opentracing-2.4.10.tar.gz", hash = "sha256:6101414f3b6d3b9c10f1c510a261e8439b6c8d67c723d5c2872084697b4580a7"},
|
||||||
{file = "types_opentracing-2.4.7-py3-none-any.whl", hash = "sha256:861fb8103b07cf717f501dd400cb274ca9992552314d4d6c7a824b11a215e512"},
|
{file = "types_opentracing-2.4.10-py3-none-any.whl", hash = "sha256:66d9cfbbdc4a6f8ca8189a15ad26f0fe41cee84c07057759c5d194e2505b84c2"},
|
||||||
]
|
]
|
||||||
types-pillow = [
|
types-pillow = [
|
||||||
{file = "types-Pillow-9.2.2.1.tar.gz", hash = "sha256:85c139e06e1c46ec5f9c634d5c54a156b0958d5d0e8be024ed353db0c804b426"},
|
{file = "types-Pillow-9.2.2.1.tar.gz", hash = "sha256:85c139e06e1c46ec5f9c634d5c54a156b0958d5d0e8be024ed353db0c804b426"},
|
||||||
@ -2787,8 +2786,8 @@ types-PyYAML = [
|
|||||||
{file = "types_PyYAML-6.0.12-py3-none-any.whl", hash = "sha256:29228db9f82df4f1b7febee06bbfb601677882e98a3da98132e31c6874163e15"},
|
{file = "types_PyYAML-6.0.12-py3-none-any.whl", hash = "sha256:29228db9f82df4f1b7febee06bbfb601677882e98a3da98132e31c6874163e15"},
|
||||||
]
|
]
|
||||||
types-requests = [
|
types-requests = [
|
||||||
{file = "types-requests-2.28.11.tar.gz", hash = "sha256:7ee827eb8ce611b02b5117cfec5da6455365b6a575f5e3ff19f655ba603e6b4e"},
|
{file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"},
|
||||||
{file = "types_requests-2.28.11-py3-none-any.whl", hash = "sha256:af5f55e803cabcfb836dad752bd6d8a0fc8ef1cd84243061c0e27dee04ccf4fd"},
|
{file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"},
|
||||||
]
|
]
|
||||||
types-setuptools = [
|
types-setuptools = [
|
||||||
{file = "types-setuptools-65.5.0.1.tar.gz", hash = "sha256:5b297081c8f1fbd992cd8b305a97ed96ee6ffc765e9115124029597dd10b8a71"},
|
{file = "types-setuptools-65.5.0.1.tar.gz", hash = "sha256:5b297081c8f1fbd992cd8b305a97ed96ee6ffc765e9115124029597dd10b8a71"},
|
||||||
@ -2807,8 +2806,8 @@ unpaddedbase64 = [
|
|||||||
{file = "unpaddedbase64-2.1.0.tar.gz", hash = "sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005"},
|
{file = "unpaddedbase64-2.1.0.tar.gz", hash = "sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005"},
|
||||||
]
|
]
|
||||||
urllib3 = [
|
urllib3 = [
|
||||||
{file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"},
|
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
|
||||||
{file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"},
|
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
|
||||||
]
|
]
|
||||||
webencodings = [
|
webencodings = [
|
||||||
{file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
|
{file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
|
||||||
|
@ -57,7 +57,7 @@ manifest-path = "rust/Cargo.toml"
|
|||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "matrix-synapse"
|
name = "matrix-synapse"
|
||||||
version = "1.70.1"
|
version = "1.71.0rc1"
|
||||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
@ -192,7 +192,7 @@ psycopg2 = { version = ">=2.8", markers = "platform_python_implementation != 'Py
|
|||||||
psycopg2cffi = { version = ">=2.8", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
psycopg2cffi = { version = ">=2.8", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||||
psycopg2cffi-compat = { version = "==1.1", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
psycopg2cffi-compat = { version = "==1.1", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||||
pysaml2 = { version = ">=4.5.0", optional = true }
|
pysaml2 = { version = ">=4.5.0", optional = true }
|
||||||
authlib = { version = ">=0.14.0", optional = true }
|
authlib = { version = ">=0.15.1", optional = true }
|
||||||
# systemd-python is necessary for logging to the systemd journal via
|
# systemd-python is necessary for logging to the systemd journal via
|
||||||
# `systemd.journal.JournalHandler`, as is documented in
|
# `systemd.journal.JournalHandler`, as is documented in
|
||||||
# `contrib/systemd/log_config.yaml`.
|
# `contrib/systemd/log_config.yaml`.
|
||||||
|
@ -254,9 +254,9 @@ importlib-resources==5.4.0 ; python_full_version >= "3.7.1" and python_version <
|
|||||||
incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \
|
--hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \
|
||||||
--hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321
|
--hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321
|
||||||
jinja2==3.0.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
jinja2==3.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \
|
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
|
||||||
--hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7
|
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
|
||||||
jsonschema==4.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
jsonschema==4.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23 \
|
--hash=sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23 \
|
||||||
--hash=sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9
|
--hash=sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9
|
||||||
@ -479,21 +479,21 @@ pillow==9.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
|
|||||||
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||||
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
|
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
|
||||||
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
|
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
|
||||||
prometheus-client==0.14.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
prometheus-client==0.15.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750 \
|
--hash=sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1 \
|
||||||
--hash=sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2
|
--hash=sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2
|
||||||
psycopg2==2.9.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
psycopg2==2.9.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426 \
|
--hash=sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa \
|
||||||
--hash=sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c \
|
--hash=sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e \
|
||||||
--hash=sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb \
|
--hash=sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a \
|
||||||
--hash=sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c \
|
--hash=sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5 \
|
||||||
--hash=sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308 \
|
--hash=sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee \
|
||||||
--hash=sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797 \
|
--hash=sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0 \
|
||||||
--hash=sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a \
|
--hash=sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a \
|
||||||
--hash=sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61 \
|
--hash=sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d \
|
||||||
--hash=sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb \
|
--hash=sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f \
|
||||||
--hash=sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184 \
|
--hash=sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2 \
|
||||||
--hash=sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f
|
--hash=sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5
|
||||||
psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
||||||
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
|
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
|
||||||
psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
||||||
@ -636,9 +636,9 @@ semantic-version==2.10.0 ; python_full_version >= "3.7.1" and python_full_versio
|
|||||||
service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34 \
|
--hash=sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34 \
|
||||||
--hash=sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db
|
--hash=sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db
|
||||||
setuptools-rust==1.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
setuptools-rust==1.5.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:0e05e456645d59429cb1021370aede73c0760e9360bbfdaaefb5bced530eb9d7 \
|
--hash=sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206 \
|
||||||
--hash=sha256:306b236ff3aa5229180e58292610d0c2c51bb488191122d2fc559ae4caeb7d5e
|
--hash=sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7
|
||||||
setuptools==65.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
setuptools==65.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82 \
|
--hash=sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82 \
|
||||||
--hash=sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57
|
--hash=sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57
|
||||||
@ -744,9 +744,9 @@ typing-extensions==4.4.0 ; python_full_version >= "3.7.1" and python_full_versio
|
|||||||
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
|
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
|
||||||
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
|
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
|
||||||
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005
|
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005
|
||||||
urllib3==1.26.8 ; python_full_version >= "3.7.1" and python_version < "4" \
|
urllib3==1.26.12 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||||
--hash=sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed \
|
--hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
|
||||||
--hash=sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c
|
--hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
|
||||||
webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
|
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
|
||||||
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
|
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
|
||||||
|
@ -20,15 +20,15 @@ crate-type = ["lib", "cdylib"]
|
|||||||
name = "synapse.synapse_rust"
|
name = "synapse.synapse_rust"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.63"
|
anyhow = "1.0.66"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
pyo3 = { version = "0.17.1", features = ["extension-module", "macros", "anyhow", "abi3", "abi3-py37"] }
|
pyo3 = { version = "0.17.1", features = ["extension-module", "macros", "anyhow", "abi3", "abi3-py37"] }
|
||||||
pyo3-log = "0.7.0"
|
pyo3-log = "0.7.0"
|
||||||
pythonize = "0.17.0"
|
pythonize = "0.17.0"
|
||||||
regex = "1.6.0"
|
regex = "1.6.0"
|
||||||
serde = { version = "1.0.144", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
serde_json = "1.0.85"
|
serde_json = "1.0.87"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
blake2 = "0.10.4"
|
blake2 = "0.10.4"
|
||||||
|
@ -25,6 +25,7 @@ use crate::push::Action;
|
|||||||
use crate::push::Condition;
|
use crate::push::Condition;
|
||||||
use crate::push::EventMatchCondition;
|
use crate::push::EventMatchCondition;
|
||||||
use crate::push::PushRule;
|
use crate::push::PushRule;
|
||||||
|
use crate::push::RelatedEventMatchCondition;
|
||||||
use crate::push::SetTweak;
|
use crate::push::SetTweak;
|
||||||
use crate::push::TweakValue;
|
use crate::push::TweakValue;
|
||||||
|
|
||||||
@ -114,6 +115,22 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
|||||||
default: true,
|
default: true,
|
||||||
default_enabled: true,
|
default_enabled: true,
|
||||||
},
|
},
|
||||||
|
PushRule {
|
||||||
|
rule_id: Cow::Borrowed("global/override/.im.nheko.msc3664.reply"),
|
||||||
|
priority_class: 5,
|
||||||
|
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatch(
|
||||||
|
RelatedEventMatchCondition {
|
||||||
|
key: Some(Cow::Borrowed("sender")),
|
||||||
|
pattern: None,
|
||||||
|
pattern_type: Some(Cow::Borrowed("user_id")),
|
||||||
|
rel_type: Cow::Borrowed("m.in_reply_to"),
|
||||||
|
include_fallbacks: None,
|
||||||
|
},
|
||||||
|
))]),
|
||||||
|
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
|
||||||
|
default: true,
|
||||||
|
default_enabled: true,
|
||||||
|
},
|
||||||
PushRule {
|
PushRule {
|
||||||
rule_id: Cow::Borrowed("global/override/.m.rule.contains_display_name"),
|
rule_id: Cow::Borrowed("global/override/.m.rule.contains_display_name"),
|
||||||
priority_class: 5,
|
priority_class: 5,
|
||||||
|
@ -23,6 +23,7 @@ use regex::Regex;
|
|||||||
use super::{
|
use super::{
|
||||||
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
|
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
|
||||||
Action, Condition, EventMatchCondition, FilteredPushRules, KnownCondition,
|
Action, Condition, EventMatchCondition, FilteredPushRules, KnownCondition,
|
||||||
|
RelatedEventMatchCondition,
|
||||||
};
|
};
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
@ -49,6 +50,13 @@ pub struct PushRuleEvaluator {
|
|||||||
/// The power level of the sender of the event, or None if event is an
|
/// The power level of the sender of the event, or None if event is an
|
||||||
/// outlier.
|
/// outlier.
|
||||||
sender_power_level: Option<i64>,
|
sender_power_level: Option<i64>,
|
||||||
|
|
||||||
|
/// The related events, indexed by relation type. Flattened in the same manner as
|
||||||
|
/// `flattened_keys`.
|
||||||
|
related_events_flattened: BTreeMap<String, BTreeMap<String, String>>,
|
||||||
|
|
||||||
|
/// If msc3664, push rules for related events, is enabled.
|
||||||
|
related_event_match_enabled: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
@ -60,6 +68,8 @@ impl PushRuleEvaluator {
|
|||||||
room_member_count: u64,
|
room_member_count: u64,
|
||||||
sender_power_level: Option<i64>,
|
sender_power_level: Option<i64>,
|
||||||
notification_power_levels: BTreeMap<String, i64>,
|
notification_power_levels: BTreeMap<String, i64>,
|
||||||
|
related_events_flattened: BTreeMap<String, BTreeMap<String, String>>,
|
||||||
|
related_event_match_enabled: bool,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let body = flattened_keys
|
let body = flattened_keys
|
||||||
.get("content.body")
|
.get("content.body")
|
||||||
@ -72,6 +82,8 @@ impl PushRuleEvaluator {
|
|||||||
room_member_count,
|
room_member_count,
|
||||||
notification_power_levels,
|
notification_power_levels,
|
||||||
sender_power_level,
|
sender_power_level,
|
||||||
|
related_events_flattened,
|
||||||
|
related_event_match_enabled,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,6 +168,9 @@ impl PushRuleEvaluator {
|
|||||||
KnownCondition::EventMatch(event_match) => {
|
KnownCondition::EventMatch(event_match) => {
|
||||||
self.match_event_match(event_match, user_id)?
|
self.match_event_match(event_match, user_id)?
|
||||||
}
|
}
|
||||||
|
KnownCondition::RelatedEventMatch(event_match) => {
|
||||||
|
self.match_related_event_match(event_match, user_id)?
|
||||||
|
}
|
||||||
KnownCondition::ContainsDisplayName => {
|
KnownCondition::ContainsDisplayName => {
|
||||||
if let Some(dn) = display_name {
|
if let Some(dn) = display_name {
|
||||||
if !dn.is_empty() {
|
if !dn.is_empty() {
|
||||||
@ -239,6 +254,79 @@ impl PushRuleEvaluator {
|
|||||||
compiled_pattern.is_match(haystack)
|
compiled_pattern.is_match(haystack)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Evaluates a `related_event_match` condition. (MSC3664)
|
||||||
|
fn match_related_event_match(
|
||||||
|
&self,
|
||||||
|
event_match: &RelatedEventMatchCondition,
|
||||||
|
user_id: Option<&str>,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
// First check if related event matching is enabled...
|
||||||
|
if !self.related_event_match_enabled {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the related event, fail if there is none.
|
||||||
|
let event = if let Some(event) = self.related_events_flattened.get(&*event_match.rel_type) {
|
||||||
|
event
|
||||||
|
} else {
|
||||||
|
return Ok(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
// If we are not matching fallbacks, don't match if our special key indicating this is a
|
||||||
|
// fallback relation is not present.
|
||||||
|
if !event_match.include_fallbacks.unwrap_or(false)
|
||||||
|
&& event.contains_key("im.vector.is_falling_back")
|
||||||
|
{
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we have no key, accept the event as matching, if it existed without matching any
|
||||||
|
// fields.
|
||||||
|
let key = if let Some(key) = &event_match.key {
|
||||||
|
key
|
||||||
|
} else {
|
||||||
|
return Ok(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
let pattern = if let Some(pattern) = &event_match.pattern {
|
||||||
|
pattern
|
||||||
|
} else if let Some(pattern_type) = &event_match.pattern_type {
|
||||||
|
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||||
|
// either way if we don't have a `user_id` then the condition can't
|
||||||
|
// match.
|
||||||
|
let user_id = if let Some(user_id) = user_id {
|
||||||
|
user_id
|
||||||
|
} else {
|
||||||
|
return Ok(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
match &**pattern_type {
|
||||||
|
"user_id" => user_id,
|
||||||
|
"user_localpart" => get_localpart_from_id(user_id)?,
|
||||||
|
_ => return Ok(false),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Ok(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
let haystack = if let Some(haystack) = event.get(&**key) {
|
||||||
|
haystack
|
||||||
|
} else {
|
||||||
|
return Ok(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
// For the content.body we match against "words", but for everything
|
||||||
|
// else we match against the entire value.
|
||||||
|
let match_type = if key == "content.body" {
|
||||||
|
GlobMatchType::Word
|
||||||
|
} else {
|
||||||
|
GlobMatchType::Whole
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut compiled_pattern = get_glob_matcher(pattern, match_type)?;
|
||||||
|
compiled_pattern.is_match(haystack)
|
||||||
|
}
|
||||||
|
|
||||||
/// Match the member count against an 'is' condition
|
/// Match the member count against an 'is' condition
|
||||||
/// The `is` condition can be things like '>2', '==3' or even just '4'.
|
/// The `is` condition can be things like '>2', '==3' or even just '4'.
|
||||||
fn match_member_count(&self, is: &str) -> Result<bool, Error> {
|
fn match_member_count(&self, is: &str) -> Result<bool, Error> {
|
||||||
@ -267,8 +355,15 @@ impl PushRuleEvaluator {
|
|||||||
fn push_rule_evaluator() {
|
fn push_rule_evaluator() {
|
||||||
let mut flattened_keys = BTreeMap::new();
|
let mut flattened_keys = BTreeMap::new();
|
||||||
flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string());
|
flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string());
|
||||||
let evaluator =
|
let evaluator = PushRuleEvaluator::py_new(
|
||||||
PushRuleEvaluator::py_new(flattened_keys, 10, Some(0), BTreeMap::new()).unwrap();
|
flattened_keys,
|
||||||
|
10,
|
||||||
|
Some(0),
|
||||||
|
BTreeMap::new(),
|
||||||
|
BTreeMap::new(),
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
|
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
|
||||||
assert_eq!(result.len(), 3);
|
assert_eq!(result.len(), 3);
|
||||||
|
@ -267,6 +267,8 @@ pub enum Condition {
|
|||||||
#[serde(tag = "kind")]
|
#[serde(tag = "kind")]
|
||||||
pub enum KnownCondition {
|
pub enum KnownCondition {
|
||||||
EventMatch(EventMatchCondition),
|
EventMatch(EventMatchCondition),
|
||||||
|
#[serde(rename = "im.nheko.msc3664.related_event_match")]
|
||||||
|
RelatedEventMatch(RelatedEventMatchCondition),
|
||||||
ContainsDisplayName,
|
ContainsDisplayName,
|
||||||
RoomMemberCount {
|
RoomMemberCount {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
@ -299,6 +301,20 @@ pub struct EventMatchCondition {
|
|||||||
pub pattern_type: Option<Cow<'static, str>>,
|
pub pattern_type: Option<Cow<'static, str>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The body of a [`Condition::RelatedEventMatch`]
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
pub struct RelatedEventMatchCondition {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub key: Option<Cow<'static, str>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub pattern: Option<Cow<'static, str>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub pattern_type: Option<Cow<'static, str>>,
|
||||||
|
pub rel_type: Cow<'static, str>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub include_fallbacks: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
/// The collection of push rules for a user.
|
/// The collection of push rules for a user.
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
#[pyclass(frozen)]
|
#[pyclass(frozen)]
|
||||||
@ -391,15 +407,21 @@ impl PushRules {
|
|||||||
pub struct FilteredPushRules {
|
pub struct FilteredPushRules {
|
||||||
push_rules: PushRules,
|
push_rules: PushRules,
|
||||||
enabled_map: BTreeMap<String, bool>,
|
enabled_map: BTreeMap<String, bool>,
|
||||||
|
msc3664_enabled: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
impl FilteredPushRules {
|
impl FilteredPushRules {
|
||||||
#[new]
|
#[new]
|
||||||
pub fn py_new(push_rules: PushRules, enabled_map: BTreeMap<String, bool>) -> Self {
|
pub fn py_new(
|
||||||
|
push_rules: PushRules,
|
||||||
|
enabled_map: BTreeMap<String, bool>,
|
||||||
|
msc3664_enabled: bool,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
push_rules,
|
push_rules,
|
||||||
enabled_map,
|
enabled_map,
|
||||||
|
msc3664_enabled,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -414,7 +436,19 @@ impl FilteredPushRules {
|
|||||||
/// Iterates over all the rules and their enabled state, including base
|
/// Iterates over all the rules and their enabled state, including base
|
||||||
/// rules, in the order they should be executed in.
|
/// rules, in the order they should be executed in.
|
||||||
fn iter(&self) -> impl Iterator<Item = (&PushRule, bool)> {
|
fn iter(&self) -> impl Iterator<Item = (&PushRule, bool)> {
|
||||||
self.push_rules.iter().map(|r| {
|
self.push_rules
|
||||||
|
.iter()
|
||||||
|
.filter(|rule| {
|
||||||
|
// Ignore disabled experimental push rules
|
||||||
|
if !self.msc3664_enabled
|
||||||
|
&& rule.rule_id == "global/override/.im.nheko.msc3664.reply"
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
})
|
||||||
|
.map(|r| {
|
||||||
let enabled = *self
|
let enabled = *self
|
||||||
.enabled_map
|
.enabled_map
|
||||||
.get(&*r.rule_id)
|
.get(&*r.rule_id)
|
||||||
@ -446,6 +480,17 @@ fn test_deserialize_condition() {
|
|||||||
let _: Condition = serde_json::from_str(json).unwrap();
|
let _: Condition = serde_json::from_str(json).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_deserialize_unstable_msc3664_condition() {
|
||||||
|
let json = r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern":"coffee","rel_type":"m.in_reply_to"}"#;
|
||||||
|
|
||||||
|
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||||
|
assert!(matches!(
|
||||||
|
condition,
|
||||||
|
Condition::Known(KnownCondition::RelatedEventMatch(_))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_deserialize_custom_condition() {
|
fn test_deserialize_custom_condition() {
|
||||||
let json = r#"{"kind":"custom_tag"}"#;
|
let json = r#"{"kind":"custom_tag"}"#;
|
||||||
|
@ -25,7 +25,9 @@ class PushRules:
|
|||||||
def rules(self) -> Collection[PushRule]: ...
|
def rules(self) -> Collection[PushRule]: ...
|
||||||
|
|
||||||
class FilteredPushRules:
|
class FilteredPushRules:
|
||||||
def __init__(self, push_rules: PushRules, enabled_map: Dict[str, bool]): ...
|
def __init__(
|
||||||
|
self, push_rules: PushRules, enabled_map: Dict[str, bool], msc3664_enabled: bool
|
||||||
|
): ...
|
||||||
def rules(self) -> Collection[Tuple[PushRule, bool]]: ...
|
def rules(self) -> Collection[Tuple[PushRule, bool]]: ...
|
||||||
|
|
||||||
def get_base_rule_ids() -> Collection[str]: ...
|
def get_base_rule_ids() -> Collection[str]: ...
|
||||||
@ -37,6 +39,8 @@ class PushRuleEvaluator:
|
|||||||
room_member_count: int,
|
room_member_count: int,
|
||||||
sender_power_level: Optional[int],
|
sender_power_level: Optional[int],
|
||||||
notification_power_levels: Mapping[str, int],
|
notification_power_levels: Mapping[str, int],
|
||||||
|
related_events_flattened: Mapping[str, Mapping[str, str]],
|
||||||
|
related_event_match_enabled: bool,
|
||||||
): ...
|
): ...
|
||||||
def run(
|
def run(
|
||||||
self,
|
self,
|
||||||
|
8
synapse/_scripts/update_synapse_database.py
Executable file → Normal file
8
synapse/_scripts/update_synapse_database.py
Executable file → Normal file
@ -15,7 +15,6 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
@ -100,13 +99,6 @@ def main() -> None:
|
|||||||
# Load, process and sanity-check the config.
|
# Load, process and sanity-check the config.
|
||||||
hs_config = yaml.safe_load(args.database_config)
|
hs_config = yaml.safe_load(args.database_config)
|
||||||
|
|
||||||
if "database" not in hs_config and "databases" not in hs_config:
|
|
||||||
sys.stderr.write(
|
|
||||||
"The configuration file must have a 'database' or 'databases' section. "
|
|
||||||
"See https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#database"
|
|
||||||
)
|
|
||||||
sys.exit(4)
|
|
||||||
|
|
||||||
config = HomeServerConfig()
|
config = HomeServerConfig()
|
||||||
config.parse_config_dict(hs_config, "", "")
|
config.parse_config_dict(hs_config, "", "")
|
||||||
|
|
||||||
|
@ -155,7 +155,13 @@ class RedirectException(CodeMessageException):
|
|||||||
|
|
||||||
class SynapseError(CodeMessageException):
|
class SynapseError(CodeMessageException):
|
||||||
"""A base exception type for matrix errors which have an errcode and error
|
"""A base exception type for matrix errors which have an errcode and error
|
||||||
message (as well as an HTTP status code).
|
message (as well as an HTTP status code). These often bubble all the way up to the
|
||||||
|
client API response so the error code and status often reach the client directly as
|
||||||
|
defined here. If the error doesn't make sense to present to a client, then it
|
||||||
|
probably shouldn't be a `SynapseError`. For example, if we contact another
|
||||||
|
homeserver over federation, we shouldn't automatically ferry response errors back to
|
||||||
|
the client on our end (a 500 from a remote server does not make sense to a client
|
||||||
|
when our server did not experience a 500).
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
errcode: Matrix error code e.g 'M_FORBIDDEN'
|
errcode: Matrix error code e.g 'M_FORBIDDEN'
|
||||||
@ -600,8 +606,20 @@ def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs: Any) -> "JsonDict":
|
|||||||
|
|
||||||
|
|
||||||
class FederationError(RuntimeError):
|
class FederationError(RuntimeError):
|
||||||
"""This class is used to inform remote homeservers about erroneous
|
"""
|
||||||
PDUs they sent us.
|
Raised when we process an erroneous PDU.
|
||||||
|
|
||||||
|
There are two kinds of scenarios where this exception can be raised:
|
||||||
|
|
||||||
|
1. We may pull an invalid PDU from a remote homeserver (e.g. during backfill). We
|
||||||
|
raise this exception to signal an error to the rest of the application.
|
||||||
|
2. We may be pushed an invalid PDU as part of a `/send` transaction from a remote
|
||||||
|
homeserver. We raise so that we can respond to the transaction and include the
|
||||||
|
error string in the "PDU Processing Result". The message which will likely be
|
||||||
|
ignored by the remote homeserver and is not machine parse-able since it's just a
|
||||||
|
string.
|
||||||
|
|
||||||
|
TODO: In the future, we should split these usage scenarios into their own error types.
|
||||||
|
|
||||||
FATAL: The remote server could not interpret the source event.
|
FATAL: The remote server could not interpret the source event.
|
||||||
(e.g., it was missing a required field)
|
(e.g., it was missing a required field)
|
||||||
|
@ -28,7 +28,7 @@ FEDERATION_V1_PREFIX = FEDERATION_PREFIX + "/v1"
|
|||||||
FEDERATION_V2_PREFIX = FEDERATION_PREFIX + "/v2"
|
FEDERATION_V2_PREFIX = FEDERATION_PREFIX + "/v2"
|
||||||
FEDERATION_UNSTABLE_PREFIX = FEDERATION_PREFIX + "/unstable"
|
FEDERATION_UNSTABLE_PREFIX = FEDERATION_PREFIX + "/unstable"
|
||||||
STATIC_PREFIX = "/_matrix/static"
|
STATIC_PREFIX = "/_matrix/static"
|
||||||
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
SERVER_KEY_PREFIX = "/_matrix/key"
|
||||||
MEDIA_R0_PREFIX = "/_matrix/media/r0"
|
MEDIA_R0_PREFIX = "/_matrix/media/r0"
|
||||||
MEDIA_V3_PREFIX = "/_matrix/media/v3"
|
MEDIA_V3_PREFIX = "/_matrix/media/v3"
|
||||||
LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"
|
LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"
|
||||||
|
@ -558,7 +558,7 @@ def reload_cache_config(config: HomeServerConfig) -> None:
|
|||||||
logger.warning(f)
|
logger.warning(f)
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"New cache config. Was:\n %s\nNow:\n",
|
"New cache config. Was:\n %s\nNow:\n %s",
|
||||||
previous_cache_config.__dict__,
|
previous_cache_config.__dict__,
|
||||||
config.caches.__dict__,
|
config.caches.__dict__,
|
||||||
)
|
)
|
||||||
|
@ -55,13 +55,13 @@ import os
|
|||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
from types import FrameType
|
from types import FrameType
|
||||||
from typing import Any, Callable, List, Optional
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
from twisted.internet.main import installReactor
|
from twisted.internet.main import installReactor
|
||||||
|
|
||||||
# a list of the original signal handlers, before we installed our custom ones.
|
# a list of the original signal handlers, before we installed our custom ones.
|
||||||
# We restore these in our child processes.
|
# We restore these in our child processes.
|
||||||
_original_signal_handlers: dict[int, Any] = {}
|
_original_signal_handlers: Dict[int, Any] = {}
|
||||||
|
|
||||||
|
|
||||||
class ProxiedReactor:
|
class ProxiedReactor:
|
||||||
|
@ -28,7 +28,7 @@ from synapse.api.urls import (
|
|||||||
LEGACY_MEDIA_PREFIX,
|
LEGACY_MEDIA_PREFIX,
|
||||||
MEDIA_R0_PREFIX,
|
MEDIA_R0_PREFIX,
|
||||||
MEDIA_V3_PREFIX,
|
MEDIA_V3_PREFIX,
|
||||||
SERVER_KEY_V2_PREFIX,
|
SERVER_KEY_PREFIX,
|
||||||
)
|
)
|
||||||
from synapse.app import _base
|
from synapse.app import _base
|
||||||
from synapse.app._base import (
|
from synapse.app._base import (
|
||||||
@ -89,7 +89,7 @@ from synapse.rest.client.register import (
|
|||||||
RegistrationTokenValidityRestServlet,
|
RegistrationTokenValidityRestServlet,
|
||||||
)
|
)
|
||||||
from synapse.rest.health import HealthResource
|
from synapse.rest.health import HealthResource
|
||||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
from synapse.rest.key.v2 import KeyResource
|
||||||
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||||
from synapse.rest.well_known import well_known_resource
|
from synapse.rest.well_known import well_known_resource
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
@ -178,13 +178,13 @@ class KeyUploadServlet(RestServlet):
|
|||||||
# Proxy headers from the original request, such as the auth headers
|
# Proxy headers from the original request, such as the auth headers
|
||||||
# (in case the access token is there) and the original IP /
|
# (in case the access token is there) and the original IP /
|
||||||
# User-Agent of the request.
|
# User-Agent of the request.
|
||||||
headers = {
|
headers: Dict[bytes, List[bytes]] = {
|
||||||
header: request.requestHeaders.getRawHeaders(header, [])
|
header: list(request.requestHeaders.getRawHeaders(header, []))
|
||||||
for header in (b"Authorization", b"User-Agent")
|
for header in (b"Authorization", b"User-Agent")
|
||||||
}
|
}
|
||||||
# Add the previous hop to the X-Forwarded-For header.
|
# Add the previous hop to the X-Forwarded-For header.
|
||||||
x_forwarded_for = request.requestHeaders.getRawHeaders(
|
x_forwarded_for = list(
|
||||||
b"X-Forwarded-For", []
|
request.requestHeaders.getRawHeaders(b"X-Forwarded-For", [])
|
||||||
)
|
)
|
||||||
# we use request.client here, since we want the previous hop, not the
|
# we use request.client here, since we want the previous hop, not the
|
||||||
# original client (as returned by request.getClientAddress()).
|
# original client (as returned by request.getClientAddress()).
|
||||||
@ -325,13 +325,13 @@ class GenericWorkerServer(HomeServer):
|
|||||||
|
|
||||||
presence.register_servlets(self, resource)
|
presence.register_servlets(self, resource)
|
||||||
|
|
||||||
resources.update({CLIENT_API_PREFIX: resource})
|
resources[CLIENT_API_PREFIX] = resource
|
||||||
|
|
||||||
resources.update(build_synapse_client_resource_tree(self))
|
resources.update(build_synapse_client_resource_tree(self))
|
||||||
resources.update({"/.well-known": well_known_resource(self)})
|
resources["/.well-known"] = well_known_resource(self)
|
||||||
|
|
||||||
elif name == "federation":
|
elif name == "federation":
|
||||||
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
resources[FEDERATION_PREFIX] = TransportLayerServer(self)
|
||||||
elif name == "media":
|
elif name == "media":
|
||||||
if self.config.media.can_load_media_repo:
|
if self.config.media.can_load_media_repo:
|
||||||
media_repo = self.get_media_repository_resource()
|
media_repo = self.get_media_repository_resource()
|
||||||
@ -359,16 +359,12 @@ class GenericWorkerServer(HomeServer):
|
|||||||
# Only load the openid resource separately if federation resource
|
# Only load the openid resource separately if federation resource
|
||||||
# is not specified since federation resource includes openid
|
# is not specified since federation resource includes openid
|
||||||
# resource.
|
# resource.
|
||||||
resources.update(
|
resources[FEDERATION_PREFIX] = TransportLayerServer(
|
||||||
{
|
|
||||||
FEDERATION_PREFIX: TransportLayerServer(
|
|
||||||
self, servlet_groups=["openid"]
|
self, servlet_groups=["openid"]
|
||||||
)
|
)
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if name in ["keys", "federation"]:
|
if name in ["keys", "federation"]:
|
||||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
resources[SERVER_KEY_PREFIX] = KeyResource(self)
|
||||||
|
|
||||||
if name == "replication":
|
if name == "replication":
|
||||||
resources[REPLICATION_PREFIX] = ReplicationRestResource(self)
|
resources[REPLICATION_PREFIX] = ReplicationRestResource(self)
|
||||||
|
@ -31,7 +31,7 @@ from synapse.api.urls import (
|
|||||||
LEGACY_MEDIA_PREFIX,
|
LEGACY_MEDIA_PREFIX,
|
||||||
MEDIA_R0_PREFIX,
|
MEDIA_R0_PREFIX,
|
||||||
MEDIA_V3_PREFIX,
|
MEDIA_V3_PREFIX,
|
||||||
SERVER_KEY_V2_PREFIX,
|
SERVER_KEY_PREFIX,
|
||||||
STATIC_PREFIX,
|
STATIC_PREFIX,
|
||||||
)
|
)
|
||||||
from synapse.app import _base
|
from synapse.app import _base
|
||||||
@ -60,7 +60,7 @@ from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
|||||||
from synapse.rest import ClientRestResource
|
from synapse.rest import ClientRestResource
|
||||||
from synapse.rest.admin import AdminRestResource
|
from synapse.rest.admin import AdminRestResource
|
||||||
from synapse.rest.health import HealthResource
|
from synapse.rest.health import HealthResource
|
||||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
from synapse.rest.key.v2 import KeyResource
|
||||||
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||||
from synapse.rest.well_known import well_known_resource
|
from synapse.rest.well_known import well_known_resource
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
@ -215,31 +215,23 @@ class SynapseHomeServer(HomeServer):
|
|||||||
consent_resource: Resource = ConsentResource(self)
|
consent_resource: Resource = ConsentResource(self)
|
||||||
if compress:
|
if compress:
|
||||||
consent_resource = gz_wrap(consent_resource)
|
consent_resource = gz_wrap(consent_resource)
|
||||||
resources.update({"/_matrix/consent": consent_resource})
|
resources["/_matrix/consent"] = consent_resource
|
||||||
|
|
||||||
if name == "federation":
|
if name == "federation":
|
||||||
federation_resource: Resource = TransportLayerServer(self)
|
federation_resource: Resource = TransportLayerServer(self)
|
||||||
if compress:
|
if compress:
|
||||||
federation_resource = gz_wrap(federation_resource)
|
federation_resource = gz_wrap(federation_resource)
|
||||||
resources.update({FEDERATION_PREFIX: federation_resource})
|
resources[FEDERATION_PREFIX] = federation_resource
|
||||||
|
|
||||||
if name == "openid":
|
if name == "openid":
|
||||||
resources.update(
|
resources[FEDERATION_PREFIX] = TransportLayerServer(
|
||||||
{
|
|
||||||
FEDERATION_PREFIX: TransportLayerServer(
|
|
||||||
self, servlet_groups=["openid"]
|
self, servlet_groups=["openid"]
|
||||||
)
|
)
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if name in ["static", "client"]:
|
if name in ["static", "client"]:
|
||||||
resources.update(
|
resources[STATIC_PREFIX] = StaticResource(
|
||||||
{
|
|
||||||
STATIC_PREFIX: StaticResource(
|
|
||||||
os.path.join(os.path.dirname(synapse.__file__), "static")
|
os.path.join(os.path.dirname(synapse.__file__), "static")
|
||||||
)
|
)
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if name in ["media", "federation", "client"]:
|
if name in ["media", "federation", "client"]:
|
||||||
if self.config.server.enable_media_repo:
|
if self.config.server.enable_media_repo:
|
||||||
@ -257,7 +249,7 @@ class SynapseHomeServer(HomeServer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if name in ["keys", "federation"]:
|
if name in ["keys", "federation"]:
|
||||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
resources[SERVER_KEY_PREFIX] = KeyResource(self)
|
||||||
|
|
||||||
if name == "metrics" and self.config.metrics.enable_metrics:
|
if name == "metrics" and self.config.metrics.enable_metrics:
|
||||||
metrics_resource: Resource = MetricsResource(RegistryProxy)
|
metrics_resource: Resource = MetricsResource(RegistryProxy)
|
||||||
|
@ -172,12 +172,24 @@ class ApplicationService:
|
|||||||
Returns:
|
Returns:
|
||||||
True if this service would like to know about this room.
|
True if this service would like to know about this room.
|
||||||
"""
|
"""
|
||||||
member_list = await store.get_users_in_room(
|
# We can use `get_local_users_in_room(...)` here because an application service
|
||||||
|
# can only be interested in local users of the server it's on (ignore any remote
|
||||||
|
# users that might match the user namespace regex).
|
||||||
|
#
|
||||||
|
# In the future, we can consider re-using
|
||||||
|
# `store.get_app_service_users_in_room` which is very similar to this
|
||||||
|
# function but has a slightly worse performance than this because we
|
||||||
|
# have an early escape-hatch if we find a single user that the
|
||||||
|
# appservice is interested in. The juice would be worth the squeeze if
|
||||||
|
# `store.get_app_service_users_in_room` was used in more places besides
|
||||||
|
# an experimental MSC. But for now we can avoid doing more work and
|
||||||
|
# barely using it later.
|
||||||
|
local_user_ids = await store.get_local_users_in_room(
|
||||||
room_id, on_invalidate=cache_context.invalidate
|
room_id, on_invalidate=cache_context.invalidate
|
||||||
)
|
)
|
||||||
|
|
||||||
# check joined member events
|
# check joined member events
|
||||||
for user_id in member_list:
|
for user_id in local_user_ids:
|
||||||
if self.is_interested_in_user(user_id):
|
if self.is_interested_in_user(user_id):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@ -98,6 +98,9 @@ class ExperimentalConfig(Config):
|
|||||||
# MSC3773: Thread notifications
|
# MSC3773: Thread notifications
|
||||||
self.msc3773_enabled: bool = experimental.get("msc3773_enabled", False)
|
self.msc3773_enabled: bool = experimental.get("msc3773_enabled", False)
|
||||||
|
|
||||||
|
# MSC3664: Pushrules to match on related events
|
||||||
|
self.msc3664_enabled: bool = experimental.get("msc3664_enabled", False)
|
||||||
|
|
||||||
# MSC3848: Introduce errcodes for specific event sending failures
|
# MSC3848: Introduce errcodes for specific event sending failures
|
||||||
self.msc3848_enabled: bool = experimental.get("msc3848_enabled", False)
|
self.msc3848_enabled: bool = experimental.get("msc3848_enabled", False)
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ DEFAULT_LOG_CONFIG = Template(
|
|||||||
# Synapse also supports structured logging for machine readable logs which can
|
# Synapse also supports structured logging for machine readable logs which can
|
||||||
# be ingested by ELK stacks. See [2] for details.
|
# be ingested by ELK stacks. See [2] for details.
|
||||||
#
|
#
|
||||||
# [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
|
# [1]: https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
|
||||||
# [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
|
# [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
|
||||||
|
|
||||||
version: 1
|
version: 1
|
||||||
|
@ -123,6 +123,8 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
|||||||
"userinfo_endpoint": {"type": "string"},
|
"userinfo_endpoint": {"type": "string"},
|
||||||
"jwks_uri": {"type": "string"},
|
"jwks_uri": {"type": "string"},
|
||||||
"skip_verification": {"type": "boolean"},
|
"skip_verification": {"type": "boolean"},
|
||||||
|
"backchannel_logout_enabled": {"type": "boolean"},
|
||||||
|
"backchannel_logout_ignore_sub": {"type": "boolean"},
|
||||||
"user_profile_method": {
|
"user_profile_method": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["auto", "userinfo_endpoint"],
|
"enum": ["auto", "userinfo_endpoint"],
|
||||||
@ -292,6 +294,10 @@ def _parse_oidc_config_dict(
|
|||||||
token_endpoint=oidc_config.get("token_endpoint"),
|
token_endpoint=oidc_config.get("token_endpoint"),
|
||||||
userinfo_endpoint=oidc_config.get("userinfo_endpoint"),
|
userinfo_endpoint=oidc_config.get("userinfo_endpoint"),
|
||||||
jwks_uri=oidc_config.get("jwks_uri"),
|
jwks_uri=oidc_config.get("jwks_uri"),
|
||||||
|
backchannel_logout_enabled=oidc_config.get("backchannel_logout_enabled", False),
|
||||||
|
backchannel_logout_ignore_sub=oidc_config.get(
|
||||||
|
"backchannel_logout_ignore_sub", False
|
||||||
|
),
|
||||||
skip_verification=oidc_config.get("skip_verification", False),
|
skip_verification=oidc_config.get("skip_verification", False),
|
||||||
user_profile_method=oidc_config.get("user_profile_method", "auto"),
|
user_profile_method=oidc_config.get("user_profile_method", "auto"),
|
||||||
allow_existing_users=oidc_config.get("allow_existing_users", False),
|
allow_existing_users=oidc_config.get("allow_existing_users", False),
|
||||||
@ -368,6 +374,12 @@ class OidcProviderConfig:
|
|||||||
# "openid" scope is used.
|
# "openid" scope is used.
|
||||||
jwks_uri: Optional[str]
|
jwks_uri: Optional[str]
|
||||||
|
|
||||||
|
# Whether Synapse should react to backchannel logouts
|
||||||
|
backchannel_logout_enabled: bool
|
||||||
|
|
||||||
|
# Whether Synapse should ignore the `sub` claim in backchannel logouts or not.
|
||||||
|
backchannel_logout_ignore_sub: bool
|
||||||
|
|
||||||
# Whether to skip metadata verification
|
# Whether to skip metadata verification
|
||||||
skip_verification: bool
|
skip_verification: bool
|
||||||
|
|
||||||
|
@ -80,6 +80,18 @@ PDU_RETRY_TIME_MS = 1 * 60 * 1000
|
|||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||||
|
class PulledPduInfo:
|
||||||
|
"""
|
||||||
|
A result object that stores the PDU and info about it like which homeserver we
|
||||||
|
pulled it from (`pull_origin`)
|
||||||
|
"""
|
||||||
|
|
||||||
|
pdu: EventBase
|
||||||
|
# Which homeserver we pulled the PDU from
|
||||||
|
pull_origin: str
|
||||||
|
|
||||||
|
|
||||||
class InvalidResponseError(RuntimeError):
|
class InvalidResponseError(RuntimeError):
|
||||||
"""Helper for _try_destination_list: indicates that the server returned a response
|
"""Helper for _try_destination_list: indicates that the server returned a response
|
||||||
we couldn't parse
|
we couldn't parse
|
||||||
@ -114,7 +126,9 @@ class FederationClient(FederationBase):
|
|||||||
self.hostname = hs.hostname
|
self.hostname = hs.hostname
|
||||||
self.signing_key = hs.signing_key
|
self.signing_key = hs.signing_key
|
||||||
|
|
||||||
self._get_pdu_cache: ExpiringCache[str, EventBase] = ExpiringCache(
|
# Cache mapping `event_id` to a tuple of the event itself and the `pull_origin`
|
||||||
|
# (which server we pulled the event from)
|
||||||
|
self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache(
|
||||||
cache_name="get_pdu_cache",
|
cache_name="get_pdu_cache",
|
||||||
clock=self._clock,
|
clock=self._clock,
|
||||||
max_len=1000,
|
max_len=1000,
|
||||||
@ -352,11 +366,11 @@ class FederationClient(FederationBase):
|
|||||||
@tag_args
|
@tag_args
|
||||||
async def get_pdu(
|
async def get_pdu(
|
||||||
self,
|
self,
|
||||||
destinations: Iterable[str],
|
destinations: Collection[str],
|
||||||
event_id: str,
|
event_id: str,
|
||||||
room_version: RoomVersion,
|
room_version: RoomVersion,
|
||||||
timeout: Optional[int] = None,
|
timeout: Optional[int] = None,
|
||||||
) -> Optional[EventBase]:
|
) -> Optional[PulledPduInfo]:
|
||||||
"""Requests the PDU with given origin and ID from the remote home
|
"""Requests the PDU with given origin and ID from the remote home
|
||||||
servers.
|
servers.
|
||||||
|
|
||||||
@ -371,11 +385,11 @@ class FederationClient(FederationBase):
|
|||||||
moving to the next destination. None indicates no timeout.
|
moving to the next destination. None indicates no timeout.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The requested PDU, or None if we were unable to find it.
|
The requested PDU wrapped in `PulledPduInfo`, or None if we were unable to find it.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"get_pdu: event_id=%s from destinations=%s", event_id, destinations
|
"get_pdu(event_id=%s): from destinations=%s", event_id, destinations
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: Rate limit the number of times we try and get the same event.
|
# TODO: Rate limit the number of times we try and get the same event.
|
||||||
@ -384,19 +398,25 @@ class FederationClient(FederationBase):
|
|||||||
# it gets persisted to the database), so we cache the results of the lookup.
|
# it gets persisted to the database), so we cache the results of the lookup.
|
||||||
# Note that this is separate to the regular get_event cache which caches
|
# Note that this is separate to the regular get_event cache which caches
|
||||||
# events once they have been persisted.
|
# events once they have been persisted.
|
||||||
event = self._get_pdu_cache.get(event_id)
|
get_pdu_cache_entry = self._get_pdu_cache.get(event_id)
|
||||||
|
|
||||||
|
event = None
|
||||||
|
pull_origin = None
|
||||||
|
if get_pdu_cache_entry:
|
||||||
|
event, pull_origin = get_pdu_cache_entry
|
||||||
# If we don't see the event in the cache, go try to fetch it from the
|
# If we don't see the event in the cache, go try to fetch it from the
|
||||||
# provided remote federated destinations
|
# provided remote federated destinations
|
||||||
if not event:
|
else:
|
||||||
pdu_attempts = self.pdu_destination_tried.setdefault(event_id, {})
|
pdu_attempts = self.pdu_destination_tried.setdefault(event_id, {})
|
||||||
|
|
||||||
|
# TODO: We can probably refactor this to use `_try_destination_list`
|
||||||
for destination in destinations:
|
for destination in destinations:
|
||||||
now = self._clock.time_msec()
|
now = self._clock.time_msec()
|
||||||
last_attempt = pdu_attempts.get(destination, 0)
|
last_attempt = pdu_attempts.get(destination, 0)
|
||||||
if last_attempt + PDU_RETRY_TIME_MS > now:
|
if last_attempt + PDU_RETRY_TIME_MS > now:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"get_pdu: skipping destination=%s because we tried it recently last_attempt=%s and we only check every %s (now=%s)",
|
"get_pdu(event_id=%s): skipping destination=%s because we tried it recently last_attempt=%s and we only check every %s (now=%s)",
|
||||||
|
event_id,
|
||||||
destination,
|
destination,
|
||||||
last_attempt,
|
last_attempt,
|
||||||
PDU_RETRY_TIME_MS,
|
PDU_RETRY_TIME_MS,
|
||||||
@ -411,43 +431,48 @@ class FederationClient(FederationBase):
|
|||||||
room_version=room_version,
|
room_version=room_version,
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
|
pull_origin = destination
|
||||||
|
|
||||||
pdu_attempts[destination] = now
|
pdu_attempts[destination] = now
|
||||||
|
|
||||||
if event:
|
if event:
|
||||||
# Prime the cache
|
# Prime the cache
|
||||||
self._get_pdu_cache[event.event_id] = event
|
self._get_pdu_cache[event.event_id] = (event, pull_origin)
|
||||||
|
|
||||||
# Now that we have an event, we can break out of this
|
# Now that we have an event, we can break out of this
|
||||||
# loop and stop asking other destinations.
|
# loop and stop asking other destinations.
|
||||||
break
|
break
|
||||||
|
|
||||||
|
except NotRetryingDestination as e:
|
||||||
|
logger.info("get_pdu(event_id=%s): %s", event_id, e)
|
||||||
|
continue
|
||||||
|
except FederationDeniedError:
|
||||||
|
logger.info(
|
||||||
|
"get_pdu(event_id=%s): Not attempting to fetch PDU from %s because the homeserver is not on our federation whitelist",
|
||||||
|
event_id,
|
||||||
|
destination,
|
||||||
|
)
|
||||||
|
continue
|
||||||
except SynapseError as e:
|
except SynapseError as e:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Failed to get PDU %s from %s because %s",
|
"get_pdu(event_id=%s): Failed to get PDU from %s because %s",
|
||||||
event_id,
|
event_id,
|
||||||
destination,
|
destination,
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
except NotRetryingDestination as e:
|
|
||||||
logger.info(str(e))
|
|
||||||
continue
|
|
||||||
except FederationDeniedError as e:
|
|
||||||
logger.info(str(e))
|
|
||||||
continue
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pdu_attempts[destination] = now
|
pdu_attempts[destination] = now
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Failed to get PDU %s from %s because %s",
|
"get_pdu(event_id=): Failed to get PDU from %s because %s",
|
||||||
event_id,
|
event_id,
|
||||||
destination,
|
destination,
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not event:
|
if not event or not pull_origin:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# `event` now refers to an object stored in `get_pdu_cache`. Our
|
# `event` now refers to an object stored in `get_pdu_cache`. Our
|
||||||
@ -459,7 +484,7 @@ class FederationClient(FederationBase):
|
|||||||
event.room_version,
|
event.room_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
return event_copy
|
return PulledPduInfo(event_copy, pull_origin)
|
||||||
|
|
||||||
@trace
|
@trace
|
||||||
@tag_args
|
@tag_args
|
||||||
@ -699,12 +724,14 @@ class FederationClient(FederationBase):
|
|||||||
pdu_origin = get_domain_from_id(pdu.sender)
|
pdu_origin = get_domain_from_id(pdu.sender)
|
||||||
if not res and pdu_origin != origin:
|
if not res and pdu_origin != origin:
|
||||||
try:
|
try:
|
||||||
res = await self.get_pdu(
|
pulled_pdu_info = await self.get_pdu(
|
||||||
destinations=[pdu_origin],
|
destinations=[pdu_origin],
|
||||||
event_id=pdu.event_id,
|
event_id=pdu.event_id,
|
||||||
room_version=room_version,
|
room_version=room_version,
|
||||||
timeout=10000,
|
timeout=10000,
|
||||||
)
|
)
|
||||||
|
if pulled_pdu_info is not None:
|
||||||
|
res = pulled_pdu_info.pdu
|
||||||
except SynapseError:
|
except SynapseError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -806,6 +833,7 @@ class FederationClient(FederationBase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
for destination in destinations:
|
for destination in destinations:
|
||||||
|
# We don't want to ask our own server for information we don't have
|
||||||
if destination == self.server_name:
|
if destination == self.server_name:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -814,9 +842,21 @@ class FederationClient(FederationBase):
|
|||||||
except (
|
except (
|
||||||
RequestSendFailed,
|
RequestSendFailed,
|
||||||
InvalidResponseError,
|
InvalidResponseError,
|
||||||
NotRetryingDestination,
|
|
||||||
) as e:
|
) as e:
|
||||||
logger.warning("Failed to %s via %s: %s", description, destination, e)
|
logger.warning("Failed to %s via %s: %s", description, destination, e)
|
||||||
|
# Skip to the next homeserver in the list to try.
|
||||||
|
continue
|
||||||
|
except NotRetryingDestination as e:
|
||||||
|
logger.info("%s: %s", description, e)
|
||||||
|
continue
|
||||||
|
except FederationDeniedError:
|
||||||
|
logger.info(
|
||||||
|
"%s: Not attempting to %s from %s because the homeserver is not on our federation whitelist",
|
||||||
|
description,
|
||||||
|
description,
|
||||||
|
destination,
|
||||||
|
)
|
||||||
|
continue
|
||||||
except UnsupportedRoomVersionError:
|
except UnsupportedRoomVersionError:
|
||||||
raise
|
raise
|
||||||
except HttpResponseException as e:
|
except HttpResponseException as e:
|
||||||
@ -1609,6 +1649,54 @@ class FederationClient(FederationBase):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
async def timestamp_to_event(
|
async def timestamp_to_event(
|
||||||
|
self, *, destinations: List[str], room_id: str, timestamp: int, direction: str
|
||||||
|
) -> Optional["TimestampToEventResponse"]:
|
||||||
|
"""
|
||||||
|
Calls each remote federating server from `destinations` asking for their closest
|
||||||
|
event to the given timestamp in the given direction until we get a response.
|
||||||
|
Also validates the response to always return the expected keys or raises an
|
||||||
|
error.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
destinations: The domains of homeservers to try fetching from
|
||||||
|
room_id: Room to fetch the event from
|
||||||
|
timestamp: The point in time (inclusive) we should navigate from in
|
||||||
|
the given direction to find the closest event.
|
||||||
|
direction: ["f"|"b"] to indicate whether we should navigate forward
|
||||||
|
or backward from the given timestamp to find the closest event.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A parsed TimestampToEventResponse including the closest event_id
|
||||||
|
and origin_server_ts or None if no destination has a response.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def _timestamp_to_event_from_destination(
|
||||||
|
destination: str,
|
||||||
|
) -> TimestampToEventResponse:
|
||||||
|
return await self._timestamp_to_event_from_destination(
|
||||||
|
destination, room_id, timestamp, direction
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Loop through each homeserver candidate until we get a succesful response
|
||||||
|
timestamp_to_event_response = await self._try_destination_list(
|
||||||
|
"timestamp_to_event",
|
||||||
|
destinations,
|
||||||
|
# TODO: The requested timestamp may lie in a part of the
|
||||||
|
# event graph that the remote server *also* didn't have,
|
||||||
|
# in which case they will have returned another event
|
||||||
|
# which may be nowhere near the requested timestamp. In
|
||||||
|
# the future, we may need to reconcile that gap and ask
|
||||||
|
# other homeservers, and/or extend `/timestamp_to_event`
|
||||||
|
# to return events on *both* sides of the timestamp to
|
||||||
|
# help reconcile the gap faster.
|
||||||
|
_timestamp_to_event_from_destination,
|
||||||
|
)
|
||||||
|
return timestamp_to_event_response
|
||||||
|
except SynapseError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _timestamp_to_event_from_destination(
|
||||||
self, destination: str, room_id: str, timestamp: int, direction: str
|
self, destination: str, room_id: str, timestamp: int, direction: str
|
||||||
) -> "TimestampToEventResponse":
|
) -> "TimestampToEventResponse":
|
||||||
"""
|
"""
|
||||||
|
@ -481,6 +481,14 @@ class FederationServer(FederationBase):
|
|||||||
pdu_results[pdu.event_id] = await process_pdu(pdu)
|
pdu_results[pdu.event_id] = await process_pdu(pdu)
|
||||||
|
|
||||||
async def process_pdu(pdu: EventBase) -> JsonDict:
|
async def process_pdu(pdu: EventBase) -> JsonDict:
|
||||||
|
"""
|
||||||
|
Processes a pushed PDU sent to us via a `/send` transaction
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
JsonDict representing a "PDU Processing Result" that will be bundled up
|
||||||
|
with the other processed PDU's in the `/send` transaction and sent back
|
||||||
|
to remote homeserver.
|
||||||
|
"""
|
||||||
event_id = pdu.event_id
|
event_id = pdu.event_id
|
||||||
with nested_logging_context(event_id):
|
with nested_logging_context(event_id):
|
||||||
try:
|
try:
|
||||||
|
@ -499,6 +499,11 @@ class FederationV2InviteServlet(BaseFederationServerServlet):
|
|||||||
result = await self.handler.on_invite_request(
|
result = await self.handler.on_invite_request(
|
||||||
origin, event, room_version_id=room_version
|
origin, event, room_version_id=room_version
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# We only store invite_room_state for internal use, so remove it before
|
||||||
|
# returning the event to the remote homeserver.
|
||||||
|
result["event"].get("unsigned", {}).pop("invite_room_state", None)
|
||||||
|
|
||||||
return 200, result
|
return 200, result
|
||||||
|
|
||||||
|
|
||||||
|
@ -100,6 +100,7 @@ class AdminHandler:
|
|||||||
user_info_dict["avatar_url"] = profile.avatar_url
|
user_info_dict["avatar_url"] = profile.avatar_url
|
||||||
user_info_dict["threepids"] = threepids
|
user_info_dict["threepids"] = threepids
|
||||||
user_info_dict["external_ids"] = external_ids
|
user_info_dict["external_ids"] = external_ids
|
||||||
|
user_info_dict["erased"] = await self.store.is_user_erased(user.to_string())
|
||||||
|
|
||||||
return user_info_dict
|
return user_info_dict
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ from typing import (
|
|||||||
import attr
|
import attr
|
||||||
import bcrypt
|
import bcrypt
|
||||||
import unpaddedbase64
|
import unpaddedbase64
|
||||||
|
from prometheus_client import Counter
|
||||||
|
|
||||||
from twisted.internet.defer import CancelledError
|
from twisted.internet.defer import CancelledError
|
||||||
from twisted.web.server import Request
|
from twisted.web.server import Request
|
||||||
@ -48,6 +49,7 @@ from synapse.api.errors import (
|
|||||||
Codes,
|
Codes,
|
||||||
InteractiveAuthIncompleteError,
|
InteractiveAuthIncompleteError,
|
||||||
LoginError,
|
LoginError,
|
||||||
|
NotFoundError,
|
||||||
StoreError,
|
StoreError,
|
||||||
SynapseError,
|
SynapseError,
|
||||||
UserDeactivatedError,
|
UserDeactivatedError,
|
||||||
@ -63,10 +65,14 @@ from synapse.http.server import finish_request, respond_with_html
|
|||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
from synapse.logging.context import defer_to_thread
|
from synapse.logging.context import defer_to_thread
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
|
from synapse.storage.databases.main.registration import (
|
||||||
|
LoginTokenExpired,
|
||||||
|
LoginTokenLookupResult,
|
||||||
|
LoginTokenReused,
|
||||||
|
)
|
||||||
from synapse.types import JsonDict, Requester, UserID
|
from synapse.types import JsonDict, Requester, UserID
|
||||||
from synapse.util import stringutils as stringutils
|
from synapse.util import stringutils as stringutils
|
||||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||||
from synapse.util.macaroons import LoginTokenAttributes
|
|
||||||
from synapse.util.msisdn import phone_number_to_msisdn
|
from synapse.util.msisdn import phone_number_to_msisdn
|
||||||
from synapse.util.stringutils import base62_encode
|
from synapse.util.stringutils import base62_encode
|
||||||
from synapse.util.threepids import canonicalise_email
|
from synapse.util.threepids import canonicalise_email
|
||||||
@ -80,6 +86,12 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
INVALID_USERNAME_OR_PASSWORD = "Invalid username or password"
|
INVALID_USERNAME_OR_PASSWORD = "Invalid username or password"
|
||||||
|
|
||||||
|
invalid_login_token_counter = Counter(
|
||||||
|
"synapse_user_login_invalid_login_tokens",
|
||||||
|
"Counts the number of rejected m.login.token on /login",
|
||||||
|
["reason"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def convert_client_dict_legacy_fields_to_identifier(
|
def convert_client_dict_legacy_fields_to_identifier(
|
||||||
submission: JsonDict,
|
submission: JsonDict,
|
||||||
@ -883,6 +895,25 @@ class AuthHandler:
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
async def create_login_token_for_user_id(
|
||||||
|
self,
|
||||||
|
user_id: str,
|
||||||
|
duration_ms: int = (2 * 60 * 1000),
|
||||||
|
auth_provider_id: Optional[str] = None,
|
||||||
|
auth_provider_session_id: Optional[str] = None,
|
||||||
|
) -> str:
|
||||||
|
login_token = self.generate_login_token()
|
||||||
|
now = self._clock.time_msec()
|
||||||
|
expiry_ts = now + duration_ms
|
||||||
|
await self.store.add_login_token_to_user(
|
||||||
|
user_id=user_id,
|
||||||
|
token=login_token,
|
||||||
|
expiry_ts=expiry_ts,
|
||||||
|
auth_provider_id=auth_provider_id,
|
||||||
|
auth_provider_session_id=auth_provider_session_id,
|
||||||
|
)
|
||||||
|
return login_token
|
||||||
|
|
||||||
async def create_refresh_token_for_user_id(
|
async def create_refresh_token_for_user_id(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
@ -1401,6 +1432,18 @@ class AuthHandler:
|
|||||||
return None
|
return None
|
||||||
return user_id
|
return user_id
|
||||||
|
|
||||||
|
def generate_login_token(self) -> str:
|
||||||
|
"""Generates an opaque string, for use as an short-term login token"""
|
||||||
|
|
||||||
|
# we use the following format for access tokens:
|
||||||
|
# syl_<random string>_<base62 crc check>
|
||||||
|
|
||||||
|
random_string = stringutils.random_string(20)
|
||||||
|
base = f"syl_{random_string}"
|
||||||
|
|
||||||
|
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
||||||
|
return f"{base}_{crc}"
|
||||||
|
|
||||||
def generate_access_token(self, for_user: UserID) -> str:
|
def generate_access_token(self, for_user: UserID) -> str:
|
||||||
"""Generates an opaque string, for use as an access token"""
|
"""Generates an opaque string, for use as an access token"""
|
||||||
|
|
||||||
@ -1427,16 +1470,17 @@ class AuthHandler:
|
|||||||
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
||||||
return f"{base}_{crc}"
|
return f"{base}_{crc}"
|
||||||
|
|
||||||
async def validate_short_term_login_token(
|
async def consume_login_token(self, login_token: str) -> LoginTokenLookupResult:
|
||||||
self, login_token: str
|
|
||||||
) -> LoginTokenAttributes:
|
|
||||||
try:
|
try:
|
||||||
res = self.macaroon_gen.verify_short_term_login_token(login_token)
|
return await self.store.consume_login_token(login_token)
|
||||||
except Exception:
|
except LoginTokenExpired:
|
||||||
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
|
invalid_login_token_counter.labels("expired").inc()
|
||||||
|
except LoginTokenReused:
|
||||||
|
invalid_login_token_counter.labels("reused").inc()
|
||||||
|
except NotFoundError:
|
||||||
|
invalid_login_token_counter.labels("not found").inc()
|
||||||
|
|
||||||
await self.auth_blocking.check_auth_blocking(res.user_id)
|
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
|
||||||
return res
|
|
||||||
|
|
||||||
async def delete_access_token(self, access_token: str) -> None:
|
async def delete_access_token(self, access_token: str) -> None:
|
||||||
"""Invalidate a single access token
|
"""Invalidate a single access token
|
||||||
@ -1711,7 +1755,7 @@ class AuthHandler:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Create a login token
|
# Create a login token
|
||||||
login_token = self.macaroon_gen.generate_short_term_login_token(
|
login_token = await self.create_login_token_for_user_id(
|
||||||
registered_user_id,
|
registered_user_id,
|
||||||
auth_provider_id=auth_provider_id,
|
auth_provider_id=auth_provider_id,
|
||||||
auth_provider_session_id=auth_provider_session_id,
|
auth_provider_session_id=auth_provider_session_id,
|
||||||
|
@ -49,6 +49,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
class E2eKeysHandler:
|
class E2eKeysHandler:
|
||||||
def __init__(self, hs: "HomeServer"):
|
def __init__(self, hs: "HomeServer"):
|
||||||
|
self.config = hs.config
|
||||||
self.store = hs.get_datastores().main
|
self.store = hs.get_datastores().main
|
||||||
self.federation = hs.get_federation_client()
|
self.federation = hs.get_federation_client()
|
||||||
self.device_handler = hs.get_device_handler()
|
self.device_handler = hs.get_device_handler()
|
||||||
@ -431,13 +432,17 @@ class E2eKeysHandler:
|
|||||||
@trace
|
@trace
|
||||||
@cancellable
|
@cancellable
|
||||||
async def query_local_devices(
|
async def query_local_devices(
|
||||||
self, query: Mapping[str, Optional[List[str]]]
|
self,
|
||||||
|
query: Mapping[str, Optional[List[str]]],
|
||||||
|
include_displaynames: bool = True,
|
||||||
) -> Dict[str, Dict[str, dict]]:
|
) -> Dict[str, Dict[str, dict]]:
|
||||||
"""Get E2E device keys for local users
|
"""Get E2E device keys for local users
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
query: map from user_id to a list
|
query: map from user_id to a list
|
||||||
of devices to query (None for all devices)
|
of devices to query (None for all devices)
|
||||||
|
include_displaynames: Whether to include device displaynames in the returned
|
||||||
|
device details.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A map from user_id -> device_id -> device details
|
A map from user_id -> device_id -> device details
|
||||||
@ -469,7 +474,9 @@ class E2eKeysHandler:
|
|||||||
# make sure that each queried user appears in the result dict
|
# make sure that each queried user appears in the result dict
|
||||||
result_dict[user_id] = {}
|
result_dict[user_id] = {}
|
||||||
|
|
||||||
results = await self.store.get_e2e_device_keys_for_cs_api(local_query)
|
results = await self.store.get_e2e_device_keys_for_cs_api(
|
||||||
|
local_query, include_displaynames
|
||||||
|
)
|
||||||
|
|
||||||
# Build the result structure
|
# Build the result structure
|
||||||
for user_id, device_keys in results.items():
|
for user_id, device_keys in results.items():
|
||||||
@ -482,11 +489,33 @@ class E2eKeysHandler:
|
|||||||
async def on_federation_query_client_keys(
|
async def on_federation_query_client_keys(
|
||||||
self, query_body: Dict[str, Dict[str, Optional[List[str]]]]
|
self, query_body: Dict[str, Dict[str, Optional[List[str]]]]
|
||||||
) -> JsonDict:
|
) -> JsonDict:
|
||||||
"""Handle a device key query from a federated server"""
|
"""Handle a device key query from a federated server:
|
||||||
|
|
||||||
|
Handles the path: GET /_matrix/federation/v1/users/keys/query
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query_body: The body of the query request. Should contain a key
|
||||||
|
"device_keys" that map to a dictionary of user ID's -> list of
|
||||||
|
device IDs. If the list of device IDs is empty, all devices of
|
||||||
|
that user will be queried.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A json dictionary containing the following:
|
||||||
|
- device_keys: A dictionary containing the requested device information.
|
||||||
|
- master_keys: An optional dictionary of user ID -> master cross-signing
|
||||||
|
key info.
|
||||||
|
- self_signing_key: An optional dictionary of user ID -> self-signing
|
||||||
|
key info.
|
||||||
|
"""
|
||||||
device_keys_query: Dict[str, Optional[List[str]]] = query_body.get(
|
device_keys_query: Dict[str, Optional[List[str]]] = query_body.get(
|
||||||
"device_keys", {}
|
"device_keys", {}
|
||||||
)
|
)
|
||||||
res = await self.query_local_devices(device_keys_query)
|
res = await self.query_local_devices(
|
||||||
|
device_keys_query,
|
||||||
|
include_displaynames=(
|
||||||
|
self.config.federation.allow_device_name_lookup_over_federation
|
||||||
|
),
|
||||||
|
)
|
||||||
ret = {"device_keys": res}
|
ret = {"device_keys": res}
|
||||||
|
|
||||||
# add in the cross-signing keys
|
# add in the cross-signing keys
|
||||||
|
@ -442,6 +442,15 @@ class FederationHandler:
|
|||||||
# appropriate stuff.
|
# appropriate stuff.
|
||||||
# TODO: We can probably do something more intelligent here.
|
# TODO: We can probably do something more intelligent here.
|
||||||
return True
|
return True
|
||||||
|
except NotRetryingDestination as e:
|
||||||
|
logger.info("_maybe_backfill_inner: %s", e)
|
||||||
|
continue
|
||||||
|
except FederationDeniedError:
|
||||||
|
logger.info(
|
||||||
|
"_maybe_backfill_inner: Not attempting to backfill from %s because the homeserver is not on our federation whitelist",
|
||||||
|
dom,
|
||||||
|
)
|
||||||
|
continue
|
||||||
except (SynapseError, InvalidResponseError) as e:
|
except (SynapseError, InvalidResponseError) as e:
|
||||||
logger.info("Failed to backfill from %s because %s", dom, e)
|
logger.info("Failed to backfill from %s because %s", dom, e)
|
||||||
continue
|
continue
|
||||||
@ -477,15 +486,9 @@ class FederationHandler:
|
|||||||
|
|
||||||
logger.info("Failed to backfill from %s because %s", dom, e)
|
logger.info("Failed to backfill from %s because %s", dom, e)
|
||||||
continue
|
continue
|
||||||
except NotRetryingDestination as e:
|
|
||||||
logger.info(str(e))
|
|
||||||
continue
|
|
||||||
except RequestSendFailed as e:
|
except RequestSendFailed as e:
|
||||||
logger.info("Failed to get backfill from %s because %s", dom, e)
|
logger.info("Failed to get backfill from %s because %s", dom, e)
|
||||||
continue
|
continue
|
||||||
except FederationDeniedError as e:
|
|
||||||
logger.info(e)
|
|
||||||
continue
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception("Failed to backfill from %s because %s", dom, e)
|
logger.exception("Failed to backfill from %s because %s", dom, e)
|
||||||
continue
|
continue
|
||||||
@ -1017,7 +1020,9 @@ class FederationHandler:
|
|||||||
|
|
||||||
context = EventContext.for_outlier(self._storage_controllers)
|
context = EventContext.for_outlier(self._storage_controllers)
|
||||||
|
|
||||||
await self._bulk_push_rule_evaluator.action_for_event_by_user(event, context)
|
await self._bulk_push_rule_evaluator.action_for_events_by_user(
|
||||||
|
[(event, context)]
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
await self._federation_event_handler.persist_events_and_notify(
|
await self._federation_event_handler.persist_events_and_notify(
|
||||||
event.room_id, [(event, context)]
|
event.room_id, [(event, context)]
|
||||||
|
@ -58,7 +58,7 @@ from synapse.event_auth import (
|
|||||||
)
|
)
|
||||||
from synapse.events import EventBase
|
from synapse.events import EventBase
|
||||||
from synapse.events.snapshot import EventContext
|
from synapse.events.snapshot import EventContext
|
||||||
from synapse.federation.federation_client import InvalidResponseError
|
from synapse.federation.federation_client import InvalidResponseError, PulledPduInfo
|
||||||
from synapse.logging.context import nested_logging_context
|
from synapse.logging.context import nested_logging_context
|
||||||
from synapse.logging.opentracing import (
|
from synapse.logging.opentracing import (
|
||||||
SynapseTags,
|
SynapseTags,
|
||||||
@ -1517,8 +1517,8 @@ class FederationEventHandler:
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def backfill_event_id(
|
async def backfill_event_id(
|
||||||
self, destination: str, room_id: str, event_id: str
|
self, destinations: List[str], room_id: str, event_id: str
|
||||||
) -> EventBase:
|
) -> PulledPduInfo:
|
||||||
"""Backfill a single event and persist it as a non-outlier which means
|
"""Backfill a single event and persist it as a non-outlier which means
|
||||||
we also pull in all of the state and auth events necessary for it.
|
we also pull in all of the state and auth events necessary for it.
|
||||||
|
|
||||||
@ -1530,24 +1530,21 @@ class FederationEventHandler:
|
|||||||
Raises:
|
Raises:
|
||||||
FederationError if we are unable to find the event from the destination
|
FederationError if we are unable to find the event from the destination
|
||||||
"""
|
"""
|
||||||
logger.info(
|
logger.info("backfill_event_id: event_id=%s", event_id)
|
||||||
"backfill_event_id: event_id=%s from destination=%s", event_id, destination
|
|
||||||
)
|
|
||||||
|
|
||||||
room_version = await self._store.get_room_version(room_id)
|
room_version = await self._store.get_room_version(room_id)
|
||||||
|
|
||||||
event_from_response = await self._federation_client.get_pdu(
|
pulled_pdu_info = await self._federation_client.get_pdu(
|
||||||
[destination],
|
destinations,
|
||||||
event_id,
|
event_id,
|
||||||
room_version,
|
room_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not event_from_response:
|
if not pulled_pdu_info:
|
||||||
raise FederationError(
|
raise FederationError(
|
||||||
"ERROR",
|
"ERROR",
|
||||||
404,
|
404,
|
||||||
"Unable to find event_id=%s from destination=%s to backfill."
|
f"Unable to find event_id={event_id} from remote servers to backfill.",
|
||||||
% (event_id, destination),
|
|
||||||
affected=event_id,
|
affected=event_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1555,13 +1552,13 @@ class FederationEventHandler:
|
|||||||
# and auth events to de-outlier it. This also sets up the necessary
|
# and auth events to de-outlier it. This also sets up the necessary
|
||||||
# `state_groups` for the event.
|
# `state_groups` for the event.
|
||||||
await self._process_pulled_events(
|
await self._process_pulled_events(
|
||||||
destination,
|
pulled_pdu_info.pull_origin,
|
||||||
[event_from_response],
|
[pulled_pdu_info.pdu],
|
||||||
# Prevent notifications going to clients
|
# Prevent notifications going to clients
|
||||||
backfilled=True,
|
backfilled=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
return event_from_response
|
return pulled_pdu_info
|
||||||
|
|
||||||
@trace
|
@trace
|
||||||
@tag_args
|
@tag_args
|
||||||
@ -1584,19 +1581,19 @@ class FederationEventHandler:
|
|||||||
async def get_event(event_id: str) -> None:
|
async def get_event(event_id: str) -> None:
|
||||||
with nested_logging_context(event_id):
|
with nested_logging_context(event_id):
|
||||||
try:
|
try:
|
||||||
event = await self._federation_client.get_pdu(
|
pulled_pdu_info = await self._federation_client.get_pdu(
|
||||||
[destination],
|
[destination],
|
||||||
event_id,
|
event_id,
|
||||||
room_version,
|
room_version,
|
||||||
)
|
)
|
||||||
if event is None:
|
if pulled_pdu_info is None:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Server %s didn't return event %s",
|
"Server %s didn't return event %s",
|
||||||
destination,
|
destination,
|
||||||
event_id,
|
event_id,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
events.append(event)
|
events.append(pulled_pdu_info.pdu)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
@ -2171,8 +2168,8 @@ class FederationEventHandler:
|
|||||||
min_depth,
|
min_depth,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
await self._bulk_push_rule_evaluator.action_for_event_by_user(
|
await self._bulk_push_rule_evaluator.action_for_events_by_user(
|
||||||
event, context
|
[(event, context)]
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1442,16 +1442,8 @@ class EventCreationHandler:
|
|||||||
a room that has been un-partial stated.
|
a room that has been un-partial stated.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for event, context in events_and_context:
|
await self._bulk_push_rule_evaluator.action_for_events_by_user(
|
||||||
# Skip push notification actions for historical messages
|
events_and_context
|
||||||
# because we don't want to notify people about old history back in time.
|
|
||||||
# The historical messages also do not have the proper `context.current_state_ids`
|
|
||||||
# and `state_groups` because they have `prev_events` that aren't persisted yet
|
|
||||||
# (historical messages persisted in reverse-chronological order).
|
|
||||||
if not event.internal_metadata.is_historical() and not event.content.get(EventContentFields.MSC2716_HISTORICAL):
|
|
||||||
with opentracing.start_active_span("calculate_push_actions"):
|
|
||||||
await self._bulk_push_rule_evaluator.action_for_event_by_user(
|
|
||||||
event, context
|
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -12,14 +12,28 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
import binascii
|
||||||
import inspect
|
import inspect
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar, Union
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
Generic,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Type,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
from urllib.parse import urlencode, urlparse
|
from urllib.parse import urlencode, urlparse
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
import unpaddedbase64
|
||||||
from authlib.common.security import generate_token
|
from authlib.common.security import generate_token
|
||||||
from authlib.jose import JsonWebToken, jwt
|
from authlib.jose import JsonWebToken, JWTClaims
|
||||||
|
from authlib.jose.errors import InvalidClaimError, JoseError, MissingClaimError
|
||||||
from authlib.oauth2.auth import ClientAuth
|
from authlib.oauth2.auth import ClientAuth
|
||||||
from authlib.oauth2.rfc6749.parameters import prepare_grant_uri
|
from authlib.oauth2.rfc6749.parameters import prepare_grant_uri
|
||||||
from authlib.oidc.core import CodeIDToken, UserInfo
|
from authlib.oidc.core import CodeIDToken, UserInfo
|
||||||
@ -35,9 +49,12 @@ from typing_extensions import TypedDict
|
|||||||
from twisted.web.client import readBody
|
from twisted.web.client import readBody
|
||||||
from twisted.web.http_headers import Headers
|
from twisted.web.http_headers import Headers
|
||||||
|
|
||||||
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.config import ConfigError
|
from synapse.config import ConfigError
|
||||||
from synapse.config.oidc import OidcProviderClientSecretJwtKey, OidcProviderConfig
|
from synapse.config.oidc import OidcProviderClientSecretJwtKey, OidcProviderConfig
|
||||||
from synapse.handlers.sso import MappingException, UserAttributes
|
from synapse.handlers.sso import MappingException, UserAttributes
|
||||||
|
from synapse.http.server import finish_request
|
||||||
|
from synapse.http.servlet import parse_string
|
||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
from synapse.logging.context import make_deferred_yieldable
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart
|
from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart
|
||||||
@ -88,6 +105,8 @@ class Token(TypedDict):
|
|||||||
#: there is no real point of doing this in our case.
|
#: there is no real point of doing this in our case.
|
||||||
JWK = Dict[str, str]
|
JWK = Dict[str, str]
|
||||||
|
|
||||||
|
C = TypeVar("C")
|
||||||
|
|
||||||
|
|
||||||
#: A JWK Set, as per RFC7517 sec 5.
|
#: A JWK Set, as per RFC7517 sec 5.
|
||||||
class JWKS(TypedDict):
|
class JWKS(TypedDict):
|
||||||
@ -247,6 +266,80 @@ class OidcHandler:
|
|||||||
|
|
||||||
await oidc_provider.handle_oidc_callback(request, session_data, code)
|
await oidc_provider.handle_oidc_callback(request, session_data, code)
|
||||||
|
|
||||||
|
async def handle_backchannel_logout(self, request: SynapseRequest) -> None:
|
||||||
|
"""Handle an incoming request to /_synapse/client/oidc/backchannel_logout
|
||||||
|
|
||||||
|
This extracts the logout_token from the request and tries to figure out
|
||||||
|
which OpenID Provider it is comming from. This works by matching the iss claim
|
||||||
|
with the issuer and the aud claim with the client_id.
|
||||||
|
|
||||||
|
Since at this point we don't know who signed the JWT, we can't just
|
||||||
|
decode it using authlib since it will always verifies the signature. We
|
||||||
|
have to decode it manually without validating the signature. The actual JWT
|
||||||
|
verification is done in the `OidcProvider.handler_backchannel_logout` method,
|
||||||
|
once we figured out which provider sent the request.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: the incoming request from the browser.
|
||||||
|
"""
|
||||||
|
logout_token = parse_string(request, "logout_token")
|
||||||
|
if logout_token is None:
|
||||||
|
raise SynapseError(400, "Missing logout_token in request")
|
||||||
|
|
||||||
|
# A JWT looks like this:
|
||||||
|
# header.payload.signature
|
||||||
|
# where all parts are encoded with urlsafe base64.
|
||||||
|
# The aud and iss claims we care about are in the payload part, which
|
||||||
|
# is a JSON object.
|
||||||
|
try:
|
||||||
|
# By destructuring the list after splitting, we ensure that we have
|
||||||
|
# exactly 3 segments
|
||||||
|
_, payload, _ = logout_token.split(".")
|
||||||
|
except ValueError:
|
||||||
|
raise SynapseError(400, "Invalid logout_token in request")
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload_bytes = unpaddedbase64.decode_base64(payload)
|
||||||
|
claims = json_decoder.decode(payload_bytes.decode("utf-8"))
|
||||||
|
except (json.JSONDecodeError, binascii.Error, UnicodeError):
|
||||||
|
raise SynapseError(400, "Invalid logout_token payload in request")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Let's extract the iss and aud claims
|
||||||
|
iss = claims["iss"]
|
||||||
|
aud = claims["aud"]
|
||||||
|
# The aud claim can be either a string or a list of string. Here we
|
||||||
|
# normalize it as a list of strings.
|
||||||
|
if isinstance(aud, str):
|
||||||
|
aud = [aud]
|
||||||
|
|
||||||
|
# Check that we have the right types for the aud and the iss claims
|
||||||
|
if not isinstance(iss, str) or not isinstance(aud, list):
|
||||||
|
raise TypeError()
|
||||||
|
for a in aud:
|
||||||
|
if not isinstance(a, str):
|
||||||
|
raise TypeError()
|
||||||
|
|
||||||
|
# At this point we properly checked both claims types
|
||||||
|
issuer: str = iss
|
||||||
|
audience: List[str] = aud
|
||||||
|
except (TypeError, KeyError):
|
||||||
|
raise SynapseError(400, "Invalid issuer/audience in logout_token")
|
||||||
|
|
||||||
|
# Now that we know the audience and the issuer, we can figure out from
|
||||||
|
# what provider it is coming from
|
||||||
|
oidc_provider: Optional[OidcProvider] = None
|
||||||
|
for provider in self._providers.values():
|
||||||
|
if provider.issuer == issuer and provider.client_id in audience:
|
||||||
|
oidc_provider = provider
|
||||||
|
break
|
||||||
|
|
||||||
|
if oidc_provider is None:
|
||||||
|
raise SynapseError(400, "Could not find the OP that issued this event")
|
||||||
|
|
||||||
|
# Ask the provider to handle the logout request.
|
||||||
|
await oidc_provider.handle_backchannel_logout(request, logout_token)
|
||||||
|
|
||||||
|
|
||||||
class OidcError(Exception):
|
class OidcError(Exception):
|
||||||
"""Used to catch errors when calling the token_endpoint"""
|
"""Used to catch errors when calling the token_endpoint"""
|
||||||
@ -275,6 +368,7 @@ class OidcProvider:
|
|||||||
provider: OidcProviderConfig,
|
provider: OidcProviderConfig,
|
||||||
):
|
):
|
||||||
self._store = hs.get_datastores().main
|
self._store = hs.get_datastores().main
|
||||||
|
self._clock = hs.get_clock()
|
||||||
|
|
||||||
self._macaroon_generaton = macaroon_generator
|
self._macaroon_generaton = macaroon_generator
|
||||||
|
|
||||||
@ -341,6 +435,7 @@ class OidcProvider:
|
|||||||
self.idp_brand = provider.idp_brand
|
self.idp_brand = provider.idp_brand
|
||||||
|
|
||||||
self._sso_handler = hs.get_sso_handler()
|
self._sso_handler = hs.get_sso_handler()
|
||||||
|
self._device_handler = hs.get_device_handler()
|
||||||
|
|
||||||
self._sso_handler.register_identity_provider(self)
|
self._sso_handler.register_identity_provider(self)
|
||||||
|
|
||||||
@ -399,6 +494,41 @@ class OidcProvider:
|
|||||||
# If we're not using userinfo, we need a valid jwks to validate the ID token
|
# If we're not using userinfo, we need a valid jwks to validate the ID token
|
||||||
m.validate_jwks_uri()
|
m.validate_jwks_uri()
|
||||||
|
|
||||||
|
if self._config.backchannel_logout_enabled:
|
||||||
|
if not m.get("backchannel_logout_supported", False):
|
||||||
|
logger.warning(
|
||||||
|
"OIDC Back-Channel Logout is enabled for issuer %r"
|
||||||
|
"but it does not advertise support for it",
|
||||||
|
self.issuer,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif not m.get("backchannel_logout_session_supported", False):
|
||||||
|
logger.warning(
|
||||||
|
"OIDC Back-Channel Logout is enabled and supported "
|
||||||
|
"by issuer %r but it might not send a session ID with "
|
||||||
|
"logout tokens, which is required for the logouts to work",
|
||||||
|
self.issuer,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self._config.backchannel_logout_ignore_sub:
|
||||||
|
# If OIDC backchannel logouts are enabled, the provider mapping provider
|
||||||
|
# should use the `sub` claim. We verify that by mapping a dumb user and
|
||||||
|
# see if we get back the sub claim
|
||||||
|
user = UserInfo({"sub": "thisisasubject"})
|
||||||
|
try:
|
||||||
|
subject = self._user_mapping_provider.get_remote_user_id(user)
|
||||||
|
if subject != user["sub"]:
|
||||||
|
raise ValueError("Unexpected subject")
|
||||||
|
except Exception:
|
||||||
|
logger.warning(
|
||||||
|
f"OIDC Back-Channel Logout is enabled for issuer {self.issuer!r} "
|
||||||
|
"but it looks like the configured `user_mapping_provider` "
|
||||||
|
"does not use the `sub` claim as subject. If it is the case, "
|
||||||
|
"and you want Synapse to ignore the `sub` claim in OIDC "
|
||||||
|
"Back-Channel Logouts, set `backchannel_logout_ignore_sub` "
|
||||||
|
"to `true` in the issuer config."
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _uses_userinfo(self) -> bool:
|
def _uses_userinfo(self) -> bool:
|
||||||
"""Returns True if the ``userinfo_endpoint`` should be used.
|
"""Returns True if the ``userinfo_endpoint`` should be used.
|
||||||
@ -414,6 +544,16 @@ class OidcProvider:
|
|||||||
or self._user_profile_method == "userinfo_endpoint"
|
or self._user_profile_method == "userinfo_endpoint"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def issuer(self) -> str:
|
||||||
|
"""The issuer identifying this provider."""
|
||||||
|
return self._config.issuer
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client_id(self) -> str:
|
||||||
|
"""The client_id used when interacting with this provider."""
|
||||||
|
return self._config.client_id
|
||||||
|
|
||||||
async def load_metadata(self, force: bool = False) -> OpenIDProviderMetadata:
|
async def load_metadata(self, force: bool = False) -> OpenIDProviderMetadata:
|
||||||
"""Return the provider metadata.
|
"""Return the provider metadata.
|
||||||
|
|
||||||
@ -661,6 +801,59 @@ class OidcProvider:
|
|||||||
|
|
||||||
return UserInfo(resp)
|
return UserInfo(resp)
|
||||||
|
|
||||||
|
async def _verify_jwt(
|
||||||
|
self,
|
||||||
|
alg_values: List[str],
|
||||||
|
token: str,
|
||||||
|
claims_cls: Type[C],
|
||||||
|
claims_options: Optional[dict] = None,
|
||||||
|
claims_params: Optional[dict] = None,
|
||||||
|
) -> C:
|
||||||
|
"""Decode and validate a JWT, re-fetching the JWKS as needed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
alg_values: list of `alg` values allowed when verifying the JWT.
|
||||||
|
token: the JWT.
|
||||||
|
claims_cls: the JWTClaims class to use to validate the claims.
|
||||||
|
claims_options: dict of options passed to the `claims_cls` constructor.
|
||||||
|
claims_params: dict of params passed to the `claims_cls` constructor.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The decoded claims in the JWT.
|
||||||
|
"""
|
||||||
|
jwt = JsonWebToken(alg_values)
|
||||||
|
|
||||||
|
logger.debug("Attempting to decode JWT (%s) %r", claims_cls.__name__, token)
|
||||||
|
|
||||||
|
# Try to decode the keys in cache first, then retry by forcing the keys
|
||||||
|
# to be reloaded
|
||||||
|
jwk_set = await self.load_jwks()
|
||||||
|
try:
|
||||||
|
claims = jwt.decode(
|
||||||
|
token,
|
||||||
|
key=jwk_set,
|
||||||
|
claims_cls=claims_cls,
|
||||||
|
claims_options=claims_options,
|
||||||
|
claims_params=claims_params,
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
logger.info("Reloading JWKS after decode error")
|
||||||
|
jwk_set = await self.load_jwks(force=True) # try reloading the jwks
|
||||||
|
claims = jwt.decode(
|
||||||
|
token,
|
||||||
|
key=jwk_set,
|
||||||
|
claims_cls=claims_cls,
|
||||||
|
claims_options=claims_options,
|
||||||
|
claims_params=claims_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug("Decoded JWT (%s) %r; validating", claims_cls.__name__, claims)
|
||||||
|
|
||||||
|
claims.validate(
|
||||||
|
now=self._clock.time(), leeway=120
|
||||||
|
) # allows 2 min of clock skew
|
||||||
|
return claims
|
||||||
|
|
||||||
async def _parse_id_token(self, token: Token, nonce: str) -> CodeIDToken:
|
async def _parse_id_token(self, token: Token, nonce: str) -> CodeIDToken:
|
||||||
"""Return an instance of UserInfo from token's ``id_token``.
|
"""Return an instance of UserInfo from token's ``id_token``.
|
||||||
|
|
||||||
@ -673,7 +866,14 @@ class OidcProvider:
|
|||||||
Returns:
|
Returns:
|
||||||
The decoded claims in the ID token.
|
The decoded claims in the ID token.
|
||||||
"""
|
"""
|
||||||
|
id_token = token.get("id_token")
|
||||||
|
|
||||||
|
# That has been theoritically been checked by the caller, so even though
|
||||||
|
# assertion are not enabled in production, it is mainly here to appease mypy
|
||||||
|
assert id_token is not None
|
||||||
|
|
||||||
metadata = await self.load_metadata()
|
metadata = await self.load_metadata()
|
||||||
|
|
||||||
claims_params = {
|
claims_params = {
|
||||||
"nonce": nonce,
|
"nonce": nonce,
|
||||||
"client_id": self._client_auth.client_id,
|
"client_id": self._client_auth.client_id,
|
||||||
@ -683,39 +883,17 @@ class OidcProvider:
|
|||||||
# in the `id_token` that we can check against.
|
# in the `id_token` that we can check against.
|
||||||
claims_params["access_token"] = token["access_token"]
|
claims_params["access_token"] = token["access_token"]
|
||||||
|
|
||||||
|
claims_options = {"iss": {"values": [metadata["issuer"]]}}
|
||||||
|
|
||||||
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
|
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
|
||||||
jwt = JsonWebToken(alg_values)
|
|
||||||
|
|
||||||
claim_options = {"iss": {"values": [metadata["issuer"]]}}
|
claims = await self._verify_jwt(
|
||||||
|
alg_values=alg_values,
|
||||||
id_token = token["id_token"]
|
token=id_token,
|
||||||
logger.debug("Attempting to decode JWT id_token %r", id_token)
|
|
||||||
|
|
||||||
# Try to decode the keys in cache first, then retry by forcing the keys
|
|
||||||
# to be reloaded
|
|
||||||
jwk_set = await self.load_jwks()
|
|
||||||
try:
|
|
||||||
claims = jwt.decode(
|
|
||||||
id_token,
|
|
||||||
key=jwk_set,
|
|
||||||
claims_cls=CodeIDToken,
|
claims_cls=CodeIDToken,
|
||||||
claims_options=claim_options,
|
claims_options=claims_options,
|
||||||
claims_params=claims_params,
|
claims_params=claims_params,
|
||||||
)
|
)
|
||||||
except ValueError:
|
|
||||||
logger.info("Reloading JWKS after decode error")
|
|
||||||
jwk_set = await self.load_jwks(force=True) # try reloading the jwks
|
|
||||||
claims = jwt.decode(
|
|
||||||
id_token,
|
|
||||||
key=jwk_set,
|
|
||||||
claims_cls=CodeIDToken,
|
|
||||||
claims_options=claim_options,
|
|
||||||
claims_params=claims_params,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug("Decoded id_token JWT %r; validating", claims)
|
|
||||||
|
|
||||||
claims.validate(leeway=120) # allows 2 min of clock skew
|
|
||||||
|
|
||||||
return claims
|
return claims
|
||||||
|
|
||||||
@ -1036,6 +1214,146 @@ class OidcProvider:
|
|||||||
# to be strings.
|
# to be strings.
|
||||||
return str(remote_user_id)
|
return str(remote_user_id)
|
||||||
|
|
||||||
|
async def handle_backchannel_logout(
|
||||||
|
self, request: SynapseRequest, logout_token: str
|
||||||
|
) -> None:
|
||||||
|
"""Handle an incoming request to /_synapse/client/oidc/backchannel_logout
|
||||||
|
|
||||||
|
The OIDC Provider posts a logout token to this endpoint when a user
|
||||||
|
session ends. That token is a JWT signed with the same keys as
|
||||||
|
ID tokens. The OpenID Connect Back-Channel Logout draft explains how to
|
||||||
|
validate the JWT and figure out what session to end.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The request to respond to
|
||||||
|
logout_token: The logout token (a JWT) extracted from the request body
|
||||||
|
"""
|
||||||
|
# Back-Channel Logout can be disabled in the config, hence this check.
|
||||||
|
# This is not that important for now since Synapse is registered
|
||||||
|
# manually to the OP, so not specifying the backchannel-logout URI is
|
||||||
|
# as effective than disabling it here. It might make more sense if we
|
||||||
|
# support dynamic registration in Synapse at some point.
|
||||||
|
if not self._config.backchannel_logout_enabled:
|
||||||
|
logger.warning(
|
||||||
|
f"Received an OIDC Back-Channel Logout request from issuer {self.issuer!r} but it is disabled in config"
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: this responds with a 400 status code, which is what the OIDC
|
||||||
|
# Back-Channel Logout spec expects, but spec also suggests answering with
|
||||||
|
# a JSON object, with the `error` and `error_description` fields set, which
|
||||||
|
# we are not doing here.
|
||||||
|
# See https://openid.net/specs/openid-connect-backchannel-1_0.html#BCResponse
|
||||||
|
raise SynapseError(
|
||||||
|
400, "OpenID Connect Back-Channel Logout is disabled for this provider"
|
||||||
|
)
|
||||||
|
|
||||||
|
metadata = await self.load_metadata()
|
||||||
|
|
||||||
|
# As per OIDC Back-Channel Logout 1.0 sec. 2.4:
|
||||||
|
# A Logout Token MUST be signed and MAY also be encrypted. The same
|
||||||
|
# keys are used to sign and encrypt Logout Tokens as are used for ID
|
||||||
|
# Tokens. If the Logout Token is encrypted, it SHOULD replicate the
|
||||||
|
# iss (issuer) claim in the JWT Header Parameters, as specified in
|
||||||
|
# Section 5.3 of [JWT].
|
||||||
|
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
|
||||||
|
|
||||||
|
# As per sec. 2.6:
|
||||||
|
# 3. Validate the iss, aud, and iat Claims in the same way they are
|
||||||
|
# validated in ID Tokens.
|
||||||
|
# Which means the audience should contain Synapse's client_id and the
|
||||||
|
# issuer should be the IdP issuer
|
||||||
|
claims_options = {
|
||||||
|
"iss": {"values": [metadata["issuer"]]},
|
||||||
|
"aud": {"values": [self.client_id]},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
claims = await self._verify_jwt(
|
||||||
|
alg_values=alg_values,
|
||||||
|
token=logout_token,
|
||||||
|
claims_cls=LogoutToken,
|
||||||
|
claims_options=claims_options,
|
||||||
|
)
|
||||||
|
except JoseError:
|
||||||
|
logger.exception("Invalid logout_token")
|
||||||
|
raise SynapseError(400, "Invalid logout_token")
|
||||||
|
|
||||||
|
# As per sec. 2.6:
|
||||||
|
# 4. Verify that the Logout Token contains a sub Claim, a sid Claim,
|
||||||
|
# or both.
|
||||||
|
# 5. Verify that the Logout Token contains an events Claim whose
|
||||||
|
# value is JSON object containing the member name
|
||||||
|
# http://schemas.openid.net/event/backchannel-logout.
|
||||||
|
# 6. Verify that the Logout Token does not contain a nonce Claim.
|
||||||
|
# This is all verified by the LogoutToken claims class, so at this
|
||||||
|
# point the `sid` claim exists and is a string.
|
||||||
|
sid: str = claims.get("sid")
|
||||||
|
|
||||||
|
# If the `sub` claim was included in the logout token, we check that it matches
|
||||||
|
# that it matches the right user. We can have cases where the `sub` claim is not
|
||||||
|
# the ID saved in database, so we let admins disable this check in config.
|
||||||
|
sub: Optional[str] = claims.get("sub")
|
||||||
|
expected_user_id: Optional[str] = None
|
||||||
|
if sub is not None and not self._config.backchannel_logout_ignore_sub:
|
||||||
|
expected_user_id = await self._store.get_user_by_external_id(
|
||||||
|
self.idp_id, sub
|
||||||
|
)
|
||||||
|
|
||||||
|
# Invalidate any running user-mapping sessions, in-flight login tokens and
|
||||||
|
# active devices
|
||||||
|
await self._sso_handler.revoke_sessions_for_provider_session_id(
|
||||||
|
auth_provider_id=self.idp_id,
|
||||||
|
auth_provider_session_id=sid,
|
||||||
|
expected_user_id=expected_user_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
request.setResponseCode(200)
|
||||||
|
request.setHeader(b"Cache-Control", b"no-cache, no-store")
|
||||||
|
request.setHeader(b"Pragma", b"no-cache")
|
||||||
|
finish_request(request)
|
||||||
|
|
||||||
|
|
||||||
|
class LogoutToken(JWTClaims):
|
||||||
|
"""
|
||||||
|
Holds and verify claims of a logout token, as per
|
||||||
|
https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken
|
||||||
|
"""
|
||||||
|
|
||||||
|
REGISTERED_CLAIMS = ["iss", "sub", "aud", "iat", "jti", "events", "sid"]
|
||||||
|
|
||||||
|
def validate(self, now: Optional[int] = None, leeway: int = 0) -> None:
|
||||||
|
"""Validate everything in claims payload."""
|
||||||
|
super().validate(now, leeway)
|
||||||
|
self.validate_sid()
|
||||||
|
self.validate_events()
|
||||||
|
self.validate_nonce()
|
||||||
|
|
||||||
|
def validate_sid(self) -> None:
|
||||||
|
"""Ensure the sid claim is present"""
|
||||||
|
sid = self.get("sid")
|
||||||
|
if not sid:
|
||||||
|
raise MissingClaimError("sid")
|
||||||
|
|
||||||
|
if not isinstance(sid, str):
|
||||||
|
raise InvalidClaimError("sid")
|
||||||
|
|
||||||
|
def validate_nonce(self) -> None:
|
||||||
|
"""Ensure the nonce claim is absent"""
|
||||||
|
if "nonce" in self:
|
||||||
|
raise InvalidClaimError("nonce")
|
||||||
|
|
||||||
|
def validate_events(self) -> None:
|
||||||
|
"""Ensure the events claim is present and with the right value"""
|
||||||
|
events = self.get("events")
|
||||||
|
if not events:
|
||||||
|
raise MissingClaimError("events")
|
||||||
|
|
||||||
|
if not isinstance(events, dict):
|
||||||
|
raise InvalidClaimError("events")
|
||||||
|
|
||||||
|
if "http://schemas.openid.net/event/backchannel-logout" not in events:
|
||||||
|
raise InvalidClaimError("events")
|
||||||
|
|
||||||
|
|
||||||
# number of seconds a newly-generated client secret should be valid for
|
# number of seconds a newly-generated client secret should be valid for
|
||||||
CLIENT_SECRET_VALIDITY_SECONDS = 3600
|
CLIENT_SECRET_VALIDITY_SECONDS = 3600
|
||||||
@ -1105,6 +1423,7 @@ class JwtClientSecret:
|
|||||||
logger.info(
|
logger.info(
|
||||||
"Generating new JWT for %s: %s %s", self._oauth_issuer, header, payload
|
"Generating new JWT for %s: %s %s", self._oauth_issuer, header, payload
|
||||||
)
|
)
|
||||||
|
jwt = JsonWebToken(header["alg"])
|
||||||
self._cached_secret = jwt.encode(header, payload, self._key.key)
|
self._cached_secret = jwt.encode(header, payload, self._key.key)
|
||||||
self._cached_secret_replacement_time = (
|
self._cached_secret_replacement_time = (
|
||||||
expires_at - CLIENT_SECRET_MIN_VALIDITY_SECONDS
|
expires_at - CLIENT_SECRET_MIN_VALIDITY_SECONDS
|
||||||
@ -1119,9 +1438,6 @@ class UserAttributeDict(TypedDict):
|
|||||||
emails: List[str]
|
emails: List[str]
|
||||||
|
|
||||||
|
|
||||||
C = TypeVar("C")
|
|
||||||
|
|
||||||
|
|
||||||
class OidcMappingProvider(Generic[C]):
|
class OidcMappingProvider(Generic[C]):
|
||||||
"""A mapping provider maps a UserInfo object to user attributes.
|
"""A mapping provider maps a UserInfo object to user attributes.
|
||||||
|
|
||||||
|
@ -307,7 +307,11 @@ class ProfileHandler:
|
|||||||
if not self.max_avatar_size and not self.allowed_avatar_mimetypes:
|
if not self.max_avatar_size and not self.allowed_avatar_mimetypes:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
server_name, _, media_id = parse_and_validate_mxc_uri(mxc)
|
host, port, media_id = parse_and_validate_mxc_uri(mxc)
|
||||||
|
if port is not None:
|
||||||
|
server_name = host + ":" + str(port)
|
||||||
|
else:
|
||||||
|
server_name = host
|
||||||
|
|
||||||
if server_name == self.server_name:
|
if server_name == self.server_name:
|
||||||
media_info = await self.store.get_local_media(media_id)
|
media_info = await self.store.get_local_media(media_id)
|
||||||
|
@ -49,7 +49,6 @@ from synapse.api.constants import (
|
|||||||
from synapse.api.errors import (
|
from synapse.api.errors import (
|
||||||
AuthError,
|
AuthError,
|
||||||
Codes,
|
Codes,
|
||||||
HttpResponseException,
|
|
||||||
LimitExceededError,
|
LimitExceededError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
StoreError,
|
StoreError,
|
||||||
@ -60,7 +59,6 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
|||||||
from synapse.event_auth import validate_event_for_room_version
|
from synapse.event_auth import validate_event_for_room_version
|
||||||
from synapse.events import EventBase
|
from synapse.events import EventBase
|
||||||
from synapse.events.utils import copy_and_fixup_power_levels_contents
|
from synapse.events.utils import copy_and_fixup_power_levels_contents
|
||||||
from synapse.federation.federation_client import InvalidResponseError
|
|
||||||
from synapse.handlers.relations import BundledAggregations
|
from synapse.handlers.relations import BundledAggregations
|
||||||
from synapse.module_api import NOT_SPAM
|
from synapse.module_api import NOT_SPAM
|
||||||
from synapse.rest.admin._base import assert_user_is_admin
|
from synapse.rest.admin._base import assert_user_is_admin
|
||||||
@ -1070,9 +1068,6 @@ class RoomCreationHandler:
|
|||||||
event_keys = {"room_id": room_id, "sender": creator_id, "state_key": ""}
|
event_keys = {"room_id": room_id, "sender": creator_id, "state_key": ""}
|
||||||
depth = 1
|
depth = 1
|
||||||
|
|
||||||
# the last event sent/persisted to the db
|
|
||||||
last_sent_event_id: Optional[str] = None
|
|
||||||
|
|
||||||
# the most recently created event
|
# the most recently created event
|
||||||
prev_event: List[str] = []
|
prev_event: List[str] = []
|
||||||
# a map of event types, state keys -> event_ids. We collect these mappings this as events are
|
# a map of event types, state keys -> event_ids. We collect these mappings this as events are
|
||||||
@ -1117,26 +1112,6 @@ class RoomCreationHandler:
|
|||||||
|
|
||||||
return new_event, new_context
|
return new_event, new_context
|
||||||
|
|
||||||
async def send(
|
|
||||||
event: EventBase,
|
|
||||||
context: synapse.events.snapshot.EventContext,
|
|
||||||
creator: Requester,
|
|
||||||
) -> int:
|
|
||||||
nonlocal last_sent_event_id
|
|
||||||
|
|
||||||
ev = await self.event_creation_handler.handle_new_client_event(
|
|
||||||
requester=creator,
|
|
||||||
events_and_context=[(event, context)],
|
|
||||||
ratelimit=False,
|
|
||||||
ignore_shadow_ban=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
last_sent_event_id = ev.event_id
|
|
||||||
|
|
||||||
# we know it was persisted, so must have a stream ordering
|
|
||||||
assert ev.internal_metadata.stream_ordering
|
|
||||||
return ev.internal_metadata.stream_ordering
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config = self._presets_dict[preset_config]
|
config = self._presets_dict[preset_config]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -1150,10 +1125,14 @@ class RoomCreationHandler:
|
|||||||
)
|
)
|
||||||
|
|
||||||
logger.debug("Sending %s in new room", EventTypes.Member)
|
logger.debug("Sending %s in new room", EventTypes.Member)
|
||||||
await send(creation_event, creation_context, creator)
|
ev = await self.event_creation_handler.handle_new_client_event(
|
||||||
|
requester=creator,
|
||||||
|
events_and_context=[(creation_event, creation_context)],
|
||||||
|
ratelimit=False,
|
||||||
|
ignore_shadow_ban=True,
|
||||||
|
)
|
||||||
|
last_sent_event_id = ev.event_id
|
||||||
|
|
||||||
# Room create event must exist at this point
|
|
||||||
assert last_sent_event_id is not None
|
|
||||||
member_event_id, _ = await self.room_member_handler.update_membership(
|
member_event_id, _ = await self.room_member_handler.update_membership(
|
||||||
creator,
|
creator,
|
||||||
creator.user,
|
creator.user,
|
||||||
@ -1172,6 +1151,7 @@ class RoomCreationHandler:
|
|||||||
depth += 1
|
depth += 1
|
||||||
state_map[(EventTypes.Member, creator.user.to_string())] = member_event_id
|
state_map[(EventTypes.Member, creator.user.to_string())] = member_event_id
|
||||||
|
|
||||||
|
events_to_send = []
|
||||||
# We treat the power levels override specially as this needs to be one
|
# We treat the power levels override specially as this needs to be one
|
||||||
# of the first events that get sent into a room.
|
# of the first events that get sent into a room.
|
||||||
pl_content = initial_state.pop((EventTypes.PowerLevels, ""), None)
|
pl_content = initial_state.pop((EventTypes.PowerLevels, ""), None)
|
||||||
@ -1180,7 +1160,7 @@ class RoomCreationHandler:
|
|||||||
EventTypes.PowerLevels, pl_content, False
|
EventTypes.PowerLevels, pl_content, False
|
||||||
)
|
)
|
||||||
current_state_group = power_context._state_group
|
current_state_group = power_context._state_group
|
||||||
await send(power_event, power_context, creator)
|
events_to_send.append((power_event, power_context))
|
||||||
else:
|
else:
|
||||||
power_level_content: JsonDict = {
|
power_level_content: JsonDict = {
|
||||||
"users": {creator_id: 9001},
|
"users": {creator_id: 9001},
|
||||||
@ -1229,9 +1209,8 @@ class RoomCreationHandler:
|
|||||||
False,
|
False,
|
||||||
)
|
)
|
||||||
current_state_group = pl_context._state_group
|
current_state_group = pl_context._state_group
|
||||||
await send(pl_event, pl_context, creator)
|
events_to_send.append((pl_event, pl_context))
|
||||||
|
|
||||||
events_to_send = []
|
|
||||||
if room_alias and (EventTypes.CanonicalAlias, "") not in initial_state:
|
if room_alias and (EventTypes.CanonicalAlias, "") not in initial_state:
|
||||||
room_alias_event, room_alias_context = await create_event(
|
room_alias_event, room_alias_context = await create_event(
|
||||||
EventTypes.CanonicalAlias, {"alias": room_alias.to_string()}, True
|
EventTypes.CanonicalAlias, {"alias": room_alias.to_string()}, True
|
||||||
@ -1509,7 +1488,12 @@ class TimestampLookupHandler:
|
|||||||
Raises:
|
Raises:
|
||||||
SynapseError if unable to find any event locally in the given direction
|
SynapseError if unable to find any event locally in the given direction
|
||||||
"""
|
"""
|
||||||
|
logger.debug(
|
||||||
|
"get_event_for_timestamp(room_id=%s, timestamp=%s, direction=%s) Finding closest event...",
|
||||||
|
room_id,
|
||||||
|
timestamp,
|
||||||
|
direction,
|
||||||
|
)
|
||||||
local_event_id = await self.store.get_event_id_for_timestamp(
|
local_event_id = await self.store.get_event_id_for_timestamp(
|
||||||
room_id, timestamp, direction
|
room_id, timestamp, direction
|
||||||
)
|
)
|
||||||
@ -1561,19 +1545,15 @@ class TimestampLookupHandler:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Loop through each homeserver candidate until we get a succesful response
|
|
||||||
for domain in likely_domains:
|
|
||||||
# We don't want to ask our own server for information we don't have
|
|
||||||
if domain == self.server_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
remote_response = await self.federation_client.timestamp_to_event(
|
remote_response = await self.federation_client.timestamp_to_event(
|
||||||
domain, room_id, timestamp, direction
|
destinations=likely_domains,
|
||||||
|
room_id=room_id,
|
||||||
|
timestamp=timestamp,
|
||||||
|
direction=direction,
|
||||||
)
|
)
|
||||||
|
if remote_response is not None:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"get_event_for_timestamp: response from domain(%s)=%s",
|
"get_event_for_timestamp: remote_response=%s",
|
||||||
domain,
|
|
||||||
remote_response,
|
remote_response,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1583,27 +1563,17 @@ class TimestampLookupHandler:
|
|||||||
# Backfill this event so we can get a pagination token for
|
# Backfill this event so we can get a pagination token for
|
||||||
# it with `/context` and paginate `/messages` from this
|
# it with `/context` and paginate `/messages` from this
|
||||||
# point.
|
# point.
|
||||||
#
|
pulled_pdu_info = await self.federation_event_handler.backfill_event_id(
|
||||||
# TODO: The requested timestamp may lie in a part of the
|
likely_domains, room_id, remote_event_id
|
||||||
# event graph that the remote server *also* didn't have,
|
|
||||||
# in which case they will have returned another event
|
|
||||||
# which may be nowhere near the requested timestamp. In
|
|
||||||
# the future, we may need to reconcile that gap and ask
|
|
||||||
# other homeservers, and/or extend `/timestamp_to_event`
|
|
||||||
# to return events on *both* sides of the timestamp to
|
|
||||||
# help reconcile the gap faster.
|
|
||||||
remote_event = (
|
|
||||||
await self.federation_event_handler.backfill_event_id(
|
|
||||||
domain, room_id, remote_event_id
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
remote_event = pulled_pdu_info.pdu
|
||||||
|
|
||||||
# XXX: When we see that the remote server is not trustworthy,
|
# XXX: When we see that the remote server is not trustworthy,
|
||||||
# maybe we should not ask them first in the future.
|
# maybe we should not ask them first in the future.
|
||||||
if remote_origin_server_ts != remote_event.origin_server_ts:
|
if remote_origin_server_ts != remote_event.origin_server_ts:
|
||||||
logger.info(
|
logger.info(
|
||||||
"get_event_for_timestamp: Remote server (%s) claimed that remote_event_id=%s occured at remote_origin_server_ts=%s but that isn't true (actually occured at %s). Their claims are dubious and we should consider not trusting them.",
|
"get_event_for_timestamp: Remote server (%s) claimed that remote_event_id=%s occured at remote_origin_server_ts=%s but that isn't true (actually occured at %s). Their claims are dubious and we should consider not trusting them.",
|
||||||
domain,
|
pulled_pdu_info.pull_origin,
|
||||||
remote_event_id,
|
remote_event_id,
|
||||||
remote_origin_server_ts,
|
remote_origin_server_ts,
|
||||||
remote_event.origin_server_ts,
|
remote_event.origin_server_ts,
|
||||||
@ -1623,23 +1593,6 @@ class TimestampLookupHandler:
|
|||||||
local_event.origin_server_ts if local_event else None,
|
local_event.origin_server_ts if local_event else None,
|
||||||
)
|
)
|
||||||
return remote_event_id, remote_origin_server_ts
|
return remote_event_id, remote_origin_server_ts
|
||||||
except (HttpResponseException, InvalidResponseError) as ex:
|
|
||||||
# Let's not put a high priority on some other homeserver
|
|
||||||
# failing to respond or giving a random response
|
|
||||||
logger.debug(
|
|
||||||
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception(%s) %s args=%s",
|
|
||||||
domain,
|
|
||||||
type(ex).__name__,
|
|
||||||
ex,
|
|
||||||
ex.args,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
# But we do want to see some exceptions in our code
|
|
||||||
logger.warning(
|
|
||||||
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception",
|
|
||||||
domain,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# To appease mypy, we have to add both of these conditions to check for
|
# To appease mypy, we have to add both of these conditions to check for
|
||||||
# `None`. We only expect `local_event` to be `None` when
|
# `None`. We only expect `local_event` to be `None` when
|
||||||
|
@ -191,6 +191,7 @@ class SsoHandler:
|
|||||||
self._server_name = hs.hostname
|
self._server_name = hs.hostname
|
||||||
self._registration_handler = hs.get_registration_handler()
|
self._registration_handler = hs.get_registration_handler()
|
||||||
self._auth_handler = hs.get_auth_handler()
|
self._auth_handler = hs.get_auth_handler()
|
||||||
|
self._device_handler = hs.get_device_handler()
|
||||||
self._error_template = hs.config.sso.sso_error_template
|
self._error_template = hs.config.sso.sso_error_template
|
||||||
self._bad_user_template = hs.config.sso.sso_auth_bad_user_template
|
self._bad_user_template = hs.config.sso.sso_auth_bad_user_template
|
||||||
self._profile_handler = hs.get_profile_handler()
|
self._profile_handler = hs.get_profile_handler()
|
||||||
@ -1026,6 +1027,76 @@ class SsoHandler:
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
async def revoke_sessions_for_provider_session_id(
|
||||||
|
self,
|
||||||
|
auth_provider_id: str,
|
||||||
|
auth_provider_session_id: str,
|
||||||
|
expected_user_id: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Revoke any devices and in-flight logins tied to a provider session.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
auth_provider_id: A unique identifier for this SSO provider, e.g.
|
||||||
|
"oidc" or "saml".
|
||||||
|
auth_provider_session_id: The session ID from the provider to logout
|
||||||
|
expected_user_id: The user we're expecting to logout. If set, it will ignore
|
||||||
|
sessions belonging to other users and log an error.
|
||||||
|
"""
|
||||||
|
# Invalidate any running user-mapping sessions
|
||||||
|
to_delete = []
|
||||||
|
for session_id, session in self._username_mapping_sessions.items():
|
||||||
|
if (
|
||||||
|
session.auth_provider_id == auth_provider_id
|
||||||
|
and session.auth_provider_session_id == auth_provider_session_id
|
||||||
|
):
|
||||||
|
to_delete.append(session_id)
|
||||||
|
|
||||||
|
for session_id in to_delete:
|
||||||
|
logger.info("Revoking mapping session %s", session_id)
|
||||||
|
del self._username_mapping_sessions[session_id]
|
||||||
|
|
||||||
|
# Invalidate any in-flight login tokens
|
||||||
|
await self._store.invalidate_login_tokens_by_session_id(
|
||||||
|
auth_provider_id=auth_provider_id,
|
||||||
|
auth_provider_session_id=auth_provider_session_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fetch any device(s) in the store associated with the session ID.
|
||||||
|
devices = await self._store.get_devices_by_auth_provider_session_id(
|
||||||
|
auth_provider_id=auth_provider_id,
|
||||||
|
auth_provider_session_id=auth_provider_session_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# We have no guarantee that all the devices of that session are for the same
|
||||||
|
# `user_id`. Hence, we have to iterate over the list of devices and log them out
|
||||||
|
# one by one.
|
||||||
|
for device in devices:
|
||||||
|
user_id = device["user_id"]
|
||||||
|
device_id = device["device_id"]
|
||||||
|
|
||||||
|
# If the user_id associated with that device/session is not the one we got
|
||||||
|
# out of the `sub` claim, skip that device and show log an error.
|
||||||
|
if expected_user_id is not None and user_id != expected_user_id:
|
||||||
|
logger.error(
|
||||||
|
"Received a logout notification from SSO provider "
|
||||||
|
f"{auth_provider_id!r} for the user {expected_user_id!r}, but with "
|
||||||
|
f"a session ID ({auth_provider_session_id!r}) which belongs to "
|
||||||
|
f"{user_id!r}. This may happen when the SSO provider user mapper "
|
||||||
|
"uses something else than the standard attribute as mapping ID. "
|
||||||
|
"For OIDC providers, set `backchannel_logout_ignore_sub` to `true` "
|
||||||
|
"in the provider config if that is the case."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Logging out %r (device %r) via SSO (%r) logout notification (session %r).",
|
||||||
|
user_id,
|
||||||
|
device_id,
|
||||||
|
auth_provider_id,
|
||||||
|
auth_provider_session_id,
|
||||||
|
)
|
||||||
|
await self._device_handler.delete_devices(user_id, [device_id])
|
||||||
|
|
||||||
|
|
||||||
def get_username_mapping_session_cookie_from_request(request: IRequest) -> str:
|
def get_username_mapping_session_cookie_from_request(request: IRequest) -> str:
|
||||||
"""Extract the session ID from the cookie
|
"""Extract the session ID from the cookie
|
||||||
|
@ -25,7 +25,6 @@ from typing import (
|
|||||||
List,
|
List,
|
||||||
Mapping,
|
Mapping,
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
|
||||||
Tuple,
|
Tuple,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
@ -90,14 +89,29 @@ incoming_responses_counter = Counter(
|
|||||||
"synapse_http_client_responses", "", ["method", "code"]
|
"synapse_http_client_responses", "", ["method", "code"]
|
||||||
)
|
)
|
||||||
|
|
||||||
# the type of the headers list, to be passed to the t.w.h.Headers.
|
# the type of the headers map, to be passed to the t.w.h.Headers.
|
||||||
# Actually we can mix str and bytes keys, but Mapping treats 'key' as invariant so
|
#
|
||||||
# we simplify.
|
# The actual type accepted by Twisted is
|
||||||
|
# Mapping[Union[str, bytes], Sequence[Union[str, bytes]] ,
|
||||||
|
# allowing us to mix and match str and bytes freely. However: any str is also a
|
||||||
|
# Sequence[str]; passing a header string value which is a
|
||||||
|
# standalone str is interpreted as a sequence of 1-codepoint strings. This is a disastrous footgun.
|
||||||
|
# We use a narrower value type (RawHeaderValue) to avoid this footgun.
|
||||||
|
#
|
||||||
|
# We also simplify the keys to be either all str or all bytes. This helps because
|
||||||
|
# Dict[K, V] is invariant in K (and indeed V).
|
||||||
RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
|
RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
|
||||||
|
|
||||||
# the value actually has to be a List, but List is invariant so we can't specify that
|
# the value actually has to be a List, but List is invariant so we can't specify that
|
||||||
# the entries can either be Lists or bytes.
|
# the entries can either be Lists or bytes.
|
||||||
RawHeaderValue = Sequence[Union[str, bytes]]
|
RawHeaderValue = Union[
|
||||||
|
List[str],
|
||||||
|
List[bytes],
|
||||||
|
List[Union[str, bytes]],
|
||||||
|
Tuple[str, ...],
|
||||||
|
Tuple[bytes, ...],
|
||||||
|
Tuple[Union[str, bytes], ...],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def check_against_blacklist(
|
def check_against_blacklist(
|
||||||
|
@ -174,8 +174,10 @@ class _BackgroundProcess:
|
|||||||
diff = new_stats - self._reported_stats
|
diff = new_stats - self._reported_stats
|
||||||
self._reported_stats = new_stats
|
self._reported_stats = new_stats
|
||||||
|
|
||||||
_background_process_ru_utime.labels(self.desc).inc(diff.ru_utime)
|
# For unknown reasons, the difference in times can be negative. See comment in
|
||||||
_background_process_ru_stime.labels(self.desc).inc(diff.ru_stime)
|
# synapse.http.request_metrics.RequestMetrics.update_metrics.
|
||||||
|
_background_process_ru_utime.labels(self.desc).inc(max(diff.ru_utime, 0))
|
||||||
|
_background_process_ru_stime.labels(self.desc).inc(max(diff.ru_stime, 0))
|
||||||
_background_process_db_txn_count.labels(self.desc).inc(diff.db_txn_count)
|
_background_process_db_txn_count.labels(self.desc).inc(diff.db_txn_count)
|
||||||
_background_process_db_txn_duration.labels(self.desc).inc(
|
_background_process_db_txn_duration.labels(self.desc).inc(
|
||||||
diff.db_txn_duration_sec
|
diff.db_txn_duration_sec
|
||||||
|
@ -771,50 +771,11 @@ class ModuleApi:
|
|||||||
auth_provider_session_id: The session ID got during login from the SSO IdP,
|
auth_provider_session_id: The session ID got during login from the SSO IdP,
|
||||||
if any.
|
if any.
|
||||||
"""
|
"""
|
||||||
# The deprecated `generate_short_term_login_token` method defaulted to an empty
|
return await self._hs.get_auth_handler().create_login_token_for_user_id(
|
||||||
# string for the `auth_provider_id` because of how the underlying macaroon was
|
|
||||||
# generated. This will change to a proper NULL-able field when the tokens get
|
|
||||||
# moved to the database.
|
|
||||||
return self._hs.get_macaroon_generator().generate_short_term_login_token(
|
|
||||||
user_id,
|
user_id,
|
||||||
auth_provider_id or "",
|
|
||||||
auth_provider_session_id,
|
|
||||||
duration_in_ms,
|
duration_in_ms,
|
||||||
)
|
|
||||||
|
|
||||||
def generate_short_term_login_token(
|
|
||||||
self,
|
|
||||||
user_id: str,
|
|
||||||
duration_in_ms: int = (2 * 60 * 1000),
|
|
||||||
auth_provider_id: str = "",
|
|
||||||
auth_provider_session_id: Optional[str] = None,
|
|
||||||
) -> str:
|
|
||||||
"""Generate a login token suitable for m.login.token authentication
|
|
||||||
|
|
||||||
Added in Synapse v1.9.0.
|
|
||||||
|
|
||||||
This was deprecated in Synapse v1.69.0 in favor of create_login_token, and will
|
|
||||||
be removed in Synapse 1.71.0.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: gives the ID of the user that the token is for
|
|
||||||
|
|
||||||
duration_in_ms: the time that the token will be valid for
|
|
||||||
|
|
||||||
auth_provider_id: the ID of the SSO IdP that the user used to authenticate
|
|
||||||
to get this token, if any. This is encoded in the token so that
|
|
||||||
/login can report stats on number of successful logins by IdP.
|
|
||||||
"""
|
|
||||||
logger.warn(
|
|
||||||
"A module configured on this server uses ModuleApi.generate_short_term_login_token(), "
|
|
||||||
"which is deprecated in favor of ModuleApi.create_login_token(), and will be removed in "
|
|
||||||
"Synapse 1.71.0",
|
|
||||||
)
|
|
||||||
return self._hs.get_macaroon_generator().generate_short_term_login_token(
|
|
||||||
user_id,
|
|
||||||
auth_provider_id,
|
auth_provider_id,
|
||||||
auth_provider_session_id,
|
auth_provider_session_id,
|
||||||
duration_in_ms,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -28,7 +28,7 @@ from typing import (
|
|||||||
|
|
||||||
from prometheus_client import Counter
|
from prometheus_client import Counter
|
||||||
|
|
||||||
from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes
|
from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes, EventContentFields
|
||||||
from synapse.event_auth import auth_types_for_event, get_user_power_level
|
from synapse.event_auth import auth_types_for_event, get_user_power_level
|
||||||
from synapse.events import EventBase, relation_from_event
|
from synapse.events import EventBase, relation_from_event
|
||||||
from synapse.events.snapshot import EventContext
|
from synapse.events.snapshot import EventContext
|
||||||
@ -45,7 +45,6 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
push_rules_invalidation_counter = Counter(
|
push_rules_invalidation_counter = Counter(
|
||||||
"synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter", ""
|
"synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter", ""
|
||||||
)
|
)
|
||||||
@ -107,6 +106,8 @@ class BulkPushRuleEvaluator:
|
|||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
self._event_auth_handler = hs.get_event_auth_handler()
|
self._event_auth_handler = hs.get_event_auth_handler()
|
||||||
|
|
||||||
|
self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled
|
||||||
|
|
||||||
self.room_push_rule_cache_metrics = register_cache(
|
self.room_push_rule_cache_metrics = register_cache(
|
||||||
"cache",
|
"cache",
|
||||||
"room_push_rule_cache",
|
"room_push_rule_cache",
|
||||||
@ -165,8 +166,21 @@ class BulkPushRuleEvaluator:
|
|||||||
return rules_by_user
|
return rules_by_user
|
||||||
|
|
||||||
async def _get_power_levels_and_sender_level(
|
async def _get_power_levels_and_sender_level(
|
||||||
self, event: EventBase, context: EventContext
|
self,
|
||||||
|
event: EventBase,
|
||||||
|
context: EventContext,
|
||||||
|
event_id_to_event: Mapping[str, EventBase],
|
||||||
) -> Tuple[dict, Optional[int]]:
|
) -> Tuple[dict, Optional[int]]:
|
||||||
|
"""
|
||||||
|
Given an event and an event context, get the power level event relevant to the event
|
||||||
|
and the power level of the sender of the event.
|
||||||
|
Args:
|
||||||
|
event: event to check
|
||||||
|
context: context of event to check
|
||||||
|
event_id_to_event: a mapping of event_id to event for a set of events being
|
||||||
|
batch persisted. This is needed as the sought-after power level event may
|
||||||
|
be in this batch rather than the DB
|
||||||
|
"""
|
||||||
# There are no power levels and sender levels possible to get from outlier
|
# There are no power levels and sender levels possible to get from outlier
|
||||||
if event.internal_metadata.is_outlier():
|
if event.internal_metadata.is_outlier():
|
||||||
return {}, None
|
return {}, None
|
||||||
@ -177,15 +191,26 @@ class BulkPushRuleEvaluator:
|
|||||||
)
|
)
|
||||||
pl_event_id = prev_state_ids.get(POWER_KEY)
|
pl_event_id = prev_state_ids.get(POWER_KEY)
|
||||||
|
|
||||||
if pl_event_id:
|
|
||||||
# fastpath: if there's a power level event, that's all we need, and
|
# fastpath: if there's a power level event, that's all we need, and
|
||||||
# not having a power level event is an extreme edge case
|
# not having a power level event is an extreme edge case
|
||||||
|
if pl_event_id:
|
||||||
|
# Get the power level event from the batch, or fall back to the database.
|
||||||
|
pl_event = event_id_to_event.get(pl_event_id)
|
||||||
|
if pl_event:
|
||||||
|
auth_events = {POWER_KEY: pl_event}
|
||||||
|
else:
|
||||||
auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
|
auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
|
||||||
else:
|
else:
|
||||||
auth_events_ids = self._event_auth_handler.compute_auth_events(
|
auth_events_ids = self._event_auth_handler.compute_auth_events(
|
||||||
event, prev_state_ids, for_verification=False
|
event, prev_state_ids, for_verification=False
|
||||||
)
|
)
|
||||||
auth_events_dict = await self.store.get_events(auth_events_ids)
|
auth_events_dict = await self.store.get_events(auth_events_ids)
|
||||||
|
# Some needed auth events might be in the batch, combine them with those
|
||||||
|
# fetched from the database.
|
||||||
|
for auth_event_id in auth_events_ids:
|
||||||
|
auth_event = event_id_to_event.get(auth_event_id)
|
||||||
|
if auth_event:
|
||||||
|
auth_events_dict[auth_event_id] = auth_event
|
||||||
auth_events = {(e.type, e.state_key): e for e in auth_events_dict.values()}
|
auth_events = {(e.type, e.state_key): e for e in auth_events_dict.values()}
|
||||||
|
|
||||||
sender_level = get_user_power_level(event.sender, auth_events)
|
sender_level = get_user_power_level(event.sender, auth_events)
|
||||||
@ -194,16 +219,81 @@ class BulkPushRuleEvaluator:
|
|||||||
|
|
||||||
return pl_event.content if pl_event else {}, sender_level
|
return pl_event.content if pl_event else {}, sender_level
|
||||||
|
|
||||||
@measure_func("action_for_event_by_user")
|
async def _related_events(self, event: EventBase) -> Dict[str, Dict[str, str]]:
|
||||||
async def action_for_event_by_user(
|
"""Fetches the related events for 'event'. Sets the im.vector.is_falling_back key if the event is from a fallback relation
|
||||||
self, event: EventBase, context: EventContext
|
|
||||||
) -> None:
|
Returns:
|
||||||
"""Given an event and context, evaluate the push rules, check if the message
|
Mapping of relation type to flattened events.
|
||||||
should increment the unread count, and insert the results into the
|
|
||||||
event_push_actions_staging table.
|
|
||||||
"""
|
"""
|
||||||
if not event.internal_metadata.is_notifiable():
|
related_events: Dict[str, Dict[str, str]] = {}
|
||||||
# Push rules for events that aren't notifiable can't be processed by this
|
if self._related_event_match_enabled:
|
||||||
|
related_event_id = event.content.get("m.relates_to", {}).get("event_id")
|
||||||
|
relation_type = event.content.get("m.relates_to", {}).get("rel_type")
|
||||||
|
if related_event_id is not None and relation_type is not None:
|
||||||
|
related_event = await self.store.get_event(
|
||||||
|
related_event_id, allow_none=True
|
||||||
|
)
|
||||||
|
if related_event is not None:
|
||||||
|
related_events[relation_type] = _flatten_dict(related_event)
|
||||||
|
|
||||||
|
reply_event_id = (
|
||||||
|
event.content.get("m.relates_to", {})
|
||||||
|
.get("m.in_reply_to", {})
|
||||||
|
.get("event_id")
|
||||||
|
)
|
||||||
|
|
||||||
|
# convert replies to pseudo relations
|
||||||
|
if reply_event_id is not None:
|
||||||
|
related_event = await self.store.get_event(
|
||||||
|
reply_event_id, allow_none=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if related_event is not None:
|
||||||
|
related_events["m.in_reply_to"] = _flatten_dict(related_event)
|
||||||
|
|
||||||
|
# indicate that this is from a fallback relation.
|
||||||
|
if relation_type == "m.thread" and event.content.get(
|
||||||
|
"m.relates_to", {}
|
||||||
|
).get("is_falling_back", False):
|
||||||
|
related_events["m.in_reply_to"][
|
||||||
|
"im.vector.is_falling_back"
|
||||||
|
] = ""
|
||||||
|
|
||||||
|
return related_events
|
||||||
|
|
||||||
|
async def action_for_events_by_user(
|
||||||
|
self, events_and_context: List[Tuple[EventBase, EventContext]]
|
||||||
|
) -> None:
|
||||||
|
"""Given a list of events and their associated contexts, evaluate the push rules
|
||||||
|
for each event, check if the message should increment the unread count, and
|
||||||
|
insert the results into the event_push_actions_staging table.
|
||||||
|
"""
|
||||||
|
# For batched events the power level events may not have been persisted yet,
|
||||||
|
# so we pass in the batched events. Thus if the event cannot be found in the
|
||||||
|
# database we can check in the batch.
|
||||||
|
event_id_to_event = {e.event_id: e for e, _ in events_and_context}
|
||||||
|
for event, context in events_and_context:
|
||||||
|
await self._action_for_event_by_user(event, context, event_id_to_event)
|
||||||
|
|
||||||
|
@measure_func("action_for_event_by_user")
|
||||||
|
async def _action_for_event_by_user(
|
||||||
|
self,
|
||||||
|
event: EventBase,
|
||||||
|
context: EventContext,
|
||||||
|
event_id_to_event: Mapping[str, EventBase],
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
if (
|
||||||
|
not event.internal_metadata.is_notifiable()
|
||||||
|
or event.internal_metadata.is_historical()
|
||||||
|
or event.content.get(EventContentFields.MSC2716_HISTORICAL)
|
||||||
|
):
|
||||||
|
# Push rules for events that aren't notifiable can't be processed by this and
|
||||||
|
# we want to skip push notification actions for historical messages
|
||||||
|
# because we don't want to notify people about old history back in time.
|
||||||
|
# The historical messages also do not have the proper `context.current_state_ids`
|
||||||
|
# and `state_groups` because they have `prev_events` that aren't persisted yet
|
||||||
|
# (historical messages persisted in reverse-chronological order).
|
||||||
return
|
return
|
||||||
|
|
||||||
# Disable counting as unread unless the experimental configuration is
|
# Disable counting as unread unless the experimental configuration is
|
||||||
@ -223,7 +313,9 @@ class BulkPushRuleEvaluator:
|
|||||||
(
|
(
|
||||||
power_levels,
|
power_levels,
|
||||||
sender_power_level,
|
sender_power_level,
|
||||||
) = await self._get_power_levels_and_sender_level(event, context)
|
) = await self._get_power_levels_and_sender_level(
|
||||||
|
event, context, event_id_to_event
|
||||||
|
)
|
||||||
|
|
||||||
# Find the event's thread ID.
|
# Find the event's thread ID.
|
||||||
relation = relation_from_event(event)
|
relation = relation_from_event(event)
|
||||||
@ -238,6 +330,8 @@ class BulkPushRuleEvaluator:
|
|||||||
# the parent is part of a thread.
|
# the parent is part of a thread.
|
||||||
thread_id = await self.store.get_thread_id(relation.parent_id)
|
thread_id = await self.store.get_thread_id(relation.parent_id)
|
||||||
|
|
||||||
|
related_events = await self._related_events(event)
|
||||||
|
|
||||||
# It's possible that old room versions have non-integer power levels (floats or
|
# It's possible that old room versions have non-integer power levels (floats or
|
||||||
# strings). Workaround this by explicitly converting to int.
|
# strings). Workaround this by explicitly converting to int.
|
||||||
notification_levels = power_levels.get("notifications", {})
|
notification_levels = power_levels.get("notifications", {})
|
||||||
@ -250,6 +344,8 @@ class BulkPushRuleEvaluator:
|
|||||||
room_member_count,
|
room_member_count,
|
||||||
sender_power_level,
|
sender_power_level,
|
||||||
notification_levels,
|
notification_levels,
|
||||||
|
related_events,
|
||||||
|
self._related_event_match_enabled,
|
||||||
)
|
)
|
||||||
|
|
||||||
users = rules_by_user.keys()
|
users = rules_by_user.keys()
|
||||||
|
29
synapse/res/templates/_base.html
Normal file
29
synapse/res/templates/_base.html
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>{% block title %}{% endblock %}</title>
|
||||||
|
<style type="text/css">
|
||||||
|
{%- include 'style.css' without context %}
|
||||||
|
</style>
|
||||||
|
{% block header %}{% endblock %}
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<header class="mx_Header">
|
||||||
|
{% if app_name == "Riot" %}
|
||||||
|
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||||
|
{% elif app_name == "Vector" %}
|
||||||
|
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||||
|
{% elif app_name == "Element" %}
|
||||||
|
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||||
|
{% else %}
|
||||||
|
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||||
|
{% endif %}
|
||||||
|
</header>
|
||||||
|
|
||||||
|
{% block body %}{% endblock %}
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
@ -1,12 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% extends "_base.html" %}
|
||||||
<html lang="en">
|
{% block title %}Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.{% endblock %}
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<p>Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</p>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
{% endblock %}
|
||||||
<title>Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
@ -1,12 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% extends "_base.html" %}
|
||||||
<html lang="en">
|
{% block title %}Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.{% endblock %}
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<p>Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</p>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
{% endblock %}
|
||||||
<title>Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
@ -1,14 +1,8 @@
|
|||||||
<!DOCTYPE html>
|
{% extends "_base.html" %}
|
||||||
<html lang="en">
|
{% block title %}Request to add an email address to your Matrix account{% endblock %}
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>Request to add an email address to your Matrix account</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>A request to add an email address to your Matrix account has been received. If this was you, please click the link below to confirm adding this email:</p>
|
<p>A request to add an email address to your Matrix account has been received. If this was you, please click the link below to confirm adding this email:</p>
|
||||||
<a href="{{ link }}">{{ link }}</a>
|
<a href="{{ link }}">{{ link }}</a>
|
||||||
<p>If this was not you, you can safely ignore this email. Thank you.</p>
|
<p>If this was not you, you can safely ignore this email. Thank you.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,13 +1,7 @@
|
|||||||
<!DOCTYPE html>
|
{% extends "_base.html" %}
|
||||||
<html lang="en">
|
{% block title %}Request failed{% endblock %}
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>Request failed</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
||||||
<p>No changes have been made to your account.</p>
|
<p>No changes have been made to your account.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,12 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% extends "_base.html" %}
|
||||||
<html lang="en">
|
{% block title %}Your email has now been validated{% endblock %}
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>Your email has now been validated</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
|
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
<html>
|
{% extends "_base.html" %}
|
||||||
<head>
|
{% block title %}Success!{% endblock %}
|
||||||
<title>Success!</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
{% block header %}
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||||
<script>
|
<script>
|
||||||
if (window.onAuthDone) {
|
if (window.onAuthDone) {
|
||||||
@ -11,11 +10,12 @@ if (window.onAuthDone) {
|
|||||||
window.opener.postMessage("authDone", "*");
|
window.opener.postMessage("authDone", "*");
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<div>
|
<div>
|
||||||
<p>Thank you</p>
|
<p>Thank you</p>
|
||||||
<p>You may now close this window and return to the application</p>
|
<p>You may now close this window and return to the application</p>
|
||||||
</div>
|
</div>
|
||||||
</body>
|
|
||||||
</html>
|
{% endblock %}
|
||||||
|
@ -1,12 +1,5 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Invalid renewal token.{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block body %}
|
||||||
<meta charset="UTF-8">
|
<p>Invalid renewal token.</p>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
{% endblock %}
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>Invalid renewal token.</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
Invalid renewal token.
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
<!doctype html>
|
{% extends "_base.html" %}
|
||||||
<html lang="en">
|
{% block title %}Notice of expiry{% endblock %}
|
||||||
<head>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
{% block header %}
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include 'mail.css' without context %}
|
{% include 'mail.css' without context %}
|
||||||
{% include "mail-%s.css" % app_name ignore missing without context %}
|
{% include "mail-%s.css" % app_name ignore missing without context %}
|
||||||
{% include 'mail-expiry.css' without context %}
|
{% include 'mail-expiry.css' without context %}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<table id="page">
|
<table id="page">
|
||||||
<tr>
|
<tr>
|
||||||
<td> </td>
|
<td> </td>
|
||||||
@ -43,5 +43,4 @@
|
|||||||
<td> </td>
|
<td> </td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
<!doctype html>
|
{% block title %}New activity in room{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{%- include 'mail.css' without context %}
|
{%- include 'mail.css' without context %}
|
||||||
{%- include "mail-%s.css" % app_name ignore missing without context %}
|
{%- include "mail-%s.css" % app_name ignore missing without context %}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<table id="page">
|
<table id="page">
|
||||||
<tr>
|
<tr>
|
||||||
<td> </td>
|
<td> </td>
|
||||||
@ -55,5 +54,4 @@
|
|||||||
<td> </td>
|
<td> </td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,14 +1,9 @@
|
|||||||
<html lang="en">
|
{% block title %}Password reset{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Password reset</title>
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>A password reset request has been received for your Matrix account. If this was you, please click the link below to confirm resetting your password:</p>
|
<p>A password reset request has been received for your Matrix account. If this was you, please click the link below to confirm resetting your password:</p>
|
||||||
|
|
||||||
<a href="{{ link }}">{{ link }}</a>
|
<a href="{{ link }}">{{ link }}</a>
|
||||||
|
|
||||||
<p>If this was not you, <strong>do not</strong> click the link above and instead contact your server administrator. Thank you.</p>
|
<p>If this was not you, <strong>do not</strong> click the link above and instead contact your server administrator. Thank you.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
<html lang="en">
|
{% block title %}Password reset confirmation{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Password reset confirmation</title>
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<!--Use a hidden form to resubmit the information necessary to reset the password-->
|
<!--Use a hidden form to resubmit the information necessary to reset the password-->
|
||||||
<form method="post">
|
<form method="post">
|
||||||
<input type="hidden" name="sid" value="{{ sid }}">
|
<input type="hidden" name="sid" value="{{ sid }}">
|
||||||
@ -15,6 +11,4 @@
|
|||||||
If you did not mean to do this, please close this page and your password will not be changed.</p>
|
If you did not mean to do this, please close this page and your password will not be changed.</p>
|
||||||
<p><button type="submit">Confirm changing my password</button></p>
|
<p><button type="submit">Confirm changing my password</button></p>
|
||||||
</form>
|
</form>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
|
||||||
|
@ -1,12 +1,6 @@
|
|||||||
<html lang="en">
|
{% block title %}Password reset failure{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Password reset failure</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
|
||||||
|
|
||||||
|
{% block body %}
|
||||||
|
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
||||||
<p>Your password has not been reset.</p>
|
<p>Your password has not been reset.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
<html lang="en">
|
{% block title %}Password reset success{% endblock %}
|
||||||
<head>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
{% block body %}
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>Your email has now been validated, please return to your client to reset your password. You may now close this window.</p>
|
<p>Your email has now been validated, please return to your client to reset your password. You may now close this window.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,7 @@
|
|||||||
<html>
|
{% block title %}Authentication{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Authentication</title>
|
{% block header %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<script src="https://www.recaptcha.net/recaptcha/api.js" async defer></script>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<script src="https://www.recaptcha.net/recaptcha/api.js"
|
|
||||||
async defer></script>
|
|
||||||
<script src="//code.jquery.com/jquery-1.11.2.min.js"></script>
|
<script src="//code.jquery.com/jquery-1.11.2.min.js"></script>
|
||||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||||
<script>
|
<script>
|
||||||
@ -12,8 +9,9 @@ function captchaDone() {
|
|||||||
$('#registrationForm').submit();
|
$('#registrationForm').submit();
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||||
<div>
|
<div>
|
||||||
{% if error is defined %}
|
{% if error is defined %}
|
||||||
@ -37,5 +35,4 @@ function captchaDone() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
@ -1,10 +1,6 @@
|
|||||||
<html lang="en">
|
{% block title %}Registration{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Registration</title>
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>You have asked us to register this email with a new Matrix account. If this was you, please click the link below to confirm your email address:</p>
|
<p>You have asked us to register this email with a new Matrix account. If this was you, please click the link below to confirm your email address:</p>
|
||||||
|
|
||||||
<a href="{{ link }}">Verify Your Email Address</a>
|
<a href="{{ link }}">Verify Your Email Address</a>
|
||||||
@ -12,5 +8,4 @@
|
|||||||
<p>If this was not you, you can safely disregard this email.</p>
|
<p>If this was not you, you can safely disregard this email.</p>
|
||||||
|
|
||||||
<p>Thank you.</p>
|
<p>Thank you.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
<html lang="en">
|
{% block title %}Registration failure{% endblock %}
|
||||||
<head>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
{% block body %}
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>Validation failed for the following reason: {{ failure_reason }}.</p>
|
<p>Validation failed for the following reason: {{ failure_reason }}.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,5 @@
|
|||||||
<html lang="en">
|
{% block title %}Your email has now been validated{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Your email has now been validated</title>
|
{% block body %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
|
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
<html lang="en">
|
{% block title %}Authentication{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Authentication</title>
|
{% block header %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||||
<div>
|
<div>
|
||||||
{% if error is defined %}
|
{% if error is defined %}
|
||||||
@ -19,5 +18,4 @@
|
|||||||
<input type="submit" value="Authenticate" />
|
<input type="submit" value="Authenticate" />
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}SSO account deactivated{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta charset="UTF-8">
|
<style type="text/css">
|
||||||
<title>SSO account deactivated</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <style type="text/css">
|
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body class="error_page">
|
|
||||||
|
{% block body %}
|
||||||
|
<div class="error_page">
|
||||||
<header>
|
<header>
|
||||||
<h1>Your account has been deactivated</h1>
|
<h1>Your account has been deactivated</h1>
|
||||||
<p>
|
<p>
|
||||||
@ -20,6 +19,6 @@
|
|||||||
administrator.
|
administrator.
|
||||||
</p>
|
</p>
|
||||||
</header>
|
</header>
|
||||||
|
</div>
|
||||||
{% include "sso_footer.html" without context %}
|
{% include "sso_footer.html" without context %}
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Create your account{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<title>Create your account</title>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<script type="text/javascript">
|
<script type="text/javascript">
|
||||||
let wasKeyboard = false;
|
let wasKeyboard = false;
|
||||||
document.addEventListener("mousedown", function() { wasKeyboard = false; });
|
document.addEventListener("mousedown", function() { wasKeyboard = false; });
|
||||||
@ -128,8 +124,9 @@
|
|||||||
color: #FE2928;
|
color: #FE2928;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<header>
|
<header>
|
||||||
<h1>Create your account</h1>
|
<h1>Create your account</h1>
|
||||||
<p>This is required. Continue to create your account on {{ server_name }}. You can't change this later.</p>
|
<p>This is required. Continue to create your account on {{ server_name }}. You can't change this later.</p>
|
||||||
@ -185,5 +182,4 @@
|
|||||||
<script type="text/javascript">
|
<script type="text/javascript">
|
||||||
{% include "sso_auth_account_details.js" without context %}
|
{% include "sso_auth_account_details.js" without context %}
|
||||||
</script>
|
</script>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Authentication failed{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Authentication failed</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body class="error_page">
|
|
||||||
|
{% block body %}
|
||||||
|
<div class="error_page">
|
||||||
<header>
|
<header>
|
||||||
<h1>That doesn't look right</h1>
|
<h1>That doesn't look right</h1>
|
||||||
<p>
|
<p>
|
||||||
@ -22,6 +20,6 @@
|
|||||||
the Identity Provider as when you log into your account.
|
the Identity Provider as when you log into your account.
|
||||||
</p>
|
</p>
|
||||||
</header>
|
</header>
|
||||||
|
</div>
|
||||||
{% include "sso_footer.html" without context %}
|
{% include "sso_footer.html" without context %}
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,15 +1,12 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Confirm it's you{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Confirm it's you</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<header>
|
<header>
|
||||||
<h1>Confirm it's you to continue</h1>
|
<h1>Confirm it's you to continue</h1>
|
||||||
<p>
|
<p>
|
||||||
@ -26,5 +23,4 @@
|
|||||||
</a>
|
</a>
|
||||||
</main>
|
</main>
|
||||||
{% include "sso_footer.html" without context %}
|
{% include "sso_footer.html" without context %}
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Authentication successful{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Authentication successful</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
</style>
|
</style>
|
||||||
@ -15,8 +11,9 @@
|
|||||||
window.opener.postMessage("authDone", "*");
|
window.opener.postMessage("authDone", "*");
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<header>
|
<header>
|
||||||
<h1>Thank you</h1>
|
<h1>Thank you</h1>
|
||||||
<p>
|
<p>
|
||||||
@ -25,5 +22,4 @@
|
|||||||
</p>
|
</p>
|
||||||
</header>
|
</header>
|
||||||
{% include "sso_footer.html" without context %}
|
{% include "sso_footer.html" without context %}
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,7 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Authentication failed{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta charset="UTF-8">
|
{% if error == "unauthorised" %}
|
||||||
<title>Authentication failed</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
|
|
||||||
@ -12,8 +9,11 @@
|
|||||||
margin-top: 56px;
|
margin-top: 56px;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endif %}
|
||||||
<body class="error_page">
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block body %}
|
||||||
|
<div class="error_page">
|
||||||
{# If an error of unauthorised is returned it means we have actively rejected their login #}
|
{# If an error of unauthorised is returned it means we have actively rejected their login #}
|
||||||
{% if error == "unauthorised" %}
|
{% if error == "unauthorised" %}
|
||||||
<header>
|
<header>
|
||||||
@ -66,5 +66,5 @@
|
|||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</body>
|
</div>
|
||||||
</html>
|
{% endblock %}
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Choose identity provider{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Choose identity provider</title>
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
|
|
||||||
@ -38,8 +34,9 @@
|
|||||||
flex: 1;
|
flex: 1;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<header>
|
<header>
|
||||||
<h1>Log in to {{ server_name }} </h1>
|
<h1>Log in to {{ server_name }} </h1>
|
||||||
<p>Choose an identity provider to log in</p>
|
<p>Choose an identity provider to log in</p>
|
||||||
@ -59,5 +56,4 @@
|
|||||||
</ul>
|
</ul>
|
||||||
</main>
|
</main>
|
||||||
{% include "sso_footer.html" without context %}
|
{% include "sso_footer.html" without context %}
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Agree to terms and conditions{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Agree to terms and conditions</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
|
|
||||||
@ -12,8 +8,9 @@
|
|||||||
margin-top: 56px;
|
margin-top: 56px;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<header>
|
<header>
|
||||||
<h1>Your account is nearly ready</h1>
|
<h1>Your account is nearly ready</h1>
|
||||||
<p>Agree to the terms to create your account.</p>
|
<p>Agree to the terms to create your account.</p>
|
||||||
@ -29,5 +26,4 @@
|
|||||||
</form>
|
</form>
|
||||||
</main>
|
</main>
|
||||||
{% include "sso_footer.html" without context %}
|
{% include "sso_footer.html" without context %}
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
<!DOCTYPE html>
|
{% block title %}Continue to your account{% endblock %}
|
||||||
<html lang="en">
|
|
||||||
<head>
|
{% block header %}
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>Continue to your account</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
{% include "sso.css" without context %}
|
{% include "sso.css" without context %}
|
||||||
|
|
||||||
@ -26,8 +22,9 @@
|
|||||||
float: left;
|
float: left;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<header>
|
<header>
|
||||||
<h1>Continue to your account</h1>
|
<h1>Continue to your account</h1>
|
||||||
</header>
|
</header>
|
||||||
@ -37,5 +34,5 @@
|
|||||||
<a href="{{ redirect_url }}" class="primary-button">Continue</a>
|
<a href="{{ redirect_url }}" class="primary-button">Continue</a>
|
||||||
</main>
|
</main>
|
||||||
{% include "sso_footer.html" without context %}
|
{% include "sso_footer.html" without context %}
|
||||||
</body>
|
|
||||||
</html>
|
{% endblock %}
|
||||||
|
29
synapse/res/templates/style.css
Normal file
29
synapse/res/templates/style.css
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
html {
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
background: #f9fafb;
|
||||||
|
max-width: 680px;
|
||||||
|
margin: auto;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
|
||||||
|
}
|
||||||
|
|
||||||
|
.mx_Header {
|
||||||
|
border-bottom: 3px solid #ddd;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
padding-top: 1rem;
|
||||||
|
padding-bottom: 1rem;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 1120px) {
|
||||||
|
body {
|
||||||
|
font-size: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 { font-size: 1rem; }
|
||||||
|
h2 { font-size: .9rem; }
|
||||||
|
h3 { font-size: .85rem; }
|
||||||
|
h4 { font-size: .8rem; }
|
||||||
|
}
|
@ -1,11 +1,10 @@
|
|||||||
<html>
|
{% block title %}Authentication{% endblock %}
|
||||||
<head>
|
|
||||||
<title>Authentication</title>
|
{% block header %}
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||||
</head>
|
{% endblock %}
|
||||||
<body>
|
|
||||||
|
{% block body %}
|
||||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||||
<div>
|
<div>
|
||||||
{% if error is defined %}
|
{% if error is defined %}
|
||||||
@ -19,5 +18,4 @@
|
|||||||
<input type="submit" value="Agree" />
|
<input type="submit" value="Agree" />
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</body>
|
{% endblock %}
|
||||||
</html>
|
|
||||||
|
@ -77,6 +77,11 @@ class CapabilitiesRestServlet(RestServlet):
|
|||||||
"enabled": True,
|
"enabled": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.config.experimental.msc3664_enabled:
|
||||||
|
response["capabilities"]["im.nheko.msc3664.related_event_match"] = {
|
||||||
|
"enabled": self.config.experimental.msc3664_enabled,
|
||||||
|
}
|
||||||
|
|
||||||
return HTTPStatus.OK, response
|
return HTTPStatus.OK, response
|
||||||
|
|
||||||
|
|
||||||
|
@ -231,7 +231,7 @@ class DehydratedDeviceServlet(RestServlet):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
PUT /org.matrix.msc2697/dehydrated_device
|
PUT /org.matrix.msc2697.v2/dehydrated_device
|
||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -271,7 +271,6 @@ class DehydratedDeviceServlet(RestServlet):
|
|||||||
raise errors.NotFoundError("No dehydrated device available")
|
raise errors.NotFoundError("No dehydrated device available")
|
||||||
|
|
||||||
class PutBody(RequestBodyModel):
|
class PutBody(RequestBodyModel):
|
||||||
device_id: StrictStr
|
|
||||||
device_data: DehydratedDeviceDataModel
|
device_data: DehydratedDeviceDataModel
|
||||||
initial_device_display_name: Optional[StrictStr]
|
initial_device_display_name: Optional[StrictStr]
|
||||||
|
|
||||||
@ -281,7 +280,7 @@ class DehydratedDeviceServlet(RestServlet):
|
|||||||
|
|
||||||
device_id = await self.device_handler.store_dehydrated_device(
|
device_id = await self.device_handler.store_dehydrated_device(
|
||||||
requester.user.to_string(),
|
requester.user.to_string(),
|
||||||
submission.device_data,
|
submission.device_data.dict(),
|
||||||
submission.initial_device_display_name,
|
submission.initial_device_display_name,
|
||||||
)
|
)
|
||||||
return 200, {"device_id": device_id}
|
return 200, {"device_id": device_id}
|
||||||
|
@ -436,8 +436,7 @@ class LoginRestServlet(RestServlet):
|
|||||||
The body of the JSON response.
|
The body of the JSON response.
|
||||||
"""
|
"""
|
||||||
token = login_submission["token"]
|
token = login_submission["token"]
|
||||||
auth_handler = self.auth_handler
|
res = await self.auth_handler.consume_login_token(token)
|
||||||
res = await auth_handler.validate_short_term_login_token(token)
|
|
||||||
|
|
||||||
return await self._complete_login(
|
return await self._complete_login(
|
||||||
res.user_id,
|
res.user_id,
|
||||||
|
@ -57,7 +57,6 @@ class LoginTokenRequestServlet(RestServlet):
|
|||||||
self.store = hs.get_datastores().main
|
self.store = hs.get_datastores().main
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
self.server_name = hs.config.server.server_name
|
self.server_name = hs.config.server.server_name
|
||||||
self.macaroon_gen = hs.get_macaroon_generator()
|
|
||||||
self.auth_handler = hs.get_auth_handler()
|
self.auth_handler = hs.get_auth_handler()
|
||||||
self.token_timeout = hs.config.experimental.msc3882_token_timeout
|
self.token_timeout = hs.config.experimental.msc3882_token_timeout
|
||||||
self.ui_auth = hs.config.experimental.msc3882_ui_auth
|
self.ui_auth = hs.config.experimental.msc3882_ui_auth
|
||||||
@ -76,10 +75,10 @@ class LoginTokenRequestServlet(RestServlet):
|
|||||||
can_skip_ui_auth=False, # Don't allow skipping of UI auth
|
can_skip_ui_auth=False, # Don't allow skipping of UI auth
|
||||||
)
|
)
|
||||||
|
|
||||||
login_token = self.macaroon_gen.generate_short_term_login_token(
|
login_token = await self.auth_handler.create_login_token_for_user_id(
|
||||||
user_id=requester.user.to_string(),
|
user_id=requester.user.to_string(),
|
||||||
auth_provider_id="org.matrix.msc3882.login_token_request",
|
auth_provider_id="org.matrix.msc3882.login_token_request",
|
||||||
duration_in_ms=self.token_timeout,
|
duration_ms=self.token_timeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -110,6 +110,13 @@ class RoomBatchSendEventRestServlet(RestServlet):
|
|||||||
errcode=Codes.MISSING_PARAM,
|
errcode=Codes.MISSING_PARAM,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if await self.store.is_partial_state_room(room_id):
|
||||||
|
raise SynapseError(
|
||||||
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
"Cannot insert history batches until we have fully joined the room",
|
||||||
|
errcode=Codes.UNABLE_DUE_TO_PARTIAL_STATE,
|
||||||
|
)
|
||||||
|
|
||||||
# Verify the batch_id_from_query corresponds to an actual insertion event
|
# Verify the batch_id_from_query corresponds to an actual insertion event
|
||||||
# and have the batch connected.
|
# and have the batch connected.
|
||||||
if batch_id_from_query:
|
if batch_id_from_query:
|
||||||
|
@ -146,12 +146,12 @@ class SyncRestServlet(RestServlet):
|
|||||||
elif filter_id.startswith("{"):
|
elif filter_id.startswith("{"):
|
||||||
try:
|
try:
|
||||||
filter_object = json_decoder.decode(filter_id)
|
filter_object = json_decoder.decode(filter_id)
|
||||||
|
except Exception:
|
||||||
|
raise SynapseError(400, "Invalid filter JSON", errcode=Codes.NOT_JSON)
|
||||||
|
self.filtering.check_valid_filter(filter_object)
|
||||||
set_timeline_upper_limit(
|
set_timeline_upper_limit(
|
||||||
filter_object, self.hs.config.server.filter_timeline_limit
|
filter_object, self.hs.config.server.filter_timeline_limit
|
||||||
)
|
)
|
||||||
except Exception:
|
|
||||||
raise SynapseError(400, "Invalid filter JSON")
|
|
||||||
self.filtering.check_valid_filter(filter_object)
|
|
||||||
filter_collection = FilterCollection(self.hs, filter_object)
|
filter_collection = FilterCollection(self.hs, filter_object)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
@ -14,17 +14,20 @@
|
|||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from twisted.web.resource import Resource
|
from synapse.http.server import HttpServer, JsonResource
|
||||||
|
from synapse.rest.key.v2.local_key_resource import LocalKey
|
||||||
from .local_key_resource import LocalKey
|
from synapse.rest.key.v2.remote_key_resource import RemoteKey
|
||||||
from .remote_key_resource import RemoteKey
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
|
||||||
class KeyApiV2Resource(Resource):
|
class KeyResource(JsonResource):
|
||||||
def __init__(self, hs: "HomeServer"):
|
def __init__(self, hs: "HomeServer"):
|
||||||
Resource.__init__(self)
|
super().__init__(hs, canonical_json=True)
|
||||||
self.putChild(b"server", LocalKey(hs))
|
self.register_servlets(self, hs)
|
||||||
self.putChild(b"query", RemoteKey(hs))
|
|
||||||
|
@staticmethod
|
||||||
|
def register_servlets(http_server: HttpServer, hs: "HomeServer") -> None:
|
||||||
|
LocalKey(hs).register(http_server)
|
||||||
|
RemoteKey(hs).register(http_server)
|
||||||
|
@ -13,16 +13,15 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Optional
|
import re
|
||||||
|
from typing import TYPE_CHECKING, Optional, Tuple
|
||||||
|
|
||||||
from canonicaljson import encode_canonical_json
|
|
||||||
from signedjson.sign import sign_json
|
from signedjson.sign import sign_json
|
||||||
from unpaddedbase64 import encode_base64
|
from unpaddedbase64 import encode_base64
|
||||||
|
|
||||||
from twisted.web.resource import Resource
|
from twisted.web.server import Request
|
||||||
|
|
||||||
from synapse.http.server import respond_with_json_bytes
|
from synapse.http.servlet import RestServlet
|
||||||
from synapse.http.site import SynapseRequest
|
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@ -31,7 +30,7 @@ if TYPE_CHECKING:
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class LocalKey(Resource):
|
class LocalKey(RestServlet):
|
||||||
"""HTTP resource containing encoding the TLS X.509 certificate and NACL
|
"""HTTP resource containing encoding the TLS X.509 certificate and NACL
|
||||||
signature verification keys for this server::
|
signature verification keys for this server::
|
||||||
|
|
||||||
@ -61,18 +60,17 @@ class LocalKey(Resource):
|
|||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
isLeaf = True
|
PATTERNS = (re.compile("^/_matrix/key/v2/server(/(?P<key_id>[^/]*))?$"),)
|
||||||
|
|
||||||
def __init__(self, hs: "HomeServer"):
|
def __init__(self, hs: "HomeServer"):
|
||||||
self.config = hs.config
|
self.config = hs.config
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
self.update_response_body(self.clock.time_msec())
|
self.update_response_body(self.clock.time_msec())
|
||||||
Resource.__init__(self)
|
|
||||||
|
|
||||||
def update_response_body(self, time_now_msec: int) -> None:
|
def update_response_body(self, time_now_msec: int) -> None:
|
||||||
refresh_interval = self.config.key.key_refresh_interval
|
refresh_interval = self.config.key.key_refresh_interval
|
||||||
self.valid_until_ts = int(time_now_msec + refresh_interval)
|
self.valid_until_ts = int(time_now_msec + refresh_interval)
|
||||||
self.response_body = encode_canonical_json(self.response_json_object())
|
self.response_body = self.response_json_object()
|
||||||
|
|
||||||
def response_json_object(self) -> JsonDict:
|
def response_json_object(self) -> JsonDict:
|
||||||
verify_keys = {}
|
verify_keys = {}
|
||||||
@ -99,9 +97,11 @@ class LocalKey(Resource):
|
|||||||
json_object = sign_json(json_object, self.config.server.server_name, key)
|
json_object = sign_json(json_object, self.config.server.server_name, key)
|
||||||
return json_object
|
return json_object
|
||||||
|
|
||||||
def render_GET(self, request: SynapseRequest) -> Optional[int]:
|
def on_GET(
|
||||||
|
self, request: Request, key_id: Optional[str] = None
|
||||||
|
) -> Tuple[int, JsonDict]:
|
||||||
time_now = self.clock.time_msec()
|
time_now = self.clock.time_msec()
|
||||||
# Update the expiry time if less than half the interval remains.
|
# Update the expiry time if less than half the interval remains.
|
||||||
if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts:
|
if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts:
|
||||||
self.update_response_body(time_now)
|
self.update_response_body(time_now)
|
||||||
return respond_with_json_bytes(request, 200, self.response_body)
|
return 200, self.response_body
|
||||||
|
@ -13,15 +13,20 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Dict, Set
|
import re
|
||||||
|
from typing import TYPE_CHECKING, Dict, Optional, Set, Tuple
|
||||||
|
|
||||||
from signedjson.sign import sign_json
|
from signedjson.sign import sign_json
|
||||||
|
|
||||||
from synapse.api.errors import Codes, SynapseError
|
from twisted.web.server import Request
|
||||||
|
|
||||||
from synapse.crypto.keyring import ServerKeyFetcher
|
from synapse.crypto.keyring import ServerKeyFetcher
|
||||||
from synapse.http.server import DirectServeJsonResource, respond_with_json
|
from synapse.http.server import HttpServer
|
||||||
from synapse.http.servlet import parse_integer, parse_json_object_from_request
|
from synapse.http.servlet import (
|
||||||
from synapse.http.site import SynapseRequest
|
RestServlet,
|
||||||
|
parse_integer,
|
||||||
|
parse_json_object_from_request,
|
||||||
|
)
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict
|
||||||
from synapse.util import json_decoder
|
from synapse.util import json_decoder
|
||||||
from synapse.util.async_helpers import yieldable_gather_results
|
from synapse.util.async_helpers import yieldable_gather_results
|
||||||
@ -32,7 +37,7 @@ if TYPE_CHECKING:
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RemoteKey(DirectServeJsonResource):
|
class RemoteKey(RestServlet):
|
||||||
"""HTTP resource for retrieving the TLS certificate and NACL signature
|
"""HTTP resource for retrieving the TLS certificate and NACL signature
|
||||||
verification keys for a collection of servers. Checks that the reported
|
verification keys for a collection of servers. Checks that the reported
|
||||||
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
|
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
|
||||||
@ -88,11 +93,7 @@ class RemoteKey(DirectServeJsonResource):
|
|||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
isLeaf = True
|
|
||||||
|
|
||||||
def __init__(self, hs: "HomeServer"):
|
def __init__(self, hs: "HomeServer"):
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self.fetcher = ServerKeyFetcher(hs)
|
self.fetcher = ServerKeyFetcher(hs)
|
||||||
self.store = hs.get_datastores().main
|
self.store = hs.get_datastores().main
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
@ -101,36 +102,48 @@ class RemoteKey(DirectServeJsonResource):
|
|||||||
)
|
)
|
||||||
self.config = hs.config
|
self.config = hs.config
|
||||||
|
|
||||||
async def _async_render_GET(self, request: SynapseRequest) -> None:
|
def register(self, http_server: HttpServer) -> None:
|
||||||
assert request.postpath is not None
|
http_server.register_paths(
|
||||||
if len(request.postpath) == 1:
|
"GET",
|
||||||
(server,) = request.postpath
|
(
|
||||||
query: dict = {server.decode("ascii"): {}}
|
re.compile(
|
||||||
elif len(request.postpath) == 2:
|
"^/_matrix/key/v2/query/(?P<server>[^/]*)(/(?P<key_id>[^/]*))?$"
|
||||||
server, key_id = request.postpath
|
),
|
||||||
|
),
|
||||||
|
self.on_GET,
|
||||||
|
self.__class__.__name__,
|
||||||
|
)
|
||||||
|
http_server.register_paths(
|
||||||
|
"POST",
|
||||||
|
(re.compile("^/_matrix/key/v2/query$"),),
|
||||||
|
self.on_POST,
|
||||||
|
self.__class__.__name__,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def on_GET(
|
||||||
|
self, request: Request, server: str, key_id: Optional[str] = None
|
||||||
|
) -> Tuple[int, JsonDict]:
|
||||||
|
if server and key_id:
|
||||||
minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts")
|
minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts")
|
||||||
arguments = {}
|
arguments = {}
|
||||||
if minimum_valid_until_ts is not None:
|
if minimum_valid_until_ts is not None:
|
||||||
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
|
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
|
||||||
query = {server.decode("ascii"): {key_id.decode("ascii"): arguments}}
|
query = {server: {key_id: arguments}}
|
||||||
else:
|
else:
|
||||||
raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND)
|
query = {server: {}}
|
||||||
|
|
||||||
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
return 200, await self.query_keys(query, query_remote_on_cache_miss=True)
|
||||||
|
|
||||||
async def _async_render_POST(self, request: SynapseRequest) -> None:
|
async def on_POST(self, request: Request) -> Tuple[int, JsonDict]:
|
||||||
content = parse_json_object_from_request(request)
|
content = parse_json_object_from_request(request)
|
||||||
|
|
||||||
query = content["server_keys"]
|
query = content["server_keys"]
|
||||||
|
|
||||||
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
return 200, await self.query_keys(query, query_remote_on_cache_miss=True)
|
||||||
|
|
||||||
async def query_keys(
|
async def query_keys(
|
||||||
self,
|
self, query: JsonDict, query_remote_on_cache_miss: bool = False
|
||||||
request: SynapseRequest,
|
) -> JsonDict:
|
||||||
query: JsonDict,
|
|
||||||
query_remote_on_cache_miss: bool = False,
|
|
||||||
) -> None:
|
|
||||||
logger.info("Handling query for keys %r", query)
|
logger.info("Handling query for keys %r", query)
|
||||||
|
|
||||||
store_queries = []
|
store_queries = []
|
||||||
@ -232,7 +245,7 @@ class RemoteKey(DirectServeJsonResource):
|
|||||||
for server_name, keys in cache_misses.items()
|
for server_name, keys in cache_misses.items()
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
await self.query_keys(request, query, query_remote_on_cache_miss=False)
|
return await self.query_keys(query, query_remote_on_cache_miss=False)
|
||||||
else:
|
else:
|
||||||
signed_keys = []
|
signed_keys = []
|
||||||
for key_json_raw in json_results:
|
for key_json_raw in json_results:
|
||||||
@ -244,6 +257,4 @@ class RemoteKey(DirectServeJsonResource):
|
|||||||
|
|
||||||
signed_keys.append(key_json)
|
signed_keys.append(key_json)
|
||||||
|
|
||||||
response = {"server_keys": signed_keys}
|
return {"server_keys": signed_keys}
|
||||||
|
|
||||||
respond_with_json(request, 200, response, canonical_json=True)
|
|
||||||
|
@ -17,6 +17,9 @@ from typing import TYPE_CHECKING
|
|||||||
|
|
||||||
from twisted.web.resource import Resource
|
from twisted.web.resource import Resource
|
||||||
|
|
||||||
|
from synapse.rest.synapse.client.oidc.backchannel_logout_resource import (
|
||||||
|
OIDCBackchannelLogoutResource,
|
||||||
|
)
|
||||||
from synapse.rest.synapse.client.oidc.callback_resource import OIDCCallbackResource
|
from synapse.rest.synapse.client.oidc.callback_resource import OIDCCallbackResource
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@ -29,6 +32,7 @@ class OIDCResource(Resource):
|
|||||||
def __init__(self, hs: "HomeServer"):
|
def __init__(self, hs: "HomeServer"):
|
||||||
Resource.__init__(self)
|
Resource.__init__(self)
|
||||||
self.putChild(b"callback", OIDCCallbackResource(hs))
|
self.putChild(b"callback", OIDCCallbackResource(hs))
|
||||||
|
self.putChild(b"backchannel_logout", OIDCBackchannelLogoutResource(hs))
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["OIDCResource"]
|
__all__ = ["OIDCResource"]
|
||||||
|
@ -0,0 +1,35 @@
|
|||||||
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from synapse.http.server import DirectServeJsonResource
|
||||||
|
from synapse.http.site import SynapseRequest
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class OIDCBackchannelLogoutResource(DirectServeJsonResource):
|
||||||
|
isLeaf = 1
|
||||||
|
|
||||||
|
def __init__(self, hs: "HomeServer"):
|
||||||
|
super().__init__()
|
||||||
|
self._oidc_handler = hs.get_oidc_handler()
|
||||||
|
|
||||||
|
async def _async_render_POST(self, request: SynapseRequest) -> None:
|
||||||
|
await self._oidc_handler.handle_backchannel_logout(request)
|
@ -201,7 +201,7 @@ class DataStore(
|
|||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
guests: bool = True,
|
guests: bool = True,
|
||||||
deactivated: bool = False,
|
deactivated: bool = False,
|
||||||
order_by: str = UserSortOrder.USER_ID.value,
|
order_by: str = UserSortOrder.NAME.value,
|
||||||
direction: str = "f",
|
direction: str = "f",
|
||||||
approved: bool = True,
|
approved: bool = True,
|
||||||
) -> Tuple[List[JsonDict], int]:
|
) -> Tuple[List[JsonDict], int]:
|
||||||
@ -261,6 +261,7 @@ class DataStore(
|
|||||||
sql_base = f"""
|
sql_base = f"""
|
||||||
FROM users as u
|
FROM users as u
|
||||||
LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ?
|
LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ?
|
||||||
|
LEFT JOIN erased_users AS eu ON u.name = eu.user_id
|
||||||
{where_clause}
|
{where_clause}
|
||||||
"""
|
"""
|
||||||
sql = "SELECT COUNT(*) as total_users " + sql_base
|
sql = "SELECT COUNT(*) as total_users " + sql_base
|
||||||
@ -269,7 +270,8 @@ class DataStore(
|
|||||||
|
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT name, user_type, is_guest, admin, deactivated, shadow_banned,
|
SELECT name, user_type, is_guest, admin, deactivated, shadow_banned,
|
||||||
displayname, avatar_url, creation_ts * 1000 as creation_ts, approved
|
displayname, avatar_url, creation_ts * 1000 as creation_ts, approved,
|
||||||
|
eu.user_id is not null as erased
|
||||||
{sql_base}
|
{sql_base}
|
||||||
ORDER BY {order_by_column} {order}, u.name ASC
|
ORDER BY {order_by_column} {order}, u.name ASC
|
||||||
LIMIT ? OFFSET ?
|
LIMIT ? OFFSET ?
|
||||||
@ -277,6 +279,13 @@ class DataStore(
|
|||||||
args += [limit, start]
|
args += [limit, start]
|
||||||
txn.execute(sql, args)
|
txn.execute(sql, args)
|
||||||
users = self.db_pool.cursor_to_dict(txn)
|
users = self.db_pool.cursor_to_dict(txn)
|
||||||
|
|
||||||
|
# some of those boolean values are returned as integers when we're on SQLite
|
||||||
|
columns_to_boolify = ["erased"]
|
||||||
|
for user in users:
|
||||||
|
for column in columns_to_boolify:
|
||||||
|
user[column] = bool(user[column])
|
||||||
|
|
||||||
return users, count
|
return users, count
|
||||||
|
|
||||||
return await self.db_pool.runInteraction(
|
return await self.db_pool.runInteraction(
|
||||||
|
@ -157,10 +157,23 @@ class ApplicationServiceWorkerStore(RoomMemberWorkerStore):
|
|||||||
app_service: "ApplicationService",
|
app_service: "ApplicationService",
|
||||||
cache_context: _CacheContext,
|
cache_context: _CacheContext,
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
users_in_room = await self.get_users_in_room(
|
"""
|
||||||
|
Get all users in a room that the appservice controls.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
room_id: The room to check in.
|
||||||
|
app_service: The application service to check interest/control against
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of user IDs that the appservice controls.
|
||||||
|
"""
|
||||||
|
# We can use `get_local_users_in_room(...)` here because an application service
|
||||||
|
# can only be interested in local users of the server it's on (ignore any remote
|
||||||
|
# users that might match the user namespace regex).
|
||||||
|
local_users_in_room = await self.get_local_users_in_room(
|
||||||
room_id, on_invalidate=cache_context.invalidate
|
room_id, on_invalidate=cache_context.invalidate
|
||||||
)
|
)
|
||||||
return list(filter(app_service.is_interested_in_user, users_in_room))
|
return list(filter(app_service.is_interested_in_user, local_users_in_room))
|
||||||
|
|
||||||
|
|
||||||
class ApplicationServiceStore(ApplicationServiceWorkerStore):
|
class ApplicationServiceStore(ApplicationServiceWorkerStore):
|
||||||
|
@ -274,6 +274,13 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
|
|||||||
destination, int(from_stream_id)
|
destination, int(from_stream_id)
|
||||||
)
|
)
|
||||||
if not has_changed:
|
if not has_changed:
|
||||||
|
# debugging for https://github.com/matrix-org/synapse/issues/14251
|
||||||
|
issue_8631_logger.debug(
|
||||||
|
"%s: no change between %i and %i",
|
||||||
|
destination,
|
||||||
|
from_stream_id,
|
||||||
|
now_stream_id,
|
||||||
|
)
|
||||||
return now_stream_id, []
|
return now_stream_id, []
|
||||||
|
|
||||||
updates = await self.db_pool.runInteraction(
|
updates = await self.db_pool.runInteraction(
|
||||||
@ -1848,7 +1855,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||||||
self,
|
self,
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
device_ids: Iterable[str],
|
device_id: str,
|
||||||
hosts: Collection[str],
|
hosts: Collection[str],
|
||||||
stream_ids: List[int],
|
stream_ids: List[int],
|
||||||
context: Optional[Dict[str, str]],
|
context: Optional[Dict[str, str]],
|
||||||
@ -1864,6 +1871,21 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||||||
stream_id_iterator = iter(stream_ids)
|
stream_id_iterator = iter(stream_ids)
|
||||||
|
|
||||||
encoded_context = json_encoder.encode(context)
|
encoded_context = json_encoder.encode(context)
|
||||||
|
mark_sent = not self.hs.is_mine_id(user_id)
|
||||||
|
|
||||||
|
values = [
|
||||||
|
(
|
||||||
|
destination,
|
||||||
|
next(stream_id_iterator),
|
||||||
|
user_id,
|
||||||
|
device_id,
|
||||||
|
mark_sent,
|
||||||
|
now,
|
||||||
|
encoded_context if whitelisted_homeserver(destination) else "{}",
|
||||||
|
)
|
||||||
|
for destination in hosts
|
||||||
|
]
|
||||||
|
|
||||||
self.db_pool.simple_insert_many_txn(
|
self.db_pool.simple_insert_many_txn(
|
||||||
txn,
|
txn,
|
||||||
table="device_lists_outbound_pokes",
|
table="device_lists_outbound_pokes",
|
||||||
@ -1876,21 +1898,19 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||||||
"ts",
|
"ts",
|
||||||
"opentracing_context",
|
"opentracing_context",
|
||||||
),
|
),
|
||||||
values=[
|
values=values,
|
||||||
(
|
)
|
||||||
destination,
|
|
||||||
next(stream_id_iterator),
|
# debugging for https://github.com/matrix-org/synapse/issues/14251
|
||||||
|
if issue_8631_logger.isEnabledFor(logging.DEBUG):
|
||||||
|
issue_8631_logger.debug(
|
||||||
|
"Recorded outbound pokes for %s:%s with device stream ids %s",
|
||||||
user_id,
|
user_id,
|
||||||
device_id,
|
device_id,
|
||||||
not self.hs.is_mine_id(
|
{
|
||||||
user_id
|
stream_id: destination
|
||||||
), # We only need to send out update for *our* users
|
for (destination, stream_id, _, _, _, _, _) in values
|
||||||
now,
|
},
|
||||||
encoded_context if whitelisted_homeserver(destination) else "{}",
|
|
||||||
)
|
|
||||||
for destination in hosts
|
|
||||||
for device_id in device_ids
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _add_device_outbound_room_poke_txn(
|
def _add_device_outbound_room_poke_txn(
|
||||||
@ -1997,7 +2017,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||||||
self._add_device_outbound_poke_to_stream_txn(
|
self._add_device_outbound_poke_to_stream_txn(
|
||||||
txn,
|
txn,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
device_ids=[device_id],
|
device_id=device_id,
|
||||||
hosts=hosts,
|
hosts=hosts,
|
||||||
stream_ids=stream_ids,
|
stream_ids=stream_ids,
|
||||||
context=context,
|
context=context,
|
||||||
|
@ -139,11 +139,15 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
|||||||
@trace
|
@trace
|
||||||
@cancellable
|
@cancellable
|
||||||
async def get_e2e_device_keys_for_cs_api(
|
async def get_e2e_device_keys_for_cs_api(
|
||||||
self, query_list: List[Tuple[str, Optional[str]]]
|
self,
|
||||||
|
query_list: List[Tuple[str, Optional[str]]],
|
||||||
|
include_displaynames: bool = True,
|
||||||
) -> Dict[str, Dict[str, JsonDict]]:
|
) -> Dict[str, Dict[str, JsonDict]]:
|
||||||
"""Fetch a list of device keys, formatted suitably for the C/S API.
|
"""Fetch a list of device keys, formatted suitably for the C/S API.
|
||||||
Args:
|
Args:
|
||||||
query_list(list): List of pairs of user_ids and device_ids.
|
query_list: List of pairs of user_ids and device_ids.
|
||||||
|
include_displaynames: Whether to include the displayname of returned devices
|
||||||
|
(if one exists).
|
||||||
Returns:
|
Returns:
|
||||||
Dict mapping from user-id to dict mapping from device_id to
|
Dict mapping from user-id to dict mapping from device_id to
|
||||||
key data. The key data will be a dict in the same format as the
|
key data. The key data will be a dict in the same format as the
|
||||||
@ -166,9 +170,12 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
r["unsigned"] = {}
|
r["unsigned"] = {}
|
||||||
|
if include_displaynames:
|
||||||
|
# Include the device's display name in the "unsigned" dictionary
|
||||||
display_name = device_info.display_name
|
display_name = device_info.display_name
|
||||||
if display_name is not None:
|
if display_name is not None:
|
||||||
r["unsigned"]["device_display_name"] = display_name
|
r["unsigned"]["device_display_name"] = display_name
|
||||||
|
|
||||||
rv[user_id][device_id] = r
|
rv[user_id][device_id] = r
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
@ -29,6 +29,7 @@ from typing import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from synapse.api.errors import StoreError
|
from synapse.api.errors import StoreError
|
||||||
|
from synapse.config.homeserver import ExperimentalConfig
|
||||||
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.database import (
|
from synapse.storage.database import (
|
||||||
@ -62,7 +63,9 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def _load_rules(
|
def _load_rules(
|
||||||
rawrules: List[JsonDict], enabled_map: Dict[str, bool]
|
rawrules: List[JsonDict],
|
||||||
|
enabled_map: Dict[str, bool],
|
||||||
|
experimental_config: ExperimentalConfig,
|
||||||
) -> FilteredPushRules:
|
) -> FilteredPushRules:
|
||||||
"""Take the DB rows returned from the DB and convert them into a full
|
"""Take the DB rows returned from the DB and convert them into a full
|
||||||
`FilteredPushRules` object.
|
`FilteredPushRules` object.
|
||||||
@ -80,7 +83,9 @@ def _load_rules(
|
|||||||
|
|
||||||
push_rules = PushRules(ruleslist)
|
push_rules = PushRules(ruleslist)
|
||||||
|
|
||||||
filtered_rules = FilteredPushRules(push_rules, enabled_map)
|
filtered_rules = FilteredPushRules(
|
||||||
|
push_rules, enabled_map, msc3664_enabled=experimental_config.msc3664_enabled
|
||||||
|
)
|
||||||
|
|
||||||
return filtered_rules
|
return filtered_rules
|
||||||
|
|
||||||
@ -160,7 +165,7 @@ class PushRulesWorkerStore(
|
|||||||
|
|
||||||
enabled_map = await self.get_push_rules_enabled_for_user(user_id)
|
enabled_map = await self.get_push_rules_enabled_for_user(user_id)
|
||||||
|
|
||||||
return _load_rules(rows, enabled_map)
|
return _load_rules(rows, enabled_map, self.hs.config.experimental)
|
||||||
|
|
||||||
async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]:
|
async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]:
|
||||||
results = await self.db_pool.simple_select_list(
|
results = await self.db_pool.simple_select_list(
|
||||||
@ -219,7 +224,9 @@ class PushRulesWorkerStore(
|
|||||||
results: Dict[str, FilteredPushRules] = {}
|
results: Dict[str, FilteredPushRules] = {}
|
||||||
|
|
||||||
for user_id, rules in raw_rules.items():
|
for user_id, rules in raw_rules.items():
|
||||||
results[user_id] = _load_rules(rules, enabled_map_by_user.get(user_id, {}))
|
results[user_id] = _load_rules(
|
||||||
|
rules, enabled_map_by_user.get(user_id, {}), self.hs.config.experimental
|
||||||
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -21,7 +21,13 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast
|
|||||||
import attr
|
import attr
|
||||||
|
|
||||||
from synapse.api.constants import UserTypes
|
from synapse.api.constants import UserTypes
|
||||||
from synapse.api.errors import Codes, StoreError, SynapseError, ThreepidValidationError
|
from synapse.api.errors import (
|
||||||
|
Codes,
|
||||||
|
NotFoundError,
|
||||||
|
StoreError,
|
||||||
|
SynapseError,
|
||||||
|
ThreepidValidationError,
|
||||||
|
)
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||||
from synapse.storage.database import (
|
from synapse.storage.database import (
|
||||||
@ -50,6 +56,14 @@ class ExternalIDReuseException(Exception):
|
|||||||
because this external id is given to an other user."""
|
because this external id is given to an other user."""
|
||||||
|
|
||||||
|
|
||||||
|
class LoginTokenExpired(Exception):
|
||||||
|
"""Exception if the login token sent expired"""
|
||||||
|
|
||||||
|
|
||||||
|
class LoginTokenReused(Exception):
|
||||||
|
"""Exception if the login token sent was already used"""
|
||||||
|
|
||||||
|
|
||||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||||
class TokenLookupResult:
|
class TokenLookupResult:
|
||||||
"""Result of looking up an access token.
|
"""Result of looking up an access token.
|
||||||
@ -115,6 +129,20 @@ class RefreshTokenLookupResult:
|
|||||||
If None, the session can be refreshed indefinitely."""
|
If None, the session can be refreshed indefinitely."""
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||||
|
class LoginTokenLookupResult:
|
||||||
|
"""Result of looking up a login token."""
|
||||||
|
|
||||||
|
user_id: str
|
||||||
|
"""The user this token belongs to."""
|
||||||
|
|
||||||
|
auth_provider_id: Optional[str]
|
||||||
|
"""The SSO Identity Provider that the user authenticated with, to get this token."""
|
||||||
|
|
||||||
|
auth_provider_session_id: Optional[str]
|
||||||
|
"""The session ID advertised by the SSO Identity Provider."""
|
||||||
|
|
||||||
|
|
||||||
class RegistrationWorkerStore(CacheInvalidationWorkerStore):
|
class RegistrationWorkerStore(CacheInvalidationWorkerStore):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -1789,6 +1817,130 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
|
|||||||
"replace_refresh_token", _replace_refresh_token_txn
|
"replace_refresh_token", _replace_refresh_token_txn
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def add_login_token_to_user(
|
||||||
|
self,
|
||||||
|
user_id: str,
|
||||||
|
token: str,
|
||||||
|
expiry_ts: int,
|
||||||
|
auth_provider_id: Optional[str],
|
||||||
|
auth_provider_session_id: Optional[str],
|
||||||
|
) -> None:
|
||||||
|
"""Adds a short-term login token for the given user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id: The user ID.
|
||||||
|
token: The new login token to add.
|
||||||
|
expiry_ts (milliseconds since the epoch): Time after which the login token
|
||||||
|
cannot be used.
|
||||||
|
auth_provider_id: The SSO Identity Provider that the user authenticated with
|
||||||
|
to get this token, if any
|
||||||
|
auth_provider_session_id: The session ID advertised by the SSO Identity
|
||||||
|
Provider, if any.
|
||||||
|
"""
|
||||||
|
await self.db_pool.simple_insert(
|
||||||
|
"login_tokens",
|
||||||
|
{
|
||||||
|
"token": token,
|
||||||
|
"user_id": user_id,
|
||||||
|
"expiry_ts": expiry_ts,
|
||||||
|
"auth_provider_id": auth_provider_id,
|
||||||
|
"auth_provider_session_id": auth_provider_session_id,
|
||||||
|
},
|
||||||
|
desc="add_login_token_to_user",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _consume_login_token(
|
||||||
|
self,
|
||||||
|
txn: LoggingTransaction,
|
||||||
|
token: str,
|
||||||
|
ts: int,
|
||||||
|
) -> LoginTokenLookupResult:
|
||||||
|
values = self.db_pool.simple_select_one_txn(
|
||||||
|
txn,
|
||||||
|
"login_tokens",
|
||||||
|
keyvalues={"token": token},
|
||||||
|
retcols=(
|
||||||
|
"user_id",
|
||||||
|
"expiry_ts",
|
||||||
|
"used_ts",
|
||||||
|
"auth_provider_id",
|
||||||
|
"auth_provider_session_id",
|
||||||
|
),
|
||||||
|
allow_none=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if values is None:
|
||||||
|
raise NotFoundError()
|
||||||
|
|
||||||
|
self.db_pool.simple_update_one_txn(
|
||||||
|
txn,
|
||||||
|
"login_tokens",
|
||||||
|
keyvalues={"token": token},
|
||||||
|
updatevalues={"used_ts": ts},
|
||||||
|
)
|
||||||
|
user_id = values["user_id"]
|
||||||
|
expiry_ts = values["expiry_ts"]
|
||||||
|
used_ts = values["used_ts"]
|
||||||
|
auth_provider_id = values["auth_provider_id"]
|
||||||
|
auth_provider_session_id = values["auth_provider_session_id"]
|
||||||
|
|
||||||
|
# Token was already used
|
||||||
|
if used_ts is not None:
|
||||||
|
raise LoginTokenReused()
|
||||||
|
|
||||||
|
# Token expired
|
||||||
|
if ts > int(expiry_ts):
|
||||||
|
raise LoginTokenExpired()
|
||||||
|
|
||||||
|
return LoginTokenLookupResult(
|
||||||
|
user_id=user_id,
|
||||||
|
auth_provider_id=auth_provider_id,
|
||||||
|
auth_provider_session_id=auth_provider_session_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def consume_login_token(self, token: str) -> LoginTokenLookupResult:
|
||||||
|
"""Lookup a login token and consume it.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
token: The login token.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The data stored with that token, including the `user_id`. Returns `None` if
|
||||||
|
the token does not exist or if it expired.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
NotFound if the login token was not found in database
|
||||||
|
LoginTokenExpired if the login token expired
|
||||||
|
LoginTokenReused if the login token was already used
|
||||||
|
"""
|
||||||
|
return await self.db_pool.runInteraction(
|
||||||
|
"consume_login_token",
|
||||||
|
self._consume_login_token,
|
||||||
|
token,
|
||||||
|
self._clock.time_msec(),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def invalidate_login_tokens_by_session_id(
|
||||||
|
self, auth_provider_id: str, auth_provider_session_id: str
|
||||||
|
) -> None:
|
||||||
|
"""Invalidate login tokens with the given IdP session ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
auth_provider_id: The SSO Identity Provider that the user authenticated with
|
||||||
|
to get this token
|
||||||
|
auth_provider_session_id: The session ID advertised by the SSO Identity
|
||||||
|
Provider
|
||||||
|
"""
|
||||||
|
await self.db_pool.simple_update(
|
||||||
|
table="login_tokens",
|
||||||
|
keyvalues={
|
||||||
|
"auth_provider_id": auth_provider_id,
|
||||||
|
"auth_provider_session_id": auth_provider_session_id,
|
||||||
|
},
|
||||||
|
updatevalues={"used_ts": self._clock.time_msec()},
|
||||||
|
desc="invalidate_login_tokens_by_session_id",
|
||||||
|
)
|
||||||
|
|
||||||
@cached()
|
@cached()
|
||||||
async def is_guest(self, user_id: str) -> bool:
|
async def is_guest(self, user_id: str) -> bool:
|
||||||
res = await self.db_pool.simple_select_one_onecol(
|
res = await self.db_pool.simple_select_one_onecol(
|
||||||
@ -2019,6 +2171,12 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
|
|||||||
and hs.config.experimental.msc3866.require_approval_for_new_accounts
|
and hs.config.experimental.msc3866.require_approval_for_new_accounts
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Create a background job for removing expired login tokens
|
||||||
|
if hs.config.worker.run_background_tasks:
|
||||||
|
self._clock.looping_call(
|
||||||
|
self._delete_expired_login_tokens, THIRTY_MINUTES_IN_MS
|
||||||
|
)
|
||||||
|
|
||||||
async def add_access_token_to_user(
|
async def add_access_token_to_user(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
@ -2617,6 +2775,23 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
|
|||||||
approved,
|
approved,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@wrap_as_background_process("delete_expired_login_tokens")
|
||||||
|
async def _delete_expired_login_tokens(self) -> None:
|
||||||
|
"""Remove login tokens with expiry dates that have passed."""
|
||||||
|
|
||||||
|
def _delete_expired_login_tokens_txn(txn: LoggingTransaction, ts: int) -> None:
|
||||||
|
sql = "DELETE FROM login_tokens WHERE expiry_ts <= ?"
|
||||||
|
txn.execute(sql, (ts,))
|
||||||
|
|
||||||
|
# We keep the expired tokens for an extra 5 minutes so we can measure how many
|
||||||
|
# times a token is being used after its expiry
|
||||||
|
now = self._clock.time_msec()
|
||||||
|
await self.db_pool.runInteraction(
|
||||||
|
"delete_expired_login_tokens",
|
||||||
|
_delete_expired_login_tokens_txn,
|
||||||
|
now - (5 * 60 * 1000),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def find_max_generated_user_id_localpart(cur: Cursor) -> int:
|
def find_max_generated_user_id_localpart(cur: Cursor) -> int:
|
||||||
"""
|
"""
|
||||||
|
@ -152,6 +152,9 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||||||
the forward extremities of those rooms will exclude most members. We may also
|
the forward extremities of those rooms will exclude most members. We may also
|
||||||
calculate room state incorrectly for such rooms and believe that a member is or
|
calculate room state incorrectly for such rooms and believe that a member is or
|
||||||
is not in the room when the opposite is true.
|
is not in the room when the opposite is true.
|
||||||
|
|
||||||
|
Note: If you only care about users in the room local to the homeserver, use
|
||||||
|
`get_local_users_in_room(...)` instead which will be more performant.
|
||||||
"""
|
"""
|
||||||
return await self.db_pool.simple_select_onecol(
|
return await self.db_pool.simple_select_onecol(
|
||||||
table="current_state_events",
|
table="current_state_events",
|
||||||
@ -707,8 +710,8 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||||||
|
|
||||||
# 250 users is pretty arbitrary but the data can be quite large if users
|
# 250 users is pretty arbitrary but the data can be quite large if users
|
||||||
# are in many rooms.
|
# are in many rooms.
|
||||||
for user_ids in batch_iter(user_ids, 250):
|
for batch_user_ids in batch_iter(user_ids, 250):
|
||||||
all_user_rooms.update(await self._get_rooms_for_users(user_ids))
|
all_user_rooms.update(await self._get_rooms_for_users(batch_user_ids))
|
||||||
|
|
||||||
return all_user_rooms
|
return all_user_rooms
|
||||||
|
|
||||||
@ -742,7 +745,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||||||
# user and the set of other users, and then checking if there is any
|
# user and the set of other users, and then checking if there is any
|
||||||
# overlap.
|
# overlap.
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT b.state_key
|
SELECT DISTINCT b.state_key
|
||||||
FROM (
|
FROM (
|
||||||
SELECT room_id FROM current_state_events
|
SELECT room_id FROM current_state_events
|
||||||
WHERE type = 'm.room.member' AND membership = 'join' AND state_key = ?
|
WHERE type = 'm.room.member' AND membership = 'join' AND state_key = ?
|
||||||
@ -751,7 +754,6 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||||||
SELECT room_id, state_key FROM current_state_events
|
SELECT room_id, state_key FROM current_state_events
|
||||||
WHERE type = 'm.room.member' AND membership = 'join' AND {clause}
|
WHERE type = 'm.room.member' AND membership = 'join' AND {clause}
|
||||||
) AS b using (room_id)
|
) AS b using (room_id)
|
||||||
LIMIT 1
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
txn.execute(sql, (user_id, *args))
|
txn.execute(sql, (user_id, *args))
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user