mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-01-22 07:01:00 -05:00
Merge remote-tracking branch 'upstream/release-v1.81'
This commit is contained in:
commit
638ea15f8e
@ -35,9 +35,9 @@ sed -i \
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies and the dev-docs
|
||||
# group from the toml file. This means we don't have to ensure compatibility between
|
||||
# old deps and dev tools.
|
||||
|
||||
pip install toml wheel
|
||||
|
||||
@ -47,6 +47,7 @@ with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
del data['tool']['poetry']['group']['dev-docs']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
|
79
.github/workflows/docs.yaml
vendored
79
.github/workflows/docs.yaml
vendored
@ -13,25 +13,10 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
pre:
|
||||
name: Calculate variables for GitHub Pages deployment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Figure out the target directory.
|
||||
#
|
||||
# The target directory depends on the name of the branch
|
||||
@ -55,11 +40,65 @@ jobs:
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
|
||||
outputs:
|
||||
branch-version: ${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-docs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
destination_dir: ./${{ needs.pre.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-devdocs:
|
||||
name: GitHub Pages (developer docs)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: "Set up Sphinx"
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.3.2"
|
||||
groups: "dev-docs"
|
||||
extras: ""
|
||||
|
||||
- name: Build the documentation
|
||||
run: |
|
||||
cd dev-docs
|
||||
poetry run make html
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./dev-docs/_build/html
|
||||
destination_dir: ./dev-docs/${{ needs.pre.outputs.branch-version }}
|
||||
|
6
.github/workflows/latest_deps.yml
vendored
6
.github/workflows/latest_deps.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -61,7 +61,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -134,7 +134,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
34
.github/workflows/tests.yml
vendored
34
.github/workflows/tests.yml
vendored
@ -34,6 +34,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@ -95,6 +103,14 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "1.3.2"
|
||||
@ -113,7 +129,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
components: clippy
|
||||
@ -135,7 +151,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
components: clippy
|
||||
@ -155,7 +171,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
# We use nightly so that it correctly groups together imports
|
||||
toolchain: nightly-2022-12-01
|
||||
@ -223,7 +239,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -268,7 +284,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -389,7 +405,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -534,7 +550,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -565,7 +581,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -588,7 +604,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
15
.github/workflows/twisted_trunk.yml
vendored
15
.github/workflows/twisted_trunk.yml
vendored
@ -5,6 +5,13 @@ on:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
twisted_ref:
|
||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
||||
required: false
|
||||
default: 'trunk'
|
||||
type: string
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@ -18,7 +25,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -29,7 +36,7 @@ jobs:
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref }}
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
@ -43,7 +50,7 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -82,7 +89,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -53,6 +53,7 @@ __pycache__/
|
||||
/coverage.*
|
||||
/dist/
|
||||
/docs/build/
|
||||
/dev-docs/_build/
|
||||
/htmlcov
|
||||
/pip-wheel-metadata/
|
||||
|
||||
@ -61,7 +62,7 @@ book/
|
||||
|
||||
# complement
|
||||
/complement-*
|
||||
/master.tar.gz
|
||||
/main.tar.gz
|
||||
|
||||
# rust
|
||||
/target/
|
||||
|
84
CHANGES.md
84
CHANGES.md
@ -1,3 +1,85 @@
|
||||
Synapse 1.81.0rc1 (2023-04-04)
|
||||
==============================
|
||||
|
||||
Synapse now attempts the versioned appservice paths before falling back to the
|
||||
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
||||
Usage of the legacy routes should be considered deprecated.
|
||||
|
||||
Additionally, Synapse has supported sending the application service access token
|
||||
via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization)
|
||||
since v1.70.0. For backwards compatibility it is *also* sent as the `access_token`
|
||||
query parameter. This is insecure and should be considered deprecated.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service routes and query parameter authorization.
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add the ability to enable/disable registrations when in the OIDC flow. ([\#14978](https://github.com/matrix-org/synapse/issues/14978))
|
||||
- Add a primitive helper script for listing worker endpoints. ([\#15243](https://github.com/matrix-org/synapse/issues/15243))
|
||||
- Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). ([\#15314](https://github.com/matrix-org/synapse/issues/15314), [\#15321](https://github.com/matrix-org/synapse/issues/15321))
|
||||
- Allow loading `/password_policy` endpoint on workers. ([\#15331](https://github.com/matrix-org/synapse/issues/15331))
|
||||
- Add experimental support for Unix sockets. Contributed by Jason Little. ([\#15353](https://github.com/matrix-org/synapse/issues/15353))
|
||||
- Build Debian packages for Ubuntu 23.04 (Lunar Lobster). ([\#15381](https://github.com/matrix-org/synapse/issues/15381))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled. ([\#15295](https://github.com/matrix-org/synapse/issues/15295))
|
||||
- Fix a bug introduced in Synapse v1.55.0 which could delay remote homeservers being able to decrypt encrypted messages sent by local users. ([\#15297](https://github.com/matrix-org/synapse/issues/15297))
|
||||
- Add a check to [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite)
|
||||
to ensure that the sqlite database passed to the script exists before trying to port from it. ([\#15306](https://github.com/matrix-org/synapse/issues/15306))
|
||||
- Fix a bug introduced in Synapse 1.76.0 where responses from worker deployments could include an internal `_INT_STREAM_POS` key. ([\#15309](https://github.com/matrix-org/synapse/issues/15309))
|
||||
- Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). ([\#15317](https://github.com/matrix-org/synapse/issues/15317))
|
||||
- Fix a long-standing bug preventing users from rejoining rooms after being banned and unbanned over federation. Contributed by Nico. ([\#15323](https://github.com/matrix-org/synapse/issues/15323))
|
||||
- Fix bug in worker mode where on a rolling restart of workers the "typing" worker would consume 100% CPU until it got restarted. ([\#15332](https://github.com/matrix-org/synapse/issues/15332))
|
||||
- Fix a long-standing bug where some to_device messages could be dropped when using workers. ([\#15349](https://github.com/matrix-org/synapse/issues/15349))
|
||||
- Fix a bug introduced in Synapse 1.70.0 where the background sync from a faster join could spin for hours when one of the events involved had been marked for backoff. ([\#15351](https://github.com/matrix-org/synapse/issues/15351))
|
||||
- Fix missing app variable in mail subject for password resets. Contributed by Cyberes. ([\#15352](https://github.com/matrix-org/synapse/issues/15352))
|
||||
- Fix a rare bug introduced in Synapse 1.66.0 where initial syncs would fail when the user had been kicked from a faster joined room that had not finished syncing. ([\#15383](https://github.com/matrix-org/synapse/issues/15383))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix a typo in login requests ratelimit defaults. ([\#15341](https://github.com/matrix-org/synapse/issues/15341))
|
||||
- Add some clarification to the doc/comments regarding TCP replication. ([\#15354](https://github.com/matrix-org/synapse/issues/15354))
|
||||
- Note that Synapse 1.74 queued a rebuild of the user directory tables. ([\#15386](https://github.com/matrix-org/synapse/issues/15386))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Use `immutabledict` instead of `frozendict`. ([\#15113](https://github.com/matrix-org/synapse/issues/15113))
|
||||
- Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. ([\#15265](https://github.com/matrix-org/synapse/issues/15265), [\#15336](https://github.com/matrix-org/synapse/issues/15336))
|
||||
- Make the pushers rely on the `device_id` instead of the `access_token_id` for various operations. ([\#15280](https://github.com/matrix-org/synapse/issues/15280))
|
||||
- Bump sentry-sdk from 1.15.0 to 1.17.0. ([\#15285](https://github.com/matrix-org/synapse/issues/15285))
|
||||
- Allow running the Twisted trunk job against other branches. ([\#15302](https://github.com/matrix-org/synapse/issues/15302))
|
||||
- Remind the releaser to ask for changelog feedback in [#synapse-dev](https://matrix.to/#/#synapse-dev:matrix.org). ([\#15303](https://github.com/matrix-org/synapse/issues/15303))
|
||||
- Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. ([\#15304](https://github.com/matrix-org/synapse/issues/15304))
|
||||
- Reject events with an invalid "mentions" property per [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15311](https://github.com/matrix-org/synapse/issues/15311))
|
||||
- As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. ([\#15316](https://github.com/matrix-org/synapse/issues/15316))
|
||||
- Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. ([\#15319](https://github.com/matrix-org/synapse/issues/15319))
|
||||
- Bump serde from 1.0.157 to 1.0.158. ([\#15324](https://github.com/matrix-org/synapse/issues/15324))
|
||||
- Bump regex from 1.7.1 to 1.7.3. ([\#15325](https://github.com/matrix-org/synapse/issues/15325))
|
||||
- Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0. ([\#15326](https://github.com/matrix-org/synapse/issues/15326))
|
||||
- Bump furo from 2022.12.7 to 2023.3.23. ([\#15327](https://github.com/matrix-org/synapse/issues/15327))
|
||||
- Bump ruff from 0.0.252 to 0.0.259. ([\#15328](https://github.com/matrix-org/synapse/issues/15328))
|
||||
- Bump cryptography from 40.0.0 to 40.0.1. ([\#15329](https://github.com/matrix-org/synapse/issues/15329))
|
||||
- Bump mypy-zope from 0.9.0 to 0.9.1. ([\#15330](https://github.com/matrix-org/synapse/issues/15330))
|
||||
- Speed up unit tests when using SQLite3. ([\#15334](https://github.com/matrix-org/synapse/issues/15334))
|
||||
- Speed up pydantic CI job. ([\#15339](https://github.com/matrix-org/synapse/issues/15339))
|
||||
- Speed up sample config CI job. ([\#15340](https://github.com/matrix-org/synapse/issues/15340))
|
||||
- Fix copyright year in SSO footer template. ([\#15358](https://github.com/matrix-org/synapse/issues/15358))
|
||||
- Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3. ([\#15369](https://github.com/matrix-org/synapse/issues/15369))
|
||||
- Bump serde from 1.0.158 to 1.0.159. ([\#15370](https://github.com/matrix-org/synapse/issues/15370))
|
||||
- Bump serde_json from 1.0.94 to 1.0.95. ([\#15371](https://github.com/matrix-org/synapse/issues/15371))
|
||||
- Speed up membership queries for users with forgotten rooms. ([\#15385](https://github.com/matrix-org/synapse/issues/15385))
|
||||
|
||||
|
||||
Synapse 1.80.0 (2023-03-28)
|
||||
===========================
|
||||
|
||||
@ -394,7 +476,7 @@ Those who are `poetry install`ing from source using our lockfile should ensure t
|
||||
Notes on faster joins
|
||||
---------------------
|
||||
|
||||
The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms.
|
||||
The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms.
|
||||
|
||||
After a faster join, Synapse considers that room "partially joined". In this state, you should be able to
|
||||
|
||||
|
26
Cargo.lock
generated
26
Cargo.lock
generated
@ -294,9 +294,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.7.1"
|
||||
version = "1.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
|
||||
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@ -305,9 +305,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.27"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
@ -323,29 +323,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.157"
|
||||
version = "1.0.159"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "707de5fcf5df2b5788fca98dd7eab490bc2fd9b7ef1404defc462833b83f25ca"
|
||||
checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.157"
|
||||
version = "1.0.159"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78997f4555c22a7971214540c4a661291970619afd56de19f77e0de86296e1e5"
|
||||
checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.2",
|
||||
"syn 2.0.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.94"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea"
|
||||
checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@ -377,9 +377,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.2"
|
||||
version = "2.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59d3276aee1fa0c33612917969b5172b5be2db051232a6e4826f1a1a9191b045"
|
||||
checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
6
debian/changelog
vendored
6
debian/changelog
vendored
@ -1,3 +1,9 @@
|
||||
matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Apr 2023 14:29:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.80.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0.
|
||||
|
20
dev-docs/Makefile
Normal file
20
dev-docs/Makefile
Normal file
@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
50
dev-docs/conf.py
Normal file
50
dev-docs/conf.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# For the full list of built-in configuration values, see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||
|
||||
project = "Synapse development"
|
||||
copyright = "2023, The Matrix.org Foundation C.I.C."
|
||||
author = "The Synapse Maintainers and Community"
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = [
|
||||
"autodoc2",
|
||||
"myst_parser",
|
||||
]
|
||||
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
|
||||
# -- Options for Autodoc2 ----------------------------------------------------
|
||||
|
||||
autodoc2_docstring_parser_regexes = [
|
||||
# this will render all docstrings as 'MyST' Markdown
|
||||
(r".*", "myst"),
|
||||
]
|
||||
|
||||
autodoc2_packages = [
|
||||
{
|
||||
"path": "../synapse",
|
||||
# Don't render documentation for everything as a matter of course
|
||||
"auto_mode": False,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# -- Options for MyST (Markdown) ---------------------------------------------
|
||||
|
||||
# myst_heading_anchors = 2
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||
|
||||
html_theme = "furo"
|
||||
html_static_path = ["_static"]
|
22
dev-docs/index.rst
Normal file
22
dev-docs/index.rst
Normal file
@ -0,0 +1,22 @@
|
||||
.. Synapse Developer Documentation documentation master file, created by
|
||||
sphinx-quickstart on Mon Mar 13 08:59:51 2023.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to the Synapse Developer Documentation!
|
||||
===========================================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
modules/federation_sender
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
5
dev-docs/modules/federation_sender.md
Normal file
5
dev-docs/modules/federation_sender.md
Normal file
@ -0,0 +1,5 @@
|
||||
Federation Sender
|
||||
=================
|
||||
|
||||
```{autodoc2-docstring} synapse.federation.sender
|
||||
```
|
@ -172,6 +172,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
|
||||
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
|
@ -25,7 +25,7 @@ position of all streams. The server then periodically sends `RDATA` commands
|
||||
which have the format `RDATA <stream_name> <instance_name> <token> <row>`, where
|
||||
the format of `<row>` is defined by the individual streams. The
|
||||
`<instance_name>` is the name of the Synapse process that generated the data
|
||||
(usually "master").
|
||||
(usually "master"). We expect an RDATA for every row in the DB.
|
||||
|
||||
Error reporting happens by either the client or server sending an ERROR
|
||||
command, and usually the connection will be closed.
|
||||
@ -107,7 +107,7 @@ reconnect, following the steps above.
|
||||
If the server sends messages faster than the client can consume them the
|
||||
server will first buffer a (fairly large) number of commands and then
|
||||
disconnect the client. This ensures that we don't queue up an unbounded
|
||||
number of commands in memory and gives us a potential oppurtunity to
|
||||
number of commands in memory and gives us a potential opportunity to
|
||||
squawk loudly. When/if the client recovers it can reconnect to the
|
||||
server and ask for missed messages.
|
||||
|
||||
@ -122,7 +122,7 @@ since these include tokens which can be used to restart the stream on
|
||||
connection errors.
|
||||
|
||||
The client should keep track of the token in the last RDATA command
|
||||
received for each stream so that on reconneciton it can start streaming
|
||||
received for each stream so that on reconnection it can start streaming
|
||||
from the correct place. Note: not all RDATA have valid tokens due to
|
||||
batching. See `RdataCommand` for more details.
|
||||
|
||||
@ -188,7 +188,8 @@ client (C):
|
||||
Two positions are included, the "new" position and the last position sent respectively.
|
||||
This allows servers to tell instances that the positions have advanced but no
|
||||
data has been written, without clients needlessly checking to see if they
|
||||
have missed any updates.
|
||||
have missed any updates. Instances will only fetch stuff if there is a gap between
|
||||
their current position and the given last position.
|
||||
|
||||
#### ERROR (S, C)
|
||||
|
||||
|
@ -88,6 +88,22 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.81.0
|
||||
|
||||
## Application service path & authentication deprecations
|
||||
|
||||
Synapse now attempts the versioned appservice paths before falling back to the
|
||||
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
||||
Usage of the legacy routes should be considered deprecated.
|
||||
|
||||
Additionally, Synapse has supported sending the application service access token
|
||||
via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization)
|
||||
since v1.70.0. For backwards compatibility it is *also* sent as the `access_token`
|
||||
query parameter. This is insecure and should be considered deprecated.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service routes and query parameter authorization.
|
||||
|
||||
# Upgrading to v1.80.0
|
||||
|
||||
## Reporting events error code change
|
||||
@ -183,6 +199,17 @@ Docker images and Debian packages need nothing specific as they already
|
||||
include or specify ICU as an explicit dependency.
|
||||
|
||||
|
||||
## User directory rebuild
|
||||
|
||||
Synapse 1.74 queues a background update
|
||||
[to rebuild the user directory](https://github.com/matrix-org/synapse/pull/14643),
|
||||
in order to fix missing or erroneous entries.
|
||||
|
||||
When this update begins, the user directory will be cleared out and rebuilt from
|
||||
scratch. User directory lookups will be incomplete until the rebuild completes.
|
||||
Admins can monitor the rebuild's progress by using the
|
||||
[Background update Admin API](usage/administration/admin_api/background_updates.md#status).
|
||||
|
||||
# Upgrading to v1.73.0
|
||||
|
||||
## Legacy Prometheus metric names have now been removed
|
||||
|
@ -1521,7 +1521,7 @@ This option specifies several limits for login:
|
||||
address. Defaults to `per_second: 0.003`, `burst_count: 5`.
|
||||
|
||||
* `account` ratelimits login requests based on the account the
|
||||
client is attempting to log into. Defaults to `per_second: 0.03`,
|
||||
client is attempting to log into. Defaults to `per_second: 0.003`,
|
||||
`burst_count: 5`.
|
||||
|
||||
* `failed_attempts` ratelimits login requests based on the account the
|
||||
@ -3100,6 +3100,11 @@ Options for each entry include:
|
||||
match a pre-existing account instead of failing. This could be used if
|
||||
switching from password logins to OIDC. Defaults to false.
|
||||
|
||||
* `enable_registration`: set to 'false' to disable automatic registration of new
|
||||
users. This allows the OIDC SSO flow to be limited to sign in only, rather than
|
||||
automatically registering users that have a valid SSO login but do not have
|
||||
a pre-registered account. Defaults to true.
|
||||
|
||||
* `user_mapping_provider`: Configuration for how attributes returned from a OIDC
|
||||
provider are mapped onto a matrix user. This setting has the following
|
||||
sub-properties:
|
||||
@ -3216,6 +3221,7 @@ oidc_providers:
|
||||
userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||
jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
skip_verification: true
|
||||
enable_registration: true
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "id"
|
||||
|
@ -247,6 +247,7 @@ information.
|
||||
^/_matrix/client/(r0|v3|unstable)/register$
|
||||
^/_matrix/client/(r0|v3|unstable)/register/available$
|
||||
^/_matrix/client/v1/register/m.login.registration_token/validity$
|
||||
^/_matrix/client/(r0|v3|unstable)/password_policy$
|
||||
|
||||
# Event sending requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/redact
|
||||
|
1648
poetry.lock
generated
1648
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.80.0"
|
||||
version = "1.81.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@ -153,15 +153,13 @@ python = "^3.7.1"
|
||||
# ----------------------
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
jsonschema = ">=3.0.0"
|
||||
# frozendict 2.1.2 is broken on Debian 10: https://github.com/Marco-Sulla/python-frozendict/issues/41
|
||||
# We cannot test our wheels against the 2.3.5 release in CI. Putting in an upper bound for this
|
||||
# because frozendict has been more trouble than it's worth; we would like to move to immutabledict.
|
||||
frozendict = ">=1,!=2.1.2,<2.3.5"
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
immutabledict = ">=2.0"
|
||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||
unpaddedbase64 = ">=2.1.0"
|
||||
# We require 1.5.0 to work around an issue when running against the C implementation of
|
||||
# frozendict: https://github.com/matrix-org/python-canonicaljson/issues/36
|
||||
canonicaljson = "^1.5.0"
|
||||
# We require 2.0.0 for immutabledict support.
|
||||
canonicaljson = "^2.0.0"
|
||||
# we use the type definitions added in signedjson 1.1.
|
||||
signedjson = "^1.1.0"
|
||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||
@ -313,7 +311,7 @@ all = [
|
||||
# We pin black so that our tests don't start failing on new releases.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.3.0"
|
||||
ruff = "0.0.252"
|
||||
ruff = "0.0.259"
|
||||
|
||||
# Typechecking
|
||||
mypy = "*"
|
||||
@ -352,6 +350,18 @@ towncrier = ">=18.6.0rc1"
|
||||
# Used for checking the Poetry lockfile
|
||||
tomli = ">=1.2.3"
|
||||
|
||||
|
||||
# Dependencies for building the development documentation
|
||||
[tool.poetry.group.dev-docs]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev-docs.dependencies]
|
||||
sphinx = {version = "^6.1", python = "^3.8"}
|
||||
sphinx-autodoc2 = {version = "^0.4.2", python = "^3.8"}
|
||||
myst-parser = {version = "^1.0.0", python = "^3.8"}
|
||||
furo = ">=2022.12.7,<2024.0.0"
|
||||
|
||||
|
||||
[build-system]
|
||||
# The upper bounds here are defensive, intended to prevent situations like
|
||||
# #13849 and #14079 where we see buildtime or runtime errors caused by build
|
||||
|
569
requirements.txt
569
requirements.txt
@ -32,10 +32,10 @@ bcrypt==4.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
|
||||
bleach==6.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414 \
|
||||
--hash=sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4
|
||||
canonicaljson==1.6.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:68dfc157b011e07d94bf74b5d4ccc01958584ed942d9dfd5fdd706609e81cd4b \
|
||||
--hash=sha256:806ea6f2cbb7405d20259e1c36dd1214ba5c242fa9165f5bd0bf2081f82c23fb
|
||||
certifi==2022.12.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
canonicaljson==2.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:c38a315de3b5a0532f1ec1f9153cd3d716abfc565a558d00a4835428a34fca5b \
|
||||
--hash=sha256:e2fdaef1d7fadc5d9cb59bd3d0d41b064ddda697809ac4325dced721d12f113f
|
||||
certifi==2022.12.7 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
|
||||
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
|
||||
cffi==1.15.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
@ -103,54 +103,105 @@ cffi==1.15.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
|
||||
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
|
||||
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
|
||||
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
|
||||
charset-normalizer==2.0.12 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
|
||||
--hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
|
||||
charset-normalizer==3.1.0 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
|
||||
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
|
||||
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
|
||||
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
|
||||
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
|
||||
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
|
||||
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
|
||||
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
|
||||
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
|
||||
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
|
||||
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
|
||||
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
|
||||
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
|
||||
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
|
||||
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
|
||||
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
|
||||
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
|
||||
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
|
||||
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
|
||||
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
|
||||
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
|
||||
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
|
||||
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
|
||||
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
|
||||
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
|
||||
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
|
||||
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
|
||||
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
|
||||
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
|
||||
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
|
||||
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
|
||||
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
|
||||
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
|
||||
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
|
||||
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
|
||||
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
|
||||
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
|
||||
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
|
||||
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
|
||||
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
|
||||
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
|
||||
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
|
||||
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
|
||||
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
|
||||
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
|
||||
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
|
||||
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
|
||||
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
|
||||
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
|
||||
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
|
||||
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
|
||||
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
|
||||
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
|
||||
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
|
||||
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
|
||||
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
|
||||
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
|
||||
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
|
||||
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
|
||||
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
|
||||
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
|
||||
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
|
||||
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
|
||||
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
|
||||
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
|
||||
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
|
||||
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
|
||||
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
|
||||
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
|
||||
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
|
||||
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
|
||||
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
|
||||
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
|
||||
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
|
||||
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
|
||||
constantly==15.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 \
|
||||
--hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d
|
||||
cryptography==39.0.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1 \
|
||||
--hash=sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7 \
|
||||
--hash=sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06 \
|
||||
--hash=sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84 \
|
||||
--hash=sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915 \
|
||||
--hash=sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074 \
|
||||
--hash=sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5 \
|
||||
--hash=sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3 \
|
||||
--hash=sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9 \
|
||||
--hash=sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3 \
|
||||
--hash=sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011 \
|
||||
--hash=sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536 \
|
||||
--hash=sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a \
|
||||
--hash=sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f \
|
||||
--hash=sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480 \
|
||||
--hash=sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac \
|
||||
--hash=sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0 \
|
||||
--hash=sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108 \
|
||||
--hash=sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828 \
|
||||
--hash=sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354 \
|
||||
--hash=sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612 \
|
||||
--hash=sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3 \
|
||||
--hash=sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97
|
||||
frozendict==2.3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78 \
|
||||
--hash=sha256:25a6d2e8b7cf6b6e5677a1a4b53b4073e5d9ec640d1db30dc679627668d25e90 \
|
||||
--hash=sha256:389f395a74eb16992217ac1521e689c1dea2d70113bcb18714669ace1ed623b9 \
|
||||
--hash=sha256:3d8042b7dab5e992e30889c9b71b781d5feef19b372d47d735e4d7d45846fd4a \
|
||||
--hash=sha256:3e93aebc6e69a8ef329bbe9afb8342bd33c7b5c7a0c480cb9f7e60b0cbe48072 \
|
||||
--hash=sha256:3ec86ebf143dd685184215c27ec416c36e0ba1b80d81b1b9482f7d380c049b4e \
|
||||
--hash=sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc \
|
||||
--hash=sha256:5809e6ff6b7257043a486f7a3b73a7da71cf69a38980b4171e4741291d0d9eb3 \
|
||||
--hash=sha256:7c550ed7fdf1962984bec21630c584d722b3ee5d5f57a0ae2527a0121dc0414a \
|
||||
--hash=sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782 \
|
||||
--hash=sha256:95bac22f7f09d81f378f2b3f672b7a50a974ca180feae1507f5e21bc147e8bc8 \
|
||||
--hash=sha256:aca59108b77cadc13ba7dfea7e8f50811208c7652a13dc6c7f92d7782a24d299 \
|
||||
--hash=sha256:b98a0d65a59af6da03f794f90b0c3085a7ee14e7bf8f0ef36b079ee8aa992439 \
|
||||
--hash=sha256:ccb6450a416c9cc9acef7683e637e28356e3ceeabf83521f74cc2718883076b7 \
|
||||
--hash=sha256:d722f3d89db6ae35ef35ecc243c40c800eb344848c83dba4798353312cd37b15 \
|
||||
--hash=sha256:dae686722c144b333c4dbdc16323a5de11406d26b76d2be1cc175f90afacb5ba \
|
||||
--hash=sha256:dbbe1339ac2646523e0bb00d1896085d1f70de23780e4927ca82b36ab8a044d3
|
||||
cryptography==40.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405 \
|
||||
--hash=sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356 \
|
||||
--hash=sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472 \
|
||||
--hash=sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41 \
|
||||
--hash=sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a \
|
||||
--hash=sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88 \
|
||||
--hash=sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554 \
|
||||
--hash=sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db \
|
||||
--hash=sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778 \
|
||||
--hash=sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c \
|
||||
--hash=sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917 \
|
||||
--hash=sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797 \
|
||||
--hash=sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160 \
|
||||
--hash=sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c \
|
||||
--hash=sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c \
|
||||
--hash=sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2 \
|
||||
--hash=sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4 \
|
||||
--hash=sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122 \
|
||||
--hash=sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94
|
||||
hiredis==2.2.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01e2e588392b5fdcc3a6aa0eb62a2eb2a142f829082fa4c3354228029d3aa1ce \
|
||||
--hash=sha256:02b9f928dc6cd43ed0f0ffc1c75fb209fb180f004b7e2e19994805f998d247aa \
|
||||
@ -244,7 +295,7 @@ hiredis==2.2.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0
|
||||
hyperlink==21.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \
|
||||
--hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
|
||||
idna==3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
idna==3.4 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
|
||||
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
|
||||
ijson==3.2.0.post0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
@ -326,15 +377,18 @@ ijson==3.2.0.post0 ; python_full_version >= "3.7.1" and python_full_version < "4
|
||||
--hash=sha256:f6785ba0f65eb64b1ce3b7fcfec101085faf98f4e77b234f14287fd4138ffb25 \
|
||||
--hash=sha256:fd218b338ac68213c997d4c88437c0e726f16d301616bf837e1468901934042c \
|
||||
--hash=sha256:fe7f414edd69dd9199b0dfffa0ada22f23d8009e10fe2a719e0993b7dcc2e6e2
|
||||
importlib-metadata==6.0.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \
|
||||
--hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
|
||||
--hash=sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d
|
||||
importlib-resources==5.4.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45 \
|
||||
--hash=sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b
|
||||
incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \
|
||||
--hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321
|
||||
immutabledict==2.2.3 ; python_full_version >= "3.7.1" and python_version < "4.0" \
|
||||
--hash=sha256:0e1e8a3f2b3ff062daa19795f947e9ec7a58add269d44e34d3ab4319e1343853 \
|
||||
--hash=sha256:a7b078ebcc4a58ddc73b55f808b26e7c8c2d5183fad325615112689e1a63e714
|
||||
importlib-metadata==6.1.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \
|
||||
--hash=sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20 \
|
||||
--hash=sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09
|
||||
importlib-resources==5.12.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6 \
|
||||
--hash=sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a
|
||||
incremental==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0 \
|
||||
--hash=sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51
|
||||
jinja2==3.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
|
||||
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
|
||||
@ -419,47 +473,57 @@ lxml==4.9.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9 \
|
||||
--hash=sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f \
|
||||
--hash=sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c
|
||||
markupsafe==2.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3 \
|
||||
--hash=sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8 \
|
||||
--hash=sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759 \
|
||||
--hash=sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed \
|
||||
--hash=sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989 \
|
||||
--hash=sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3 \
|
||||
--hash=sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a \
|
||||
--hash=sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c \
|
||||
--hash=sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c \
|
||||
--hash=sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8 \
|
||||
--hash=sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454 \
|
||||
--hash=sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad \
|
||||
--hash=sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d \
|
||||
--hash=sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635 \
|
||||
--hash=sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61 \
|
||||
--hash=sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea \
|
||||
--hash=sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49 \
|
||||
--hash=sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce \
|
||||
--hash=sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e \
|
||||
--hash=sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f \
|
||||
--hash=sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f \
|
||||
--hash=sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f \
|
||||
--hash=sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7 \
|
||||
--hash=sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a \
|
||||
--hash=sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7 \
|
||||
--hash=sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076 \
|
||||
--hash=sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb \
|
||||
--hash=sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7 \
|
||||
--hash=sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7 \
|
||||
--hash=sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c \
|
||||
--hash=sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26 \
|
||||
--hash=sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c \
|
||||
--hash=sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8 \
|
||||
--hash=sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448 \
|
||||
--hash=sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956 \
|
||||
--hash=sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05 \
|
||||
--hash=sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1 \
|
||||
--hash=sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357 \
|
||||
--hash=sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea \
|
||||
--hash=sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730
|
||||
markupsafe==2.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
|
||||
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
|
||||
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
|
||||
--hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
|
||||
--hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
|
||||
--hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
|
||||
--hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
|
||||
--hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
|
||||
--hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
|
||||
--hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
|
||||
--hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
|
||||
--hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
|
||||
--hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
|
||||
--hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
|
||||
--hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
|
||||
--hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
|
||||
--hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
|
||||
--hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
|
||||
--hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
|
||||
--hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
|
||||
--hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
|
||||
--hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
|
||||
--hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
|
||||
--hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
|
||||
--hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
|
||||
--hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
|
||||
--hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
|
||||
--hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
|
||||
--hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
|
||||
--hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
|
||||
--hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
|
||||
--hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
|
||||
--hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
|
||||
--hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
|
||||
--hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
|
||||
--hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
|
||||
--hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
|
||||
--hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
|
||||
--hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
|
||||
--hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
|
||||
--hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
|
||||
--hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
|
||||
--hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
|
||||
--hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
|
||||
--hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
|
||||
--hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
|
||||
--hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
|
||||
--hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
|
||||
--hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
|
||||
--hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
|
||||
matrix-common==1.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20 \
|
||||
--hash=sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1
|
||||
@ -536,9 +600,9 @@ packaging==23.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.
|
||||
parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c \
|
||||
--hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9
|
||||
phonenumbers==8.13.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:2e3fd1f3fde226b289489275517c76edf223eafd9f43a2c2c36498a44b73d4b0 \
|
||||
--hash=sha256:6eb2faf29c19f946baf10f1c977a1f856cab90819fe7735b8e141d5407420c4a
|
||||
phonenumbers==8.13.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:253bb0e01250d21a11f2b42b3e6e161b7f6cb2ac440e2e2a95c1da71d221ee1a \
|
||||
--hash=sha256:d3e3555b38c89b121f5b2e917847003bdd07027569d758d5f40156c01aeac089
|
||||
pillow==9.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33 \
|
||||
--hash=sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b \
|
||||
@ -650,43 +714,43 @@ pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
|
||||
pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
|
||||
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
|
||||
pydantic==1.10.6 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62 \
|
||||
--hash=sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35 \
|
||||
--hash=sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832 \
|
||||
--hash=sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a \
|
||||
--hash=sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06 \
|
||||
--hash=sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4 \
|
||||
--hash=sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6 \
|
||||
--hash=sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7 \
|
||||
--hash=sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f \
|
||||
--hash=sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70 \
|
||||
--hash=sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186 \
|
||||
--hash=sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd \
|
||||
--hash=sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d \
|
||||
--hash=sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c \
|
||||
--hash=sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f \
|
||||
--hash=sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65 \
|
||||
--hash=sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc \
|
||||
--hash=sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2 \
|
||||
--hash=sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d \
|
||||
--hash=sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a \
|
||||
--hash=sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7 \
|
||||
--hash=sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb \
|
||||
--hash=sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb \
|
||||
--hash=sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77 \
|
||||
--hash=sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7 \
|
||||
--hash=sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160 \
|
||||
--hash=sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0 \
|
||||
--hash=sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4 \
|
||||
--hash=sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d \
|
||||
--hash=sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084 \
|
||||
--hash=sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b \
|
||||
--hash=sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd \
|
||||
--hash=sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d \
|
||||
--hash=sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2 \
|
||||
--hash=sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31 \
|
||||
--hash=sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083
|
||||
pydantic==1.10.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e \
|
||||
--hash=sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6 \
|
||||
--hash=sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd \
|
||||
--hash=sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca \
|
||||
--hash=sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b \
|
||||
--hash=sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a \
|
||||
--hash=sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245 \
|
||||
--hash=sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d \
|
||||
--hash=sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee \
|
||||
--hash=sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1 \
|
||||
--hash=sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3 \
|
||||
--hash=sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d \
|
||||
--hash=sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5 \
|
||||
--hash=sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914 \
|
||||
--hash=sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd \
|
||||
--hash=sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1 \
|
||||
--hash=sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e \
|
||||
--hash=sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e \
|
||||
--hash=sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a \
|
||||
--hash=sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd \
|
||||
--hash=sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f \
|
||||
--hash=sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209 \
|
||||
--hash=sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d \
|
||||
--hash=sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a \
|
||||
--hash=sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143 \
|
||||
--hash=sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918 \
|
||||
--hash=sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52 \
|
||||
--hash=sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e \
|
||||
--hash=sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f \
|
||||
--hash=sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e \
|
||||
--hash=sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb \
|
||||
--hash=sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe \
|
||||
--hash=sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe \
|
||||
--hash=sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d \
|
||||
--hash=sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209 \
|
||||
--hash=sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af
|
||||
pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \
|
||||
--hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907
|
||||
@ -701,31 +765,37 @@ pynacl==1.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
|
||||
--hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \
|
||||
--hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \
|
||||
--hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543
|
||||
pyopenssl==23.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:c1cc5f86bcacefc84dada7d31175cae1b1518d5f60d3d0bb595a67822a868a6f \
|
||||
--hash=sha256:df5fc28af899e74e19fccb5510df423581047e10ab6f1f4ba1763ff5fde844c0
|
||||
pyrsistent==0.18.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \
|
||||
--hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \
|
||||
--hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \
|
||||
--hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \
|
||||
--hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \
|
||||
--hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \
|
||||
--hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \
|
||||
--hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \
|
||||
--hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \
|
||||
--hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \
|
||||
--hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \
|
||||
--hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \
|
||||
--hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \
|
||||
--hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 \
|
||||
--hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \
|
||||
--hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \
|
||||
--hash=sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07 \
|
||||
--hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \
|
||||
--hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \
|
||||
--hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \
|
||||
--hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6
|
||||
pyopenssl==23.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8cb78010a1eb2c8e24b851693b7b04dfe9b1dc0a5ab3843927b10a85b1dfbb2e \
|
||||
--hash=sha256:fb96e936866ad65662c22d0de84ca0fba58397893cdfe0f01334fa93382af23c
|
||||
pyrsistent==0.19.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \
|
||||
--hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \
|
||||
--hash=sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a \
|
||||
--hash=sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c \
|
||||
--hash=sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3 \
|
||||
--hash=sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393 \
|
||||
--hash=sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9 \
|
||||
--hash=sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da \
|
||||
--hash=sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf \
|
||||
--hash=sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64 \
|
||||
--hash=sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a \
|
||||
--hash=sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3 \
|
||||
--hash=sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98 \
|
||||
--hash=sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2 \
|
||||
--hash=sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8 \
|
||||
--hash=sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf \
|
||||
--hash=sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc \
|
||||
--hash=sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7 \
|
||||
--hash=sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28 \
|
||||
--hash=sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2 \
|
||||
--hash=sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b \
|
||||
--hash=sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a \
|
||||
--hash=sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64 \
|
||||
--hash=sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19 \
|
||||
--hash=sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1 \
|
||||
--hash=sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9 \
|
||||
--hash=sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c
|
||||
pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
|
||||
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
|
||||
@ -767,9 +837,9 @@ pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
|
||||
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
|
||||
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
|
||||
requests==2.27.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
|
||||
--hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
|
||||
requests==2.28.2 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \
|
||||
--hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf
|
||||
semantic-version==2.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c \
|
||||
--hash=sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177
|
||||
@ -779,74 +849,12 @@ service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_versio
|
||||
setuptools-rust==1.5.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206 \
|
||||
--hash=sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7
|
||||
setuptools==65.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
|
||||
--hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
|
||||
setuptools==67.6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077 \
|
||||
--hash=sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2
|
||||
signedjson==1.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228 \
|
||||
--hash=sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492
|
||||
simplejson==3.17.6 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce \
|
||||
--hash=sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1 \
|
||||
--hash=sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0 \
|
||||
--hash=sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf \
|
||||
--hash=sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a \
|
||||
--hash=sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe \
|
||||
--hash=sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d \
|
||||
--hash=sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94 \
|
||||
--hash=sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad \
|
||||
--hash=sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a \
|
||||
--hash=sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2 \
|
||||
--hash=sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc \
|
||||
--hash=sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81 \
|
||||
--hash=sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7 \
|
||||
--hash=sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837 \
|
||||
--hash=sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566 \
|
||||
--hash=sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3 \
|
||||
--hash=sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f \
|
||||
--hash=sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802 \
|
||||
--hash=sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e \
|
||||
--hash=sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9 \
|
||||
--hash=sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7 \
|
||||
--hash=sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a \
|
||||
--hash=sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044 \
|
||||
--hash=sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494 \
|
||||
--hash=sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51 \
|
||||
--hash=sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d \
|
||||
--hash=sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33 \
|
||||
--hash=sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27 \
|
||||
--hash=sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f \
|
||||
--hash=sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198 \
|
||||
--hash=sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70 \
|
||||
--hash=sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c \
|
||||
--hash=sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab \
|
||||
--hash=sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045 \
|
||||
--hash=sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5 \
|
||||
--hash=sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7 \
|
||||
--hash=sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882 \
|
||||
--hash=sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40 \
|
||||
--hash=sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198 \
|
||||
--hash=sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180 \
|
||||
--hash=sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a \
|
||||
--hash=sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda \
|
||||
--hash=sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2 \
|
||||
--hash=sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851 \
|
||||
--hash=sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe \
|
||||
--hash=sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a \
|
||||
--hash=sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211 \
|
||||
--hash=sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1 \
|
||||
--hash=sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6 \
|
||||
--hash=sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb \
|
||||
--hash=sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9 \
|
||||
--hash=sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed \
|
||||
--hash=sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd \
|
||||
--hash=sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd \
|
||||
--hash=sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6 \
|
||||
--hash=sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a \
|
||||
--hash=sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366 \
|
||||
--hash=sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81 \
|
||||
--hash=sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a \
|
||||
--hash=sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba
|
||||
six==1.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
|
||||
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
|
||||
@ -884,64 +892,43 @@ typing-extensions==4.5.0 ; python_full_version >= "3.7.1" and python_full_versio
|
||||
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
|
||||
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
|
||||
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005
|
||||
urllib3==1.26.12 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
|
||||
--hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
|
||||
urllib3==1.26.15 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \
|
||||
--hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42
|
||||
webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
|
||||
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
|
||||
zipp==3.7.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d \
|
||||
--hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375
|
||||
zope-interface==5.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192 \
|
||||
--hash=sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702 \
|
||||
--hash=sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09 \
|
||||
--hash=sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4 \
|
||||
--hash=sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a \
|
||||
--hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \
|
||||
--hash=sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf \
|
||||
--hash=sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c \
|
||||
--hash=sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d \
|
||||
--hash=sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78 \
|
||||
--hash=sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83 \
|
||||
--hash=sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531 \
|
||||
--hash=sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46 \
|
||||
--hash=sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021 \
|
||||
--hash=sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94 \
|
||||
--hash=sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc \
|
||||
--hash=sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63 \
|
||||
--hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \
|
||||
--hash=sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117 \
|
||||
--hash=sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25 \
|
||||
--hash=sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05 \
|
||||
--hash=sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e \
|
||||
--hash=sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1 \
|
||||
--hash=sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004 \
|
||||
--hash=sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2 \
|
||||
--hash=sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e \
|
||||
--hash=sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f \
|
||||
--hash=sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f \
|
||||
--hash=sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120 \
|
||||
--hash=sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f \
|
||||
--hash=sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1 \
|
||||
--hash=sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9 \
|
||||
--hash=sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e \
|
||||
--hash=sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7 \
|
||||
--hash=sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8 \
|
||||
--hash=sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b \
|
||||
--hash=sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155 \
|
||||
--hash=sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7 \
|
||||
--hash=sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c \
|
||||
--hash=sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325 \
|
||||
--hash=sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d \
|
||||
--hash=sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb \
|
||||
--hash=sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e \
|
||||
--hash=sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959 \
|
||||
--hash=sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7 \
|
||||
--hash=sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920 \
|
||||
--hash=sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e \
|
||||
--hash=sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48 \
|
||||
--hash=sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8 \
|
||||
--hash=sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4 \
|
||||
--hash=sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263
|
||||
zipp==3.15.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \
|
||||
--hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556
|
||||
zope-interface==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373 \
|
||||
--hash=sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb \
|
||||
--hash=sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446 \
|
||||
--hash=sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8 \
|
||||
--hash=sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c \
|
||||
--hash=sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8 \
|
||||
--hash=sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2 \
|
||||
--hash=sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f \
|
||||
--hash=sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f \
|
||||
--hash=sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5 \
|
||||
--hash=sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85 \
|
||||
--hash=sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc \
|
||||
--hash=sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788 \
|
||||
--hash=sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518 \
|
||||
--hash=sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410 \
|
||||
--hash=sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464 \
|
||||
--hash=sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5 \
|
||||
--hash=sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d \
|
||||
--hash=sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52 \
|
||||
--hash=sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca \
|
||||
--hash=sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8 \
|
||||
--hash=sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2 \
|
||||
--hash=sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f \
|
||||
--hash=sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58 \
|
||||
--hash=sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a \
|
||||
--hash=sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d \
|
||||
--hash=sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28 \
|
||||
--hash=sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990 \
|
||||
--hash=sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995 \
|
||||
--hash=sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30
|
||||
|
@ -28,6 +28,7 @@ DISTS = (
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04)
|
||||
"ubuntu:kinetic", # 22.10 (EOL 2023-07-20)
|
||||
"ubuntu:lunar", # 23.04 (EOL 2024-01)
|
||||
)
|
||||
|
||||
DESC = """\
|
||||
|
@ -91,6 +91,7 @@ else
|
||||
"synapse" "docker" "tests"
|
||||
"scripts-dev"
|
||||
"contrib" "synmark" "stubs" ".ci"
|
||||
"dev-docs"
|
||||
)
|
||||
fi
|
||||
fi
|
||||
|
@ -280,7 +280,7 @@ def _prepare() -> None:
|
||||
)
|
||||
|
||||
print("Opening the changelog in your browser...")
|
||||
print("Please ask others to give it a check.")
|
||||
print("Please ask #synapse-dev to give it a check.")
|
||||
click.launch(
|
||||
f"https://github.com/matrix-org/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
|
||||
)
|
||||
|
@ -1,39 +0,0 @@
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Stub for frozendict.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload
|
||||
|
||||
_KT = TypeVar("_KT", bound=Hashable) # Key type.
|
||||
_VT = TypeVar("_VT") # Value type.
|
||||
|
||||
class frozendict(Mapping[_KT, _VT]):
|
||||
@overload
|
||||
def __init__(self, **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
def __getitem__(self, key: _KT) -> _VT: ...
|
||||
def __contains__(self, key: Any) -> bool: ...
|
||||
def copy(self, **add_or_replace: Any) -> frozendict: ...
|
||||
def __iter__(self) -> Iterator[_KT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def __hash__(self) -> int: ...
|
@ -17,9 +17,9 @@
|
||||
""" This is an implementation of a Matrix homeserver.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
|
||||
from synapse.util.rust import check_rust_lib_up_to_date
|
||||
from synapse.util.stringutils import strtobool
|
||||
@ -61,11 +61,20 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Use the standard library json implementation instead of simplejson.
|
||||
# Teach canonicaljson how to serialise immutabledicts.
|
||||
try:
|
||||
from canonicaljson import set_json_library
|
||||
from canonicaljson import register_preserialisation_callback
|
||||
from immutabledict import immutabledict
|
||||
|
||||
set_json_library(json)
|
||||
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
|
||||
try:
|
||||
return d._dict
|
||||
except Exception:
|
||||
# Paranoia: fall back to a `dict()` call, in case a future version of
|
||||
# immutabledict removes `_dict` from the implementation.
|
||||
return dict(d)
|
||||
|
||||
register_preserialisation_callback(immutabledict, _immutabledict_cb)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
302
synapse/_scripts/generate_workers_map.py
Executable file
302
synapse/_scripts/generate_workers_map.py
Executable file
@ -0,0 +1,302 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2022-2023 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
|
||||
|
||||
import yaml
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.federation.transport.server import (
|
||||
TransportLayerServer,
|
||||
register_servlets as register_federation_servlets,
|
||||
)
|
||||
from synapse.http.server import HttpServer, ServletCallback
|
||||
from synapse.rest import ClientRestResource
|
||||
from synapse.rest.key.v2 import RemoteKey
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
|
||||
logger = logging.getLogger("generate_workers_map")
|
||||
|
||||
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore # type: ignore
|
||||
|
||||
def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None:
|
||||
super().__init__(config.server.server_name, config=config)
|
||||
self.config.worker.worker_app = worker_app
|
||||
|
||||
|
||||
GROUP_PATTERN = re.compile(r"\(\?P<[^>]+?>(.+?)\)")
|
||||
|
||||
|
||||
@dataclass
|
||||
class EndpointDescription:
|
||||
"""
|
||||
Describes an endpoint and how it should be routed.
|
||||
"""
|
||||
|
||||
# The servlet class that handles this endpoint
|
||||
servlet_class: object
|
||||
|
||||
# The category of this endpoint. Is read from the `CATEGORY` constant in the servlet
|
||||
# class.
|
||||
category: Optional[str]
|
||||
|
||||
# TODO:
|
||||
# - does it need to be routed based on a stream writer config?
|
||||
# - does it benefit from any optimised, but optional, routing?
|
||||
# - what 'opinionated synapse worker class' (event_creator, synchrotron, etc) does
|
||||
# it go in?
|
||||
|
||||
|
||||
class EnumerationResource(HttpServer):
|
||||
"""
|
||||
Accepts servlet registrations for the purposes of building up a description of
|
||||
all endpoints.
|
||||
"""
|
||||
|
||||
def __init__(self, is_worker: bool) -> None:
|
||||
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
|
||||
self._is_worker = is_worker
|
||||
|
||||
def register_paths(
|
||||
self,
|
||||
method: str,
|
||||
path_patterns: Iterable[Pattern],
|
||||
callback: ServletCallback,
|
||||
servlet_classname: str,
|
||||
) -> None:
|
||||
# federation servlet callbacks are wrapped, so unwrap them.
|
||||
callback = getattr(callback, "__wrapped__", callback)
|
||||
|
||||
# fish out the servlet class
|
||||
servlet_class = callback.__self__.__class__ # type: ignore
|
||||
|
||||
if self._is_worker and method in getattr(
|
||||
servlet_class, "WORKERS_DENIED_METHODS", ()
|
||||
):
|
||||
# This endpoint would cause an error if called on a worker, so pretend it
|
||||
# was never registered!
|
||||
return
|
||||
|
||||
sd = EndpointDescription(
|
||||
servlet_class=servlet_class,
|
||||
category=getattr(servlet_class, "CATEGORY", None),
|
||||
)
|
||||
|
||||
for pat in path_patterns:
|
||||
self.registrations[(method, pat.pattern)] = sd
|
||||
|
||||
|
||||
def get_registered_paths_for_hs(
|
||||
hs: HomeServer,
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given a homeserver, get all registered endpoints and their descriptions.
|
||||
"""
|
||||
|
||||
enumerator = EnumerationResource(is_worker=hs.config.worker.worker_app is not None)
|
||||
ClientRestResource.register_servlets(enumerator, hs)
|
||||
federation_server = TransportLayerServer(hs)
|
||||
|
||||
# we can't use `federation_server.register_servlets` but this line does the
|
||||
# same thing, only it uses this enumerator
|
||||
register_federation_servlets(
|
||||
federation_server.hs,
|
||||
resource=enumerator,
|
||||
ratelimiter=federation_server.ratelimiter,
|
||||
authenticator=federation_server.authenticator,
|
||||
servlet_groups=federation_server.servlet_groups,
|
||||
)
|
||||
|
||||
# the key server endpoints are separate again
|
||||
RemoteKey(hs).register(enumerator)
|
||||
|
||||
return enumerator.registrations
|
||||
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
returns:
|
||||
|
||||
Dict from (method, path) to EndpointDescription
|
||||
|
||||
TODO Don't require passing in a config
|
||||
"""
|
||||
|
||||
hs = MockHomeserver(base_config, worker_app)
|
||||
# TODO We only do this to avoid an error, but don't need the database etc
|
||||
hs.setup()
|
||||
return get_registered_paths_for_hs(hs)
|
||||
|
||||
|
||||
def elide_http_methods_if_unconflicting(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
||||
can be handled by the worker whose registration map is `registrations`.
|
||||
|
||||
i.e. the only endpoints left with methods (other than `*`) should be the ones where
|
||||
the worker can't handle all possible methods for that path.
|
||||
"""
|
||||
|
||||
def paths_to_methods_dict(
|
||||
methods_and_paths: Iterable[Tuple[str, str]]
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""
|
||||
Given (method, path) pairs, produces a dict from path to set of methods
|
||||
available at that path.
|
||||
"""
|
||||
result: Dict[str, Set[str]] = {}
|
||||
for method, path in methods_and_paths:
|
||||
result.setdefault(path, set()).add(method)
|
||||
return result
|
||||
|
||||
all_possible_reg_methods = paths_to_methods_dict(all_possible_registrations)
|
||||
reg_methods = paths_to_methods_dict(registrations)
|
||||
|
||||
output = {}
|
||||
|
||||
for path, handleable_methods in reg_methods.items():
|
||||
if handleable_methods == all_possible_reg_methods[path]:
|
||||
any_method = next(iter(handleable_methods))
|
||||
# TODO This assumes that all methods have the same servlet.
|
||||
# I suppose that's possibly dubious?
|
||||
output[("*", path)] = registrations[(any_method, path)]
|
||||
else:
|
||||
for method in handleable_methods:
|
||||
output[(method, path)] = registrations[(method, path)]
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def simplify_path_regexes(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription]
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||
so that we don't use the Python-specific regex extensions
|
||||
(and also to remove needlessly specific detail).
|
||||
"""
|
||||
|
||||
def simplify_path_regex(path: str) -> str:
|
||||
"""
|
||||
Given a regex pattern, replaces all named capturing groups (e.g. `(?P<blah>xyz)`)
|
||||
with a simpler version available in more common regex dialects (e.g. `.*`).
|
||||
"""
|
||||
|
||||
# TODO it's hard to choose between these two;
|
||||
# `.*` is a vague simplification
|
||||
# return GROUP_PATTERN.sub(r"\1", path)
|
||||
return GROUP_PATTERN.sub(r".*", path)
|
||||
|
||||
return {(m, simplify_path_regex(p)): v for (m, p), v in registrations.items()}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Updates a synapse database to the latest schema and optionally runs background updates"
|
||||
" on it."
|
||||
)
|
||||
)
|
||||
parser.add_argument("-v", action="store_true")
|
||||
parser.add_argument(
|
||||
"--config-path",
|
||||
type=argparse.FileType("r"),
|
||||
required=True,
|
||||
help="Synapse configuration file",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# TODO
|
||||
# logging.basicConfig(**logging_config)
|
||||
|
||||
# Load, process and sanity-check the config.
|
||||
hs_config = yaml.safe_load(args.config_path)
|
||||
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
master_paths = get_registered_paths_for_default(None, config)
|
||||
worker_paths = get_registered_paths_for_default(
|
||||
"synapse.app.generic_worker", config
|
||||
)
|
||||
|
||||
all_paths = {**master_paths, **worker_paths}
|
||||
|
||||
elided_worker_paths = elide_http_methods_if_unconflicting(worker_paths, all_paths)
|
||||
elide_http_methods_if_unconflicting(master_paths, all_paths)
|
||||
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: Dict[
|
||||
Optional[str], Dict[Tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
categories_to_methods_and_paths[desc.category][method, path] = desc
|
||||
|
||||
for category, contents in categories_to_methods_and_paths.items():
|
||||
print_category(category, contents)
|
||||
|
||||
|
||||
def print_category(
|
||||
category_name: Optional[str],
|
||||
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
Prints out a category, in documentation page style.
|
||||
|
||||
Example:
|
||||
```
|
||||
# Category name
|
||||
/path/xyz
|
||||
|
||||
GET /path/abc
|
||||
```
|
||||
"""
|
||||
|
||||
if category_name:
|
||||
print(f"# {category_name}")
|
||||
else:
|
||||
print("# (Uncategorised requests)")
|
||||
|
||||
for ln in sorted(
|
||||
p for m, p in simplify_path_regexes(elided_worker_paths) if m == "*"
|
||||
):
|
||||
print(ln)
|
||||
print()
|
||||
for ln in sorted(
|
||||
f"{m:6} {p}" for m, p in simplify_path_regexes(elided_worker_paths) if m != "*"
|
||||
):
|
||||
print(ln)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -18,6 +18,7 @@
|
||||
import argparse
|
||||
import curses
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
@ -67,7 +68,10 @@ from synapse.storage.databases.main.media_repository import (
|
||||
MediaRepositoryBackgroundUpdateStore,
|
||||
)
|
||||
from synapse.storage.databases.main.presence import PresenceBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.pusher import PusherWorkerStore
|
||||
from synapse.storage.databases.main.pusher import (
|
||||
PusherBackgroundUpdatesStore,
|
||||
PusherWorkerStore,
|
||||
)
|
||||
from synapse.storage.databases.main.receipts import ReceiptsBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.registration import (
|
||||
RegistrationBackgroundUpdateStore,
|
||||
@ -225,6 +229,7 @@ class Store(
|
||||
AccountDataWorkerStore,
|
||||
PushRuleStore,
|
||||
PusherWorkerStore,
|
||||
PusherBackgroundUpdatesStore,
|
||||
PresenceBackgroundUpdateStore,
|
||||
ReceiptsBackgroundUpdateStore,
|
||||
RelationsWorkerStore,
|
||||
@ -1326,6 +1331,13 @@ def main() -> None:
|
||||
filename="port-synapse.log" if args.curses else None,
|
||||
)
|
||||
|
||||
if not os.path.isfile(args.sqlite_database):
|
||||
sys.stderr.write(
|
||||
"The sqlite database you specified does not exist, please check that you have the"
|
||||
"correct path."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
sqlite_config = {
|
||||
"name": "sqlite3",
|
||||
"args": {
|
||||
|
@ -27,7 +27,7 @@ from synapse.util import json_decoder
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.types import JsonDict
|
||||
from synapse.types import JsonDict, StrCollection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -682,18 +682,27 @@ class FederationPullAttemptBackoffError(RuntimeError):
|
||||
Attributes:
|
||||
event_id: The event_id which we are refusing to pull
|
||||
message: A custom error message that gives more context
|
||||
retry_after_ms: The remaining backoff interval, in milliseconds
|
||||
"""
|
||||
|
||||
def __init__(self, event_ids: List[str], message: Optional[str]):
|
||||
self.event_ids = event_ids
|
||||
def __init__(
|
||||
self, event_ids: "StrCollection", message: Optional[str], retry_after_ms: int
|
||||
):
|
||||
event_ids = list(event_ids)
|
||||
|
||||
if message:
|
||||
error_message = message
|
||||
else:
|
||||
error_message = f"Not attempting to pull event_ids={self.event_ids} because we already tried to pull them recently (backing off)."
|
||||
error_message = (
|
||||
f"Not attempting to pull event_ids={event_ids} because we already "
|
||||
"tried to pull them recently (backing off)."
|
||||
)
|
||||
|
||||
super().__init__(error_message)
|
||||
|
||||
self.event_ids = event_ids
|
||||
self.retry_after_ms = retry_after_ms
|
||||
|
||||
|
||||
class HttpResponseException(CodeMessageException):
|
||||
"""
|
||||
|
@ -41,7 +41,12 @@ from typing_extensions import ParamSpec
|
||||
|
||||
import twisted
|
||||
from twisted.internet import defer, error, reactor as _reactor
|
||||
from twisted.internet.interfaces import IOpenSSLContextFactory, IReactorSSL, IReactorTCP
|
||||
from twisted.internet.interfaces import (
|
||||
IOpenSSLContextFactory,
|
||||
IReactorSSL,
|
||||
IReactorTCP,
|
||||
IReactorUNIX,
|
||||
)
|
||||
from twisted.internet.protocol import ServerFactory
|
||||
from twisted.internet.tcp import Port
|
||||
from twisted.logger import LoggingFile, LogLevel
|
||||
@ -56,7 +61,7 @@ from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config._base import format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import ListenerConfig, ManholeConfig
|
||||
from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.events.presence_router import load_legacy_presence_router
|
||||
from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||
@ -351,6 +356,28 @@ def listen_tcp(
|
||||
return r # type: ignore[return-value]
|
||||
|
||||
|
||||
def listen_unix(
|
||||
path: str,
|
||||
mode: int,
|
||||
factory: ServerFactory,
|
||||
reactor: IReactorUNIX = reactor,
|
||||
backlog: int = 50,
|
||||
) -> List[Port]:
|
||||
"""
|
||||
Create a UNIX socket for a given path and 'mode' permission
|
||||
|
||||
Returns:
|
||||
list of twisted.internet.tcp.Port listening for TCP connections
|
||||
"""
|
||||
wantPID = True
|
||||
|
||||
return [
|
||||
# IReactorUNIX returns an object implementing IListeningPort from listenUNIX,
|
||||
# but we know it will be a Port instance.
|
||||
cast(Port, reactor.listenUNIX(path, factory, backlog, mode, wantPID))
|
||||
]
|
||||
|
||||
|
||||
def listen_http(
|
||||
listener_config: ListenerConfig,
|
||||
root_resource: Resource,
|
||||
@ -359,18 +386,13 @@ def listen_http(
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> List[Port]:
|
||||
port = listener_config.port
|
||||
bind_addresses = listener_config.bind_addresses
|
||||
tls = listener_config.tls
|
||||
|
||||
assert listener_config.http_options is not None
|
||||
|
||||
site_tag = listener_config.http_options.tag
|
||||
if site_tag is None:
|
||||
site_tag = str(port)
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
site = SynapseSite(
|
||||
"synapse.access.%s.%s" % ("https" if tls else "http", site_tag),
|
||||
"synapse.access.%s.%s"
|
||||
% ("https" if listener_config.is_tls() else "http", site_tag),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
@ -378,25 +400,41 @@ def listen_http(
|
||||
max_request_body_size=max_request_body_size,
|
||||
reactor=reactor,
|
||||
)
|
||||
if tls:
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
bind_addresses,
|
||||
port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d (TLS)", port)
|
||||
|
||||
if isinstance(listener_config, TCPListenerConfig):
|
||||
if listener_config.is_tls():
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info(
|
||||
"Synapse now listening on TCP port %d (TLS)", listener_config.port
|
||||
)
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d", listener_config.port)
|
||||
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
ports = listen_unix(
|
||||
listener_config.path, listener_config.mode, site, reactor=reactor
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d", port)
|
||||
# getHost() returns a UNIXAddress which contains an instance variable of 'name'
|
||||
# encoded as a byte string. Decode as utf-8 so pretty.
|
||||
logger.info(
|
||||
"Synapse now listening on Unix Socket at: "
|
||||
f"{ports[0].getHost().name.decode('utf-8')}"
|
||||
)
|
||||
|
||||
return ports
|
||||
|
||||
|
||||
|
@ -38,7 +38,7 @@ from synapse.app._base import (
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.server import JsonResource, OptionsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
@ -236,12 +236,18 @@ class GenericWorkerServer(HomeServer):
|
||||
if listener.type == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener.type == "manhole":
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not using a unix socket for manhole at this time."
|
||||
)
|
||||
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
@ -249,10 +255,16 @@ class GenericWorkerServer(HomeServer):
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for metrics at this time."
|
||||
)
|
||||
|
||||
else:
|
||||
logger.warning("Unsupported listener type: %s", listener.type)
|
||||
|
||||
|
@ -44,7 +44,7 @@ from synapse.app._base import (
|
||||
)
|
||||
from synapse.config._base import ConfigError, format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.additional_resource import AdditionalResource
|
||||
from synapse.http.server import (
|
||||
@ -78,14 +78,13 @@ class SynapseHomeServer(HomeServer):
|
||||
DATASTORE_CLASS = DataStore # type: ignore
|
||||
|
||||
def _listener_http(
|
||||
self, config: HomeServerConfig, listener_config: ListenerConfig
|
||||
self,
|
||||
config: HomeServerConfig,
|
||||
listener_config: ListenerConfig,
|
||||
) -> Iterable[Port]:
|
||||
port = listener_config.port
|
||||
# Must exist since this is an HTTP listener.
|
||||
assert listener_config.http_options is not None
|
||||
site_tag = listener_config.http_options.tag
|
||||
if site_tag is None:
|
||||
site_tag = str(port)
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
# We always include a health resource.
|
||||
resources: Dict[str, Resource] = {"/health": HealthResource()}
|
||||
@ -252,12 +251,17 @@ class SynapseHomeServer(HomeServer):
|
||||
self._listener_http(self.config, listener)
|
||||
)
|
||||
elif listener.type == "manhole":
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for manhole at this time."
|
||||
)
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
@ -265,10 +269,16 @@ class SynapseHomeServer(HomeServer):
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for metrics at this time."
|
||||
)
|
||||
|
||||
else:
|
||||
# this shouldn't happen, as the listener type should have been checked
|
||||
# during parsing
|
||||
|
@ -17,6 +17,8 @@ import urllib.parse
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
@ -24,13 +26,14 @@ from typing import (
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import TypeGuard
|
||||
from typing_extensions import Concatenate, ParamSpec, TypeGuard
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind
|
||||
from synapse.api.errors import CodeMessageException
|
||||
from synapse.api.errors import CodeMessageException, HttpResponseException
|
||||
from synapse.appservice import (
|
||||
ApplicationService,
|
||||
TransactionOneTimeKeysCount,
|
||||
@ -38,7 +41,7 @@ from synapse.appservice import (
|
||||
)
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.utils import SerializeEventConfig, serialize_event
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.http.client import SimpleHttpClient, is_unknown_endpoint
|
||||
from synapse.types import DeviceListUpdates, JsonDict, ThirdPartyInstanceID
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
@ -78,7 +81,11 @@ sent_todevice_counter = Counter(
|
||||
HOUR_IN_MS = 60 * 60 * 1000
|
||||
|
||||
|
||||
APP_SERVICE_PREFIX = "/_matrix/app/unstable"
|
||||
APP_SERVICE_PREFIX = "/_matrix/app/v1"
|
||||
APP_SERVICE_UNSTABLE_PREFIX = "/_matrix/app/unstable"
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
def _is_valid_3pe_metadata(info: JsonDict) -> bool:
|
||||
@ -121,6 +128,47 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||
)
|
||||
|
||||
async def _send_with_fallbacks(
|
||||
self,
|
||||
service: "ApplicationService",
|
||||
prefixes: List[str],
|
||||
path: str,
|
||||
func: Callable[Concatenate[str, P], Awaitable[R]],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> R:
|
||||
"""
|
||||
Attempt to call an application service with multiple paths, falling back
|
||||
until one succeeds.
|
||||
|
||||
Args:
|
||||
service: The appliacation service, this provides the base URL.
|
||||
prefixes: A last of paths to try in order for the requests.
|
||||
path: A suffix to append to each prefix.
|
||||
func: The function to call, the first argument will be the full
|
||||
endpoint to fetch. Other arguments are provided by args/kwargs.
|
||||
|
||||
Returns:
|
||||
The return value of func.
|
||||
"""
|
||||
for i, prefix in enumerate(prefixes, start=1):
|
||||
uri = f"{service.url}{prefix}{path}"
|
||||
try:
|
||||
return await func(uri, *args, **kwargs)
|
||||
except HttpResponseException as e:
|
||||
# If an error is received that is due to an unrecognised path,
|
||||
# fallback to next path (if one exists). Otherwise, consider it
|
||||
# a legitimate error and raise.
|
||||
if i < len(prefixes) and is_unknown_endpoint(e):
|
||||
continue
|
||||
raise
|
||||
except Exception:
|
||||
# Unexpected exceptions get sent to the caller.
|
||||
raise
|
||||
|
||||
# The function should always exit via the return or raise above this.
|
||||
raise RuntimeError("Unexpected fallback behaviour. This should never be seen.")
|
||||
|
||||
async def query_user(self, service: "ApplicationService", user_id: str) -> bool:
|
||||
if service.url is None:
|
||||
return False
|
||||
@ -128,10 +176,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
||||
try:
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/users/{urllib.parse.quote(user_id)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
@ -140,9 +190,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except CodeMessageException as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
logger.warning("query_user to %s received %s", uri, e.code)
|
||||
logger.warning("query_user to %s received %s", service.url, e.code)
|
||||
except Exception as ex:
|
||||
logger.warning("query_user to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_user to %s threw exception %s", service.url, ex)
|
||||
return False
|
||||
|
||||
async def query_alias(self, service: "ApplicationService", alias: str) -> bool:
|
||||
@ -152,21 +202,23 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
||||
try:
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/rooms/{urllib.parse.quote(alias)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
if response is not None: # just an empty json object
|
||||
return True
|
||||
except CodeMessageException as e:
|
||||
logger.warning("query_alias to %s received %s", uri, e.code)
|
||||
logger.warning("query_alias to %s received %s", service.url, e.code)
|
||||
if e.code == 404:
|
||||
return False
|
||||
except Exception as ex:
|
||||
logger.warning("query_alias to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_alias to %s threw exception %s", service.url, ex)
|
||||
return False
|
||||
|
||||
async def query_3pe(
|
||||
@ -188,25 +240,24 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = "%s%s/thirdparty/%s/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
kind,
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
args: Mapping[Any, Any] = {
|
||||
**fields,
|
||||
b"access_token": service.hs_token,
|
||||
}
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||
f"/thirdparty/{kind}/{urllib.parse.quote(protocol)}",
|
||||
self.get_json,
|
||||
args=args,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
if not isinstance(response, list):
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid response %r", uri, response
|
||||
"query_3pe to %s returned an invalid response %r",
|
||||
service.url,
|
||||
response,
|
||||
)
|
||||
return []
|
||||
|
||||
@ -216,12 +267,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
ret.append(r)
|
||||
else:
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid result %r", uri, r
|
||||
"query_3pe to %s returned an invalid result %r", service.url, r
|
||||
)
|
||||
|
||||
return ret
|
||||
except Exception as ex:
|
||||
logger.warning("query_3pe to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_3pe to %s threw exception %s", service.url, ex)
|
||||
return []
|
||||
|
||||
async def get_3pe_protocol(
|
||||
@ -233,21 +284,20 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
async def _get() -> Optional[JsonDict]:
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
uri = "%s%s/thirdparty/protocol/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
info = await self.get_json(
|
||||
uri,
|
||||
info = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||
f"/thirdparty/protocol/{urllib.parse.quote(protocol)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
|
||||
if not _is_valid_3pe_metadata(info):
|
||||
logger.warning(
|
||||
"query_3pe_protocol to %s did not return a valid result", uri
|
||||
"query_3pe_protocol to %s did not return a valid result",
|
||||
service.url,
|
||||
)
|
||||
return None
|
||||
|
||||
@ -260,7 +310,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
|
||||
return info
|
||||
except Exception as ex:
|
||||
logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex)
|
||||
logger.warning(
|
||||
"query_3pe_protocol to %s threw exception %s", service.url, ex
|
||||
)
|
||||
return None
|
||||
|
||||
key = (service.id, protocol)
|
||||
@ -274,7 +326,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
assert service.hs_token is not None
|
||||
|
||||
await self.post_json_get_json(
|
||||
uri=service.url + "/_matrix/app/unstable/fi.mau.msc2659/ping",
|
||||
uri=f"{service.url}{APP_SERVICE_UNSTABLE_PREFIX}/fi.mau.msc2659/ping",
|
||||
post_json={"transaction_id": txn_id},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
@ -318,8 +370,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
)
|
||||
txn_id = 0
|
||||
|
||||
uri = service.url + ("/transactions/%s" % urllib.parse.quote(str(txn_id)))
|
||||
|
||||
# Never send ephemeral events to appservices that do not support it
|
||||
body: JsonDict = {"events": serialized_events}
|
||||
if service.supports_ephemeral:
|
||||
@ -351,8 +401,11 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
}
|
||||
|
||||
try:
|
||||
await self.put_json(
|
||||
uri=uri,
|
||||
await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/transactions/{urllib.parse.quote(str(txn_id))}",
|
||||
self.put_json,
|
||||
json_body=body,
|
||||
args={"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
@ -360,7 +413,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug(
|
||||
"push_bulk to %s succeeded! events=%s",
|
||||
uri,
|
||||
service.url,
|
||||
[event.get("event_id") for event in events],
|
||||
)
|
||||
sent_transactions_counter.labels(service.id).inc()
|
||||
@ -371,7 +424,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except CodeMessageException as e:
|
||||
logger.warning(
|
||||
"push_bulk to %s received code=%s msg=%s",
|
||||
uri,
|
||||
service.url,
|
||||
e.code,
|
||||
e.msg,
|
||||
exc_info=logger.isEnabledFor(logging.DEBUG),
|
||||
@ -379,7 +432,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except Exception as ex:
|
||||
logger.warning(
|
||||
"push_bulk to %s threw exception(%s) %s args=%s",
|
||||
uri,
|
||||
service.url,
|
||||
type(ex).__name__,
|
||||
ex,
|
||||
ex.args,
|
||||
@ -388,6 +441,108 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
failed_transactions_counter.labels(service.id).inc()
|
||||
return False
|
||||
|
||||
async def claim_client_keys(
|
||||
self, service: "ApplicationService", query: List[Tuple[str, str, str]]
|
||||
) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]:
|
||||
"""Claim one time keys from an application service.
|
||||
|
||||
Note that any error (including a timeout) is treated as the application
|
||||
service having no information.
|
||||
|
||||
Args:
|
||||
service: The application service to query.
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A tuple of:
|
||||
A map of user ID -> a map device ID -> a map of key ID -> JSON dict.
|
||||
|
||||
A copy of the input which has not been fulfilled because the
|
||||
appservice doesn't support this endpoint or has not returned
|
||||
data for that tuple.
|
||||
"""
|
||||
if service.url is None:
|
||||
return {}, query
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
# Create the expected payload shape.
|
||||
body: Dict[str, Dict[str, List[str]]] = {}
|
||||
for user_id, device, algorithm in query:
|
||||
body.setdefault(user_id, {}).setdefault(device, []).append(algorithm)
|
||||
|
||||
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3983/keys/claim"
|
||||
try:
|
||||
response = await self.post_json_get_json(
|
||||
uri,
|
||||
body,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# The appservice doesn't support this endpoint.
|
||||
if is_unknown_endpoint(e):
|
||||
return {}, query
|
||||
logger.warning("claim_keys to %s received %s", uri, e.code)
|
||||
return {}, query
|
||||
except Exception as ex:
|
||||
logger.warning("claim_keys to %s threw exception %s", uri, ex)
|
||||
return {}, query
|
||||
|
||||
# Check if the appservice fulfilled all of the queried user/device/algorithms
|
||||
# or if some are still missing.
|
||||
#
|
||||
# TODO This places a lot of faith in the response shape being correct.
|
||||
missing = [
|
||||
(user_id, device, algorithm)
|
||||
for user_id, device, algorithm in query
|
||||
if algorithm not in response.get(user_id, {}).get(device, [])
|
||||
]
|
||||
|
||||
return response, missing
|
||||
|
||||
async def query_keys(
|
||||
self, service: "ApplicationService", query: Dict[str, List[str]]
|
||||
) -> Dict[str, Dict[str, Dict[str, JsonDict]]]:
|
||||
"""Query the application service for keys.
|
||||
|
||||
Note that any error (including a timeout) is treated as the application
|
||||
service having no information.
|
||||
|
||||
Args:
|
||||
service: The application service to query.
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A map of device_keys/master_keys/self_signing_keys/user_signing_keys:
|
||||
|
||||
device_keys is a map of user ID -> a map device ID -> device info.
|
||||
"""
|
||||
if service.url is None:
|
||||
return {}
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3984/keys/query"
|
||||
try:
|
||||
response = await self.post_json_get_json(
|
||||
uri,
|
||||
query,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# The appservice doesn't support this endpoint.
|
||||
if is_unknown_endpoint(e):
|
||||
return {}
|
||||
logger.warning("query_keys to %s received %s", uri, e.code)
|
||||
return {}
|
||||
except Exception as ex:
|
||||
logger.warning("query_keys to %s threw exception %s", uri, ex)
|
||||
return {}
|
||||
|
||||
return response
|
||||
|
||||
def _serialize(
|
||||
self, service: "ApplicationService", events: Iterable[EventBase]
|
||||
) -> List[JsonDict]:
|
||||
|
@ -74,6 +74,16 @@ class ExperimentalConfig(Config):
|
||||
"msc3202_transaction_extensions", False
|
||||
)
|
||||
|
||||
# MSC3983: Proxying OTK claim requests to exclusive ASes.
|
||||
self.msc3983_appservice_otk_claims: bool = experimental.get(
|
||||
"msc3983_appservice_otk_claims", False
|
||||
)
|
||||
|
||||
# MSC3984: Proxying key queries to exclusive ASes.
|
||||
self.msc3984_appservice_key_query: bool = experimental.get(
|
||||
"msc3984_appservice_key_query", False
|
||||
)
|
||||
|
||||
# MSC3706 (server-side support for partial state in /send_join responses)
|
||||
# Synapse will always serve partial state responses to requests using the stable
|
||||
# query parameter `omit_members`. If this flag is set, Synapse will also serve
|
||||
|
@ -136,6 +136,7 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
||||
"type": "array",
|
||||
"items": SsoAttributeRequirement.JSON_SCHEMA,
|
||||
},
|
||||
"enable_registration": {"type": "boolean"},
|
||||
},
|
||||
}
|
||||
|
||||
@ -306,6 +307,7 @@ def _parse_oidc_config_dict(
|
||||
user_mapping_provider_class=user_mapping_provider_class,
|
||||
user_mapping_provider_config=user_mapping_provider_config,
|
||||
attribute_requirements=attribute_requirements,
|
||||
enable_registration=oidc_config.get("enable_registration", True),
|
||||
)
|
||||
|
||||
|
||||
@ -405,3 +407,6 @@ class OidcProviderConfig:
|
||||
|
||||
# required attributes to require in userinfo to allow login/registration
|
||||
attribute_requirements: List[SsoAttributeRequirement]
|
||||
|
||||
# Whether automatic registrations are enabled in the ODIC flow. Defaults to True
|
||||
enable_registration: bool
|
||||
|
@ -214,17 +214,52 @@ class HttpListenerConfig:
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ListenerConfig:
|
||||
"""Object describing the configuration of a single listener."""
|
||||
class TCPListenerConfig:
|
||||
"""Object describing the configuration of a single TCP listener."""
|
||||
|
||||
port: int = attr.ib(validator=attr.validators.instance_of(int))
|
||||
bind_addresses: List[str]
|
||||
bind_addresses: List[str] = attr.ib(validator=attr.validators.instance_of(List))
|
||||
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||
tls: bool = False
|
||||
|
||||
# http_options is only populated if type=http
|
||||
http_options: Optional[HttpListenerConfig] = None
|
||||
|
||||
def get_site_tag(self) -> str:
|
||||
"""Retrieves http_options.tag if it exists, otherwise the port number."""
|
||||
if self.http_options and self.http_options.tag is not None:
|
||||
return self.http_options.tag
|
||||
else:
|
||||
return str(self.port)
|
||||
|
||||
def is_tls(self) -> bool:
|
||||
return self.tls
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class UnixListenerConfig:
|
||||
"""Object describing the configuration of a single Unix socket listener."""
|
||||
|
||||
# Note: unix sockets can not be tls encrypted, so HAVE to be behind a tls-handling
|
||||
# reverse proxy
|
||||
path: str = attr.ib()
|
||||
# A default(0o666) for this is set in parse_listener_def() below
|
||||
mode: int
|
||||
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||
|
||||
# http_options is only populated if type=http
|
||||
http_options: Optional[HttpListenerConfig] = None
|
||||
|
||||
def get_site_tag(self) -> str:
|
||||
return "unix"
|
||||
|
||||
def is_tls(self) -> bool:
|
||||
"""Unix sockets can't have TLS"""
|
||||
return False
|
||||
|
||||
|
||||
ListenerConfig = Union[TCPListenerConfig, UnixListenerConfig]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ManholeConfig:
|
||||
@ -531,12 +566,12 @@ class ServerConfig(Config):
|
||||
|
||||
self.listeners = [parse_listener_def(i, x) for i, x in enumerate(listeners)]
|
||||
|
||||
# no_tls is not really supported any more, but let's grandfather it in
|
||||
# here.
|
||||
# no_tls is not really supported anymore, but let's grandfather it in here.
|
||||
if config.get("no_tls", False):
|
||||
l2 = []
|
||||
for listener in self.listeners:
|
||||
if listener.tls:
|
||||
if isinstance(listener, TCPListenerConfig) and listener.tls:
|
||||
# Use isinstance() as the assertion this *has* a listener.port
|
||||
logger.info(
|
||||
"Ignoring TLS-enabled listener on port %i due to no_tls",
|
||||
listener.port,
|
||||
@ -577,7 +612,7 @@ class ServerConfig(Config):
|
||||
)
|
||||
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=bind_port,
|
||||
bind_addresses=[bind_host],
|
||||
tls=True,
|
||||
@ -589,7 +624,7 @@ class ServerConfig(Config):
|
||||
unsecure_port = config.get("unsecure_port", bind_port - 400)
|
||||
if unsecure_port:
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=unsecure_port,
|
||||
bind_addresses=[bind_host],
|
||||
tls=False,
|
||||
@ -601,7 +636,7 @@ class ServerConfig(Config):
|
||||
manhole = config.get("manhole")
|
||||
if manhole:
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=manhole,
|
||||
bind_addresses=["127.0.0.1"],
|
||||
type="manhole",
|
||||
@ -648,7 +683,7 @@ class ServerConfig(Config):
|
||||
logger.warning(METRICS_PORT_WARNING)
|
||||
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=metrics_port,
|
||||
bind_addresses=[config.get("metrics_bind_host", "127.0.0.1")],
|
||||
type="http",
|
||||
@ -724,7 +759,7 @@ class ServerConfig(Config):
|
||||
self.delete_stale_devices_after = None
|
||||
|
||||
def has_tls_listener(self) -> bool:
|
||||
return any(listener.tls for listener in self.listeners)
|
||||
return any(listener.is_tls() for listener in self.listeners)
|
||||
|
||||
def generate_config_section(
|
||||
self,
|
||||
@ -904,25 +939,25 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||
raise ConfigError(DIRECT_TCP_ERROR, ("listeners", str(num), "type"))
|
||||
|
||||
port = listener.get("port")
|
||||
if type(port) is not int:
|
||||
socket_path = listener.get("path")
|
||||
# Either a port or a path should be declared at a minimum. Using both would be bad.
|
||||
if port is not None and not isinstance(port, int):
|
||||
raise ConfigError("Listener configuration is lacking a valid 'port' option")
|
||||
if socket_path is not None and not isinstance(socket_path, str):
|
||||
raise ConfigError("Listener configuration is lacking a valid 'path' option")
|
||||
if port and socket_path:
|
||||
raise ConfigError(
|
||||
"Can not have both a UNIX socket and an IP/port declared for the same "
|
||||
"resource!"
|
||||
)
|
||||
if port is None and socket_path is None:
|
||||
raise ConfigError(
|
||||
"Must have either a UNIX socket or an IP/port declared for a given "
|
||||
"resource!"
|
||||
)
|
||||
|
||||
tls = listener.get("tls", False)
|
||||
|
||||
bind_addresses = listener.get("bind_addresses", [])
|
||||
bind_address = listener.get("bind_address")
|
||||
# if bind_address was specified, add it to the list of addresses
|
||||
if bind_address:
|
||||
bind_addresses.append(bind_address)
|
||||
|
||||
# if we still have an empty list of addresses, use the default list
|
||||
if not bind_addresses:
|
||||
if listener_type == "metrics":
|
||||
# the metrics listener doesn't support IPv6
|
||||
bind_addresses.append("0.0.0.0")
|
||||
else:
|
||||
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
||||
|
||||
http_config = None
|
||||
if listener_type == "http":
|
||||
try:
|
||||
@ -932,8 +967,12 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||
except ValueError as e:
|
||||
raise ConfigError("Unknown listener resource") from e
|
||||
|
||||
# For a unix socket, default x_forwarded to True, as this is the only way of
|
||||
# getting a client IP.
|
||||
# Note: a reverse proxy is required anyway, as there is no way of exposing a
|
||||
# unix socket to the internet.
|
||||
http_config = HttpListenerConfig(
|
||||
x_forwarded=listener.get("x_forwarded", False),
|
||||
x_forwarded=listener.get("x_forwarded", (True if socket_path else False)),
|
||||
resources=resources,
|
||||
additional_resources=listener.get("additional_resources", {}),
|
||||
tag=listener.get("tag"),
|
||||
@ -941,7 +980,30 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||
experimental_cors_msc3886=listener.get("experimental_cors_msc3886", False),
|
||||
)
|
||||
|
||||
return ListenerConfig(port, bind_addresses, listener_type, tls, http_config)
|
||||
if socket_path:
|
||||
# TODO: Add in path validation, like if the directory exists and is writable?
|
||||
# Set a default for the permission, in case it's left out
|
||||
socket_mode = listener.get("mode", 0o666)
|
||||
|
||||
return UnixListenerConfig(socket_path, socket_mode, listener_type, http_config)
|
||||
|
||||
else:
|
||||
assert port is not None
|
||||
bind_addresses = listener.get("bind_addresses", [])
|
||||
bind_address = listener.get("bind_address")
|
||||
# if bind_address was specified, add it to the list of addresses
|
||||
if bind_address:
|
||||
bind_addresses.append(bind_address)
|
||||
|
||||
# if we still have an empty list of addresses, use the default list
|
||||
if not bind_addresses:
|
||||
if listener_type == "metrics":
|
||||
# the metrics listener doesn't support IPv6
|
||||
bind_addresses.append("0.0.0.0")
|
||||
else:
|
||||
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
||||
|
||||
return TCPListenerConfig(port, bind_addresses, listener_type, tls, http_config)
|
||||
|
||||
|
||||
_MANHOLE_SETTINGS_SCHEMA = {
|
||||
|
@ -19,15 +19,18 @@ from typing import Any, Dict, List, Union
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import (
|
||||
from synapse.config._base import (
|
||||
Config,
|
||||
ConfigError,
|
||||
RoutableShardedWorkerHandlingConfig,
|
||||
ShardedWorkerHandlingConfig,
|
||||
)
|
||||
from .server import DIRECT_TCP_ERROR, ListenerConfig, parse_listener_def
|
||||
from synapse.config.server import (
|
||||
DIRECT_TCP_ERROR,
|
||||
TCPListenerConfig,
|
||||
parse_listener_def,
|
||||
)
|
||||
from synapse.types import JsonDict
|
||||
|
||||
_DEPRECATED_WORKER_DUTY_OPTION_USED = """
|
||||
The '%s' configuration option is deprecated and will be removed in a future
|
||||
@ -161,7 +164,7 @@ class WorkerConfig(Config):
|
||||
manhole = config.get("worker_manhole")
|
||||
if manhole:
|
||||
self.worker_listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=manhole,
|
||||
bind_addresses=["127.0.0.1"],
|
||||
type="manhole",
|
||||
|
@ -51,7 +51,7 @@ def check_event_content_hash(
|
||||
# some malformed events lack a 'hashes'. Protect against it being missing
|
||||
# or a weird type by basically treating it the same as an unhashed event.
|
||||
hashes = event.get("hashes")
|
||||
# nb it might be a frozendict or a dict
|
||||
# nb it might be a immutabledict or a dict
|
||||
if not isinstance(hashes, collections.abc.Mapping):
|
||||
raise SynapseError(
|
||||
400, "Malformed 'hashes': %s" % (type(hashes),), Codes.UNAUTHORIZED
|
||||
|
@ -462,7 +462,7 @@ class FrozenEvent(EventBase):
|
||||
# Signatures is a dict of dicts, and this is faster than doing a
|
||||
# copy.deepcopy
|
||||
signatures = {
|
||||
name: {sig_id: sig for sig_id, sig in sigs.items()}
|
||||
name: dict(sigs.items())
|
||||
for name, sigs in event_dict.pop("signatures", {}).items()
|
||||
}
|
||||
|
||||
@ -510,7 +510,7 @@ class FrozenEventV2(EventBase):
|
||||
# Signatures is a dict of dicts, and this is faster than doing a
|
||||
# copy.deepcopy
|
||||
signatures = {
|
||||
name: {sig_id: sig for sig_id, sig in sigs.items()}
|
||||
name: dict(sigs.items())
|
||||
for name, sigs in event_dict.pop("signatures", {}).items()
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple
|
||||
|
||||
import attr
|
||||
from frozendict import frozendict
|
||||
from immutabledict import immutabledict
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.events import EventBase
|
||||
@ -489,4 +489,4 @@ def _decode_state_dict(
|
||||
if input is None:
|
||||
return None
|
||||
|
||||
return frozendict({(etype, state_key): v for etype, state_key, v in input})
|
||||
return immutabledict({(etype, state_key): v for etype, state_key, v in input})
|
||||
|
@ -355,7 +355,7 @@ def serialize_event(
|
||||
time_now_ms = int(time_now_ms)
|
||||
|
||||
# Should this strip out None's?
|
||||
d = {k: v for k, v in e.get_dict().items()}
|
||||
d = dict(e.get_dict().items())
|
||||
|
||||
d["event_id"] = e.event_id
|
||||
|
||||
@ -567,7 +567,7 @@ PowerLevelsContent = Mapping[str, Union[_PowerLevel, Mapping[str, _PowerLevel]]]
|
||||
def copy_and_fixup_power_levels_contents(
|
||||
old_power_levels: PowerLevelsContent,
|
||||
) -> Dict[str, Union[int, Dict[str, int]]]:
|
||||
"""Copy the content of a power_levels event, unfreezing frozendicts along the way.
|
||||
"""Copy the content of a power_levels event, unfreezing immutabledicts along the way.
|
||||
|
||||
We accept as input power level values which are strings, provided they represent an
|
||||
integer, e.g. `"`100"` instead of 100. Such strings are converted to integers
|
||||
|
@ -12,11 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import collections.abc
|
||||
from typing import Iterable, Type, Union, cast
|
||||
from typing import Iterable, List, Type, Union, cast
|
||||
|
||||
import jsonschema
|
||||
from pydantic import Field, StrictBool, StrictStr
|
||||
|
||||
from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes, Membership
|
||||
from synapse.api.constants import (
|
||||
MAX_ALIAS_LENGTH,
|
||||
EventContentFields,
|
||||
EventTypes,
|
||||
Membership,
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import EventFormatVersions
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@ -28,6 +34,8 @@ from synapse.events.utils import (
|
||||
validate_canonicaljson,
|
||||
)
|
||||
from synapse.federation.federation_server import server_matches_acl_event
|
||||
from synapse.http.servlet import validate_json_object
|
||||
from synapse.rest.models import RequestBodyModel
|
||||
from synapse.types import EventID, JsonDict, RoomID, UserID
|
||||
|
||||
|
||||
@ -94,27 +102,27 @@ class EventValidator:
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Retention:
|
||||
elif event.type == EventTypes.Retention:
|
||||
self._validate_retention(event)
|
||||
|
||||
if event.type == EventTypes.ServerACL:
|
||||
elif event.type == EventTypes.ServerACL:
|
||||
if not server_matches_acl_event(config.server.server_name, event):
|
||||
raise SynapseError(
|
||||
400, "Can't create an ACL event that denies the local server"
|
||||
)
|
||||
|
||||
if event.type == EventTypes.PowerLevels:
|
||||
elif event.type == EventTypes.PowerLevels:
|
||||
try:
|
||||
jsonschema.validate(
|
||||
instance=event.content,
|
||||
schema=POWER_LEVELS_SCHEMA,
|
||||
cls=plValidator,
|
||||
cls=POWER_LEVELS_VALIDATOR,
|
||||
)
|
||||
except jsonschema.ValidationError as e:
|
||||
if e.path:
|
||||
# example: "users_default": '0' is not of type 'integer'
|
||||
# cast safety: path entries can be integers, if we fail to validate
|
||||
# items in an array. However the POWER_LEVELS_SCHEMA doesn't expect
|
||||
# items in an array. However, the POWER_LEVELS_SCHEMA doesn't expect
|
||||
# to see any arrays.
|
||||
message = (
|
||||
'"' + cast(str, e.path[-1]) + '": ' + e.message # noqa: B306
|
||||
@ -131,6 +139,15 @@ class EventValidator:
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
# If the event contains a mentions key, validate it.
|
||||
if (
|
||||
EventContentFields.MSC3952_MENTIONS in event.content
|
||||
and config.experimental.msc3952_intentional_mentions
|
||||
):
|
||||
validate_json_object(
|
||||
event.content[EventContentFields.MSC3952_MENTIONS], Mentions
|
||||
)
|
||||
|
||||
def _validate_retention(self, event: EventBase) -> None:
|
||||
"""Checks that an event that defines the retention policy for a room respects the
|
||||
format enforced by the spec.
|
||||
@ -265,12 +282,17 @@ POWER_LEVELS_SCHEMA = {
|
||||
}
|
||||
|
||||
|
||||
class Mentions(RequestBodyModel):
|
||||
user_ids: List[StrictStr] = Field(default_factory=list)
|
||||
room: StrictBool = False
|
||||
|
||||
|
||||
# This could return something newer than Draft 7, but that's the current "latest"
|
||||
# validator.
|
||||
def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]:
|
||||
validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA)
|
||||
def _create_validator(schema: JsonDict) -> Type[jsonschema.Draft7Validator]:
|
||||
validator = jsonschema.validators.validator_for(schema)
|
||||
|
||||
# by default jsonschema does not consider a frozendict to be an object so
|
||||
# by default jsonschema does not consider a immutabledict to be an object so
|
||||
# we need to use a custom type checker
|
||||
# https://python-jsonschema.readthedocs.io/en/stable/validate/?highlight=object#validating-with-additional-types
|
||||
type_checker = validator.TYPE_CHECKER.redefine(
|
||||
@ -280,4 +302,4 @@ def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]:
|
||||
return jsonschema.validators.extend(validator, type_checker=type_checker)
|
||||
|
||||
|
||||
plValidator = _create_power_level_validator()
|
||||
POWER_LEVELS_VALIDATOR = _create_validator(POWER_LEVELS_SCHEMA)
|
||||
|
@ -61,6 +61,7 @@ from synapse.federation.federation_base import (
|
||||
event_from_pdu_json,
|
||||
)
|
||||
from synapse.federation.transport.client import SendJoinResponse
|
||||
from synapse.http.client import is_unknown_endpoint
|
||||
from synapse.http.types import QueryParams
|
||||
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, tag_args, trace
|
||||
from synapse.types import JsonDict, UserID, get_domain_from_id
|
||||
@ -759,43 +760,6 @@ class FederationClient(FederationBase):
|
||||
|
||||
return signed_auth
|
||||
|
||||
def _is_unknown_endpoint(
|
||||
self, e: HttpResponseException, synapse_error: Optional[SynapseError] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Returns true if the response was due to an endpoint being unimplemented.
|
||||
|
||||
Args:
|
||||
e: The error response received from the remote server.
|
||||
synapse_error: The above error converted to a SynapseError. This is
|
||||
automatically generated if not provided.
|
||||
|
||||
"""
|
||||
if synapse_error is None:
|
||||
synapse_error = e.to_synapse_error()
|
||||
# MSC3743 specifies that servers should return a 404 or 405 with an errcode
|
||||
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
||||
# to an unknown method, respectively.
|
||||
#
|
||||
# Older versions of servers don't properly handle this. This needs to be
|
||||
# rather specific as some endpoints truly do return 404 errors.
|
||||
return (
|
||||
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
||||
(e.code == 404 or e.code == 405)
|
||||
and (
|
||||
# Older Dendrites returned a text or empty body.
|
||||
# Older Conduit returned an empty body.
|
||||
not e.response
|
||||
or e.response == b"404 page not found"
|
||||
# The proper response JSON with M_UNRECOGNIZED errcode.
|
||||
or synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
) or (
|
||||
# Older Synapses returned a 400 error.
|
||||
e.code == 400
|
||||
and synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
|
||||
async def _try_destination_list(
|
||||
self,
|
||||
description: str,
|
||||
@ -887,7 +851,7 @@ class FederationClient(FederationBase):
|
||||
elif 400 <= e.code < 500 and synapse_error.errcode in failover_errcodes:
|
||||
failover = True
|
||||
|
||||
elif failover_on_unknown_endpoint and self._is_unknown_endpoint(
|
||||
elif failover_on_unknown_endpoint and is_unknown_endpoint(
|
||||
e, synapse_error
|
||||
):
|
||||
failover = True
|
||||
@ -1223,7 +1187,7 @@ class FederationClient(FederationBase):
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
# fallback to the v1 endpoint. Otherwise, consider it a legitimate error
|
||||
# and raise.
|
||||
if not self._is_unknown_endpoint(e):
|
||||
if not is_unknown_endpoint(e):
|
||||
raise
|
||||
|
||||
logger.debug("Couldn't send_join with the v2 API, falling back to the v1 API")
|
||||
@ -1297,7 +1261,7 @@ class FederationClient(FederationBase):
|
||||
# fallback to the v1 endpoint if the room uses old-style event IDs.
|
||||
# Otherwise, consider it a legitimate error and raise.
|
||||
err = e.to_synapse_error()
|
||||
if self._is_unknown_endpoint(e, err):
|
||||
if is_unknown_endpoint(e, err):
|
||||
if room_version.event_format != EventFormatVersions.ROOM_V1_V2:
|
||||
raise SynapseError(
|
||||
400,
|
||||
@ -1358,7 +1322,7 @@ class FederationClient(FederationBase):
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
# fallback to the v1 endpoint. Otherwise, consider it a legitimate error
|
||||
# and raise.
|
||||
if not self._is_unknown_endpoint(e):
|
||||
if not is_unknown_endpoint(e):
|
||||
raise
|
||||
|
||||
logger.debug("Couldn't send_leave with the v2 API, falling back to the v1 API")
|
||||
@ -1629,7 +1593,7 @@ class FederationClient(FederationBase):
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
# fallback to the unstable endpoint. Otherwise, consider it a
|
||||
# legitimate error and raise.
|
||||
if not self._is_unknown_endpoint(e):
|
||||
if not is_unknown_endpoint(e):
|
||||
raise
|
||||
|
||||
logger.debug(
|
||||
|
@ -86,7 +86,7 @@ from synapse.storage.databases.main.lock import Lock
|
||||
from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
|
||||
from synapse.storage.roommember import MemberSummary
|
||||
from synapse.types import JsonDict, StateMap, get_domain_from_id
|
||||
from synapse.util import json_decoder, unwrapFirstError
|
||||
from synapse.util import unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer, concurrently_execute, gather_results
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.stringutils import parse_server_name
|
||||
@ -135,6 +135,7 @@ class FederationServer(FederationBase):
|
||||
self.state = hs.get_state_handler()
|
||||
self._event_auth_handler = hs.get_event_auth_handler()
|
||||
self._room_member_handler = hs.get_room_member_handler()
|
||||
self._e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
|
||||
self._state_storage_controller = hs.get_storage_controllers().state
|
||||
|
||||
@ -1012,15 +1013,14 @@ class FederationServer(FederationBase):
|
||||
query.append((user_id, device_id, algorithm))
|
||||
|
||||
log_kv({"message": "Claiming one time keys.", "user, device pairs": query})
|
||||
results = await self.store.claim_e2e_one_time_keys(query)
|
||||
results = await self._e2e_keys_handler.claim_local_one_time_keys(query)
|
||||
|
||||
json_result: Dict[str, Dict[str, dict]] = {}
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, json_str in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {
|
||||
key_id: json_decoder.decode(json_str)
|
||||
}
|
||||
json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
||||
for result in results:
|
||||
for user_id, device_keys in result.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, key in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {key_id: key}
|
||||
|
||||
logger.info(
|
||||
"Claimed one-time-keys: %s",
|
||||
|
@ -244,7 +244,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
|
||||
self.notifier.on_new_replication_data()
|
||||
|
||||
def send_device_messages(self, destination: str, immediate: bool = False) -> None:
|
||||
def send_device_messages(self, destination: str, immediate: bool = True) -> None:
|
||||
"""As per FederationSender"""
|
||||
# We don't need to replicate this as it gets sent down a different
|
||||
# stream.
|
||||
|
@ -11,6 +11,119 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
The Federation Sender is responsible for sending Persistent Data Units (PDUs)
|
||||
and Ephemeral Data Units (EDUs) to other homeservers using
|
||||
the `/send` Federation API.
|
||||
|
||||
|
||||
## How do PDUs get sent?
|
||||
|
||||
The Federation Sender is made aware of new PDUs due to `FederationSender.notify_new_events`.
|
||||
When the sender is notified about a newly-persisted PDU that originates from this homeserver
|
||||
and is not an out-of-band event, we pass the PDU to the `_PerDestinationQueue` for each
|
||||
remote homeserver that is in the room at that point in the DAG.
|
||||
|
||||
|
||||
### Per-Destination Queues
|
||||
|
||||
There is one `PerDestinationQueue` per 'destination' homeserver.
|
||||
The `PerDestinationQueue` maintains the following information about the destination:
|
||||
|
||||
- whether the destination is currently in [catch-up mode (see below)](#catch-up-mode);
|
||||
- a queue of PDUs to be sent to the destination; and
|
||||
- a queue of EDUs to be sent to the destination (not considered in this section).
|
||||
|
||||
Upon a new PDU being enqueued, `attempt_new_transaction` is called to start a new
|
||||
transaction if there is not already one in progress.
|
||||
|
||||
|
||||
### Transactions and the Transaction Transmission Loop
|
||||
|
||||
Each federation HTTP request to the `/send` endpoint is referred to as a 'transaction'.
|
||||
The body of the HTTP request contains a list of PDUs and EDUs to send to the destination.
|
||||
|
||||
The *Transaction Transmission Loop* (`_transaction_transmission_loop`) is responsible
|
||||
for emptying the queued PDUs (and EDUs) from a `PerDestinationQueue` by sending
|
||||
them to the destination.
|
||||
|
||||
There can only be one transaction in flight for a given destination at any time.
|
||||
(Other than preventing us from overloading the destination, this also makes it easier to
|
||||
reason about because we process events sequentially for each destination.
|
||||
This is useful for *Catch-Up Mode*, described later.)
|
||||
|
||||
The loop continues so long as there is anything to send. At each iteration of the loop, we:
|
||||
|
||||
- dequeue up to 50 PDUs (and up to 100 EDUs).
|
||||
- make the `/send` request to the destination homeserver with the dequeued PDUs and EDUs.
|
||||
- if successful, make note of the fact that we succeeded in transmitting PDUs up to
|
||||
the given `stream_ordering` of the latest PDU by
|
||||
- if unsuccessful, back off from the remote homeserver for some time.
|
||||
If we have been unsuccessful for too long (when the backoff interval grows to exceed 1 hour),
|
||||
the in-memory queues are emptied and we enter [*Catch-Up Mode*, described below](#catch-up-mode).
|
||||
|
||||
|
||||
### Catch-Up Mode
|
||||
|
||||
When the `PerDestinationQueue` has the catch-up flag set, the *Catch-Up Transmission Loop*
|
||||
(`_catch_up_transmission_loop`) is used in lieu of the regular `_transaction_transmission_loop`.
|
||||
(Only once the catch-up mode has been exited can the regular tranaction transmission behaviour
|
||||
be resumed.)
|
||||
|
||||
*Catch-Up Mode*, entered upon Synapse startup or once a homeserver has fallen behind due to
|
||||
connection problems, is responsible for sending PDUs that have been missed by the destination
|
||||
homeserver. (PDUs can be missed because the `PerDestinationQueue` is volatile — i.e. resets
|
||||
on startup — and it does not hold PDUs forever if `/send` requests to the destination fail.)
|
||||
|
||||
The catch-up mechanism makes use of the `last_successful_stream_ordering` column in the
|
||||
`destinations` table (which gives the `stream_ordering` of the most recent successfully
|
||||
sent PDU) and the `stream_ordering` column in the `destination_rooms` table (which gives,
|
||||
for each room, the `stream_ordering` of the most recent PDU that needs to be sent to this
|
||||
destination).
|
||||
|
||||
Each iteration of the loop pulls out 50 `destination_rooms` entries with the oldest
|
||||
`stream_ordering`s that are greater than the `last_successful_stream_ordering`.
|
||||
In other words, from the set of latest PDUs in each room to be sent to the destination,
|
||||
the 50 oldest such PDUs are pulled out.
|
||||
|
||||
These PDUs could, in principle, now be directly sent to the destination. However, as an
|
||||
optimisation intended to prevent overloading destination homeservers, we instead attempt
|
||||
to send the latest forward extremities so long as the destination homeserver is still
|
||||
eligible to receive those.
|
||||
This reduces load on the destination **in aggregate** because all Synapse homeservers
|
||||
will behave according to this principle and therefore avoid sending lots of different PDUs
|
||||
at different points in the DAG to a recovering homeserver.
|
||||
*This optimisation is not currently valid in rooms which are partial-state on this homeserver,
|
||||
since we are unable to determine whether the destination homeserver is eligible to receive
|
||||
the latest forward extremities unless this homeserver sent those PDUs — in this case, we
|
||||
just send the latest PDUs originating from this server and skip this optimisation.*
|
||||
|
||||
Whilst PDUs are sent through this mechanism, the position of `last_successful_stream_ordering`
|
||||
is advanced as normal.
|
||||
Once there are no longer any rooms containing outstanding PDUs to be sent to the destination
|
||||
*that are not already in the `PerDestinationQueue` because they arrived since Catch-Up Mode
|
||||
was enabled*, Catch-Up Mode is exited and we return to `_transaction_transmission_loop`.
|
||||
|
||||
|
||||
#### A note on failures and back-offs
|
||||
|
||||
If a remote server is unreachable over federation, we back off from that server,
|
||||
with an exponentially-increasing retry interval.
|
||||
Whilst we don't automatically retry after the interval, we prevent making new attempts
|
||||
until such time as the back-off has cleared.
|
||||
Once the back-off is cleared and a new PDU or EDU arrives for transmission, the transmission
|
||||
loop resumes and empties the queue by making federation requests.
|
||||
|
||||
If the backoff grows too large (> 1 hour), the in-memory queue is emptied (to prevent
|
||||
unbounded growth) and Catch-Up Mode is entered.
|
||||
|
||||
It is worth noting that the back-off for a remote server is cleared once an inbound
|
||||
request from that remote server is received (see `notify_remote_server_up`).
|
||||
At this point, the transaction transmission loop is also started up, to proactively
|
||||
send missed PDUs and EDUs to the destination (i.e. you don't need to wait for a new PDU
|
||||
or EDU, destined for that destination, to be created in order to send out missed PDUs and
|
||||
EDUs).
|
||||
"""
|
||||
|
||||
import abc
|
||||
import logging
|
||||
@ -783,7 +896,7 @@ class FederationSender(AbstractFederationSender):
|
||||
else:
|
||||
queue.send_edu(edu)
|
||||
|
||||
def send_device_messages(self, destination: str, immediate: bool = False) -> None:
|
||||
def send_device_messages(self, destination: str, immediate: bool = True) -> None:
|
||||
if destination == self.server_name:
|
||||
logger.warning("Not sending device update to ourselves")
|
||||
return
|
||||
|
@ -108,6 +108,7 @@ class PublicRoomList(BaseFederationServlet):
|
||||
"""
|
||||
|
||||
PATH = "/publicRooms"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -212,6 +213,7 @@ class OpenIdUserInfo(BaseFederationServlet):
|
||||
"""
|
||||
|
||||
PATH = "/openid/userinfo"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
REQUIRE_AUTH = False
|
||||
|
||||
|
@ -70,6 +70,7 @@ class BaseFederationServerServlet(BaseFederationServlet):
|
||||
|
||||
class FederationSendServlet(BaseFederationServerServlet):
|
||||
PATH = "/send/(?P<transaction_id>[^/]*)/?"
|
||||
CATEGORY = "Inbound federation transaction request"
|
||||
|
||||
# We ratelimit manually in the handler as we queue up the requests and we
|
||||
# don't want to fill up the ratelimiter with blocked requests.
|
||||
@ -138,6 +139,7 @@ class FederationSendServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationEventServlet(BaseFederationServerServlet):
|
||||
PATH = "/event/(?P<event_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
# This is when someone asks for a data item for a given server data_id pair.
|
||||
async def on_GET(
|
||||
@ -152,6 +154,7 @@ class FederationEventServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationStateV1Servlet(BaseFederationServerServlet):
|
||||
PATH = "/state/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
# This is when someone asks for all data for a given room.
|
||||
async def on_GET(
|
||||
@ -170,6 +173,7 @@ class FederationStateV1Servlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationStateIdsServlet(BaseFederationServerServlet):
|
||||
PATH = "/state_ids/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -187,6 +191,7 @@ class FederationStateIdsServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationBackfillServlet(BaseFederationServerServlet):
|
||||
PATH = "/backfill/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -225,6 +230,7 @@ class FederationTimestampLookupServlet(BaseFederationServerServlet):
|
||||
"""
|
||||
|
||||
PATH = "/timestamp_to_event/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -246,6 +252,7 @@ class FederationTimestampLookupServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationQueryServlet(BaseFederationServerServlet):
|
||||
PATH = "/query/(?P<query_type>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
# This is when we receive a server-server Query
|
||||
async def on_GET(
|
||||
@ -262,6 +269,7 @@ class FederationQueryServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationMakeJoinServlet(BaseFederationServerServlet):
|
||||
PATH = "/make_join/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -297,6 +305,7 @@ class FederationMakeJoinServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationMakeLeaveServlet(BaseFederationServerServlet):
|
||||
PATH = "/make_leave/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -312,6 +321,7 @@ class FederationMakeLeaveServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1SendLeaveServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_leave/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -327,6 +337,7 @@ class FederationV1SendLeaveServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV2SendLeaveServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_leave/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
PREFIX = FEDERATION_V2_PREFIX
|
||||
|
||||
@ -344,6 +355,7 @@ class FederationV2SendLeaveServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationMakeKnockServlet(BaseFederationServerServlet):
|
||||
PATH = "/make_knock/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -366,6 +378,7 @@ class FederationMakeKnockServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1SendKnockServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_knock/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -381,6 +394,7 @@ class FederationV1SendKnockServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationEventAuthServlet(BaseFederationServerServlet):
|
||||
PATH = "/event_auth/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -395,6 +409,7 @@ class FederationEventAuthServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1SendJoinServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_join/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -412,6 +427,7 @@ class FederationV1SendJoinServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV2SendJoinServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_join/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
PREFIX = FEDERATION_V2_PREFIX
|
||||
|
||||
@ -455,6 +471,7 @@ class FederationV2SendJoinServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1InviteServlet(BaseFederationServerServlet):
|
||||
PATH = "/invite/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -479,6 +496,7 @@ class FederationV1InviteServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV2InviteServlet(BaseFederationServerServlet):
|
||||
PATH = "/invite/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
PREFIX = FEDERATION_V2_PREFIX
|
||||
|
||||
@ -515,6 +533,7 @@ class FederationV2InviteServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationThirdPartyInviteExchangeServlet(BaseFederationServerServlet):
|
||||
PATH = "/exchange_third_party_invite/(?P<room_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -529,6 +548,7 @@ class FederationThirdPartyInviteExchangeServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationClientKeysQueryServlet(BaseFederationServerServlet):
|
||||
PATH = "/user/keys/query"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_POST(
|
||||
self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
|
||||
@ -538,6 +558,7 @@ class FederationClientKeysQueryServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationUserDevicesQueryServlet(BaseFederationServerServlet):
|
||||
PATH = "/user/devices/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -551,6 +572,7 @@ class FederationUserDevicesQueryServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationClientKeysClaimServlet(BaseFederationServerServlet):
|
||||
PATH = "/user/keys/claim"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_POST(
|
||||
self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
|
||||
@ -561,6 +583,7 @@ class FederationClientKeysClaimServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationGetMissingEventsServlet(BaseFederationServerServlet):
|
||||
PATH = "/get_missing_events/(?P<room_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_POST(
|
||||
self,
|
||||
@ -586,6 +609,7 @@ class FederationGetMissingEventsServlet(BaseFederationServerServlet):
|
||||
|
||||
class On3pidBindServlet(BaseFederationServerServlet):
|
||||
PATH = "/3pid/onbind"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
REQUIRE_AUTH = False
|
||||
|
||||
@ -618,6 +642,7 @@ class On3pidBindServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationVersionServlet(BaseFederationServlet):
|
||||
PATH = "/version"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
REQUIRE_AUTH = False
|
||||
|
||||
@ -640,6 +665,7 @@ class FederationVersionServlet(BaseFederationServlet):
|
||||
|
||||
class FederationRoomHierarchyServlet(BaseFederationServlet):
|
||||
PATH = "/hierarchy/(?P<room_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -672,6 +698,7 @@ class RoomComplexityServlet(BaseFederationServlet):
|
||||
|
||||
PATH = "/rooms/(?P<room_id>[^/]*)/complexity"
|
||||
PREFIX = FEDERATION_UNSTABLE_PREFIX
|
||||
CATEGORY = "Federation requests (unstable)"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -12,7 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Collection, Dict, Iterable, List, Optional, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Collection,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from prometheus_client import Counter
|
||||
|
||||
@ -829,3 +839,126 @@ class ApplicationServicesHandler:
|
||||
if unknown_user:
|
||||
return await self.query_user_exists(user_id)
|
||||
return True
|
||||
|
||||
async def claim_e2e_one_time_keys(
|
||||
self, query: Iterable[Tuple[str, str, str]]
|
||||
) -> Tuple[
|
||||
Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]], List[Tuple[str, str, str]]
|
||||
]:
|
||||
"""Claim one time keys from application services.
|
||||
|
||||
Users which are exclusively owned by an application service are sent a
|
||||
key claim request to check if the application service provides keys
|
||||
directly.
|
||||
|
||||
Args:
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A tuple of:
|
||||
An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes.
|
||||
|
||||
A copy of the input which has not been fulfilled (either because
|
||||
they are not appservice users or the appservice does not support
|
||||
providing OTKs).
|
||||
"""
|
||||
services = self.store.get_app_services()
|
||||
|
||||
# Partition the users by appservice.
|
||||
query_by_appservice: Dict[str, List[Tuple[str, str, str]]] = {}
|
||||
missing = []
|
||||
for user_id, device, algorithm in query:
|
||||
if not self.store.get_if_app_services_interested_in_user(user_id):
|
||||
missing.append((user_id, device, algorithm))
|
||||
continue
|
||||
|
||||
# Find the associated appservice.
|
||||
for service in services:
|
||||
if service.is_exclusive_user(user_id):
|
||||
query_by_appservice.setdefault(service.id, []).append(
|
||||
(user_id, device, algorithm)
|
||||
)
|
||||
continue
|
||||
|
||||
# Query each service in parallel.
|
||||
results = await make_deferred_yieldable(
|
||||
defer.DeferredList(
|
||||
[
|
||||
run_in_background(
|
||||
self.appservice_api.claim_client_keys,
|
||||
# We know this must be an app service.
|
||||
self.store.get_app_service_by_id(service_id), # type: ignore[arg-type]
|
||||
service_query,
|
||||
)
|
||||
for service_id, service_query in query_by_appservice.items()
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Patch together the results -- they are all independent (since they
|
||||
# require exclusive control over the users). They get returned as a list
|
||||
# and the caller combines them.
|
||||
claimed_keys: List[Dict[str, Dict[str, Dict[str, JsonDict]]]] = []
|
||||
for success, result in results:
|
||||
if success:
|
||||
claimed_keys.append(result[0])
|
||||
missing.extend(result[1])
|
||||
|
||||
return claimed_keys, missing
|
||||
|
||||
async def query_keys(
|
||||
self, query: Mapping[str, Optional[List[str]]]
|
||||
) -> Dict[str, Dict[str, Dict[str, JsonDict]]]:
|
||||
"""Query application services for device keys.
|
||||
|
||||
Users which are exclusively owned by an application service are queried
|
||||
for keys to check if the application service provides keys directly.
|
||||
|
||||
Args:
|
||||
query: map from user_id to a list of devices to query
|
||||
|
||||
Returns:
|
||||
A map from user_id -> device_id -> device details
|
||||
"""
|
||||
services = self.store.get_app_services()
|
||||
|
||||
# Partition the users by appservice.
|
||||
query_by_appservice: Dict[str, Dict[str, List[str]]] = {}
|
||||
for user_id, device_ids in query.items():
|
||||
if not self.store.get_if_app_services_interested_in_user(user_id):
|
||||
continue
|
||||
|
||||
# Find the associated appservice.
|
||||
for service in services:
|
||||
if service.is_exclusive_user(user_id):
|
||||
query_by_appservice.setdefault(service.id, {})[user_id] = (
|
||||
device_ids or []
|
||||
)
|
||||
continue
|
||||
|
||||
# Query each service in parallel.
|
||||
results = await make_deferred_yieldable(
|
||||
defer.DeferredList(
|
||||
[
|
||||
run_in_background(
|
||||
self.appservice_api.query_keys,
|
||||
# We know this must be an app service.
|
||||
self.store.get_app_service_by_id(service_id), # type: ignore[arg-type]
|
||||
service_query,
|
||||
)
|
||||
for service_id, service_query in query_by_appservice.items()
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Patch together the results -- they are all independent (since they
|
||||
# require exclusive control over the users). They get returned as a single
|
||||
# dictionary.
|
||||
key_queries: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
||||
for success, result in results:
|
||||
if success:
|
||||
key_queries.update(result)
|
||||
|
||||
return key_queries
|
||||
|
@ -1504,8 +1504,10 @@ class AuthHandler:
|
||||
)
|
||||
|
||||
# delete pushers associated with this access token
|
||||
# XXX(quenting): This is only needed until the 'set_device_id_for_pushers'
|
||||
# background update completes.
|
||||
if token.token_id is not None:
|
||||
await self.hs.get_pusherpool().remove_pushers_by_access_token(
|
||||
await self.hs.get_pusherpool().remove_pushers_by_access_tokens(
|
||||
token.user_id, (token.token_id,)
|
||||
)
|
||||
|
||||
@ -1535,7 +1537,9 @@ class AuthHandler:
|
||||
)
|
||||
|
||||
# delete pushers associated with the access tokens
|
||||
await self.hs.get_pusherpool().remove_pushers_by_access_token(
|
||||
# XXX(quenting): This is only needed until the 'set_device_id_for_pushers'
|
||||
# background update completes.
|
||||
await self.hs.get_pusherpool().remove_pushers_by_access_tokens(
|
||||
user_id, (token_id for _, token_id, _ in tokens_and_devices)
|
||||
)
|
||||
|
||||
|
@ -503,6 +503,8 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
else:
|
||||
raise
|
||||
|
||||
await self.hs.get_pusherpool().remove_pushers_by_devices(user_id, device_ids)
|
||||
|
||||
# Delete data specific to each device. Not optimised as it is not
|
||||
# considered as part of a critical path.
|
||||
for device_id in device_ids:
|
||||
|
@ -13,7 +13,6 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Mapping, Optional, Tuple
|
||||
|
||||
@ -53,6 +52,7 @@ class E2eKeysHandler:
|
||||
self.store = hs.get_datastores().main
|
||||
self.federation = hs.get_federation_client()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
self._appservice_handler = hs.get_application_service_handler()
|
||||
self.is_mine = hs.is_mine
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
@ -88,6 +88,13 @@ class E2eKeysHandler:
|
||||
max_count=10,
|
||||
)
|
||||
|
||||
self._query_appservices_for_otks = (
|
||||
hs.config.experimental.msc3983_appservice_otk_claims
|
||||
)
|
||||
self._query_appservices_for_keys = (
|
||||
hs.config.experimental.msc3984_appservice_key_query
|
||||
)
|
||||
|
||||
@trace
|
||||
@cancellable
|
||||
async def query_devices(
|
||||
@ -493,6 +500,19 @@ class E2eKeysHandler:
|
||||
local_query, include_displaynames
|
||||
)
|
||||
|
||||
# Check if the application services have any additional results.
|
||||
if self._query_appservices_for_keys:
|
||||
# Query the appservices for any keys.
|
||||
appservice_results = await self._appservice_handler.query_keys(query)
|
||||
|
||||
# Merge results, overriding with what the appservice returned.
|
||||
for user_id, devices in appservice_results.get("device_keys", {}).items():
|
||||
# Copy the appservice device info over the homeserver device info, but
|
||||
# don't completely overwrite it.
|
||||
results.setdefault(user_id, {}).update(devices)
|
||||
|
||||
# TODO Handle cross-signing keys.
|
||||
|
||||
# Build the result structure
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, device_info in device_keys.items():
|
||||
@ -542,6 +562,42 @@ class E2eKeysHandler:
|
||||
|
||||
return ret
|
||||
|
||||
async def claim_local_one_time_keys(
|
||||
self, local_query: List[Tuple[str, str, str]]
|
||||
) -> Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]]:
|
||||
"""Claim one time keys for local users.
|
||||
|
||||
1. Attempt to claim OTKs from the database.
|
||||
2. Ask application services if they provide OTKs.
|
||||
3. Attempt to fetch fallback keys from the database.
|
||||
|
||||
Args:
|
||||
local_query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes.
|
||||
"""
|
||||
|
||||
otk_results, not_found = await self.store.claim_e2e_one_time_keys(local_query)
|
||||
|
||||
# If the application services have not provided any keys via the C-S
|
||||
# API, query it directly for one-time keys.
|
||||
if self._query_appservices_for_otks:
|
||||
(
|
||||
appservice_results,
|
||||
not_found,
|
||||
) = await self._appservice_handler.claim_e2e_one_time_keys(not_found)
|
||||
else:
|
||||
appservice_results = []
|
||||
|
||||
# For each user that does not have a one-time keys available, see if
|
||||
# there is a fallback key.
|
||||
fallback_results = await self.store.claim_e2e_fallback_keys(not_found)
|
||||
|
||||
# Return the results in order, each item from the input query should
|
||||
# only appear once in the combined list.
|
||||
return (otk_results, *appservice_results, fallback_results)
|
||||
|
||||
@trace
|
||||
async def claim_one_time_keys(
|
||||
self, query: Dict[str, Dict[str, Dict[str, str]]], timeout: Optional[int]
|
||||
@ -561,17 +617,18 @@ class E2eKeysHandler:
|
||||
set_tag("local_key_query", str(local_query))
|
||||
set_tag("remote_key_query", str(remote_queries))
|
||||
|
||||
results = await self.store.claim_e2e_one_time_keys(local_query)
|
||||
results = await self.claim_local_one_time_keys(local_query)
|
||||
|
||||
# A map of user ID -> device ID -> key ID -> key.
|
||||
json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
||||
for result in results:
|
||||
for user_id, device_keys in result.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, key in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {key_id: key}
|
||||
|
||||
# Remote failures.
|
||||
failures: Dict[str, JsonDict] = {}
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, json_str in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {
|
||||
key_id: json_decoder.decode(json_str)
|
||||
}
|
||||
|
||||
@trace
|
||||
async def claim_client_keys(destination: str) -> None:
|
||||
|
@ -1949,27 +1949,25 @@ class FederationHandler:
|
||||
)
|
||||
for event in events:
|
||||
for attempt in itertools.count():
|
||||
# We try a new destination on every iteration.
|
||||
try:
|
||||
await self._federation_event_handler.update_state_for_partial_state_event(
|
||||
destination, event
|
||||
)
|
||||
break
|
||||
except FederationPullAttemptBackoffError as exc:
|
||||
# Log a warning about why we failed to process the event (the error message
|
||||
# for `FederationPullAttemptBackoffError` is pretty good)
|
||||
logger.warning("_sync_partial_state_room: %s", exc)
|
||||
# We do not record a failed pull attempt when we backoff fetching a missing
|
||||
# `prev_event` because not being able to fetch the `prev_events` just means
|
||||
# we won't be able to de-outlier the pulled event. But we can still use an
|
||||
# `outlier` in the state/auth chain for another event. So we shouldn't stop
|
||||
# a downstream event from trying to pull it.
|
||||
#
|
||||
# This avoids a cascade of backoff for all events in the DAG downstream from
|
||||
# one event backoff upstream.
|
||||
except FederationError as e:
|
||||
# TODO: We should `record_event_failed_pull_attempt` here,
|
||||
# see https://github.com/matrix-org/synapse/issues/13700
|
||||
while True:
|
||||
try:
|
||||
await self._federation_event_handler.update_state_for_partial_state_event(
|
||||
destination, event
|
||||
)
|
||||
break
|
||||
except FederationPullAttemptBackoffError as e:
|
||||
# We are in the backoff period for one of the event's
|
||||
# prev_events. Wait it out and try again after.
|
||||
logger.warning(
|
||||
"%s; waiting for %d ms...", e, e.retry_after_ms
|
||||
)
|
||||
await self.clock.sleep(e.retry_after_ms / 1000)
|
||||
|
||||
# Success, no need to try the rest of the destinations.
|
||||
break
|
||||
except FederationError as e:
|
||||
if attempt == len(destinations) - 1:
|
||||
# We have tried every remote server for this event. Give up.
|
||||
# TODO(faster_joins) giving up isn't the right thing to do
|
||||
@ -1986,6 +1984,8 @@ class FederationHandler:
|
||||
destination,
|
||||
e,
|
||||
)
|
||||
# TODO: We should `record_event_failed_pull_attempt` here,
|
||||
# see https://github.com/matrix-org/synapse/issues/13700
|
||||
raise
|
||||
|
||||
# Try the next remote server.
|
||||
|
@ -140,6 +140,7 @@ class FederationEventHandler:
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._clock = hs.get_clock()
|
||||
self._store = hs.get_datastores().main
|
||||
self._storage_controllers = hs.get_storage_controllers()
|
||||
self._state_storage_controller = self._storage_controllers.state
|
||||
@ -583,7 +584,7 @@ class FederationEventHandler:
|
||||
|
||||
await self._check_event_auth(origin, event, context)
|
||||
if context.rejected:
|
||||
raise SynapseError(400, "Join event was rejected")
|
||||
raise SynapseError(403, "Join event was rejected")
|
||||
|
||||
# the remote server is responsible for sending our join event to the rest
|
||||
# of the federation. Indeed, attempting to do so will result in problems
|
||||
@ -1038,8 +1039,8 @@ class FederationEventHandler:
|
||||
|
||||
Raises:
|
||||
FederationPullAttemptBackoffError if we are are deliberately not attempting
|
||||
to pull the given event over federation because we've already done so
|
||||
recently and are backing off.
|
||||
to pull one of the given event's `prev_event`s over federation because
|
||||
we've already done so recently and are backing off.
|
||||
FederationError if we fail to get the state from the remote server after any
|
||||
missing `prev_event`s.
|
||||
"""
|
||||
@ -1053,13 +1054,22 @@ class FederationEventHandler:
|
||||
# If we've already recently attempted to pull this missing event, don't
|
||||
# try it again so soon. Since we have to fetch all of the prev_events, we can
|
||||
# bail early here if we find any to ignore.
|
||||
prevs_to_ignore = await self._store.get_event_ids_to_not_pull_from_backoff(
|
||||
room_id, missing_prevs
|
||||
prevs_with_pull_backoff = (
|
||||
await self._store.get_event_ids_to_not_pull_from_backoff(
|
||||
room_id, missing_prevs
|
||||
)
|
||||
)
|
||||
if len(prevs_to_ignore) > 0:
|
||||
if len(prevs_with_pull_backoff) > 0:
|
||||
raise FederationPullAttemptBackoffError(
|
||||
event_ids=prevs_to_ignore,
|
||||
message=f"While computing context for event={event_id}, not attempting to pull missing prev_event={prevs_to_ignore[0]} because we already tried to pull recently (backing off).",
|
||||
event_ids=prevs_with_pull_backoff.keys(),
|
||||
message=(
|
||||
f"While computing context for event={event_id}, not attempting to "
|
||||
f"pull missing prev_events={list(prevs_with_pull_backoff.keys())} "
|
||||
"because we already tried to pull recently (backing off)."
|
||||
),
|
||||
retry_after_ms=(
|
||||
max(prevs_with_pull_backoff.values()) - self._clock.time_msec()
|
||||
),
|
||||
)
|
||||
|
||||
if not missing_prevs:
|
||||
|
@ -1239,6 +1239,7 @@ class OidcProvider:
|
||||
grandfather_existing_users,
|
||||
extra_attributes,
|
||||
auth_provider_session_id=sid,
|
||||
registration_enabled=self._config.enable_registration,
|
||||
)
|
||||
|
||||
def _remote_id_from_userinfo(self, userinfo: UserInfo) -> str:
|
||||
|
@ -1023,11 +1023,11 @@ class RegistrationHandler:
|
||||
user_tuple = await self.store.get_user_by_access_token(token)
|
||||
# The token better still exist.
|
||||
assert user_tuple
|
||||
token_id = user_tuple.token_id
|
||||
device_id = user_tuple.device_id
|
||||
|
||||
await self.pusher_pool.add_or_update_pusher(
|
||||
user_id=user_id,
|
||||
access_token=token_id,
|
||||
device_id=device_id,
|
||||
kind="email",
|
||||
app_id="m.email",
|
||||
app_display_name="Email Notifications",
|
||||
|
@ -830,64 +830,69 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
# `is_partial_state_room` also indicates whether `partial_state_before_join` is
|
||||
# partial.
|
||||
|
||||
# TODO: Refactor into dictionary of explicitly allowed transitions
|
||||
# between old and new state, with specific error messages for some
|
||||
# transitions and generic otherwise
|
||||
old_state_id = partial_state_before_join.get(
|
||||
(EventTypes.Member, target.to_string())
|
||||
)
|
||||
if old_state_id:
|
||||
old_state = await self.store.get_event(old_state_id, allow_none=True)
|
||||
old_membership = old_state.content.get("membership") if old_state else None
|
||||
if action == "unban" and old_membership != "ban":
|
||||
raise SynapseError(
|
||||
403,
|
||||
"Cannot unban user who was not banned"
|
||||
" (membership=%s)" % old_membership,
|
||||
errcode=Codes.BAD_STATE,
|
||||
)
|
||||
if old_membership == "ban" and action not in ["ban", "unban", "leave"]:
|
||||
raise SynapseError(
|
||||
403,
|
||||
"Cannot %s user who was banned" % (action,),
|
||||
errcode=Codes.BAD_STATE,
|
||||
)
|
||||
|
||||
if old_state:
|
||||
same_content = content == old_state.content
|
||||
same_membership = old_membership == effective_membership_state
|
||||
same_sender = requester.user.to_string() == old_state.sender
|
||||
if same_sender and same_membership and same_content:
|
||||
# duplicate event.
|
||||
# we know it was persisted, so must have a stream ordering.
|
||||
assert old_state.internal_metadata.stream_ordering
|
||||
return (
|
||||
old_state.event_id,
|
||||
old_state.internal_metadata.stream_ordering,
|
||||
)
|
||||
|
||||
if old_membership in ["ban", "leave"] and action == "kick":
|
||||
raise AuthError(403, "The target user is not in the room")
|
||||
|
||||
# we don't allow people to reject invites to the server notice
|
||||
# room, but they can leave it once they are joined.
|
||||
if (
|
||||
old_membership == Membership.INVITE
|
||||
and effective_membership_state == Membership.LEAVE
|
||||
):
|
||||
is_blocked = await self.store.is_server_notice_room(room_id)
|
||||
if is_blocked:
|
||||
raise SynapseError(
|
||||
HTTPStatus.FORBIDDEN,
|
||||
"You cannot reject this invite",
|
||||
errcode=Codes.CANNOT_LEAVE_SERVER_NOTICE_ROOM,
|
||||
)
|
||||
else:
|
||||
if action == "kick":
|
||||
raise AuthError(403, "The target user is not in the room")
|
||||
|
||||
is_host_in_room = await self._is_host_in_room(partial_state_before_join)
|
||||
|
||||
# if we are not in the room, we won't have the current state
|
||||
if is_host_in_room:
|
||||
# TODO: Refactor into dictionary of explicitly allowed transitions
|
||||
# between old and new state, with specific error messages for some
|
||||
# transitions and generic otherwise
|
||||
old_state_id = partial_state_before_join.get(
|
||||
(EventTypes.Member, target.to_string())
|
||||
)
|
||||
|
||||
if old_state_id:
|
||||
old_state = await self.store.get_event(old_state_id, allow_none=True)
|
||||
old_membership = (
|
||||
old_state.content.get("membership") if old_state else None
|
||||
)
|
||||
if action == "unban" and old_membership != "ban":
|
||||
raise SynapseError(
|
||||
403,
|
||||
"Cannot unban user who was not banned"
|
||||
" (membership=%s)" % old_membership,
|
||||
errcode=Codes.BAD_STATE,
|
||||
)
|
||||
if old_membership == "ban" and action not in ["ban", "unban", "leave"]:
|
||||
raise SynapseError(
|
||||
403,
|
||||
"Cannot %s user who was banned" % (action,),
|
||||
errcode=Codes.BAD_STATE,
|
||||
)
|
||||
|
||||
if old_state:
|
||||
same_content = content == old_state.content
|
||||
same_membership = old_membership == effective_membership_state
|
||||
same_sender = requester.user.to_string() == old_state.sender
|
||||
if same_sender and same_membership and same_content:
|
||||
# duplicate event.
|
||||
# we know it was persisted, so must have a stream ordering.
|
||||
assert old_state.internal_metadata.stream_ordering
|
||||
return (
|
||||
old_state.event_id,
|
||||
old_state.internal_metadata.stream_ordering,
|
||||
)
|
||||
|
||||
if old_membership in ["ban", "leave"] and action == "kick":
|
||||
raise AuthError(403, "The target user is not in the room")
|
||||
|
||||
# we don't allow people to reject invites to the server notice
|
||||
# room, but they can leave it once they are joined.
|
||||
if (
|
||||
old_membership == Membership.INVITE
|
||||
and effective_membership_state == Membership.LEAVE
|
||||
):
|
||||
is_blocked = await self.store.is_server_notice_room(room_id)
|
||||
if is_blocked:
|
||||
raise SynapseError(
|
||||
HTTPStatus.FORBIDDEN,
|
||||
"You cannot reject this invite",
|
||||
errcode=Codes.CANNOT_LEAVE_SERVER_NOTICE_ROOM,
|
||||
)
|
||||
else:
|
||||
if action == "kick":
|
||||
raise AuthError(403, "The target user is not in the room")
|
||||
|
||||
if effective_membership_state == Membership.JOIN:
|
||||
if requester.is_guest:
|
||||
guest_can_join = await self._can_guest_join(partial_state_before_join)
|
||||
|
@ -383,6 +383,7 @@ class SsoHandler:
|
||||
grandfather_existing_users: Callable[[], Awaitable[Optional[str]]],
|
||||
extra_login_attributes: Optional[JsonDict] = None,
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
registration_enabled: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Given an SSO ID, retrieve the user ID for it and possibly register the user.
|
||||
@ -435,6 +436,10 @@ class SsoHandler:
|
||||
|
||||
auth_provider_session_id: An optional session ID from the IdP.
|
||||
|
||||
registration_enabled: An optional boolean to enable/disable automatic
|
||||
registrations of new users. If false and the user does not exist then the
|
||||
flow is aborted. Defaults to true.
|
||||
|
||||
Raises:
|
||||
MappingException if there was a problem mapping the response to a user.
|
||||
RedirectException: if the mapping provider needs to redirect the user
|
||||
@ -462,8 +467,16 @@ class SsoHandler:
|
||||
auth_provider_id, remote_user_id, user_id
|
||||
)
|
||||
|
||||
# Otherwise, generate a new user.
|
||||
if not user_id:
|
||||
if not user_id and not registration_enabled:
|
||||
logger.info(
|
||||
"User does not exist and registration are disabled for IdP '%s' and remote_user_id '%s'",
|
||||
auth_provider_id,
|
||||
remote_user_id,
|
||||
)
|
||||
raise MappingException(
|
||||
"User does not exist and registrations are disabled"
|
||||
)
|
||||
elif not user_id: # Otherwise, generate a new user.
|
||||
attributes = await self._call_attribute_mapper(sso_to_matrix_id_mapper)
|
||||
|
||||
next_step_url = self._get_url_for_next_new_user_step(
|
||||
|
@ -943,6 +943,8 @@ class SyncHandler:
|
||||
|
||||
timeline_state = {}
|
||||
|
||||
# Membership events to fetch that can be found in the room state, or in
|
||||
# the case of partial state rooms, the auth events of timeline events.
|
||||
members_to_fetch = set()
|
||||
first_event_by_sender_map = {}
|
||||
for event in batch.events:
|
||||
@ -964,9 +966,19 @@ class SyncHandler:
|
||||
# (if we are) to fix https://github.com/vector-im/riot-web/issues/7209
|
||||
# We only need apply this on full state syncs given we disabled
|
||||
# LL for incr syncs in #3840.
|
||||
members_to_fetch.add(sync_config.user.to_string())
|
||||
|
||||
state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch)
|
||||
# We don't insert ourselves into `members_to_fetch`, because in some
|
||||
# rare cases (an empty event batch with a now_token after the user's
|
||||
# leave in a partial state room which another local user has
|
||||
# joined), the room state will be missing our membership and there
|
||||
# is no guarantee that our membership will be in the auth events of
|
||||
# timeline events when the room is partial stated.
|
||||
state_filter = StateFilter.from_lazy_load_member_list(
|
||||
members_to_fetch.union((sync_config.user.to_string(),))
|
||||
)
|
||||
else:
|
||||
state_filter = StateFilter.from_lazy_load_member_list(
|
||||
members_to_fetch
|
||||
)
|
||||
|
||||
# We are happy to use partial state to compute the `/sync` response.
|
||||
# Since partial state may not include the lazy-loaded memberships we
|
||||
@ -988,7 +1000,9 @@ class SyncHandler:
|
||||
# sync's timeline and the start of the current sync's timeline.
|
||||
# See the docstring above for details.
|
||||
state_ids: StateMap[str]
|
||||
|
||||
# We need to know whether the state we fetch may be partial, so check
|
||||
# whether the room is partial stated *before* fetching it.
|
||||
is_partial_state_room = await self.store.is_partial_state_room(room_id)
|
||||
if full_state:
|
||||
if batch:
|
||||
state_at_timeline_end = (
|
||||
@ -1119,7 +1133,7 @@ class SyncHandler:
|
||||
# If we only have partial state for the room, `state_ids` may be missing the
|
||||
# memberships we wanted. We attempt to find some by digging through the auth
|
||||
# events of timeline events.
|
||||
if lazy_load_members and await self.store.is_partial_state_room(room_id):
|
||||
if lazy_load_members and is_partial_state_room:
|
||||
assert members_to_fetch is not None
|
||||
assert first_event_by_sender_map is not None
|
||||
|
||||
|
@ -52,6 +52,11 @@ FEDERATION_TIMEOUT = 60 * 1000
|
||||
FEDERATION_PING_INTERVAL = 40 * 1000
|
||||
|
||||
|
||||
# How long to remember a typing notification happened in a room before
|
||||
# forgetting about it.
|
||||
FORGET_TIMEOUT = 10 * 60 * 1000
|
||||
|
||||
|
||||
class FollowerTypingHandler:
|
||||
"""A typing handler on a different process than the writer that is updated
|
||||
via replication.
|
||||
@ -83,7 +88,10 @@ class FollowerTypingHandler:
|
||||
self.wheel_timer: WheelTimer[RoomMember] = WheelTimer(bucket_size=5000)
|
||||
self._latest_room_serial = 0
|
||||
|
||||
self._rooms_updated: Set[str] = set()
|
||||
|
||||
self.clock.looping_call(self._handle_timeouts, 5000)
|
||||
self.clock.looping_call(self._prune_old_typing, FORGET_TIMEOUT)
|
||||
|
||||
def _reset(self) -> None:
|
||||
"""Reset the typing handler's data caches."""
|
||||
@ -92,6 +100,8 @@ class FollowerTypingHandler:
|
||||
# map room IDs to sets of users currently typing
|
||||
self._room_typing = {}
|
||||
|
||||
self._rooms_updated = set()
|
||||
|
||||
self._member_last_federation_poke = {}
|
||||
self.wheel_timer = WheelTimer(bucket_size=5000)
|
||||
|
||||
@ -178,6 +188,7 @@ class FollowerTypingHandler:
|
||||
prev_typing = self._room_typing.get(row.room_id, set())
|
||||
now_typing = set(row.user_ids)
|
||||
self._room_typing[row.room_id] = now_typing
|
||||
self._rooms_updated.add(row.room_id)
|
||||
|
||||
if self.federation:
|
||||
run_as_background_process(
|
||||
@ -209,6 +220,19 @@ class FollowerTypingHandler:
|
||||
def get_current_token(self) -> int:
|
||||
return self._latest_room_serial
|
||||
|
||||
def _prune_old_typing(self) -> None:
|
||||
"""Prune rooms that haven't seen typing updates since last time.
|
||||
|
||||
This is safe to do as clients should time out old typing notifications.
|
||||
"""
|
||||
stale_rooms = self._room_serials.keys() - self._rooms_updated
|
||||
|
||||
for room_id in stale_rooms:
|
||||
self._room_serials.pop(room_id, None)
|
||||
self._room_typing.pop(room_id, None)
|
||||
|
||||
self._rooms_updated = set()
|
||||
|
||||
|
||||
class TypingWriterHandler(FollowerTypingHandler):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
@ -388,6 +412,7 @@ class TypingWriterHandler(FollowerTypingHandler):
|
||||
self._typing_stream_change_cache.entity_has_changed(
|
||||
member.room_id, self._latest_room_serial
|
||||
)
|
||||
self._rooms_updated.add(member.room_id)
|
||||
|
||||
self.notifier.on_new_event(
|
||||
StreamKeyType.TYPING, self._latest_room_serial, rooms=[member.room_id]
|
||||
|
@ -966,3 +966,42 @@ class InsecureInterceptableContextFactory(ssl.ContextFactory):
|
||||
|
||||
def creatorForNetloc(self, hostname: bytes, port: int) -> IOpenSSLContextFactory:
|
||||
return self
|
||||
|
||||
|
||||
def is_unknown_endpoint(
|
||||
e: HttpResponseException, synapse_error: Optional[SynapseError] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Returns true if the response was due to an endpoint being unimplemented.
|
||||
|
||||
Args:
|
||||
e: The error response received from the remote server.
|
||||
synapse_error: The above error converted to a SynapseError. This is
|
||||
automatically generated if not provided.
|
||||
|
||||
"""
|
||||
if synapse_error is None:
|
||||
synapse_error = e.to_synapse_error()
|
||||
|
||||
# Matrix v1.6 specifies that servers should return a 404 or 405 with an errcode
|
||||
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
||||
# to an unknown method, respectively.
|
||||
#
|
||||
# Older versions of servers don't return proper errors, so be graceful. But,
|
||||
# also handle that some endpoints truly do return 404 errors.
|
||||
return (
|
||||
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
||||
(e.code == 404 or e.code == 405)
|
||||
and (
|
||||
# Consider empty body or non-JSON bodies to be unrecognised (matches
|
||||
# older Dendrites & Conduits).
|
||||
not e.response
|
||||
or not e.response.startswith(b"{")
|
||||
# The proper response JSON with M_UNRECOGNIZED errcode.
|
||||
or synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
) or (
|
||||
# Older Synapses returned a 400 error.
|
||||
e.code == 400
|
||||
and synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
|
@ -778,17 +778,13 @@ def parse_json_object_from_request(
|
||||
Model = TypeVar("Model", bound=BaseModel)
|
||||
|
||||
|
||||
def parse_and_validate_json_object_from_request(
|
||||
request: Request, model_type: Type[Model]
|
||||
) -> Model:
|
||||
"""Parse a JSON object from the body of a twisted HTTP request, then deserialise and
|
||||
validate using the given pydantic model.
|
||||
def validate_json_object(content: JsonDict, model_type: Type[Model]) -> Model:
|
||||
"""Validate a deserialized JSON object using the given pydantic model.
|
||||
|
||||
Raises:
|
||||
SynapseError if the request body couldn't be decoded as JSON or
|
||||
if it wasn't a JSON object.
|
||||
"""
|
||||
content = parse_json_object_from_request(request, allow_empty_body=False)
|
||||
try:
|
||||
instance = model_type.parse_obj(content)
|
||||
except ValidationError as e:
|
||||
@ -811,6 +807,20 @@ def parse_and_validate_json_object_from_request(
|
||||
return instance
|
||||
|
||||
|
||||
def parse_and_validate_json_object_from_request(
|
||||
request: Request, model_type: Type[Model]
|
||||
) -> Model:
|
||||
"""Parse a JSON object from the body of a twisted HTTP request, then deserialise and
|
||||
validate using the given pydantic model.
|
||||
|
||||
Raises:
|
||||
SynapseError if the request body couldn't be decoded as JSON or
|
||||
if it wasn't a JSON object.
|
||||
"""
|
||||
content = parse_json_object_from_request(request, allow_empty_body=False)
|
||||
return validate_json_object(content, model_type)
|
||||
|
||||
|
||||
def assert_params_in_dict(body: JsonDict, required: Iterable[str]) -> None:
|
||||
absent = []
|
||||
for k in required:
|
||||
|
@ -19,6 +19,7 @@ from typing import TYPE_CHECKING, Any, Generator, Optional, Tuple, Union
|
||||
import attr
|
||||
from zope.interface import implementer
|
||||
|
||||
from twisted.internet.address import UNIXAddress
|
||||
from twisted.internet.defer import Deferred
|
||||
from twisted.internet.interfaces import IAddress, IReactorTime
|
||||
from twisted.python.failure import Failure
|
||||
@ -257,7 +258,7 @@ class SynapseRequest(Request):
|
||||
request_id,
|
||||
request=ContextRequest(
|
||||
request_id=request_id,
|
||||
ip_address=self.getClientAddress().host,
|
||||
ip_address=self.get_client_ip_if_available(),
|
||||
site_tag=self.synapse_site.site_tag,
|
||||
# The requester is going to be unknown at this point.
|
||||
requester=None,
|
||||
@ -414,7 +415,7 @@ class SynapseRequest(Request):
|
||||
|
||||
self.synapse_site.access_logger.debug(
|
||||
"%s - %s - Received request: %s %s",
|
||||
self.getClientAddress().host,
|
||||
self.get_client_ip_if_available(),
|
||||
self.synapse_site.site_tag,
|
||||
self.get_method(),
|
||||
self.get_redacted_uri(),
|
||||
@ -462,7 +463,7 @@ class SynapseRequest(Request):
|
||||
"%s - %s - {%s}"
|
||||
" Processed request: %.3fsec/%.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)"
|
||||
' %sB %s "%s %s %s" "%s" [%d dbevts]',
|
||||
self.getClientAddress().host,
|
||||
self.get_client_ip_if_available(),
|
||||
self.synapse_site.site_tag,
|
||||
requester,
|
||||
processing_time,
|
||||
@ -500,6 +501,26 @@ class SynapseRequest(Request):
|
||||
|
||||
return True
|
||||
|
||||
def get_client_ip_if_available(self) -> str:
|
||||
"""Logging helper. Return something useful when a client IP is not retrievable
|
||||
from a unix socket.
|
||||
|
||||
In practice, this returns the socket file path on a SynapseRequest if using a
|
||||
unix socket and the normal IP address for TCP sockets.
|
||||
|
||||
"""
|
||||
# getClientAddress().host returns a proper IP address for a TCP socket. But
|
||||
# unix sockets have no concept of IP addresses or ports and return a
|
||||
# UNIXAddress containing a 'None' value. In order to get something usable for
|
||||
# logs(where this is used) get the unix socket file. getHost() returns a
|
||||
# UNIXAddress containing a value of the socket file and has an instance
|
||||
# variable of 'name' encoded as a byte string containing the path we want.
|
||||
# Decode to utf-8 so it looks nice.
|
||||
if isinstance(self.getClientAddress(), UNIXAddress):
|
||||
return self.getHost().name.decode("utf-8")
|
||||
else:
|
||||
return self.getClientAddress().host
|
||||
|
||||
|
||||
class XForwardedForRequest(SynapseRequest):
|
||||
"""Request object which honours proxy headers
|
||||
|
@ -103,7 +103,7 @@ class PusherConfig:
|
||||
|
||||
id: Optional[str]
|
||||
user_name: str
|
||||
access_token: Optional[int]
|
||||
|
||||
profile_tag: str
|
||||
kind: str
|
||||
app_id: str
|
||||
@ -119,6 +119,11 @@ class PusherConfig:
|
||||
enabled: bool
|
||||
device_id: Optional[str]
|
||||
|
||||
# XXX(quenting): The access_token is not persisted anymore for new pushers, but we
|
||||
# keep it when reading from the database, so that we don't get stale pushers
|
||||
# while the "set_device_id_for_pushers" background update is running.
|
||||
access_token: Optional[int]
|
||||
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
"""Information that can be retrieved about a pusher after creation."""
|
||||
return {
|
||||
|
@ -149,7 +149,7 @@ class Mailer:
|
||||
await self.send_email(
|
||||
email_address,
|
||||
self.email_subjects.password_reset
|
||||
% {"server_name": self.hs.config.server.server_name},
|
||||
% {"server_name": self.hs.config.server.server_name, "app": self.app_name},
|
||||
template_vars,
|
||||
)
|
||||
|
||||
|
@ -25,7 +25,7 @@ from synapse.metrics.background_process_metrics import (
|
||||
from synapse.push import Pusher, PusherConfig, PusherConfigException
|
||||
from synapse.push.pusher import PusherFactory
|
||||
from synapse.replication.http.push import ReplicationRemovePusherRestServlet
|
||||
from synapse.types import JsonDict, RoomStreamToken
|
||||
from synapse.types import JsonDict, RoomStreamToken, StrCollection
|
||||
from synapse.util.async_helpers import concurrently_execute
|
||||
from synapse.util.threepids import canonicalise_email
|
||||
|
||||
@ -97,7 +97,6 @@ class PusherPool:
|
||||
async def add_or_update_pusher(
|
||||
self,
|
||||
user_id: str,
|
||||
access_token: Optional[int],
|
||||
kind: str,
|
||||
app_id: str,
|
||||
app_display_name: str,
|
||||
@ -128,6 +127,22 @@ class PusherPool:
|
||||
# stream ordering, so it will process pushes from this point onwards.
|
||||
last_stream_ordering = self.store.get_room_max_stream_ordering()
|
||||
|
||||
# Before we actually persist the pusher, we check if the user already has one
|
||||
# for this app ID and pushkey. If so, we want to keep the access token and
|
||||
# device ID in place, since this could be one device modifying
|
||||
# (e.g. enabling/disabling) another device's pusher.
|
||||
# XXX(quenting): Even though we're not persisting the access_token_id for new
|
||||
# pushers anymore, we still need to copy existing access_token_ids over when
|
||||
# updating a pusher, in case the "set_device_id_for_pushers" background update
|
||||
# hasn't run yet.
|
||||
access_token_id = None
|
||||
existing_config = await self._get_pusher_config_for_user_by_app_id_and_pushkey(
|
||||
user_id, app_id, pushkey
|
||||
)
|
||||
if existing_config:
|
||||
device_id = existing_config.device_id
|
||||
access_token_id = existing_config.access_token
|
||||
|
||||
# we try to create the pusher just to validate the config: it
|
||||
# will then get pulled out of the database,
|
||||
# recreated, added and started: this means we have only one
|
||||
@ -136,7 +151,6 @@ class PusherPool:
|
||||
PusherConfig(
|
||||
id=None,
|
||||
user_name=user_id,
|
||||
access_token=access_token,
|
||||
profile_tag=profile_tag,
|
||||
kind=kind,
|
||||
app_id=app_id,
|
||||
@ -151,23 +165,12 @@ class PusherPool:
|
||||
failing_since=None,
|
||||
enabled=enabled,
|
||||
device_id=device_id,
|
||||
access_token=access_token_id,
|
||||
)
|
||||
)
|
||||
|
||||
# Before we actually persist the pusher, we check if the user already has one
|
||||
# this app ID and pushkey. If so, we want to keep the access token and device ID
|
||||
# in place, since this could be one device modifying (e.g. enabling/disabling)
|
||||
# another device's pusher.
|
||||
existing_config = await self._get_pusher_config_for_user_by_app_id_and_pushkey(
|
||||
user_id, app_id, pushkey
|
||||
)
|
||||
if existing_config:
|
||||
access_token = existing_config.access_token
|
||||
device_id = existing_config.device_id
|
||||
|
||||
await self.store.add_pusher(
|
||||
user_id=user_id,
|
||||
access_token=access_token,
|
||||
kind=kind,
|
||||
app_id=app_id,
|
||||
app_display_name=app_display_name,
|
||||
@ -180,6 +183,7 @@ class PusherPool:
|
||||
profile_tag=profile_tag,
|
||||
enabled=enabled,
|
||||
device_id=device_id,
|
||||
access_token_id=access_token_id,
|
||||
)
|
||||
pusher = await self.process_pusher_change_by_id(app_id, pushkey, user_id)
|
||||
|
||||
@ -199,7 +203,7 @@ class PusherPool:
|
||||
)
|
||||
await self.remove_pusher(p.app_id, p.pushkey, p.user_name)
|
||||
|
||||
async def remove_pushers_by_access_token(
|
||||
async def remove_pushers_by_access_tokens(
|
||||
self, user_id: str, access_tokens: Iterable[int]
|
||||
) -> None:
|
||||
"""Remove the pushers for a given user corresponding to a set of
|
||||
@ -209,6 +213,8 @@ class PusherPool:
|
||||
user_id: user to remove pushers for
|
||||
access_tokens: access token *ids* to remove pushers for
|
||||
"""
|
||||
# XXX(quenting): This is only needed until the "set_device_id_for_pushers"
|
||||
# background update finishes
|
||||
tokens = set(access_tokens)
|
||||
for p in await self.store.get_pushers_by_user_id(user_id):
|
||||
if p.access_token in tokens:
|
||||
@ -220,6 +226,26 @@ class PusherPool:
|
||||
)
|
||||
await self.remove_pusher(p.app_id, p.pushkey, p.user_name)
|
||||
|
||||
async def remove_pushers_by_devices(
|
||||
self, user_id: str, devices: StrCollection
|
||||
) -> None:
|
||||
"""Remove the pushers for a given user corresponding to a set of devices
|
||||
|
||||
Args:
|
||||
user_id: user to remove pushers for
|
||||
devices: device IDs to remove pushers for
|
||||
"""
|
||||
device_ids = set(devices)
|
||||
for p in await self.store.get_pushers_by_user_id(user_id):
|
||||
if p.device_id in device_ids:
|
||||
logger.info(
|
||||
"Removing pusher for app id %s, pushkey %s, user %s",
|
||||
p.app_id,
|
||||
p.pushkey,
|
||||
p.user_name,
|
||||
)
|
||||
await self.remove_pusher(p.app_id, p.pushkey, p.user_name)
|
||||
|
||||
def on_new_notifications(self, max_token: RoomStreamToken) -> None:
|
||||
if not self.pushers:
|
||||
# nothing to do here.
|
||||
|
@ -345,7 +345,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
_outgoing_request_counter.labels(cls.NAME, 200).inc()
|
||||
|
||||
# Wait on any streams that the remote may have written to.
|
||||
for stream_name, position in result.get(
|
||||
for stream_name, position in result.pop(
|
||||
_STREAM_POSITION_KEY, {}
|
||||
).items():
|
||||
await replication.wait_for_stream_position(
|
||||
|
@ -14,36 +14,7 @@
|
||||
"""This module contains the implementation of both the client and server
|
||||
protocols.
|
||||
|
||||
The basic structure of the protocol is line based, where the initial word of
|
||||
each line specifies the command. The rest of the line is parsed based on the
|
||||
command. For example, the `RDATA` command is defined as::
|
||||
|
||||
RDATA <stream_name> <token> <row_json>
|
||||
|
||||
(Note that `<row_json>` may contains spaces, but cannot contain newlines.)
|
||||
|
||||
Blank lines are ignored.
|
||||
|
||||
# Example
|
||||
|
||||
An example iteraction is shown below. Each line is prefixed with '>' or '<' to
|
||||
indicate which side is sending, these are *not* included on the wire::
|
||||
|
||||
* connection established *
|
||||
> SERVER localhost:8823
|
||||
> PING 1490197665618
|
||||
< NAME synapse.app.appservice
|
||||
< PING 1490197665618
|
||||
< REPLICATE
|
||||
> POSITION events 1
|
||||
> POSITION backfill 1
|
||||
> POSITION caches 1
|
||||
> RDATA caches 2 ["get_user_by_id",["@01register-user:localhost:8823"],1490197670513]
|
||||
> RDATA events 14 ["ev", ["$149019767112vOHxz:localhost:8823",
|
||||
"!AFDCvgApUmpdfVjIXm:localhost:8823","m.room.guest_access","",null]]
|
||||
< PING 1490197675618
|
||||
> ERROR server stopping
|
||||
* connection closed by server *
|
||||
An explanation of this protocol is available in docs/tcp_replication.md
|
||||
"""
|
||||
import fcntl
|
||||
import logging
|
||||
|
@ -152,8 +152,8 @@ class Stream:
|
||||
Returns:
|
||||
A triplet `(updates, new_last_token, limited)`, where `updates` is
|
||||
a list of `(token, row)` entries, `new_last_token` is the new
|
||||
position in stream, and `limited` is whether there are more updates
|
||||
to fetch.
|
||||
position in stream (ie the highest token returned in the updates),
|
||||
and `limited` is whether there are more updates to fetch.
|
||||
"""
|
||||
current_token = self.current_token(self.local_instance_name)
|
||||
updates, current_token, limited = await self.get_updates_since(
|
||||
|
@ -15,5 +15,5 @@
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
<p>An open network for secure, decentralized communication.<br>© 2022 The Matrix.org Foundation C.I.C.</p>
|
||||
</footer>
|
||||
<p>An open network for secure, decentralized communication.<br>© 2023 The Matrix.org Foundation C.I.C.</p>
|
||||
</footer>
|
||||
|
@ -138,8 +138,7 @@ class ClientRestResource(JsonResource):
|
||||
capabilities.register_servlets(hs, client_resource)
|
||||
account_validity.register_servlets(hs, client_resource)
|
||||
relations.register_servlets(hs, client_resource)
|
||||
if is_main_process:
|
||||
password_policy.register_servlets(hs, client_resource)
|
||||
password_policy.register_servlets(hs, client_resource)
|
||||
knock.register_servlets(hs, client_resource)
|
||||
appservice_ping.register_servlets(hs, client_resource)
|
||||
|
||||
|
@ -425,7 +425,6 @@ class UserRestServletV2(RestServlet):
|
||||
):
|
||||
await self.pusher_pool.add_or_update_pusher(
|
||||
user_id=user_id,
|
||||
access_token=None,
|
||||
kind="email",
|
||||
app_id="m.email",
|
||||
app_display_name="Email Notifications",
|
||||
|
@ -43,19 +43,22 @@ def client_patterns(
|
||||
Returns:
|
||||
An iterable of patterns.
|
||||
"""
|
||||
patterns = []
|
||||
versions = []
|
||||
|
||||
if unstable:
|
||||
unstable_prefix = CLIENT_API_PREFIX + "/unstable"
|
||||
patterns.append(re.compile("^" + unstable_prefix + path_regex))
|
||||
if v1:
|
||||
v1_prefix = CLIENT_API_PREFIX + "/api/v1"
|
||||
patterns.append(re.compile("^" + v1_prefix + path_regex))
|
||||
for release in releases:
|
||||
new_prefix = CLIENT_API_PREFIX + f"/{release}"
|
||||
patterns.append(re.compile("^" + new_prefix + path_regex))
|
||||
versions.append("api/v1")
|
||||
versions.extend(releases)
|
||||
if unstable:
|
||||
versions.append("unstable")
|
||||
|
||||
return patterns
|
||||
if len(versions) == 1:
|
||||
versions_str = versions[0]
|
||||
elif len(versions) > 1:
|
||||
versions_str = "(" + "|".join(versions) + ")"
|
||||
else:
|
||||
raise RuntimeError("Must have at least one version for a URL")
|
||||
|
||||
return [re.compile("^" + CLIENT_API_PREFIX + "/" + versions_str + path_regex)]
|
||||
|
||||
|
||||
def set_timeline_upper_limit(filter_json: JsonDict, filter_timeline_limit: int) -> None:
|
||||
|
@ -576,6 +576,9 @@ class AddThreepidMsisdnSubmitTokenServlet(RestServlet):
|
||||
|
||||
class ThreepidRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/account/3pid$")
|
||||
# This is used as a proxy for all the 3pid endpoints.
|
||||
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -834,6 +837,7 @@ def assert_valid_next_link(hs: "HomeServer", next_link: str) -> None:
|
||||
|
||||
class WhoamiRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/account/whoami$")
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -38,6 +38,7 @@ class AccountDataServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/user/(?P<user_id>[^/]*)/account_data/(?P<account_data_type>[^/]*)"
|
||||
)
|
||||
CATEGORY = "Account data requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -136,6 +137,7 @@ class RoomAccountDataServlet(RestServlet):
|
||||
"/rooms/(?P<room_id>[^/]*)"
|
||||
"/account_data/(?P<account_data_type>[^/]*)"
|
||||
)
|
||||
CATEGORY = "Account data requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -40,6 +40,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class DevicesRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/devices$")
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -123,6 +124,7 @@ class DeleteDevicesRestServlet(RestServlet):
|
||||
|
||||
class DeviceRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/devices/(?P<device_id>[^/]*)$")
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -33,6 +33,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class EventStreamRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/events$", v1=True)
|
||||
CATEGORY = "Sync requests"
|
||||
|
||||
DEFAULT_LONGPOLL_TIME_MS = 30000
|
||||
|
||||
@ -76,6 +77,7 @@ class EventStreamRestServlet(RestServlet):
|
||||
|
||||
class EventRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/events/(?P<event_id>[^/]*)$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -31,6 +31,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class GetFilterRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/user/(?P<user_id>[^/]*)/filter/(?P<filter_id>[^/]*)")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -69,6 +70,7 @@ class GetFilterRestServlet(RestServlet):
|
||||
|
||||
class CreateFilterRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/user/(?P<user_id>[^/]*)/filter")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -28,6 +28,7 @@ if TYPE_CHECKING:
|
||||
# TODO: Needs unit testing
|
||||
class InitialSyncRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/initialSync$", v1=True)
|
||||
CATEGORY = "Sync requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -89,6 +89,7 @@ class KeyUploadServlet(RestServlet):
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns("/keys/upload(/(?P<device_id>[^/]+))?$")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -182,6 +183,7 @@ class KeyQueryServlet(RestServlet):
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns("/keys/query$")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -225,6 +227,7 @@ class KeyChangesServlet(RestServlet):
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns("/keys/changes$")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -274,6 +277,7 @@ class OneTimeKeyServlet(RestServlet):
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns("/keys/claim$")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -40,6 +40,7 @@ class KnockRoomAliasServlet(RestServlet):
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns("/knock/(?P<room_identifier>[^/]*)")
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -72,6 +72,8 @@ class LoginResponse(TypedDict, total=False):
|
||||
|
||||
class LoginRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/login$", v1=True)
|
||||
CATEGORY = "Registration/login requests"
|
||||
|
||||
CAS_TYPE = "m.login.cas"
|
||||
SSO_TYPE = "m.login.sso"
|
||||
TOKEN_TYPE = "m.login.token"
|
||||
@ -537,6 +539,7 @@ def _get_auth_flow_dict_for_idp(idp: SsoIdentityProvider) -> JsonDict:
|
||||
|
||||
class RefreshTokenServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/refresh$")
|
||||
CATEGORY = "Registration/login requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
@ -590,6 +593,7 @@ class SsoRedirectServlet(RestServlet):
|
||||
+ "/(r0|v3)/login/sso/redirect/(?P<idp_id>[A-Za-z0-9_.~-]+)$"
|
||||
)
|
||||
]
|
||||
CATEGORY = "SSO requests needed for all SSO providers"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
# make sure that the relevant handlers are instantiated, so that they
|
||||
|
@ -31,6 +31,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class PasswordPolicyServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/password_policy$")
|
||||
CATEGORY = "Registration/login requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -33,6 +33,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class PresenceStatusRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/presence/(?P<user_id>[^/]*)/status", v1=True)
|
||||
CATEGORY = "Presence requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -29,6 +29,7 @@ if TYPE_CHECKING:
|
||||
|
||||
class ProfileDisplaynameRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)/displayname", v1=True)
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -86,6 +87,7 @@ class ProfileDisplaynameRestServlet(RestServlet):
|
||||
|
||||
class ProfileAvatarURLRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)/avatar_url", v1=True)
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -142,6 +144,7 @@ class ProfileAvatarURLRestServlet(RestServlet):
|
||||
|
||||
class ProfileRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)", v1=True)
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -44,6 +44,9 @@ class PushRuleRestServlet(RestServlet):
|
||||
"Unrecognised request: You probably wanted a trailing slash"
|
||||
)
|
||||
|
||||
WORKERS_DENIED_METHODS = ["PUT", "DELETE"]
|
||||
CATEGORY = "Push rule requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
|
@ -126,7 +126,6 @@ class PushersSetRestServlet(RestServlet):
|
||||
try:
|
||||
await self.pusher_pool.add_or_update_pusher(
|
||||
user_id=user.to_string(),
|
||||
access_token=requester.access_token_id,
|
||||
kind=content["kind"],
|
||||
app_id=content["app_id"],
|
||||
app_display_name=content["app_display_name"],
|
||||
|
@ -31,6 +31,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class ReadMarkerRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/read_markers$")
|
||||
CATEGORY = "Receipts requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -36,6 +36,7 @@ class ReceiptRestServlet(RestServlet):
|
||||
"/receipt/(?P<receipt_type>[^/]*)"
|
||||
"/(?P<event_id>[^/]*)$"
|
||||
)
|
||||
CATEGORY = "Receipts requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -367,6 +367,7 @@ class RegistrationTokenValidityRestServlet(RestServlet):
|
||||
f"/register/{LoginType.REGISTRATION_TOKEN}/validity",
|
||||
releases=("v1",),
|
||||
)
|
||||
CATEGORY = "Registration/login requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -395,6 +396,7 @@ class RegistrationTokenValidityRestServlet(RestServlet):
|
||||
|
||||
class RegisterRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/register$")
|
||||
CATEGORY = "Registration/login requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -42,6 +42,7 @@ class RelationPaginationServlet(RestServlet):
|
||||
"(/(?P<relation_type>[^/]*)(/(?P<event_type>[^/]*))?)?$",
|
||||
releases=("v1",),
|
||||
)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -84,6 +85,7 @@ class RelationPaginationServlet(RestServlet):
|
||||
|
||||
class ThreadsServlet(RestServlet):
|
||||
PATTERNS = (re.compile("^/_matrix/client/v1/rooms/(?P<room_id>[^/]*)/threads"),)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -140,7 +140,7 @@ class TransactionRestServlet(RestServlet):
|
||||
|
||||
|
||||
class RoomCreateRestServlet(TransactionRestServlet):
|
||||
# No PATTERN; we have custom dispatch rules here
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
@ -180,6 +180,8 @@ class RoomCreateRestServlet(TransactionRestServlet):
|
||||
|
||||
# TODO: Needs unit testing for generic events
|
||||
class RoomStateEventRestServlet(RestServlet):
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.event_creation_handler = hs.get_event_creation_handler()
|
||||
@ -323,6 +325,8 @@ class RoomStateEventRestServlet(RestServlet):
|
||||
|
||||
# TODO: Needs unit testing for generic events + feedback
|
||||
class RoomSendEventRestServlet(TransactionRestServlet):
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.event_creation_handler = hs.get_event_creation_handler()
|
||||
@ -402,6 +406,8 @@ class RoomSendEventRestServlet(TransactionRestServlet):
|
||||
|
||||
# TODO: Needs unit testing for room ID + alias joins
|
||||
class JoinRoomAliasServlet(ResolveRoomIdMixin, TransactionRestServlet):
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
super(ResolveRoomIdMixin, self).__init__(hs) # ensure the Mixin is set up
|
||||
@ -464,6 +470,7 @@ class JoinRoomAliasServlet(ResolveRoomIdMixin, TransactionRestServlet):
|
||||
# TODO: Needs unit testing
|
||||
class PublicRoomListRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/publicRooms$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -582,6 +589,7 @@ class PublicRoomListRestServlet(RestServlet):
|
||||
# TODO: Needs unit testing
|
||||
class RoomMemberListRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/members$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -637,6 +645,7 @@ class RoomMemberListRestServlet(RestServlet):
|
||||
# except it does custom AS logic and has a simpler return format
|
||||
class JoinedRoomMemberListRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/joined_members$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -658,6 +667,10 @@ class JoinedRoomMemberListRestServlet(RestServlet):
|
||||
# TODO: Needs better unit testing
|
||||
class RoomMessageListRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/messages$", v1=True)
|
||||
# TODO The routing information should be exposed programatically.
|
||||
# I want to do this but for now I felt bad about leaving this without
|
||||
# at least a visible warning on it.
|
||||
CATEGORY = "Client API requests (ALL FOR SAME ROOM MUST GO TO SAME WORKER)"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -724,6 +737,7 @@ class RoomMessageListRestServlet(RestServlet):
|
||||
# TODO: Needs unit testing
|
||||
class RoomStateRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/state$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -746,6 +760,7 @@ class RoomStateRestServlet(RestServlet):
|
||||
# TODO: Needs unit testing
|
||||
class RoomInitialSyncRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/initialSync$", v1=True)
|
||||
CATEGORY = "Sync requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -770,6 +785,7 @@ class RoomEventServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/rooms/(?P<room_id>[^/]*)/event/(?P<event_id>[^/]*)$", v1=True
|
||||
)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -862,6 +878,7 @@ class RoomEventContextServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/rooms/(?P<room_id>[^/]*)/context/(?P<event_id>[^/]*)$", v1=True
|
||||
)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -962,6 +979,8 @@ class RoomForgetRestServlet(TransactionRestServlet):
|
||||
|
||||
# TODO: Needs unit testing
|
||||
class RoomMembershipRestServlet(TransactionRestServlet):
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.room_member_handler = hs.get_room_member_handler()
|
||||
@ -1075,6 +1094,8 @@ class RoomMembershipRestServlet(TransactionRestServlet):
|
||||
|
||||
|
||||
class RoomRedactEventRestServlet(TransactionRestServlet):
|
||||
CATEGORY = "Event sending requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.event_creation_handler = hs.get_event_creation_handler()
|
||||
@ -1168,6 +1189,7 @@ class RoomTypingRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/rooms/(?P<room_id>[^/]*)/typing/(?P<user_id>[^/]*)$", v1=True
|
||||
)
|
||||
CATEGORY = "The typing stream"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -1199,7 +1221,7 @@ class RoomTypingRestServlet(RestServlet):
|
||||
# Limit timeout to stop people from setting silly typing timeouts.
|
||||
timeout = min(content.get("timeout", 30000), 120000)
|
||||
|
||||
# Defer getting the typing handler since it will raise on workers.
|
||||
# Defer getting the typing handler since it will raise on WORKER_PATTERNS.
|
||||
typing_handler = self.hs.get_typing_writer_handler()
|
||||
|
||||
try:
|
||||
@ -1228,6 +1250,7 @@ class RoomAliasListServlet(RestServlet):
|
||||
r"/rooms/(?P<room_id>[^/]*)/aliases"
|
||||
),
|
||||
] + list(client_patterns("/rooms/(?P<room_id>[^/]*)/aliases$", unstable=False))
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -1248,6 +1271,7 @@ class RoomAliasListServlet(RestServlet):
|
||||
|
||||
class SearchRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/search$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -1267,6 +1291,7 @@ class SearchRestServlet(RestServlet):
|
||||
|
||||
class JoinedRoomsRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/joined_rooms$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -1338,6 +1363,7 @@ class TimestampLookupRestServlet(RestServlet):
|
||||
PATTERNS = (
|
||||
re.compile("^/_matrix/client/v1/rooms/(?P<room_id>[^/]*)/timestamp_to_event$"),
|
||||
)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -1369,6 +1395,8 @@ class TimestampLookupRestServlet(RestServlet):
|
||||
|
||||
class RoomHierarchyRestServlet(RestServlet):
|
||||
PATTERNS = (re.compile("^/_matrix/client/v1/rooms/(?P<room_id>[^/]*)/hierarchy$"),)
|
||||
WORKERS = PATTERNS
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -1409,6 +1437,7 @@ class RoomSummaryRestServlet(ResolveRoomIdMixin, RestServlet):
|
||||
"/rooms/(?P<room_identifier>[^/]*)/summary$"
|
||||
),
|
||||
)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
|
@ -70,6 +70,7 @@ class RoomBatchSendEventRestServlet(RestServlet):
|
||||
"/rooms/(?P<room_id>[^/]*)/batch_send$"
|
||||
),
|
||||
)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -37,6 +37,7 @@ class RoomKeysServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/room_keys/keys(/(?P<room_id>[^/]+))?(/(?P<session_id>[^/]+))?$"
|
||||
)
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -253,6 +254,7 @@ class RoomKeysServlet(RestServlet):
|
||||
|
||||
class RoomKeysNewVersionServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/room_keys/version$")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -328,6 +330,7 @@ class RoomKeysNewVersionServlet(RestServlet):
|
||||
|
||||
class RoomKeysVersionServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/room_keys/version/(?P<version>[^/]+)$")
|
||||
CATEGORY = "Encryption requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -35,6 +35,7 @@ class SendToDeviceRestServlet(servlet.RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/sendToDevice/(?P<message_type>[^/]*)/(?P<txn_id>[^/]*)$"
|
||||
)
|
||||
CATEGORY = "The to_device stream"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -87,6 +87,7 @@ class SyncRestServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/sync$")
|
||||
ALLOWED_PRESENCE = {"online", "offline", "unavailable"}
|
||||
CATEGORY = "Sync requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -37,6 +37,7 @@ class TagListServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags$"
|
||||
)
|
||||
CATEGORY = "Account data requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
@ -64,6 +65,7 @@ class TagServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags/(?P<tag>[^/]*)"
|
||||
)
|
||||
CATEGORY = "Account data requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -31,6 +31,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class UserDirectorySearchRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/user_directory/search$")
|
||||
CATEGORY = "User directory search requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -34,6 +34,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class VersionsRestServlet(RestServlet):
|
||||
PATTERNS = [re.compile("^/_matrix/client/versions$")]
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
@ -29,6 +29,7 @@ if TYPE_CHECKING:
|
||||
|
||||
class VoipRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/voip/turnServer$", v1=True)
|
||||
CATEGORY = "Client API requests"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user