Compare commits

..

15 Commits

Author SHA1 Message Date
Tulir Asokan
1e91ea1f39 Disable event creation concurrency within rooms 2023-02-15 23:26:02 +02:00
Tulir Asokan
393cdd4c97 Remove unnecessary pusher URL validation 2023-02-15 23:26:02 +02:00
Tulir Asokan
3c48ee761d Allow specific users to use timestamp massaging without being appservices 2023-02-15 23:26:02 +02:00
Tulir Asokan
de7223afeb Allow custom content in read receipts 2023-02-15 23:26:02 +02:00
Tulir Asokan
de58f07338 Add support for batch sending new events 2023-02-15 23:26:02 +02:00
Tulir Asokan
bad102a762 Allow appservices to batch send as any local user 2023-02-15 23:26:02 +02:00
Tulir Asokan
9d230e1f59 Allow unhiding events that the C-S API filters away by default 2023-02-15 23:26:02 +02:00
Tulir Asokan
0febcc4664 Allow bypassing unnecessary validation in C-S API 2023-02-15 23:26:02 +02:00
Tulir Asokan
14fe6acb3b Set immutable cache-control header for media downloads 2023-02-15 23:26:02 +02:00
Tulir Asokan
ca4efe51d4 Thumbnail webp images as webp to avoid losing transparency 2023-02-15 23:26:02 +02:00
Tulir Asokan
bdc4beffdb Allow registering invalid user IDs with admin API 2023-02-15 23:26:02 +02:00
Tulir Asokan
a0d9756c6f Allow specifying room ID when creating room 2023-02-15 23:26:02 +02:00
Tulir Asokan
d0a50be89e Fix default power level for room creator 2023-02-15 23:26:02 +02:00
Tulir Asokan
bbbd39b46d Add meow readme and config extension 2023-02-15 23:26:02 +02:00
Tulir Asokan
3aa77541b6 Add meow dockerfile
N.B. requires requirements.txt to be generated in repo root beforehand
2023-02-15 23:26:02 +02:00
344 changed files with 4475 additions and 8421 deletions

View File

@ -109,26 +109,11 @@ sytest_tests = [
"postgres": "multi-postgres",
"workers": "workers",
},
{
"sytest-tag": "focal",
"postgres": "multi-postgres",
"workers": "workers",
"reactor": "asyncio",
},
]
if not IS_PR:
sytest_tests.extend(
[
{
"sytest-tag": "focal",
"reactor": "asyncio",
},
{
"sytest-tag": "focal",
"postgres": "postgres",
"reactor": "asyncio",
},
{
"sytest-tag": "testing",
"postgres": "postgres",

View File

@ -21,8 +21,4 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
0a00b7ff14890987f09112a2ae696c61001e6cf1
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
# Update black to 23.1.0 (#15103)
9bb2eac71962970d02842bca441f4bcdbbf93a11
c4268e3da64f1abb5b31deaeb5769adb6510c0a7

View File

@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0
uses: dawidd6/action-download-artifact@bd10f381a96414ce2b13a11bfa89902ba7cea07f # v2.24.3
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}

View File

@ -12,7 +12,7 @@ jobs:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
@ -39,7 +39,7 @@ jobs:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0

View File

@ -27,7 +27,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2
@ -61,7 +61,7 @@ jobs:
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2
@ -134,7 +134,7 @@ jobs:
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2

View File

@ -1,24 +0,0 @@
on:
push:
branches: ["develop", "release-*"]
paths:
- poetry.lock
pull_request:
paths:
- poetry.lock
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
check-sdists:
name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: pip install tomli
- run: ./scripts-dev/check_locked_deps_have_sdists.py

View File

@ -48,7 +48,7 @@ jobs:
with:
ref: master
- name: Login to registry
uses: docker/login-action@v2
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}

View File

@ -112,7 +112,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: 1.58.1
components: clippy
@ -134,7 +134,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: nightly-2022-12-01
components: clippy
@ -154,10 +154,9 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
# We use nightly so that it correctly groups together imports
toolchain: nightly-2022-12-01
toolchain: 1.58.1
components: rustfmt
- uses: Swatinem/rust-cache@v2
@ -222,7 +221,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2
@ -267,7 +266,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2
@ -369,7 +368,6 @@ jobs:
SYTEST_BRANCH: ${{ github.head_ref }}
POSTGRES: ${{ matrix.job.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
WORKERS: ${{ matrix.job.workers && 1 }}
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
TOP: ${{ github.workspace }}
@ -388,7 +386,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2
@ -533,7 +531,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2
@ -564,7 +562,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2
@ -587,7 +585,7 @@ jobs:
# There don't seem to be versioned releases of this action per se: for each rust
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@v2

View File

@ -6,7 +6,7 @@ on:
jobs:
triage:
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
with:
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
content_id: ${{ github.event.issue.node_id }}

View File

@ -18,7 +18,7 @@ jobs:
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2
@ -43,7 +43,7 @@ jobs:
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2
@ -82,7 +82,7 @@ jobs:
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
uses: dtolnay/rust-toolchain@9cd00a88a73addc8617065438eff914dd08d0955
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2

View File

@ -1,197 +1,3 @@
Synapse 1.79.0rc1 (2023-03-07)
==============================
Features
--------
- Add two new Third Party Rules module API callbacks: [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier) and [`on_remove_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_remove_user_third_party_identifier). ([\#15044](https://github.com/matrix-org/synapse/issues/15044))
- Experimental support for [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967) to not require UIA for setting up cross-signing on first use. ([\#15077](https://github.com/matrix-org/synapse/issues/15077))
- Add media information to the command line [user data export tool](https://matrix-org.github.io/synapse/v1.79/usage/administration/admin_faq.html#how-can-i-export-user-data). ([\#15107](https://github.com/matrix-org/synapse/issues/15107))
- Add an [admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html) to delete a [specific event report](https://spec.matrix.org/v1.6/client-server-api/#reporting-content). ([\#15116](https://github.com/matrix-org/synapse/issues/15116))
- Add support for knocking to workers. ([\#15133](https://github.com/matrix-org/synapse/issues/15133))
- Allow use of the `/filter` Client-Server APIs on workers. ([\#15134](https://github.com/matrix-org/synapse/issues/15134))
- Update support for [MSC2677](https://github.com/matrix-org/matrix-spec-proposals/pull/2677): remove support for server-side aggregation of reactions. ([\#15172](https://github.com/matrix-org/synapse/issues/15172))
- Stabilise support for [MSC3758](https://github.com/matrix-org/matrix-spec-proposals/pull/3758): `event_property_is` push condition. ([\#15185](https://github.com/matrix-org/synapse/issues/15185))
Bugfixes
--------
- Fix a bug introduced in Synapse 1.75 that caused experimental support for deleting account data to raise an internal server error while using an account data writer worker. ([\#14869](https://github.com/matrix-org/synapse/issues/14869))
- Fix a long-standing bug where Synapse handled an unspecced field on push rules. ([\#15088](https://github.com/matrix-org/synapse/issues/15088))
- Fix a long-standing bug where a URL preview would break if the discovered oEmbed failed to download. ([\#15092](https://github.com/matrix-org/synapse/issues/15092))
- Fix a long-standing bug where an initial sync would not respond to changes to the list of ignored users if there was an initial sync cached. ([\#15163](https://github.com/matrix-org/synapse/issues/15163))
- Add the `transaction_id` in the events included in many endpoints' responses. ([\#15174](https://github.com/matrix-org/synapse/issues/15174))
- Fix a bug introduced in Synapse 1.78.0 where requests to claim dehydrated devices would fail with a `405` error. ([\#15180](https://github.com/matrix-org/synapse/issues/15180))
- Stop applying edits when bundling aggregations, per [MSC3925](https://github.com/matrix-org/matrix-spec-proposals/pull/3925). ([\#15193](https://github.com/matrix-org/synapse/issues/15193))
- Fix a long-standing bug where the user directory search was not case-insensitive for accented characters. ([\#15143](https://github.com/matrix-org/synapse/issues/15143))
Updates to the Docker image
---------------------------
- Improve startup logging in the with-workers Docker image. ([\#15186](https://github.com/matrix-org/synapse/issues/15186))
Improved Documentation
----------------------
- Document how to use caches in a module. ([\#14026](https://github.com/matrix-org/synapse/issues/14026))
- Clarify which worker processes the ThirdPartyRules' [`on_new_event`](https://matrix-org.github.io/synapse/v1.78/modules/third_party_rules_callbacks.html#on_new_event) module API callback runs on. ([\#15071](https://github.com/matrix-org/synapse/issues/15071))
- Document using [Shibboleth](https://www.shibboleth.net/) as an OpenID Provider. ([\#15112](https://github.com/matrix-org/synapse/issues/15112))
- Correct reference to `federation_verify_certificates` in configuration documentation. ([\#15139](https://github.com/matrix-org/synapse/issues/15139))
- Correct small documentation errors in some `MatrixFederationHttpClient` methods. ([\#15148](https://github.com/matrix-org/synapse/issues/15148))
- Correct the description of the behavior of `registration_shared_secret_path` on startup. ([\#15168](https://github.com/matrix-org/synapse/issues/15168))
Deprecations and Removals
-------------------------
- Deprecate the `on_threepid_bind` module callback, to be replaced by [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier). See [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.79/docs/upgrade.md#upgrading-to-v1790). ([\#15044]
- Remove the unspecced `room_alias` field from the [`/createRoom`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3createroom) response. ([\#15093](https://github.com/matrix-org/synapse/issues/15093))
- Remove the unspecced `PUT` on the `/knock/{roomIdOrAlias}` endpoint. ([\#15189](https://github.com/matrix-org/synapse/issues/15189))
- Remove the undocumented and unspecced `type` parameter to the `/thumbnail` endpoint. ([\#15137](https://github.com/matrix-org/synapse/issues/15137))
- Remove unspecced and buggy `PUT` method on the unstable `/rooms/<room_id>/batch_send` endpoint. ([\#15199](https://github.com/matrix-org/synapse/issues/15199))
Internal Changes
----------------
- Run the integration test suites with the asyncio reactor enabled in CI. ([\#14101](https://github.com/matrix-org/synapse/issues/14101))
- Batch up storing state groups when creating a new room. ([\#14918](https://github.com/matrix-org/synapse/issues/14918))
- Update [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952) support based on changes to the MSC. ([\#15051](https://github.com/matrix-org/synapse/issues/15051))
- Refactor writing json data in `FileExfiltrationWriter`. ([\#15095](https://github.com/matrix-org/synapse/issues/15095))
- Tighten the login ratelimit defaults. ([\#15135](https://github.com/matrix-org/synapse/issues/15135))
- Fix a typo in an experimental config setting. ([\#15138](https://github.com/matrix-org/synapse/issues/15138))
- Refactor the media modules. ([\#15146](https://github.com/matrix-org/synapse/issues/15146), [\#15175](https://github.com/matrix-org/synapse/issues/15175))
- Improve type hints. ([\#15164](https://github.com/matrix-org/synapse/issues/15164))
- Move `get_event_report` and `get_event_reports_paginate` from `RoomStore` to `RoomWorkerStore`. ([\#15165](https://github.com/matrix-org/synapse/issues/15165))
- Remove dangling reference to being a reference implementation in docstring. ([\#15167](https://github.com/matrix-org/synapse/issues/15167))
- Add an option to force a rebuild of the "editable" complement image. ([\#15184](https://github.com/matrix-org/synapse/issues/15184))
- Use nightly rustfmt in CI. ([\#15188](https://github.com/matrix-org/synapse/issues/15188))
- Add a `get_next_txn` method to `StreamIdGenerator` to match `MultiWriterIdGenerator`. ([\#15191](https://github.com/matrix-org/synapse/issues/15191))
- Combine `AbstractStreamIdTracker` and `AbstractStreamIdGenerator`. ([\#15192](https://github.com/matrix-org/synapse/issues/15192))
- Automatically fix errors with `ruff`. ([\#15194](https://github.com/matrix-org/synapse/issues/15194))
- Refactor database transaction for query users' devices to reduce database pool contention. ([\#15215](https://github.com/matrix-org/synapse/issues/15215))
- Correct `test_icu_word_boundary_punctuation` so that it passes with the ICU versions available in Alpine and macOS. ([\#15177](https://github.com/matrix-org/synapse/issues/15177))
<details><summary>Locked dependency updates</summary>
- Bump actions/checkout from 2 to 3. ([\#15155](https://github.com/matrix-org/synapse/issues/15155))
- Bump black from 22.12.0 to 23.1.0. ([\#15103](https://github.com/matrix-org/synapse/issues/15103))
- Bump dawidd6/action-download-artifact from 2.25.0 to 2.26.0. ([\#15152](https://github.com/matrix-org/synapse/issues/15152))
- Bump docker/login-action from 1 to 2. ([\#15154](https://github.com/matrix-org/synapse/issues/15154))
- Bump matrix-org/backend-meta from 1 to 2. ([\#15156](https://github.com/matrix-org/synapse/issues/15156))
- Bump ruff from 0.0.237 to 0.0.252. ([\#15159](https://github.com/matrix-org/synapse/issues/15159))
- Bump serde_json from 1.0.93 to 1.0.94. ([\#15214](https://github.com/matrix-org/synapse/issues/15214))
- Bump types-commonmark from 0.9.2.1 to 0.9.2.2. ([\#15209](https://github.com/matrix-org/synapse/issues/15209))
- Bump types-opentracing from 2.4.10.1 to 2.4.10.3. ([\#15158](https://github.com/matrix-org/synapse/issues/15158))
- Bump types-pillow from 9.4.0.13 to 9.4.0.17. ([\#15211](https://github.com/matrix-org/synapse/issues/15211))
- Bump types-psycopg2 from 2.9.21.4 to 2.9.21.8. ([\#15210](https://github.com/matrix-org/synapse/issues/15210))
- Bump types-pyopenssl from 22.1.0.2 to 23.0.0.4. ([\#15213](https://github.com/matrix-org/synapse/issues/15213))
- Bump types-setuptools from 67.3.0.1 to 67.4.0.3. ([\#15160](https://github.com/matrix-org/synapse/issues/15160))
- Bump types-setuptools from 67.4.0.3 to 67.5.0.0. ([\#15212](https://github.com/matrix-org/synapse/issues/15212))
- Bump typing-extensions from 4.4.0 to 4.5.0. ([\#15157](https://github.com/matrix-org/synapse/issues/15157))
</details>
Synapse 1.78.0 (2023-02-28)
===========================
Bugfixes
--------
- Fix a bug introduced in Synapse 1.76 where 5s delays would occasionally occur in deployments using workers. ([\#15150](https://github.com/matrix-org/synapse/issues/15150))
Synapse 1.78.0rc1 (2023-02-21)
==============================
Features
--------
- Implement the experimental `exact_event_match` push rule condition from [MSC3758](https://github.com/matrix-org/matrix-spec-proposals/pull/3758). ([\#14964](https://github.com/matrix-org/synapse/issues/14964))
- Add account data to the command line [user data export tool](https://matrix-org.github.io/synapse/v1.78/usage/administration/admin_faq.html#how-can-i-export-user-data). ([\#14969](https://github.com/matrix-org/synapse/issues/14969))
- Implement [MSC3873](https://github.com/matrix-org/matrix-spec-proposals/pull/3873) to disambiguate push rule keys with dots in them. ([\#15004](https://github.com/matrix-org/synapse/issues/15004))
- Allow Synapse to use a specific Redis [logical database](https://redis.io/commands/select/) in worker-mode deployments. ([\#15034](https://github.com/matrix-org/synapse/issues/15034))
- Tag opentracing spans for federation requests with the name of the worker serving the request. ([\#15042](https://github.com/matrix-org/synapse/issues/15042))
- Implement the experimental `exact_event_property_contains` push rule condition from [MSC3966](https://github.com/matrix-org/matrix-spec-proposals/pull/3966). ([\#15045](https://github.com/matrix-org/synapse/issues/15045))
- Remove spurious `dont_notify` action from the defaults for the `.m.rule.reaction` pushrule. ([\#15073](https://github.com/matrix-org/synapse/issues/15073))
- Update the error code returned when user sends a duplicate annotation. ([\#15075](https://github.com/matrix-org/synapse/issues/15075))
Bugfixes
--------
- Prevent clients from reporting nonexistent events. ([\#13779](https://github.com/matrix-org/synapse/issues/13779))
- Return spec-compliant JSON errors when unknown endpoints are requested. ([\#14605](https://github.com/matrix-org/synapse/issues/14605))
- Fix a long-standing bug where the room aliases returned could be corrupted. ([\#15038](https://github.com/matrix-org/synapse/issues/15038))
- Fix a bug introduced in Synapse 1.76.0 where partially-joined rooms could not be deleted using the [purge room API](https://matrix-org.github.io/synapse/latest/admin_api/rooms.html#delete-room-api). ([\#15068](https://github.com/matrix-org/synapse/issues/15068))
- Fix a long-standing bug where federated joins would fail if the first server in the list of servers to try is not in the room. ([\#15074](https://github.com/matrix-org/synapse/issues/15074))
- Fix a bug introduced in Synapse v1.74.0 where searching with colons when using ICU for search term tokenisation would fail with an error. ([\#15079](https://github.com/matrix-org/synapse/issues/15079))
- Reduce the likelihood of a rare race condition where rejoining a restricted room over federation would fail. ([\#15080](https://github.com/matrix-org/synapse/issues/15080))
- Fix a bug introduced in Synapse 1.76 where workers would fail to start if the `health` listener was configured. ([\#15096](https://github.com/matrix-org/synapse/issues/15096))
- Fix a bug introduced in Synapse 1.75 where the [portdb script](https://matrix-org.github.io/synapse/release-v1.78/postgres.html#porting-from-sqlite) would fail to run after a room had been faster-joined. ([\#15108](https://github.com/matrix-org/synapse/issues/15108))
Improved Documentation
----------------------
- Document how to start Synapse with Poetry. Contributed by @thezaidbintariq. ([\#14892](https://github.com/matrix-org/synapse/issues/14892), [\#15022](https://github.com/matrix-org/synapse/issues/15022))
- Update delegation documentation to clarify that SRV DNS delegation does not eliminate all needs to serve files from .well-known locations. Contributed by @williamkray. ([\#14959](https://github.com/matrix-org/synapse/issues/14959))
- Fix a mistake in registration_shared_secret_path docs. ([\#15078](https://github.com/matrix-org/synapse/issues/15078))
- Refer to a more recent blog post on the [Database Maintenance Tools](https://matrix-org.github.io/synapse/latest/usage/administration/database_maintenance_tools.html) page. Contributed by @jahway603. ([\#15083](https://github.com/matrix-org/synapse/issues/15083))
Internal Changes
----------------
- Re-type hint some collections as read-only. ([\#13755](https://github.com/matrix-org/synapse/issues/13755))
- Faster joins: don't stall when another user joins during a partial-state room resync. ([\#14606](https://github.com/matrix-org/synapse/issues/14606))
- Add a class `UnpersistedEventContext` to allow for the batching up of storing state groups. ([\#14675](https://github.com/matrix-org/synapse/issues/14675))
- Add a check to ensure that locked dependencies have source distributions available. ([\#14742](https://github.com/matrix-org/synapse/issues/14742))
- Tweak comment on `_is_local_room_accessible` as part of room visibility in `/hierarchy` to clarify the condition for a room being visible. ([\#14834](https://github.com/matrix-org/synapse/issues/14834))
- Prevent `WARNING: there is already a transaction in progress` lines appearing in PostgreSQL's logs on some occasions. ([\#14840](https://github.com/matrix-org/synapse/issues/14840))
- Use `StrCollection` to avoid potential bugs with `Collection[str]`. ([\#14929](https://github.com/matrix-org/synapse/issues/14929))
- Improve performance of `/sync` in a few situations. ([\#14973](https://github.com/matrix-org/synapse/issues/14973))
- Limit concurrent event creation for a room to avoid state resolution when sending bursts of events to a local room. ([\#14977](https://github.com/matrix-org/synapse/issues/14977))
- Skip calculating unread push actions in /sync when enable_push is false. ([\#14980](https://github.com/matrix-org/synapse/issues/14980))
- Add a schema dump symlinks inside `contrib`, to make it easier for IDEs to interrogate Synapse's database schema. ([\#14982](https://github.com/matrix-org/synapse/issues/14982))
- Improve type hints. ([\#15008](https://github.com/matrix-org/synapse/issues/15008), [\#15026](https://github.com/matrix-org/synapse/issues/15026), [\#15027](https://github.com/matrix-org/synapse/issues/15027), [\#15028](https://github.com/matrix-org/synapse/issues/15028), [\#15031](https://github.com/matrix-org/synapse/issues/15031), [\#15035](https://github.com/matrix-org/synapse/issues/15035), [\#15052](https://github.com/matrix-org/synapse/issues/15052), [\#15072](https://github.com/matrix-org/synapse/issues/15072), [\#15084](https://github.com/matrix-org/synapse/issues/15084))
- Update [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952) support based on changes to the MSC. ([\#15037](https://github.com/matrix-org/synapse/issues/15037))
- Avoid mutating a cached value in `get_user_devices_from_cache`. ([\#15040](https://github.com/matrix-org/synapse/issues/15040))
- Fix a rare exception in logs on start up. ([\#15041](https://github.com/matrix-org/synapse/issues/15041))
- Update pyo3-log to v0.8.1. ([\#15043](https://github.com/matrix-org/synapse/issues/15043))
- Avoid mutating cached values in `_generate_sync_entry_for_account_data`. ([\#15047](https://github.com/matrix-org/synapse/issues/15047))
- Refactor arguments of `try_unbind_threepid` and `_try_unbind_threepid_with_id_server` to not use dictionaries. ([\#15053](https://github.com/matrix-org/synapse/issues/15053))
- Merge debug logging from the hotfixes branch. ([\#15054](https://github.com/matrix-org/synapse/issues/15054))
- Faster joins: omit device list updates originating from partial state rooms in /sync responses without lazy loading of members enabled. ([\#15069](https://github.com/matrix-org/synapse/issues/15069))
- Fix clashing database transaction name. ([\#15070](https://github.com/matrix-org/synapse/issues/15070))
- Upper-bound frozendict dependency. This works around us being unable to test installing our wheels against Python 3.11 in CI. ([\#15114](https://github.com/matrix-org/synapse/issues/15114))
- Tweak logging for when a worker waits for its view of a replication stream to catch up. ([\#15120](https://github.com/matrix-org/synapse/issues/15120))
<details><summary>Locked dependency updates</summary>
- Bump bleach from 5.0.1 to 6.0.0. ([\#15059](https://github.com/matrix-org/synapse/issues/15059))
- Bump cryptography from 38.0.4 to 39.0.1. ([\#15020](https://github.com/matrix-org/synapse/issues/15020))
- Bump ruff version from 0.0.230 to 0.0.237. ([\#15033](https://github.com/matrix-org/synapse/issues/15033))
- Bump dtolnay/rust-toolchain from 9cd00a88a73addc8617065438eff914dd08d0955 to 25dc93b901a87e864900a8aec6c12e9aa794c0c3. ([\#15060](https://github.com/matrix-org/synapse/issues/15060))
- Bump systemd-python from 234 to 235. ([\#15061](https://github.com/matrix-org/synapse/issues/15061))
- Bump serde_json from 1.0.92 to 1.0.93. ([\#15062](https://github.com/matrix-org/synapse/issues/15062))
- Bump types-requests from 2.28.11.8 to 2.28.11.12. ([\#15063](https://github.com/matrix-org/synapse/issues/15063))
- Bump types-pillow from 9.4.0.5 to 9.4.0.10. ([\#15064](https://github.com/matrix-org/synapse/issues/15064))
- Bump sentry-sdk from 1.13.0 to 1.15.0. ([\#15065](https://github.com/matrix-org/synapse/issues/15065))
- Bump types-jsonschema from 4.17.0.3 to 4.17.0.5. ([\#15099](https://github.com/matrix-org/synapse/issues/15099))
- Bump types-bleach from 5.0.3.1 to 6.0.0.0. ([\#15100](https://github.com/matrix-org/synapse/issues/15100))
- Bump dtolnay/rust-toolchain from 25dc93b901a87e864900a8aec6c12e9aa794c0c3 to e12eda571dc9a5ee5d58eecf4738ec291c66f295. ([\#15101](https://github.com/matrix-org/synapse/issues/15101))
- Bump dawidd6/action-download-artifact from 2.24.3 to 2.25.0. ([\#15102](https://github.com/matrix-org/synapse/issues/15102))
- Bump types-pillow from 9.4.0.10 to 9.4.0.13. ([\#15104](https://github.com/matrix-org/synapse/issues/15104))
- Bump types-setuptools from 67.1.0.0 to 67.3.0.1. ([\#15105](https://github.com/matrix-org/synapse/issues/15105))
</details>
Synapse 1.77.0 (2023-02-14)
===========================
@ -257,7 +63,7 @@ Internal Changes
- Preparatory work for adding a denormalised event stream ordering column in the future. Contributed by Nick @ Beeper (@fizzadar). ([\#14979](https://github.com/matrix-org/synapse/issues/14979), [9cd7610](https://github.com/matrix-org/synapse/commit/9cd7610f86ab5051c9365dd38d1eec405a5f8ca6), [f10caa7](https://github.com/matrix-org/synapse/commit/f10caa73eee0caa91cf373966104d1ededae2aee); see [\#15014](https://github.com/matrix-org/synapse/issues/15014))
- Add tests for `_flatten_dict`. ([\#14981](https://github.com/matrix-org/synapse/issues/14981), [\#15002](https://github.com/matrix-org/synapse/issues/15002))
<details><summary>Locked dependency updates</summary>
<details><summary>Dependabot updates</summary>
- Bump dtolnay/rust-toolchain from e645b0cf01249a964ec099494d38d2da0f0b349f to 9cd00a88a73addc8617065438eff914dd08d0955. ([\#14968](https://github.com/matrix-org/synapse/issues/14968))
- Bump docker/build-push-action from 3 to 4. ([\#14952](https://github.com/matrix-org/synapse/issues/14952))

8
Cargo.lock generated
View File

@ -232,9 +232,9 @@ dependencies = [
[[package]]
name = "pyo3-log"
version = "0.8.1"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9c8b57fe71fb5dcf38970ebedc2b1531cf1c14b1b9b4c560a182a57e115575c"
checksum = "e5695ccff5060c13ca1751cf8c857a12da9b0bf0378cb071c5e0326f7c7e4c1b"
dependencies = [
"arc-swap",
"log",
@ -343,9 +343,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.94"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea"
checksum = "7434af0dc1cbd59268aa98b4c22c131c0584d2232f6fb166efb993e2832e896a"
dependencies = [
"itoa",
"ryu",

View File

@ -42,10 +42,8 @@ after the full release.
* Allowed sending custom data with read receipts.
You can view the full list of changes on the [meow-patchset] branch.
Additionally, historical patch sets are saved as `meow-patchset-vX` [tags].
[meow-patchset]: https://mau.dev/maunium/synapse/-/compare/patchset-base...meow-patchset
[tags]: https://mau.dev/maunium/synapse/-/tags?search=meow-patchset&sort=updated_desc
## Configuration
Generating a new config will include the `meow` section, but this is here for

View File

@ -1,28 +0,0 @@
# Schema symlinks
This directory contains symlinks to the latest dump of the postgres full schema. This is useful to have, as it allows IDEs to understand our schema and provide autocomplete, linters, inspections, etc.
In particular, the DataGrip functionality in IntelliJ's products seems to only consider files called `*.sql` when defining a schema from DDL; `*.sql.postgres` will be ignored. To get around this we symlink those files to ones ending in `.sql`. We've chosen to ignore the `.sql.sqlite` schema dumps here, as they're not intended for production use (and are much quicker to test against).
## Example
![](datagrip-aware-of-schema.png)
## Caveats
- Doesn't include temporary tables created ad-hoc by Synapse.
- Postgres only. IDEs will likely be confused by SQLite-specific queries.
- Will not include migrations created after the latest schema dump.
- Symlinks might confuse checkouts on Windows systems.
## Instructions
### Jetbrains IDEs with DataGrip plugin
- View -> Tool Windows -> Database
- `+` Icon -> DDL Data Source
- Pick a name, e.g. `Synapse schema dump`
- Under sources, click `+`.
- Add an entry with Path pointing to this directory, and dialect set to PostgreSQL.
- OK, and OK.
- IDE should now be aware of the schema.
- Try control-clicking on a table name in a bit of SQL e.g. in `_get_forgotten_rooms_for_user_txn`.

View File

@ -1 +0,0 @@
../../synapse/storage/schema/common/full_schemas/72/full.sql.postgres

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

View File

@ -1 +0,0 @@
../../synapse/storage/schema/main/full_schemas/72/full.sql.postgres

View File

@ -1 +0,0 @@
../../synapse/storage/schema/common/schema_version.sql

View File

@ -1 +0,0 @@
../../synapse/storage/schema/state/full_schemas/72/full.sql.postgres

View File

@ -68,7 +68,6 @@ redis:
enabled: true
host: redis
port: 6379
# dbid: <redis_logical_db_id>
# password: <secret_password>
```

19
debian/changelog vendored
View File

@ -1,22 +1,3 @@
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
* New Synapse release 1.79.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Mar 2023 12:03:49 +0000
matrix-synapse-py3 (1.78.0) stable; urgency=medium
* New Synapse release 1.78.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Feb 2023 08:56:03 -0800
matrix-synapse-py3 (1.78.0~rc1) stable; urgency=medium
* Add `matrix-org-archive-keyring` package as recommended.
* New Synapse release 1.78.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Feb 2023 14:29:19 +0000
matrix-synapse-py3 (1.77.0) stable; urgency=medium
* New Synapse release 1.77.0.

1
debian/control vendored
View File

@ -37,7 +37,6 @@ Depends:
# so we put perl:Depends in Suggests rather than Depends.
Recommends:
${shlibs1:Recommends},
matrix-org-archive-keyring,
Suggests:
sqlite3,
${perl:Depends},

View File

@ -142,7 +142,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
@ -205,7 +204,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/send",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
],
@ -676,21 +674,17 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
if not os.path.exists(config_path):
log("Generating base homeserver config")
generate_base_homeserver_config()
else:
log("Base homeserver config exists—not regenerating")
# This script may be run multiple times (mostly by Complement, see note at top of file).
# Don't re-configure workers in this instance.
mark_filepath = "/conf/workers_have_been_configured"
if not os.path.exists(mark_filepath):
# Always regenerate all other config files
log("Generating worker config files")
generate_worker_files(environ, config_path, data_dir)
# Mark workers as being configured
with open(mark_filepath, "w") as f:
f.write("")
else:
log("Worker config exists—not regenerating")
# Lifted right out of start.py
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)

View File

@ -169,17 +169,3 @@ The following fields are returned in the JSON response body:
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
have a canonical alias set.
* `event_json`: object - Details of the original event that was reported.
# Delete a specific event report
This API deletes a specific event report. If the request is successful, the response body
will be an empty JSON object.
The api is:
```
DELETE /_synapse/admin/v1/event_reports/<report_id>
```
**URL parameters:**
* `report_id`: string - The ID of the event report.

View File

@ -235,14 +235,6 @@ The following fields are returned in the JSON response body:
Request:
```
POST /_synapse/admin/v1/media/delete?before_ts=<before_ts>
{}
```
*Deprecated in Synapse v1.78.0:* This API is available at the deprecated endpoint:
```
POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
@ -251,7 +243,7 @@ POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
URL Parameters
* `server_name`: string - The name of your local server (e.g `matrix.org`). *Deprecated in Synapse v1.78.0.*
* `server_name`: string - The name of your local server (e.g `matrix.org`).
* `before_ts`: string representing a positive integer - Unix timestamp in milliseconds.
Files that were last used before this timestamp will be deleted. It is the timestamp of
last access, not the timestamp when the file was created.

View File

@ -73,15 +73,6 @@ It is also possible to do delegation using a SRV DNS record. However, that is ge
not recommended, as it can be difficult to configure the TLS certificates correctly in
this case, and it offers little advantage over `.well-known` delegation.
Please keep in mind that server delegation is a function of server-server communication,
and as such using SRV DNS records will not cover use cases involving client-server comms.
This means setting global client settings (such as a Jitsi endpoint, or disabling
creating new rooms as encrypted by default, etc) will still require that you serve a file
from the `https://<server_name>/.well-known/` endpoints defined in the spec! If you are
considering using SRV DNS delegation to avoid serving files from this endpoint, consider
the impact that you will not be able to change those client-based default values globally,
and will be relegated to the featureset of the configuration of each individual client.
However, if you really need it, you can find some documentation on what such a
record should look like and how Synapse will use it in [the Matrix
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names).

View File

@ -78,19 +78,6 @@ poetry install --extras all
This will install the runtime and developer dependencies for the project.
## Running Synapse via poetry
To start a local instance of Synapse in the locked poetry environment, create a config file:
```sh
cp docs/sample_config.yaml homeserver.yaml
```
Now edit homeserver.yaml, and run Synapse with:
```sh
poetry run python -m synapse.app.homeserver -c homeserver.yaml
```
# 5. Get in touch.

View File

@ -307,8 +307,8 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
```python
async def check_media_file_for_spam(
file_wrapper: "synapse.media.media_storage.ReadableFileWrapper",
file_info: "synapse.media._base.FileInfo",
file_wrapper: "synapse.rest.media.v1.media_storage.ReadableFileWrapper",
file_info: "synapse.rest.media.v1._base.FileInfo",
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```

View File

@ -146,9 +146,6 @@ Note that this callback is called when the event has already been processed and
into the room, which means this callback cannot be used to deny persisting the event. To
deny an incoming event, see [`check_event_for_spam`](spam_checker_callbacks.md#check_event_for_spam) instead.
For any given event, this callback will be called on every worker process, even if that worker will not end up
acting on that event. This callback will not be called for events that are marked as rejected.
If multiple modules implement this callback, Synapse runs them all in order.
### `check_can_shutdown_room`
@ -254,11 +251,6 @@ If multiple modules implement this callback, Synapse runs them all in order.
_First introduced in Synapse v1.56.0_
**<span style="color:red">
This callback is deprecated in favour of the `on_add_user_third_party_identifier` callback, which
features the same functionality. The only difference is in name.
</span>**
```python
async def on_threepid_bind(user_id: str, medium: str, address: str) -> None:
```
@ -273,44 +265,6 @@ server_.
If multiple modules implement this callback, Synapse runs them all in order.
### `on_add_user_third_party_identifier`
_First introduced in Synapse v1.79.0_
```python
async def on_add_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
```
Called after successfully creating an association between a user and a third-party identifier
(email address, phone number). The module is given the Matrix ID of the user the
association is for, as well as the medium (`email` or `msisdn`) and address of the
third-party identifier (i.e. an email address).
Note that this callback is _not_ called if a user attempts to bind their third-party identifier
to an identity server (via a call to [`POST
/_matrix/client/v3/account/3pid/bind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidbind)).
If multiple modules implement this callback, Synapse runs them all in order.
### `on_remove_user_third_party_identifier`
_First introduced in Synapse v1.79.0_
```python
async def on_remove_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
```
Called after successfully removing an association between a user and a third-party identifier
(email address, phone number). The module is given the Matrix ID of the user the
association is for, as well as the medium (`email` or `msisdn`) and address of the
third-party identifier (i.e. an email address).
Note that this callback is _not_ called if a user attempts to unbind their third-party
identifier from an identity server (via a call to [`POST
/_matrix/client/v3/account/3pid/unbind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidunbind)).
If multiple modules implement this callback, Synapse runs them all in order.
## Example
The example below is a module that implements the third-party rules callback
@ -343,4 +297,4 @@ class EventCensorer:
)
event_dict["content"] = new_event_content
return event_dict
```
```

View File

@ -83,59 +83,3 @@ the callback name as the argument name and the function as its value. A
Callbacks for each category can be found on their respective page of the
[Synapse documentation website](https://matrix-org.github.io/synapse).
## Caching
_Added in Synapse 1.74.0._
Modules can leverage Synapse's caching tools to manage their own cached functions. This
can be helpful for modules that need to repeatedly request the same data from the database
or a remote service.
Functions that need to be wrapped with a cache need to be decorated with a `@cached()`
decorator (which can be imported from `synapse.module_api`) and registered with the
[`ModuleApi.register_cached_function`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L888)
API when initialising the module. If the module needs to invalidate an entry in a cache,
it needs to use the [`ModuleApi.invalidate_cache`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L904)
API, with the function to invalidate the cache of and the key(s) of the entry to
invalidate.
Below is an example of a simple module using a cached function:
```python
from typing import Any
from synapse.module_api import cached, ModuleApi
class MyModule:
def __init__(self, config: Any, api: ModuleApi):
self.api = api
# Register the cached function so Synapse knows how to correctly invalidate
# entries for it.
self.api.register_cached_function(self.get_user_from_id)
@cached()
async def get_department_for_user(self, user_id: str) -> str:
"""A function with a cache."""
# Request a department from an external service.
return await self.http_client.get_json(
"https://int.example.com/users", {"user_id": user_id)
)["department"]
async def do_something_with_users(self) -> None:
"""Calls the cached function and then invalidates an entry in its cache."""
user_id = "@alice:example.com"
# Get the user. Since get_department_for_user is wrapped with a cache,
# the return value for this user_id will be cached.
department = await self.get_department_for_user(user_id)
# Do something with `department`...
# Let's say something has changed with our user, and the entry we have for
# them in the cache is out of date, so we want to invalidate it.
await self.api.invalidate_cache(self.get_department_for_user, (user_id,))
```
See the [`cached` docstring](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L190) for more details.

View File

@ -590,47 +590,6 @@ oidc_providers:
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
### Shibboleth with OIDC Plugin
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
1. Shibboleth needs the [OIDC Plugin](https://shibboleth.atlassian.net/wiki/spaces/IDPPLUGINS/pages/1376878976/OIDC+OP) installed and working correctly.
2. Create a new config on the IdP Side, ensure that the `client_id` and `client_secret`
are randomly generated data.
```json
{
"client_id": "SOME-CLIENT-ID",
"client_secret": "SOME-SUPER-SECRET-SECRET",
"response_types": ["code"],
"grant_types": ["authorization_code"],
"scope": "openid profile email",
"redirect_uris": ["https://[synapse public baseurl]/_synapse/client/oidc/callback"]
}
```
Synapse config:
```yaml
oidc_providers:
# Shibboleth IDP
#
- idp_id: shibboleth
idp_name: "Shibboleth Login"
discover: true
issuer: "https://YOUR-IDP-URL.TLD"
client_id: "YOUR_CLIENT_ID"
client_secret: "YOUR-CLIENT-SECRECT-FROM-YOUR-IDP"
scopes: ["openid", "profile", "email"]
allow_existing_users: true
user_profile_method: "userinfo_endpoint"
user_mapping_provider:
config:
subject_claim: "sub"
localpart_template: "{{ user.sub.split('@')[0] }}"
display_name_template: "{{ user.name }}"
email_template: "{{ user.email }}"
```
### Twitch
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)

View File

@ -88,39 +88,6 @@ process, for example:
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
```
# Upgrading to v1.79.0
## The `on_threepid_bind` module callback method has been deprecated
Synapse v1.79.0 deprecates the
[`on_threepid_bind`](modules/third_party_rules_callbacks.md#on_threepid_bind)
"third-party rules" Synapse module callback method in favour of a new module method,
[`on_add_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_add_user_third_party_identifier).
`on_threepid_bind` will be removed in a future version of Synapse. You should check whether any Synapse
modules in use in your deployment are making use of `on_threepid_bind`, and update them where possible.
The arguments and functionality of the new method are the same.
The justification behind the name change is that the old method's name, `on_threepid_bind`, was
misleading. A user is considered to "bind" their third-party ID to their Matrix ID only if they
do so via an [identity server](https://spec.matrix.org/latest/identity-service-api/)
(so that users on other homeservers may find them). But this method was not called in that case -
it was only called when a user added a third-party identifier on the local homeserver.
Module developers may also be interested in the related
[`on_remove_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_remove_user_third_party_identifier)
module callback method that was also added in Synapse v1.79.0. This new method is called when a
user removes a third-party identifier from their account.
# Upgrading to v1.78.0
## Deprecate the `/_synapse/admin/v1/media/<server_name>/delete` admin API
Synapse 1.78.0 replaces the `/_synapse/admin/v1/media/<server_name>/delete`
admin API with an identical endpoint at `/_synapse/admin/v1/media/delete`. Please
update your tooling to use the new endpoint. The deprecated version will be removed
in a future release.
# Upgrading to v1.76.0
## Faster joins are enabled by default
@ -170,7 +137,6 @@ and then do `pip install matrix-synapse[user-search]` for a PyPI install.
Docker images and Debian packages need nothing specific as they already
include or specify ICU as an explicit dependency.
# Upgrading to v1.73.0
## Legacy Prometheus metric names have now been removed

View File

@ -70,55 +70,10 @@ output-directory
│ ├───state
│ ├───invite_state
│ └───knock_state
├───user_data
│ ├───account_data
│ │ ├───global
│ │ └───<room_id>
│ ├───connections
│ ├───devices
│ └───profile
└───media_ids
└───<media_id>
```
The `media_ids` folder contains only the metadata of the media uploaded by the user.
It does not contain the media itself.
Furthermore, only the `media_ids` that Synapse manages itself are exported.
If another media repository (e.g. [matrix-media-repo](https://github.com/turt2live/matrix-media-repo))
is used, the data must be exported separately.
With the `media_ids` the media files can be downloaded.
Media that have been sent in encrypted rooms are only retrieved in encrypted form.
The following script can help with download the media files:
```bash
#!/usr/bin/env bash
# Parameters
#
# source_directory: Directory which contains the export with the media_ids.
# target_directory: Directory into which all files are to be downloaded.
# repository_url: Address of the media repository resp. media worker.
# serverName: Name of the server (`server_name` from homeserver.yaml).
#
# Example:
# ./download_media.sh /tmp/export_data/media_ids/ /tmp/export_data/media_files/ http://localhost:8008 matrix.example.com
source_directory=$1
target_directory=$2
repository_url=$3
serverName=$4
mkdir -p $target_directory
for file in $source_directory/*; do
filename=$(basename ${file})
url=$repository_url/_matrix/media/v3/download/$serverName/$filename
echo "Downloading $filename - $url"
if ! wget -o /dev/null -P $target_directory $url; then
echo "Could not download $filename"
fi
done
└───user_data
├───connections
├───devices
└───profile
```
Manually resetting passwords
@ -129,7 +84,7 @@ can reset a user's password using the [admin API](../../admin_api/user_admin_api
I have a problem with my server. Can I just delete my database and start again?
---
Deleting your database is unlikely to make anything better.
Deleting your database is unlikely to make anything better.
It's easy to make the mistake of thinking that you can start again from a clean
slate by dropping your database, but things don't work like that in a federated
@ -144,7 +99,7 @@ Come and seek help in https://matrix.to/#/#synapse:matrix.org.
There are two exceptions when it might be sensible to delete your database and start again:
* You have *never* joined any rooms which are federated with other servers. For
instance, a local deployment which the outside world can't talk to.
instance, a local deployment which the outside world can't talk to.
* You are changing the `server_name` in the homeserver configuration. In effect
this makes your server a completely new one from the point of view of the network,
so in this case it makes sense to start with a clean database.
@ -157,7 +112,7 @@ Using the following curl command:
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
```
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
Access Token:\<click to reveal\>
Access Token:\<click to reveal\>
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
@ -194,13 +149,13 @@ What are the biggest rooms on my server?
---
```sql
SELECT s.canonical_alias, g.room_id, count(*) AS num_rows
FROM
state_groups_state AS g,
room_stats_state AS s
WHERE g.room_id = s.room_id
SELECT s.canonical_alias, g.room_id, count(*) AS num_rows
FROM
state_groups_state AS g,
room_stats_state AS s
WHERE g.room_id = s.room_id
GROUP BY s.canonical_alias, g.room_id
ORDER BY num_rows desc
ORDER BY num_rows desc
LIMIT 10;
```

View File

@ -1,4 +1,4 @@
_This [blog post by Jackson Chen](https://jacksonchen666.com/posts/2022-12-03/14-33-00/) (Dec 2022) explains how to use many of the tools listed on this page. There is also an [earlier blog by Victor Berger](https://levans.fr/shrink-synapse-database.html) (June 2020), though this may be outdated in places._
This blog post by Victor Berger explains how to use many of the tools listed on this page: https://levans.fr/shrink-synapse-database.html
# List of useful tools and scripts for maintenance Synapse database:
@ -15,4 +15,4 @@ The purge history API allows server admins to purge historic events from their d
Tool for compressing (deduplicating) `state_groups_state` table.
## [SQL for analyzing Synapse PostgreSQL database stats](useful_sql_for_admins.md)
Some easy SQL that reports useful stats about your Synapse database.
Some easy SQL that reports useful stats about your Synapse database.

View File

@ -1105,7 +1105,7 @@ This setting should only be used in very specific cases, such as
federation over Tor hidden services and similar. For private networks
of homeservers, you likely want to use a private CA instead.
Only effective if `federation_verify_certificates` is `true`.
Only effective if `federation_verify_certicates` is `true`.
Example configuration:
```yaml
@ -1518,11 +1518,11 @@ rc_registration_token_validity:
This option specifies several limits for login:
* `address` ratelimits login requests based on the client's IP
address. Defaults to `per_second: 0.003`, `burst_count: 5`.
address. Defaults to `per_second: 0.17`, `burst_count: 3`.
* `account` ratelimits login requests based on the account the
client is attempting to log into. Defaults to `per_second: 0.03`,
`burst_count: 5`.
client is attempting to log into. Defaults to `per_second: 0.17`,
`burst_count: 3`.
* `failed_attempts` ratelimits login requests based on the account the
client is attempting to log into, based on the amount of failed login
@ -2227,12 +2227,12 @@ allows the shared secret to be specified in an external file.
The file should be a plain text file, containing only the shared secret.
If this file does not exist, Synapse will create a new shared
secret on startup and store it in this file.
If this file does not exist, Synapse will create a new signing
key on startup and store it in this file.
Example configuration:
```yaml
registration_shared_secret_path: /path/to/secrets/file
registration_shared_secret_file: /path/to/secrets/file
```
_Added in Synapse 1.67.0._
@ -3927,9 +3927,6 @@ This setting has the following sub-options:
* `host` and `port`: Optional host and port to use to connect to redis. Defaults to
localhost and 6379
* `password`: Optional password if configured on the Redis instance.
* `dbid`: Optional redis dbid if needs to connect to specific redis logical db.
_Added in Synapse 1.78.0._
Example configuration:
```yaml
@ -3938,7 +3935,6 @@ redis:
host: localhost
port: 6379
password: <secret_password>
dbid: <dbid>
```
---
## Individual worker configuration

View File

@ -160,18 +160,7 @@ recommend the use of `systemd` where available: for information on setting up
[Systemd with Workers](systemd-with-workers/). To use `synctl`, see
[Using synctl with Workers](synctl_workers.md).
## Start Synapse with Poetry
The following applies to Synapse installations that have been installed from source using `poetry`.
You can start the main Synapse process with Poetry by running the following command:
```console
poetry run synapse_homeserver -c [your homeserver.yaml]
```
For worker setups, you can run the following command
```console
poetry run synapse_worker -c [your worker.yaml]
```
## Available worker applications
### `synapse.app.generic_worker`
@ -232,7 +221,6 @@ information.
^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms$
^/_matrix/client/v1/rooms/.*/timestamp_to_event$
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)
# Encryption requests
^/_matrix/client/(r0|v3|unstable)/keys/query$
@ -252,7 +240,6 @@ information.
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state/
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$
^/_matrix/client/(api/v1|r0|v3|unstable)/join/
^/_matrix/client/(api/v1|r0|v3|unstable)/knock/
^/_matrix/client/(api/v1|r0|v3|unstable)/profile/
# Account data requests

View File

@ -31,11 +31,18 @@ exclude = (?x)
|synapse/storage/databases/__init__.py
|synapse/storage/databases/main/cache.py
|synapse/storage/schema/
|tests/module_api/test_api.py
|tests/rest/media/v1/test_media_storage.py
|tests/server.py
)$
[mypy-synapse.federation.transport.client]
disallow_untyped_defs = False
[mypy-synapse.http.client]
disallow_untyped_defs = False
[mypy-synapse.http.matrixfederationclient]
disallow_untyped_defs = False
@ -48,12 +55,87 @@ warn_unused_ignores = False
[mypy-synapse.util.caches.treecache]
disallow_untyped_defs = False
[mypy-synapse.server]
disallow_untyped_defs = False
[mypy-synapse.storage.database]
disallow_untyped_defs = False
[mypy-tests.*]
disallow_untyped_defs = False
[mypy-tests.api.*]
disallow_untyped_defs = True
[mypy-tests.app.*]
disallow_untyped_defs = True
[mypy-tests.appservice.*]
disallow_untyped_defs = True
[mypy-tests.config.*]
disallow_untyped_defs = True
[mypy-tests.crypto.*]
disallow_untyped_defs = True
[mypy-tests.events.*]
disallow_untyped_defs = True
[mypy-tests.federation.*]
disallow_untyped_defs = True
[mypy-tests.handlers.*]
disallow_untyped_defs = True
[mypy-tests.http.*]
disallow_untyped_defs = True
[mypy-tests.logging.*]
disallow_untyped_defs = True
[mypy-tests.metrics.*]
disallow_untyped_defs = True
[mypy-tests.push.*]
disallow_untyped_defs = True
[mypy-tests.replication.*]
disallow_untyped_defs = True
[mypy-tests.rest.*]
disallow_untyped_defs = True
[mypy-tests.state.test_profile]
disallow_untyped_defs = True
[mypy-tests.storage.*]
disallow_untyped_defs = True
[mypy-tests.test_server]
disallow_untyped_defs = True
[mypy-tests.test_state]
disallow_untyped_defs = True
[mypy-tests.test_terms_auth]
disallow_untyped_defs = True
[mypy-tests.types.*]
disallow_untyped_defs = True
[mypy-tests.util.caches.*]
disallow_untyped_defs = True
[mypy-tests.util.caches.test_descriptors]
disallow_untyped_defs = False
[mypy-tests.util.*]
disallow_untyped_defs = True
[mypy-tests.utils]
disallow_untyped_defs = True
;; Dependencies without annotations
;; Before ignoring a module, check to see if type stubs are available.
;; The `typeshed` project maintains stubs here:

342
poetry.lock generated
View File

@ -90,46 +90,32 @@ typecheck = ["mypy"]
[[package]]
name = "black"
version = "23.1.0"
version = "22.12.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
{file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
{file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
{file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
{file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
{file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
{file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
{file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
{file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
{file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
{file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
{file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
{file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
@ -141,14 +127,14 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "bleach"
version = "6.0.0"
version = "5.0.1"
description = "An easy safelist-based HTML-sanitizing tool."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"},
{file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"},
{file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"},
{file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"},
]
[package.dependencies]
@ -157,17 +143,18 @@ webencodings = "*"
[package.extras]
css = ["tinycss2 (>=1.1.0,<1.2)"]
dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"]
[[package]]
name = "canonicaljson"
version = "1.6.5"
version = "1.6.4"
description = "Canonical JSON"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "canonicaljson-1.6.5-py3-none-any.whl", hash = "sha256:806ea6f2cbb7405d20259e1c36dd1214ba5c242fa9165f5bd0bf2081f82c23fb"},
{file = "canonicaljson-1.6.5.tar.gz", hash = "sha256:68dfc157b011e07d94bf74b5d4ccc01958584ed942d9dfd5fdd706609e81cd4b"},
{file = "canonicaljson-1.6.4-py3-none-any.whl", hash = "sha256:55d282853b4245dbcd953fe54c39b91571813d7c44e1dbf66e3c4f97ff134a48"},
{file = "canonicaljson-1.6.4.tar.gz", hash = "sha256:6c09b2119511f30eb1126cfcd973a10824e20f1cfd25039cde3d1218dd9c8d8f"},
]
[package.dependencies]
@ -352,49 +339,50 @@ files = [
[[package]]
name = "cryptography"
version = "39.0.1"
version = "38.0.4"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"},
{file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"},
{file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"},
{file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"},
{file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"},
{file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"},
{file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"},
{file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"},
{file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"},
{file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"},
{file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"},
{file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70"},
{file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb"},
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d"},
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1"},
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8"},
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db"},
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b"},
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c"},
{file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00"},
{file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0"},
{file = "cryptography-38.0.4-cp36-abi3-win32.whl", hash = "sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744"},
{file = "cryptography-38.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d"},
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353"},
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7"},
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd"},
{file = "cryptography-38.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2"},
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b"},
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6"},
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876"},
{file = "cryptography-38.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b"},
{file = "cryptography-38.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285"},
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b"},
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083"},
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee"},
{file = "cryptography-38.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9"},
{file = "cryptography-38.0.4.tar.gz", hash = "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290"},
]
[package.dependencies]
cffi = ">=1.12"
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"]
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
sdist = ["setuptools-rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"]
test-randomorder = ["pytest-randomly"]
tox = ["tox"]
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
[[package]]
name = "defusedxml"
@ -1160,38 +1148,36 @@ files = [
[[package]]
name = "mypy"
version = "1.0.0"
version = "0.981"
description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "mypy-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af"},
{file = "mypy-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c"},
{file = "mypy-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a"},
{file = "mypy-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593"},
{file = "mypy-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7"},
{file = "mypy-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52"},
{file = "mypy-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d"},
{file = "mypy-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5"},
{file = "mypy-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd"},
{file = "mypy-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2"},
{file = "mypy-1.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c"},
{file = "mypy-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88"},
{file = "mypy-1.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805"},
{file = "mypy-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21"},
{file = "mypy-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964"},
{file = "mypy-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36"},
{file = "mypy-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1"},
{file = "mypy-1.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43"},
{file = "mypy-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb"},
{file = "mypy-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af"},
{file = "mypy-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072"},
{file = "mypy-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457"},
{file = "mypy-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74"},
{file = "mypy-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d"},
{file = "mypy-1.0.0-py3-none-any.whl", hash = "sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f"},
{file = "mypy-1.0.0.tar.gz", hash = "sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf"},
{file = "mypy-0.981-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4bc460e43b7785f78862dab78674e62ec3cd523485baecfdf81a555ed29ecfa0"},
{file = "mypy-0.981-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:756fad8b263b3ba39e4e204ee53042671b660c36c9017412b43af210ddee7b08"},
{file = "mypy-0.981-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a16a0145d6d7d00fbede2da3a3096dcc9ecea091adfa8da48fa6a7b75d35562d"},
{file = "mypy-0.981-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce65f70b14a21fdac84c294cde75e6dbdabbcff22975335e20827b3b94bdbf49"},
{file = "mypy-0.981-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e35d764784b42c3e256848fb8ed1d4292c9fc0098413adb28d84974c095b279"},
{file = "mypy-0.981-cp310-cp310-win_amd64.whl", hash = "sha256:e53773073c864d5f5cec7f3fc72fbbcef65410cde8cc18d4f7242dea60dac52e"},
{file = "mypy-0.981-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ee196b1d10b8b215e835f438e06965d7a480f6fe016eddbc285f13955cca659"},
{file = "mypy-0.981-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad21d4c9d3673726cf986ea1d0c9fb66905258709550ddf7944c8f885f208be"},
{file = "mypy-0.981-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d1debb09043e1f5ee845fa1e96d180e89115b30e47c5d3ce53bc967bab53f62d"},
{file = "mypy-0.981-cp37-cp37m-win_amd64.whl", hash = "sha256:9f362470a3480165c4c6151786b5379351b790d56952005be18bdbdd4c7ce0ae"},
{file = "mypy-0.981-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c9e0efb95ed6ca1654951bd5ec2f3fa91b295d78bf6527e026529d4aaa1e0c30"},
{file = "mypy-0.981-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e178eaffc3c5cd211a87965c8c0df6da91ed7d258b5fc72b8e047c3771317ddb"},
{file = "mypy-0.981-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:06e1eac8d99bd404ed8dd34ca29673c4346e76dd8e612ea507763dccd7e13c7a"},
{file = "mypy-0.981-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa38f82f53e1e7beb45557ff167c177802ba7b387ad017eab1663d567017c8ee"},
{file = "mypy-0.981-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:64e1f6af81c003f85f0dfed52db632817dabb51b65c0318ffbf5ff51995bbb08"},
{file = "mypy-0.981-cp38-cp38-win_amd64.whl", hash = "sha256:e1acf62a8c4f7c092462c738aa2c2489e275ed386320c10b2e9bff31f6f7e8d6"},
{file = "mypy-0.981-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b6ede64e52257931315826fdbfc6ea878d89a965580d1a65638ef77cb551f56d"},
{file = "mypy-0.981-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eb3978b191b9fa0488524bb4ffedf2c573340e8c2b4206fc191d44c7093abfb7"},
{file = "mypy-0.981-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f8fcf7b4b3cc0c74fb33ae54a4cd00bb854d65645c48beccf65fa10b17882c"},
{file = "mypy-0.981-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64d2ce043a209a297df322eb4054dfbaa9de9e8738291706eaafda81ab2b362"},
{file = "mypy-0.981-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2ee3dbc53d4df7e6e3b1c68ac6a971d3a4fb2852bf10a05fda228721dd44fae1"},
{file = "mypy-0.981-cp39-cp39-win_amd64.whl", hash = "sha256:8e8e49aa9cc23aa4c926dc200ce32959d3501c4905147a66ce032f05cb5ecb92"},
{file = "mypy-0.981-py3-none-any.whl", hash = "sha256:794f385653e2b749387a42afb1e14c2135e18daeb027e0d97162e4b7031210f8"},
{file = "mypy-0.981.tar.gz", hash = "sha256:ad77c13037d3402fbeffda07d51e3f228ba078d1c7096a73759c9419ea031bf4"},
]
[package.dependencies]
@ -1202,7 +1188,6 @@ typing-extensions = ">=3.10"
[package.extras]
dmypy = ["psutil (>=4.0)"]
install-types = ["pip"]
python2 = ["typed-ast (>=1.4.0,<2)"]
reports = ["lxml"]
@ -1220,18 +1205,18 @@ files = [
[[package]]
name = "mypy-zope"
version = "0.9.0"
version = "0.3.11"
description = "Plugin for mypy to support zope interfaces"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "mypy-zope-0.9.0.tar.gz", hash = "sha256:88bf6cd056e38b338e6956055958a7805b4ff84404ccd99e29883a3647a1aeb3"},
{file = "mypy_zope-0.9.0-py3-none-any.whl", hash = "sha256:e1bb4b57084f76ff8a154a3e07880a1af2ac6536c491dad4b143d529f72c5d15"},
{file = "mypy-zope-0.3.11.tar.gz", hash = "sha256:d4255f9f04d48c79083bbd4e2fea06513a6ac7b8de06f8c4ce563fd85142ca05"},
{file = "mypy_zope-0.3.11-py3-none-any.whl", hash = "sha256:ec080a6508d1f7805c8d2054f9fdd13c849742ce96803519e1fdfa3d3cab7140"},
]
[package.dependencies]
mypy = "1.0.0"
mypy = "0.981"
"zope.interface" = "*"
"zope.schema" = "*"
@ -1985,29 +1970,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "ruff"
version = "0.0.252"
version = "0.0.230"
description = "An extremely fast Python linter, written in Rust."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.0.252-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:349367a227c4db7abbc3a9993efea8a608b5bea4bb4a1e5fc6f0d56819524f92"},
{file = "ruff-0.0.252-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:ce77f9106d96b4faf7865860fb5155b9deaf6f699d9c279118c5ad947739ecaf"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edadb0b050293b4e60dab979ba6a4e734d9c899cbe316a0ee5b65e3cdd39c750"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4efdae98937d1e4d23ab0b7fc7e8e6b6836cc7d2d42238ceeacbc793ef780542"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8546d879f7d3f669379a03e7b103d90e11901976ab508aeda59c03dfd8a359e"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83fdc7169b6c1fb5fe8d1cdf345697f558c1b433ef97df9ca11defa2a8f3ee9e"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84ed9be1a17e2a556a571a5b959398633dd10910abd8dcf8b098061e746e892d"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f5e77bd9ba4438cf2ee32154e2673afe22f538ef29f5d65ca47e3dc46c42cf8"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5179b94b45c0f8512eaff3ab304c14714a46df2e9ca72a9d96084adc376b71"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:92efd8a71157595df5bc46aaaa0613d8a2fbc5cddc53ae7b749c16025c324732"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd350fc10832cfd28e681d829a8aa83ea3e653326e0ea9d98637dfb8d46177d2"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f119240c9631216e846166e06023b1d878e25fbac93bf20da50069e91cfbfaee"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5c5a49f89f5ede93d16eddfeeadd7e5739ec703e8f63ac95eac30236b9e49da3"},
{file = "ruff-0.0.252-py3-none-win32.whl", hash = "sha256:89a897dc743f2fe063483ea666097e72e848f4bbe40493fe0533e61799959f6e"},
{file = "ruff-0.0.252-py3-none-win_amd64.whl", hash = "sha256:cdc89ad6ff88519b1fb1816ac82a9ad910762c90ff5fd64dda7691b72d36aff7"},
{file = "ruff-0.0.252-py3-none-win_arm64.whl", hash = "sha256:4b594a17cf53077165429486650658a0e1b2ac6ab88954f5afd50d2b1b5657a9"},
{file = "ruff-0.0.252.tar.gz", hash = "sha256:6992611ab7bdbe7204e4831c95ddd3febfeece2e6f5e44bbed044454c7db0f63"},
{file = "ruff-0.0.230-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:fcc31d02cebda0a85e2e13a44642aea7f84362cb4f589e2f6b864e3928e4a7db"},
{file = "ruff-0.0.230-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:45a7f2c7155d520b8ca255a01235763d5c25fd5e7af055e50a78c6d91ece0ced"},
{file = "ruff-0.0.230-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4eca8b185ab56cac67acc23287c3c8c62a0c0ffadc0787a3bef3a6e77eaed82f"},
{file = "ruff-0.0.230-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec2bcdb5040efd8082a3a98369eec4bdc5fd05f53cc6714cb2b725d557d4abe8"},
{file = "ruff-0.0.230-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26571aee2b93b60e47e44478f72a9787b387f752e85b85f176739bd91b27cfd1"},
{file = "ruff-0.0.230-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4b69c9883c3e264f8bb2d52bdabb88b8d9672750ea05f33e0ff52532824bd5c5"},
{file = "ruff-0.0.230-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b3dc88b83f200378a9b9c91036989f0285a10759514c42235ce02e5824ac8d0"},
{file = "ruff-0.0.230-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767716f008dd3a40ec2318396f648fda437c6968087a4526cde5879e382cf477"},
{file = "ruff-0.0.230-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac27a0f9b96d9923cef7d911790a21a19b51aec0f08375ccc47ad735b1054d78"},
{file = "ruff-0.0.230-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:729dfc7b7ad4f7d8761dc60c58f15372d6f5c2dd9b6c5952524f2bc3aec7de6a"},
{file = "ruff-0.0.230-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ad086cf2e5fef274687121f673f0f9b60c8981ec07c2bb0448c459cbaef81bcb"},
{file = "ruff-0.0.230-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4feaed0978c24687133cd11c7380de20aa841f893e24430c735cc6c3faba4837"},
{file = "ruff-0.0.230-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1d1046d0d43a0f24b2e9e61d76bb201b486ad02e9787d3432af43bd7d16f2c2e"},
{file = "ruff-0.0.230-py3-none-win32.whl", hash = "sha256:4d627911c9ba57bcd2f2776f1c09a10d334db163cb5be8c892e7ec7b59ccf58c"},
{file = "ruff-0.0.230-py3-none-win_amd64.whl", hash = "sha256:27fd4891a1d0642f5b2038ebf86f8169bc3d466964bdfaa0ce2a65149bc7cced"},
{file = "ruff-0.0.230.tar.gz", hash = "sha256:a049f93af1057ac450e8c09559d44e371eda1c151b1b863c0013a1066fefddb0"},
]
[[package]]
@ -2044,14 +2028,14 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
name = "sentry-sdk"
version = "1.15.0"
version = "1.13.0"
description = "Python client for Sentry (https://sentry.io)"
category = "main"
optional = true
python-versions = "*"
files = [
{file = "sentry-sdk-1.15.0.tar.gz", hash = "sha256:69ecbb2e1ff4db02a06c4f20f6f69cb5dfe3ebfbc06d023e40d77cf78e9c37e7"},
{file = "sentry_sdk-1.15.0-py2.py3-none-any.whl", hash = "sha256:7ad4d37dd093f4a7cb5ad804c6efe9e8fab8873f7ffc06042dc3f3fd700a93ec"},
{file = "sentry-sdk-1.13.0.tar.gz", hash = "sha256:72da0766c3069a3941eadbdfa0996f83f5a33e55902a19ba399557cfee1dddcc"},
{file = "sentry_sdk-1.13.0-py2.py3-none-any.whl", hash = "sha256:b7ff6318183e551145b5c4766eb65b59ad5b63ff234dffddc5fb50340cad6729"},
]
[package.dependencies]
@ -2069,8 +2053,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"]
huey = ["huey (>=2)"]
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
opentelemetry = ["opentelemetry-distro (>=0.350b0)"]
pure-eval = ["asttokens", "executing", "pure-eval"]
pymongo = ["pymongo (>=3.1)"]
pyspark = ["pyspark (>=2.4.4)"]
@ -2272,13 +2255,13 @@ files = [
[[package]]
name = "systemd-python"
version = "235"
version = "234"
description = "Python interface for libsystemd"
category = "main"
optional = true
python-versions = "*"
files = [
{file = "systemd-python-235.tar.gz", hash = "sha256:4e57f39797fd5d9e2d22b8806a252d7c0106c936039d1e71c8c6b8008e695c0a"},
{file = "systemd-python-234.tar.gz", hash = "sha256:fd0e44bf70eadae45aadc292cb0a7eb5b0b6372cd1b391228047d33895db83e7"},
]
[[package]]
@ -2563,90 +2546,142 @@ files = [
[[package]]
name = "types-bleach"
version = "6.0.0.0"
version = "5.0.3.1"
description = "Typing stubs for bleach"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-bleach-6.0.0.0.tar.gz", hash = "sha256:770ce9c7ea6173743ef1a4a70f2619bb1819bf53c7cd0336d939af93f488fbe2"},
{file = "types_bleach-6.0.0.0-py3-none-any.whl", hash = "sha256:75f55f035837c5fce2cd0bd5162a2a90057680a89c9275588a5c12f5f597a14a"},
{file = "types-bleach-5.0.3.1.tar.gz", hash = "sha256:ce8772ea5126dab1883851b41e3aeff229aa5213ced36096990344e632e92373"},
{file = "types_bleach-5.0.3.1-py3-none-any.whl", hash = "sha256:af5f1b3a54ff279f54c29eccb2e6988ebb6718bc4061469588a5fd4880a79287"},
]
[[package]]
name = "types-commonmark"
version = "0.9.2.2"
version = "0.9.2.1"
description = "Typing stubs for commonmark"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-commonmark-0.9.2.2.tar.gz", hash = "sha256:f3259350634c2ce68ae503398430482f7cf44e5cae3d344995e916fbf453b4be"},
{file = "types_commonmark-0.9.2.2-py3-none-any.whl", hash = "sha256:d3d878692615e7fbe47bf19ba67497837b135812d665012a3d42219c1f2c3a61"},
{file = "types-commonmark-0.9.2.1.tar.gz", hash = "sha256:db8277e6aeb83429265eccece98a24954a9a502dde7bc7cf840a8741abd96b86"},
{file = "types_commonmark-0.9.2.1-py3-none-any.whl", hash = "sha256:9d5f500cb7eced801bde728137b0a10667bd853d328db641d03141f189e3aab4"},
]
[[package]]
name = "types-cryptography"
version = "3.3.15"
description = "Typing stubs for cryptography"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"},
{file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"},
]
[package.dependencies]
types-enum34 = "*"
types-ipaddress = "*"
[[package]]
name = "types-docutils"
version = "0.19.1.1"
description = "Typing stubs for docutils"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-docutils-0.19.1.1.tar.gz", hash = "sha256:be0a51ba1c7dd215d9d2df66d6845e63c1009b4bbf4c5beb87a0d9745cdba962"},
{file = "types_docutils-0.19.1.1-py3-none-any.whl", hash = "sha256:a024cada35f0c13cc45eb0b68a102719018a634013690b7fef723bcbfadbd1f1"},
]
[[package]]
name = "types-enum34"
version = "1.1.8"
description = "Typing stubs for enum34"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-enum34-1.1.8.tar.gz", hash = "sha256:6f9c769641d06d73a55e11c14d38ac76fcd37eb545ce79cebb6eec9d50a64110"},
{file = "types_enum34-1.1.8-py3-none-any.whl", hash = "sha256:05058c7a495f6bfaaca0be4aeac3cce5cdd80a2bad2aab01fd49a20bf4a0209d"},
]
[[package]]
name = "types-ipaddress"
version = "1.0.8"
description = "Typing stubs for ipaddress"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-ipaddress-1.0.8.tar.gz", hash = "sha256:a03df3be5935e50ba03fa843daabff539a041a28e73e0fce2c5705bee54d3841"},
{file = "types_ipaddress-1.0.8-py3-none-any.whl", hash = "sha256:4933b74da157ba877b1a705d64f6fa7742745e9ffd65e51011f370c11ebedb55"},
]
[[package]]
name = "types-jsonschema"
version = "4.17.0.5"
version = "4.17.0.3"
description = "Typing stubs for jsonschema"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-jsonschema-4.17.0.5.tar.gz", hash = "sha256:7adc7bfca4afe291de0c93eca9367aa72a4fbe8ce87fe15642c600ad97d45dd6"},
{file = "types_jsonschema-4.17.0.5-py3-none-any.whl", hash = "sha256:79ac8a7763fe728947af90a24168b91621edf7e8425bf3670abd4ea0d4758fba"},
{file = "types-jsonschema-4.17.0.3.tar.gz", hash = "sha256:746aa466ffed9a1acc7bdbd0ac0b5e068f00be2ee008c1d1e14b0944a8c8b24b"},
{file = "types_jsonschema-4.17.0.3-py3-none-any.whl", hash = "sha256:c8d5b26b7c8da6a48d7fb1ce029b97e0ff6e74db3727efb968c69f39ad013685"},
]
[[package]]
name = "types-opentracing"
version = "2.4.10.3"
version = "2.4.10.1"
description = "Typing stubs for opentracing"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-opentracing-2.4.10.3.tar.gz", hash = "sha256:b277f114265b41216714f9c77dffcab57038f1730fd141e2c55c5c9f6f2caa87"},
{file = "types_opentracing-2.4.10.3-py3-none-any.whl", hash = "sha256:60244d718fcd9de7043645ecaf597222d550432507098ab2e6268f7b589a7fa7"},
{file = "types-opentracing-2.4.10.1.tar.gz", hash = "sha256:49e7e52b8b6e221865a9201fc8c2df0bcda8e7098d4ebb35903dbfa4b4d29195"},
{file = "types_opentracing-2.4.10.1-py3-none-any.whl", hash = "sha256:eb63394acd793e7d9e327956242349fee14580a87c025408dc268d4dd883cc24"},
]
[[package]]
name = "types-pillow"
version = "9.4.0.17"
version = "9.4.0.5"
description = "Typing stubs for Pillow"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-Pillow-9.4.0.17.tar.gz", hash = "sha256:7f0e871d2d46fbb6bc7deca3e02dc552cf9c1e8b49deb9595509551be3954e49"},
{file = "types_Pillow-9.4.0.17-py3-none-any.whl", hash = "sha256:f8b848a05f17cb4d53d245c59bf560372b9778d4cfaf9705f6245009bf9f65f3"},
{file = "types-Pillow-9.4.0.5.tar.gz", hash = "sha256:941cefaac2f5297d7d2a9989633c95b4063112690dc21c965d46bd5a7fff3c76"},
{file = "types_Pillow-9.4.0.5-py3-none-any.whl", hash = "sha256:a1d2b3e070b4d852af04f76f018d12bd51abb4abca3b725d91b35e01cda7a2de"},
]
[[package]]
name = "types-psycopg2"
version = "2.9.21.8"
version = "2.9.21.4"
description = "Typing stubs for psycopg2"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-psycopg2-2.9.21.8.tar.gz", hash = "sha256:b629440ffcfdebd742fab07f777ff69aefdd19394a138c18e921a1964c3cf5f6"},
{file = "types_psycopg2-2.9.21.8-py3-none-any.whl", hash = "sha256:e747fbec6e0e2502b625bc7686d13cc62fc170e8ae920e5ba27fac946778eeb9"},
{file = "types-psycopg2-2.9.21.4.tar.gz", hash = "sha256:d43dda166a70d073ddac40718e06539836b5844c99b58ef8d4489a8df2edf5c0"},
{file = "types_psycopg2-2.9.21.4-py3-none-any.whl", hash = "sha256:6a05dca0856996aa37d7abe436751803bf47ec006cabbefea092e057f23bc95d"},
]
[[package]]
name = "types-pyopenssl"
version = "23.0.0.4"
version = "22.1.0.2"
description = "Typing stubs for pyOpenSSL"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-pyOpenSSL-23.0.0.4.tar.gz", hash = "sha256:8b3550b6e19d51ce78aabd724b0d8ebd962081a5fce95e7f85a592dfcdbc16bf"},
{file = "types_pyOpenSSL-23.0.0.4-py3-none-any.whl", hash = "sha256:ad49e15bb8bb2f251b8fc24776f414d877629e44b1b049240063ab013b5a6a7d"},
{file = "types-pyOpenSSL-22.1.0.2.tar.gz", hash = "sha256:7a350e29e55bc3ee4571f996b4b1c18c4e4098947db45f7485b016eaa35b44bc"},
{file = "types_pyOpenSSL-22.1.0.2-py3-none-any.whl", hash = "sha256:54606a6afb203eb261e0fca9b7f75fa6c24d5ff71e13903c162ffb951c2c64c6"},
]
[package.dependencies]
cryptography = ">=35.0.0"
types-cryptography = "*"
[[package]]
name = "types-pyyaml"
@ -2662,14 +2697,14 @@ files = [
[[package]]
name = "types-requests"
version = "2.28.11.12"
version = "2.28.11.8"
description = "Typing stubs for requests"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-requests-2.28.11.12.tar.gz", hash = "sha256:fd530aab3fc4f05ee36406af168f0836e6f00f1ee51a0b96b7311f82cb675230"},
{file = "types_requests-2.28.11.12-py3-none-any.whl", hash = "sha256:dbc2933635860e553ffc59f5e264264981358baffe6342b925e3eb8261f866ee"},
{file = "types-requests-2.28.11.8.tar.gz", hash = "sha256:e67424525f84adfbeab7268a159d3c633862dafae15c5b19547ce1b55954f0a3"},
{file = "types_requests-2.28.11.8-py3-none-any.whl", hash = "sha256:61960554baca0008ae7e2db2bd3b322ca9a144d3e80ce270f5fb640817e40994"},
]
[package.dependencies]
@ -2677,16 +2712,19 @@ types-urllib3 = "<1.27"
[[package]]
name = "types-setuptools"
version = "67.5.0.0"
version = "67.1.0.0"
description = "Typing stubs for setuptools"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-setuptools-67.5.0.0.tar.gz", hash = "sha256:fa6f231eeb27e86b1d6e8260f73de300e91f99c205b9a5e21debd49f3726a849"},
{file = "types_setuptools-67.5.0.0-py3-none-any.whl", hash = "sha256:f7f4bf4ab777e88631d3a387bbfdd4d480a2a4693ca896130f8ef738370377b8"},
{file = "types-setuptools-67.1.0.0.tar.gz", hash = "sha256:162a39d22e3a5eb802197c84f16b19e798101bbd33d9437837fbb45627da5627"},
{file = "types_setuptools-67.1.0.0-py3-none-any.whl", hash = "sha256:5bd7a10d93e468bfcb10d24cb8ea5e12ac4f4ac91267293959001f1448cf0619"},
]
[package.dependencies]
types-docutils = "*"
[[package]]
name = "types-urllib3"
version = "1.26.10"
@ -2701,14 +2739,14 @@ files = [
[[package]]
name = "typing-extensions"
version = "4.5.0"
version = "4.4.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
]
[[package]]
@ -2990,4 +3028,4 @@ user-search = ["pyicu"]
[metadata]
lock-version = "2.0"
python-versions = "^3.7.1"
content-hash = "7bcffef7b6e6d4b1113222e2ca152b3798c997872789c8a1ea01238f199d56fe"
content-hash = "2673ef0530a42dae1df998bacfcaf88a563529b39461003a980743a97f02996f"

View File

@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
[tool.poetry]
name = "matrix-synapse"
version = "1.79.0rc1"
version = "1.77.0"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
@ -154,9 +154,7 @@ python = "^3.7.1"
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
jsonschema = ">=3.0.0"
# frozendict 2.1.2 is broken on Debian 10: https://github.com/Marco-Sulla/python-frozendict/issues/41
# We cannot test our wheels against the 2.3.5 release in CI. Putting in an upper bound for this
# because frozendict has been more trouble than it's worth; we would like to move to immutabledict.
frozendict = ">=1,!=2.1.2,<2.3.5"
frozendict = ">=1,!=2.1.2"
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
unpaddedbase64 = ">=2.1.0"
# We require 1.5.0 to work around an issue when running against the C implementation of
@ -313,7 +311,7 @@ all = [
# We pin black so that our tests don't start failing on new releases.
isort = ">=5.10.1"
black = ">=22.3.0"
ruff = "0.0.252"
ruff = "0.0.230"
# Typechecking
mypy = "*"
@ -348,9 +346,6 @@ twine = "*"
# Towncrier min version comes from #3425. Rationale unclear.
towncrier = ">=18.6.0rc1"
# Used for checking the Poetry lockfile
tomli = ">=1.2.3"
[build-system]
# The upper bounds here are defensive, intended to prevent situations like
# #13849 and #14079 where we see buildtime or runtime errors caused by build

View File

@ -1,936 +0,0 @@
attrs==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \
--hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99
authlib==1.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a \
--hash=sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d
automat==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180 \
--hash=sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e
bcrypt==4.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535 \
--hash=sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0 \
--hash=sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410 \
--hash=sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd \
--hash=sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665 \
--hash=sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab \
--hash=sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71 \
--hash=sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215 \
--hash=sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b \
--hash=sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda \
--hash=sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9 \
--hash=sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a \
--hash=sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344 \
--hash=sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f \
--hash=sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d \
--hash=sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c \
--hash=sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c \
--hash=sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2 \
--hash=sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d \
--hash=sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e \
--hash=sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3
bleach==6.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414 \
--hash=sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4
canonicaljson==1.6.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:68dfc157b011e07d94bf74b5d4ccc01958584ed942d9dfd5fdd706609e81cd4b \
--hash=sha256:806ea6f2cbb7405d20259e1c36dd1214ba5c242fa9165f5bd0bf2081f82c23fb
certifi==2022.12.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
cffi==1.15.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==2.0.12 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
--hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
constantly==15.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 \
--hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d
cryptography==39.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \
--hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \
--hash=sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885 \
--hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \
--hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \
--hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \
--hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \
--hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \
--hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \
--hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \
--hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \
--hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \
--hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \
--hash=sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef \
--hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \
--hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \
--hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \
--hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \
--hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \
--hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \
--hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \
--hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \
--hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8
frozendict==2.3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78 \
--hash=sha256:25a6d2e8b7cf6b6e5677a1a4b53b4073e5d9ec640d1db30dc679627668d25e90 \
--hash=sha256:389f395a74eb16992217ac1521e689c1dea2d70113bcb18714669ace1ed623b9 \
--hash=sha256:3d8042b7dab5e992e30889c9b71b781d5feef19b372d47d735e4d7d45846fd4a \
--hash=sha256:3e93aebc6e69a8ef329bbe9afb8342bd33c7b5c7a0c480cb9f7e60b0cbe48072 \
--hash=sha256:3ec86ebf143dd685184215c27ec416c36e0ba1b80d81b1b9482f7d380c049b4e \
--hash=sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc \
--hash=sha256:5809e6ff6b7257043a486f7a3b73a7da71cf69a38980b4171e4741291d0d9eb3 \
--hash=sha256:7c550ed7fdf1962984bec21630c584d722b3ee5d5f57a0ae2527a0121dc0414a \
--hash=sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782 \
--hash=sha256:95bac22f7f09d81f378f2b3f672b7a50a974ca180feae1507f5e21bc147e8bc8 \
--hash=sha256:aca59108b77cadc13ba7dfea7e8f50811208c7652a13dc6c7f92d7782a24d299 \
--hash=sha256:b98a0d65a59af6da03f794f90b0c3085a7ee14e7bf8f0ef36b079ee8aa992439 \
--hash=sha256:ccb6450a416c9cc9acef7683e637e28356e3ceeabf83521f74cc2718883076b7 \
--hash=sha256:d722f3d89db6ae35ef35ecc243c40c800eb344848c83dba4798353312cd37b15 \
--hash=sha256:dae686722c144b333c4dbdc16323a5de11406d26b76d2be1cc175f90afacb5ba \
--hash=sha256:dbbe1339ac2646523e0bb00d1896085d1f70de23780e4927ca82b36ab8a044d3
hiredis==2.2.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:02118dc8545e2371448b9983a0041f12124eea907eb61858f2be8e7c1dfa1e43 \
--hash=sha256:03c6a1f6bf2f64f40d076c997cdfcb8b3d1c9557dda6cb7bbad2c5c839921726 \
--hash=sha256:0474ab858f5dd15be6b467d89ec14b4c287f53b55ca5455369c3a1a787ef3a24 \
--hash=sha256:04c972593f26f4769e2be7058b7928179337593bcfc6a8b6bda87eea807b7cbf \
--hash=sha256:0a9493bbc477436a3725e99cfcba768f416ab70ab92956e373d1a3b480b1e204 \
--hash=sha256:0e199868fe78c2d175bbb7b88f5daf2eae4a643a62f03f8d6736f9832f04f88b \
--hash=sha256:0f2607e08dcb1c5d1e925c451facbfc357927acaa336a004552c32a6dd68e050 \
--hash=sha256:0f4b92df1e69dc48411045d2117d1d27ec6b5f0dd2b6501759cea2f6c68d5618 \
--hash=sha256:103bde304d558061c4ba1d7ff94351e761da753c28883fd68964f25080152dac \
--hash=sha256:14f67987e1d55b197e46729d1497019228ad8c94427bb63500e6f217aa586ca5 \
--hash=sha256:1523ec56d711bee863aaaf4325cef4430da3143ec388e60465f47e28818016cd \
--hash=sha256:1776db8af168b22588ec10c3df674897b20cc6d25f093cd2724b8b26d7dac057 \
--hash=sha256:17e9f363db56a8edb4eff936354cfa273197465bcd970922f3d292032eca87b0 \
--hash=sha256:18135ecf28fc6577e71c0f8d8eb2f31e4783020a7d455571e7e5d2793374ce20 \
--hash=sha256:19666eb154b7155d043bf941e50d1640125f92d3294e2746df87639cc44a10e6 \
--hash=sha256:209b94fa473b39e174b665186cad73206ca849cf6e822900b761e83080f67b06 \
--hash=sha256:220b6ac9d3fce60d14ccc34f9790e20a50dc56b6fb747fc357600963c0cf6aca \
--hash=sha256:231e5836579fc75b25c6f9bb6213950ea3d39aadcfeb7f880211ca55df968342 \
--hash=sha256:2bb682785a37145b209f44f5d5290b0f9f4b56205542fc592d0f1b3d5ffdfcf0 \
--hash=sha256:2ed6c948648798b440a9da74db65cdd2ad22f38cf4687f5212df369031394591 \
--hash=sha256:2f6e80fb7cd4cc61af95ab2875801e4c36941a956c183297c3273cbfbbefa9d3 \
--hash=sha256:33624903dfb629d6f7c17ed353b4b415211c29fd447f31e6bf03361865b97e68 \
--hash=sha256:341952a311654c39433c1e0d8d31c2a0c5864b2675ed159ed264ecaa5cfb225b \
--hash=sha256:38270042f40ed9e576966c603d06c984c80364b0d9ec86962a31551dae27b0cd \
--hash=sha256:3af3071d33432960cba88ce4e4932b508ab3e13ce41431c2a1b2dc9a988f7627 \
--hash=sha256:3afc76a012b907895e679d1e6bcc6394845d0cc91b75264711f8caf53d7b0f37 \
--hash=sha256:42504e4058246536a9f477f450ab21275126fc5f094be5d5e5290c6de9d855f9 \
--hash=sha256:497a8837984ddfbf6f5a4c034c0107f2c5aaaebeebf34e2c6ab591acffce5f12 \
--hash=sha256:49a518b456403602775218062a4dd06bed42b26854ff1ff6784cfee2ef6fa347 \
--hash=sha256:4e3b8f0eba6d88c2aec63e6d1e38960f8a25c01f9796d32993ffa1cfcf48744c \
--hash=sha256:58e51d83b42fdcc29780897641b1dcb30c0e4d3c4f6d9d71d79b2cfec99b8eb7 \
--hash=sha256:595474e6c25f1c3c8ec67d587188e7dd47c492829b2c7c5ba1b17ee9e7e9a9ea \
--hash=sha256:5cd590dd7858d0107c37b438aa27bbcaa0ba77c5b8eda6ebab7acff0aa89f7d7 \
--hash=sha256:5da26970c41084a2ac337a4f075301a78cffb0e0f3df5e98c3049fc95e10725c \
--hash=sha256:63f941e77c024be2a1451089e2fdbd5ff450ff0965f49948bbeb383aef1799ea \
--hash=sha256:69c32d54ac1f6708145c77d79af12f7448ca1025a0bf912700ad1f0be511026a \
--hash=sha256:6afbddc82bbb2c4c405d9a49a056ffe6541f8ad3160df49a80573b399f94ba3a \
--hash=sha256:6ba9f425739a55e1409fda5dafad7fdda91c6dcd2b111ba93bb7b53d90737506 \
--hash=sha256:6f5f469ba5ae613e4c652cdedfc723aa802329fcc2d65df1e9ab0ac0de34ad9e \
--hash=sha256:6fbb1a56d455602bd6c276d5c316ae245111b2dc8158355112f2d905e7471c85 \
--hash=sha256:706995fb1173fab7f12110fbad00bb95dd0453336f7f0b341b4ca7b1b9ff0bc7 \
--hash=sha256:70db8f514ebcb6f884497c4eee21d0350bbc4102e63502411f8e100cf3b7921e \
--hash=sha256:724aed63871bc386d6f28b5f4d15490d84934709f093e021c4abb785e72db5db \
--hash=sha256:78f2a53149b116e0088f6eda720574f723fbc75189195aab8a7a2a591ca89cab \
--hash=sha256:796b616478a5c1cac83e9e10fcd803e746e5a02461bfa7767aebae8b304e2124 \
--hash=sha256:7a5dac3ae05bc64b233f950edf37dce9c904aedbc7e18cfc2adfb98edb85da46 \
--hash=sha256:812e27a9b20db967f942306267bcd8b1369d7c171831b6f45d22d75576cd01cd \
--hash=sha256:82ad46d1140c5779cd9dfdafc35f47dd09dadff7654d8001c50bb283da82e7c9 \
--hash=sha256:8a11250dd0521e9f729325b19ce9121df4cbb80ad3468cc21e56803e8380bc4b \
--hash=sha256:8ad00a7621de8ef9ae1616cf24a53d48ad1a699b96668637559a8982d109a800 \
--hash=sha256:8fe289556264cb1a2efbcd3d6b3c55e059394ad01b6afa88151264137f85c352 \
--hash=sha256:943631a49d7746cd413acaf0b712d030a15f02671af94c54759ba3144351f97a \
--hash=sha256:954abb363ed1d18dfb7510dbd89402cb7c21106307e04e2ee7bccf35a134f4dd \
--hash=sha256:96745c4cdca261a50bd70c01f14c6c352a48c4d6a78e2d422040fba7919eadef \
--hash=sha256:96b079c53b6acd355edb6fe615270613f3f7ddc4159d69837ce15ec518925c40 \
--hash=sha256:998ab35070dc81806a23be5de837466a51b25e739fb1a0d5313474d5bb29c829 \
--hash=sha256:99b5bcadd5e029234f89d244b86bc8d21093be7ac26111068bebd92a4a95dc73 \
--hash=sha256:a0e98106a28fabb672bb014f6c4506cc67491e4cf9ac56d189cbb1e81a9a3e68 \
--hash=sha256:a16d81115128e6a9fc6904de051475be195f6c460c9515583dccfd407b16ff78 \
--hash=sha256:a386f00800b1b043b091b93850e02814a8b398952438a9d4895bd70f5c80a821 \
--hash=sha256:a54d2b3328a2305e0dfb257a4545053fdc64df0c64e0635982e191c846cc0456 \
--hash=sha256:a57a4a33a78e94618d026fc68e853d3f71fa4a1d4da7a6e828e927819b001f2d \
--hash=sha256:a5e5e51faa7cd02444d4ee1eb59e316c08e974bcfa3a959cb790bc4e9bb616c5 \
--hash=sha256:a99c0d50d1a31be285c83301eff4b911dca16aac1c3fe1875c7d6f517a1e9fc4 \
--hash=sha256:ae6b356ed166a0ec663a46b547c988815d2b0e5f2d0af31ef34a16cf3ce705d0 \
--hash=sha256:b2b847ea3f9af99e02c4c58b7cc6714e105c8d73705e5ff1132e9a249391f688 \
--hash=sha256:b90dd0adb1d659f8c94b32556198af1e61e38edd27fc7434d4b3b68ad4e51d37 \
--hash=sha256:bad6e9a0e31678ee15ac3ef72e77c08177c86df05c37d2423ff3cded95131e51 \
--hash=sha256:bde0178e7e6c49e408b8d3a8c0ec8e69a23e8dc2ae29f87af2d74b21025385dc \
--hash=sha256:c233199b9f4dd43e2297577e32ba5fcd0378871a47207bc424d5e5344d030a3e \
--hash=sha256:c604919bba041e4c4708ecb0fe6c7c8a92a7f1e886b0ae8d2c13c3e4abfc5eda \
--hash=sha256:c65f38418e35970d44f9b5a59533f0f60f14b9f91b712dba51092d2c74d4dcd1 \
--hash=sha256:c702dd28d52656bb86f7a2a76ea9341ac434810871b51fcd6cd28c6d7490fbdf \
--hash=sha256:c7fd6394779c9a3b324b65394deadb949311662f3770bd34f904b8c04328082c \
--hash=sha256:cb59a7535e0b8373f694ce87576c573f533438c5fbee450193333a22118f4a98 \
--hash=sha256:cbb3f56d371b560bf39fe45d29c24e3d819ae2399733e2c86394a34e76adab38 \
--hash=sha256:d4f6bbe599d255a504ef789c19e23118c654d256343c1ecdf7042fb4b4d0f7fa \
--hash=sha256:d77dbc13d55c1d45d6a203da910002fffd13fa310af5e9c5994959587a192789 \
--hash=sha256:d87f90064106dfd7d2cc7baeb007a8ca289ee985f4bf64bb627c50cdc34208ed \
--hash=sha256:d8849bc74473778c10377f82cf9a534e240734e2f9a92c181ef6d51b4e3d3eb2 \
--hash=sha256:d9fbef7f9070055a7cc012ac965e3dbabbf2400b395649ea8d6016dc82a7d13a \
--hash=sha256:dcad9c9239845b29f149a895e7e99b8307889cecbfc37b69924c2dad1f4ae4e8 \
--hash=sha256:df6325aade17b1f86c8b87f6a1d9549a4184fda00e27e2fca0e5d2a987130365 \
--hash=sha256:e57dfcd72f036cce9eab77bc533a932444459f7e54d96a555d25acf2501048be \
--hash=sha256:e5945ef29a76ab792973bef1ffa2970d81dd22edb94dfa5d6cba48beb9f51962 \
--hash=sha256:ea011b3bfa37f2746737860c1e5ba198b63c9b4764e40b042aac7bd2c258938f \
--hash=sha256:ed79f65098c4643cb6ec4530b337535f00b58ea02e25180e3df15e9cc9da58dc \
--hash=sha256:f0ccf6fc116795d76bca72aa301a33874c507f9e77402e857d298c73419b5ea3 \
--hash=sha256:fec7465caac7b0a36551abb37066221cabf59f776d78fdd58ff17669942b4b41
hyperlink==21.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \
--hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
idna==3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
ijson==3.2.0.post0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:00594ed3ef2218fee8c652d9e7f862fb39f8251b67c6379ef12f7e044bf6bbf3 \
--hash=sha256:03dfd4c8ed19e704d04b0ad4f34f598dc569fd3f73089f80eed698e7f6069233 \
--hash=sha256:09fe3a53e00c59de33b825ba8d6d39f544a7d7180983cd3d6bd2c3794ae35442 \
--hash=sha256:0eb838b4e4360e65c00aa13c78b35afc2477759d423b602b60335af5bed3de5b \
--hash=sha256:11bb84a53c37e227e733c6dffad2037391cf0b3474bff78596dc4373b02008a0 \
--hash=sha256:11dfd64633fe1382c4237477ac3836f682ca17e25e0d0799e84737795b0611df \
--hash=sha256:1302dc6490da7d44c3a76a5f0b87d8bec9f918454c6d6e6bf4ed922e47da58bb \
--hash=sha256:13f2939db983327dd0492f6c1c0e77be3f2cbf9b620c92c7547d1d2cd6ef0486 \
--hash=sha256:158494bfe89ccb32618d0e53b471364080ceb975462ec464d9f9f37d9832b653 \
--hash=sha256:183841b8d033ca95457f61fb0719185dc7f51a616070bdf1dcaf03473bed05b2 \
--hash=sha256:1a75cfb34217b41136b714985be645f12269e4345da35d7b48aabd317c82fd10 \
--hash=sha256:1d64ffaab1d006a4fa9584a4c723e95cc9609bf6c3365478e250cd0bffaaadf3 \
--hash=sha256:25919b444426f58dcc62f763d1c6be6297f309da85ecab55f51da6ca86fc9fdf \
--hash=sha256:26b57838e712b8852c40ec6d74c6de8bb226446440e1af1354c077a6f81b9142 \
--hash=sha256:27409ba44cfd006901971063d37699f72e092b5efaa1586288b5067d80c6b5bd \
--hash=sha256:2d50b2ad9c6c51ca160aa60de7f4dacd1357c38d0e503f51aed95c1c1945ff53 \
--hash=sha256:2f204f6d4cedeb28326c230a0b046968b5263c234c65a5b18cee22865800fff7 \
--hash=sha256:2f9d449f86f8971c24609e319811f7f3b6b734f0218c4a0e799debe19300d15b \
--hash=sha256:3b21b1ecd20ed2f918f6f99cdfa68284a416c0f015ffa64b68fa933df1b24d40 \
--hash=sha256:3ccc4d4b947549f9c431651c02b95ef571412c78f88ded198612a41d5c5701a0 \
--hash=sha256:41e955e173f77f54337fecaaa58a35c464b75e232b1f939b282497134a4d4f0e \
--hash=sha256:424232c2bf3e8181f1b572db92c179c2376b57eba9fc8931453fba975f48cb80 \
--hash=sha256:434e57e7ec5c334ccb0e67bb4d9e60c264dcb2a3843713dbeb12cb19fe42a668 \
--hash=sha256:47a56e3628c227081a2aa58569cbf2af378bad8af648aa904080e87cd6644cfb \
--hash=sha256:4d4e143908f47307042c9678803d27706e0e2099d0a6c1988c6cae1da07760bf \
--hash=sha256:4e7c4fdc7d24747c8cc7d528c145afda4de23210bf4054bd98cd63bf07e4882d \
--hash=sha256:51c1db80d7791fb761ad9a6c70f521acd2c4b0e5afa2fe0d813beb2140d16c37 \
--hash=sha256:5242cb2313ba3ece307b426efa56424ac13cc291c36f292b501d412a98ad0703 \
--hash=sha256:535665a77408b6bea56eb828806fae125846dff2e2e0ed4cb2e0a8e36244d753 \
--hash=sha256:535a59d61b9aef6fc2a3d01564c1151e38e5a44b92cd6583cb4e8ccf0f58043f \
--hash=sha256:53f1a13eb99ab514c562869513172135d4b55a914b344e6518ba09ad3ef1e503 \
--hash=sha256:5418066666b25b05f2b8ae2698408daa0afa68f07b0b217f2ab24465b7e9cbd9 \
--hash=sha256:56500dac8f52989ef7c0075257a8b471cbea8ef77f1044822742b3cbf2246e8b \
--hash=sha256:5809752045ef74c26adf159ed03df7fb7e7a8d656992fd7562663ed47d6d39d9 \
--hash=sha256:5c93ae4d49d8cf8accfedc8a8e7815851f56ceb6e399b0c186754a68fed22844 \
--hash=sha256:5d365df54d18076f1d5f2ffb1eef2ac7f0d067789838f13d393b5586fbb77b02 \
--hash=sha256:6def9ac8d73b76cb02e9e9837763f27f71e5e67ec0afae5f1f4cf8f61c39b1ac \
--hash=sha256:6ee9537e8a8aa15dd2d0912737aeb6265e781e74f7f7cad8165048fcb5f39230 \
--hash=sha256:6eed1ddd3147de49226db4f213851cf7860493a7b6c7bd5e62516941c007094c \
--hash=sha256:6fd55f7a46429de95383fc0d0158c1bfb798e976d59d52830337343c2d9bda5c \
--hash=sha256:775444a3b647350158d0b3c6c39c88b4a0995643a076cb104bf25042c9aedcf8 \
--hash=sha256:79b94662c2e9d366ab362c2c5858097eae0da100dea0dfd340db09ab28c8d5e8 \
--hash=sha256:7e0d1713a9074a7677eb8e43f424b731589d1c689d4676e2f57a5ce59d089e89 \
--hash=sha256:80a5bd7e9923cab200701f67ad2372104328b99ddf249dbbe8834102c852d316 \
--hash=sha256:830de03f391f7e72b8587bb178c22d534da31153e9ee4234d54ef82cde5ace5e \
--hash=sha256:84eed88177f6c243c52b280cb094f751de600d98d2221e0dec331920894889ec \
--hash=sha256:8f20072376e338af0e51ccecb02335b4e242d55a9218a640f545be7fc64cca99 \
--hash=sha256:93aaec00cbde65c192f15c21f3ee44d2ab0c11eb1a35020b5c4c2676f7fe01d0 \
--hash=sha256:9829a17f6f78d7f4d0aeff28c126926a1e5f86828ebb60d6a0acfa0d08457f9f \
--hash=sha256:986a0347fe19e5117a5241276b72add570839e5bcdc7a6dac4b538c5928eeff5 \
--hash=sha256:992e9e68003df32e2aa0f31eb82c0a94f21286203ab2f2b2c666410e17b59d2f \
--hash=sha256:9ecbf85a6d73fc72f6534c38f7d92ed15d212e29e0dbe9810a465d61c8a66d23 \
--hash=sha256:a340413a9bf307fafd99254a4dd4ac6c567b91a205bf896dde18888315fd7fcd \
--hash=sha256:a4465c90b25ca7903410fabe4145e7b45493295cc3b84ec1216653fbe9021276 \
--hash=sha256:a7698bc480df76073067017f73ba4139dbaae20f7a6c9a0c7855b9c5e9a62124 \
--hash=sha256:a8af68fe579f6f0b9a8b3f033d10caacfed6a4b89b8c7a1d9478a8f5d8aba4a1 \
--hash=sha256:a8c84dff2d60ae06d5280ec87cd63050bbd74a90c02bfc7c390c803cfc8ac8fc \
--hash=sha256:b3456cd5b16ec9db3ef23dd27f37bf5a14f765e8272e9af3e3de9ee9a4cba867 \
--hash=sha256:b3bdd2e12d9b9a18713dd6f3c5ef3734fdab25b79b177054ba9e35ecc746cb6e \
--hash=sha256:b3c6cf18b61b94db9590f86af0dd60edbccb36e151643152b8688066f677fbc9 \
--hash=sha256:b3e8d46c1004afcf2bf513a8fb575ee2ec3d8009a2668566b5926a2dcf7f1a45 \
--hash=sha256:bced6cd5b09d4d002dda9f37292dd58d26eb1c4d0d179b820d3708d776300bb4 \
--hash=sha256:bed8dcb7dbfdb98e647ad47676045e0891f610d38095dcfdae468e1e1efb2766 \
--hash=sha256:c85892d68895ba7a0b16a0e6b7d9f9a0e30e86f2b1e0f6986243473ba8735432 \
--hash=sha256:c8646eb81eec559d7d8b1e51a5087299d06ecab3bc7da54c01f7df94350df135 \
--hash=sha256:cd0450e76b9c629b7f86e7d5b91b7cc9c281dd719630160a992b19a856f7bdbd \
--hash=sha256:ce4be2beece2629bd24bcab147741d1532bd5ed40fb52f2b4fcde5c5bf606df0 \
--hash=sha256:d3e255ef05b434f20fc9d4b18ea15733d1038bec3e4960d772b06216fa79e82d \
--hash=sha256:dcec67fc15e5978ad286e8cc2a3f9347076e28e0e01673b5ace18c73da64e3ff \
--hash=sha256:e97e6e07851cefe7baa41f1ebf5c0899d2d00d94bfef59825752e4c784bebbe8 \
--hash=sha256:eb167ee21d9c413d6b0ab65ec12f3e7ea0122879da8b3569fa1063526f9f03a8 \
--hash=sha256:efee1e9b4f691e1086730f3010e31c55625bc2e0f7db292a38a2cdf2774c2e13 \
--hash=sha256:f349bee14d0a4a72ba41e1b1cce52af324ebf704f5066c09e3dd04cfa6f545f0 \
--hash=sha256:f470f3d750e00df86e03254fdcb422d2f726f4fb3a0d8eeee35e81343985e58a \
--hash=sha256:f6464242f7895268d3086d7829ef031b05c77870dad1e13e51ef79d0a9cfe029 \
--hash=sha256:f6785ba0f65eb64b1ce3b7fcfec101085faf98f4e77b234f14287fd4138ffb25 \
--hash=sha256:fd218b338ac68213c997d4c88437c0e726f16d301616bf837e1468901934042c \
--hash=sha256:fe7f414edd69dd9199b0dfffa0ada22f23d8009e10fe2a719e0993b7dcc2e6e2
importlib-metadata==6.0.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \
--hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
--hash=sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d
importlib-resources==5.4.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
--hash=sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45 \
--hash=sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b
incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \
--hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321
jinja2==3.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
jsonschema==4.17.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \
--hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6
lxml==4.9.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7 \
--hash=sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726 \
--hash=sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03 \
--hash=sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140 \
--hash=sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a \
--hash=sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05 \
--hash=sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03 \
--hash=sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419 \
--hash=sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4 \
--hash=sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e \
--hash=sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67 \
--hash=sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50 \
--hash=sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894 \
--hash=sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf \
--hash=sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947 \
--hash=sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1 \
--hash=sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd \
--hash=sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3 \
--hash=sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92 \
--hash=sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3 \
--hash=sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457 \
--hash=sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74 \
--hash=sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf \
--hash=sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1 \
--hash=sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4 \
--hash=sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975 \
--hash=sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5 \
--hash=sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe \
--hash=sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7 \
--hash=sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1 \
--hash=sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2 \
--hash=sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409 \
--hash=sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f \
--hash=sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f \
--hash=sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5 \
--hash=sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24 \
--hash=sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e \
--hash=sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4 \
--hash=sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a \
--hash=sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c \
--hash=sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de \
--hash=sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f \
--hash=sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b \
--hash=sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5 \
--hash=sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7 \
--hash=sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a \
--hash=sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c \
--hash=sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9 \
--hash=sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e \
--hash=sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab \
--hash=sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941 \
--hash=sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5 \
--hash=sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45 \
--hash=sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7 \
--hash=sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892 \
--hash=sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746 \
--hash=sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c \
--hash=sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53 \
--hash=sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe \
--hash=sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184 \
--hash=sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38 \
--hash=sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df \
--hash=sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9 \
--hash=sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b \
--hash=sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2 \
--hash=sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0 \
--hash=sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda \
--hash=sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b \
--hash=sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5 \
--hash=sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380 \
--hash=sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33 \
--hash=sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8 \
--hash=sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1 \
--hash=sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889 \
--hash=sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9 \
--hash=sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f \
--hash=sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c
markupsafe==2.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3 \
--hash=sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8 \
--hash=sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759 \
--hash=sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed \
--hash=sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989 \
--hash=sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3 \
--hash=sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a \
--hash=sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c \
--hash=sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c \
--hash=sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8 \
--hash=sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454 \
--hash=sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad \
--hash=sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d \
--hash=sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635 \
--hash=sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61 \
--hash=sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea \
--hash=sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49 \
--hash=sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce \
--hash=sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e \
--hash=sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f \
--hash=sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f \
--hash=sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f \
--hash=sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7 \
--hash=sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a \
--hash=sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7 \
--hash=sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076 \
--hash=sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb \
--hash=sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7 \
--hash=sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7 \
--hash=sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c \
--hash=sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26 \
--hash=sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c \
--hash=sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8 \
--hash=sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448 \
--hash=sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956 \
--hash=sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05 \
--hash=sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1 \
--hash=sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357 \
--hash=sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea \
--hash=sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730
matrix-common==1.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20 \
--hash=sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1
msgpack==1.0.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467 \
--hash=sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae \
--hash=sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92 \
--hash=sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef \
--hash=sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624 \
--hash=sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227 \
--hash=sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88 \
--hash=sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9 \
--hash=sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8 \
--hash=sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd \
--hash=sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6 \
--hash=sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55 \
--hash=sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e \
--hash=sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2 \
--hash=sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44 \
--hash=sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6 \
--hash=sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9 \
--hash=sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab \
--hash=sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae \
--hash=sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa \
--hash=sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9 \
--hash=sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e \
--hash=sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250 \
--hash=sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce \
--hash=sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075 \
--hash=sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236 \
--hash=sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae \
--hash=sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e \
--hash=sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f \
--hash=sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08 \
--hash=sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6 \
--hash=sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d \
--hash=sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43 \
--hash=sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1 \
--hash=sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6 \
--hash=sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0 \
--hash=sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c \
--hash=sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff \
--hash=sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db \
--hash=sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243 \
--hash=sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661 \
--hash=sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba \
--hash=sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e \
--hash=sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb \
--hash=sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52 \
--hash=sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6 \
--hash=sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1 \
--hash=sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f \
--hash=sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da \
--hash=sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f \
--hash=sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c \
--hash=sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8
netaddr==0.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac \
--hash=sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243
packaging==23.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \
--hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97
parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c \
--hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9
phonenumbers==8.13.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:2e3fd1f3fde226b289489275517c76edf223eafd9f43a2c2c36498a44b73d4b0 \
--hash=sha256:6eb2faf29c19f946baf10f1c977a1f856cab90819fe7735b8e141d5407420c4a
pillow==9.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33 \
--hash=sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b \
--hash=sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e \
--hash=sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35 \
--hash=sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153 \
--hash=sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9 \
--hash=sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569 \
--hash=sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57 \
--hash=sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8 \
--hash=sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1 \
--hash=sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264 \
--hash=sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157 \
--hash=sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9 \
--hash=sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133 \
--hash=sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9 \
--hash=sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab \
--hash=sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6 \
--hash=sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5 \
--hash=sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df \
--hash=sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503 \
--hash=sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b \
--hash=sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa \
--hash=sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327 \
--hash=sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493 \
--hash=sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d \
--hash=sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4 \
--hash=sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4 \
--hash=sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35 \
--hash=sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2 \
--hash=sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c \
--hash=sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011 \
--hash=sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a \
--hash=sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e \
--hash=sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f \
--hash=sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848 \
--hash=sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57 \
--hash=sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f \
--hash=sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c \
--hash=sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9 \
--hash=sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5 \
--hash=sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9 \
--hash=sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d \
--hash=sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0 \
--hash=sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1 \
--hash=sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e \
--hash=sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815 \
--hash=sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0 \
--hash=sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b \
--hash=sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd \
--hash=sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c \
--hash=sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3 \
--hash=sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab \
--hash=sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858 \
--hash=sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5 \
--hash=sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee \
--hash=sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343 \
--hash=sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb \
--hash=sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47 \
--hash=sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed \
--hash=sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837 \
--hash=sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286 \
--hash=sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28 \
--hash=sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628 \
--hash=sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df \
--hash=sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d \
--hash=sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d \
--hash=sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a \
--hash=sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6 \
--hash=sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336 \
--hash=sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132 \
--hash=sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070 \
--hash=sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe \
--hash=sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a \
--hash=sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd \
--hash=sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391 \
--hash=sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a \
--hash=sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
prometheus-client==0.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab \
--hash=sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48
psycopg2==2.9.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955 \
--hash=sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa \
--hash=sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e \
--hash=sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a \
--hash=sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5 \
--hash=sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee \
--hash=sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad \
--hash=sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0 \
--hash=sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a \
--hash=sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d \
--hash=sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f \
--hash=sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2 \
--hash=sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5
psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
--hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52
pyasn1-modules==0.2.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
--hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74
pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
--hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:05a81b006be15655b2a1bae5faa4280cf7c81d0e09fcb49b342ebf826abe5a72 \
--hash=sha256:0b53e1d41e97063d51a02821b80538053ee4608b9a181c1005441f1673c55423 \
--hash=sha256:2b3ce5f16deb45c472dde1a0ee05619298c864a20cded09c4edd820e1454129f \
--hash=sha256:2e82a6d37a95e0b1b42b82ab340ada3963aea1317fd7f888bb6b9dfbf4fff57c \
--hash=sha256:301d626a59edbe5dfb48fcae245896379a450d04baeed50ef40d8199f2733b06 \
--hash=sha256:39f4a73e5342b25c2959529f07f026ef58147249f9b7431e1ba8414a36761f53 \
--hash=sha256:4948f264678c703f3877d1c8877c4e3b2e12e549c57795107f08cf70c6ec7774 \
--hash=sha256:4b05697738e7d2040696b0a66d9f0a10bec0efa1883ca75ee9e55baf511909d6 \
--hash=sha256:51bdeb10d2db0f288e71d49c9cefa609bca271720ecd0c58009bd7504a0c464c \
--hash=sha256:55b1625899acd33229c4352ce0ae54038529b412bd51c4915349b49ca575258f \
--hash=sha256:572066051eeac73d23f95ba9a71349c42a3e05999d0ee1572b7860235b850cc6 \
--hash=sha256:6a05a9db1ef5be0fe63e988f9617ca2551013f55000289c671f71ec16f4985e3 \
--hash=sha256:6dc1cc241440ed7ca9ab59d9929075445da6b7c94ced281b3dd4cfe6c8cff817 \
--hash=sha256:6e7124d6855b2780611d9f5e1e145e86667eaa3bd9459192c8dc1a097f5e9903 \
--hash=sha256:75d52162fe6b2b55964fbb0af2ee58e99791a3138588c482572bb6087953113a \
--hash=sha256:78cec42b95dbb500a1f7120bdf95c401f6abb616bbe8785ef09887306792e66e \
--hash=sha256:7feb6a2d401f4d6863050f58325b8d99c1e56f4512d98b11ac64ad1751dc647d \
--hash=sha256:8775d4ef5e7299a2f4699501077a0defdaac5b6c4321173bcb0f3c496fbadf85 \
--hash=sha256:887ca463c3bc47103c123bc06919c86720e80e1214aab79e9b779cda0ff92a00 \
--hash=sha256:9193d4f4ee8feca58bc56c8306bcb820f5c7905fd919e0750acdeeeef0615b28 \
--hash=sha256:983e720704431a6573d626b00662eb78a07148c9115129f9b4351091ec95ecc3 \
--hash=sha256:990406d226dea0e8f25f643b370224771878142155b879784ce89f633541a024 \
--hash=sha256:9cbdc268a62d9a98c56e2452d6c41c0263d64a2009aac69246486f01b4f594c4 \
--hash=sha256:a48f1953c4a1d9bd0b5167ac50da9a79f6072c63c4cef4cf2a3736994903583e \
--hash=sha256:a9a6747cac06c2beb466064dda999a13176b23535e4c496c9d48e6406f92d42d \
--hash=sha256:a9f2de23bec87ff306aef658384b02aa7c32389766af3c5dee9ce33e80222dfa \
--hash=sha256:b5635de53e6686fe7a44b5cf25fcc419a0d5e5c1a1efe73d49d48fe7586db854 \
--hash=sha256:b6f9d649892a6f54a39ed56b8dfd5e08b5f3be5f893da430bed76975f3735d15 \
--hash=sha256:b9a3859f24eb4e097502a3be1fb4b2abb79b6103dd9e2e0edb70613a4459a648 \
--hash=sha256:cd8702c5142afda03dc2b1ee6bc358b62b3735b2cce53fc77b31ca9f728e4bc8 \
--hash=sha256:d7b5a3821225f5c43496c324b0d6875fde910a1c2933d726a743ce328fbb2a8c \
--hash=sha256:d88c4c0e5c5dfd05092a4b271282ef0588e5f4aaf345778056fc5259ba098857 \
--hash=sha256:eb992a1ef739cc7b543576337bebfc62c0e6567434e522e97291b251a41dad7f \
--hash=sha256:f2f7eb6273dd12472d7f218e1fef6f7c7c2f00ac2e1ecde4db8824c457300416 \
--hash=sha256:fdf88ab63c3ee282c76d652fc86518aacb737ff35796023fae56a65ced1a5978 \
--hash=sha256:fdf8d759ef326962b4678d89e275ffc55b7ce59d917d9f72233762061fd04a2d
pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \
--hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907
pynacl==1.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \
--hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \
--hash=sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 \
--hash=sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 \
--hash=sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92 \
--hash=sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff \
--hash=sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba \
--hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \
--hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \
--hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543
pyopenssl==23.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:c1cc5f86bcacefc84dada7d31175cae1b1518d5f60d3d0bb595a67822a868a6f \
--hash=sha256:df5fc28af899e74e19fccb5510df423581047e10ab6f1f4ba1763ff5fde844c0
pyrsistent==0.18.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \
--hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \
--hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \
--hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \
--hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \
--hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \
--hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \
--hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \
--hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \
--hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \
--hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \
--hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \
--hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \
--hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 \
--hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \
--hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \
--hash=sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07 \
--hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \
--hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \
--hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \
--hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6
pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
requests==2.27.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
--hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
semantic-version==2.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c \
--hash=sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177
service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34 \
--hash=sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db
setuptools-rust==1.5.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206 \
--hash=sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7
setuptools==65.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
--hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
signedjson==1.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228 \
--hash=sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492
simplejson==3.17.6 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce \
--hash=sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1 \
--hash=sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0 \
--hash=sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf \
--hash=sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a \
--hash=sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe \
--hash=sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d \
--hash=sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94 \
--hash=sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad \
--hash=sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a \
--hash=sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2 \
--hash=sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc \
--hash=sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81 \
--hash=sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7 \
--hash=sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837 \
--hash=sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566 \
--hash=sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3 \
--hash=sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f \
--hash=sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802 \
--hash=sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e \
--hash=sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9 \
--hash=sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7 \
--hash=sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a \
--hash=sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044 \
--hash=sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494 \
--hash=sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51 \
--hash=sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d \
--hash=sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33 \
--hash=sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27 \
--hash=sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f \
--hash=sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198 \
--hash=sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70 \
--hash=sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c \
--hash=sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab \
--hash=sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045 \
--hash=sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5 \
--hash=sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7 \
--hash=sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882 \
--hash=sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40 \
--hash=sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198 \
--hash=sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180 \
--hash=sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a \
--hash=sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda \
--hash=sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2 \
--hash=sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851 \
--hash=sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe \
--hash=sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a \
--hash=sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211 \
--hash=sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1 \
--hash=sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6 \
--hash=sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb \
--hash=sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9 \
--hash=sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed \
--hash=sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd \
--hash=sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd \
--hash=sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6 \
--hash=sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a \
--hash=sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366 \
--hash=sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81 \
--hash=sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a \
--hash=sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba
six==1.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
sortedcontainers==2.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \
--hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0
treq==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2 \
--hash=sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec
twisted-iocpsupport==1.0.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_system == "Windows" \
--hash=sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41 \
--hash=sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d \
--hash=sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9 \
--hash=sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf \
--hash=sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323 \
--hash=sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32 \
--hash=sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4 \
--hash=sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f \
--hash=sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546 \
--hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \
--hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \
--hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415
twisted==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31 \
--hash=sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0
twisted[tls]==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31 \
--hash=sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0
txredisapi==1.4.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:34c9eba8d34f452d30661f073b67b8cd42b695e3d31678ec1bbf628a65a0f059 \
--hash=sha256:e6cc43f51e35d608abdca8f8c7d20e148fe1d82679f6e584baea613ebec812bb
typing-extensions==4.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005
urllib3==1.26.12 ; python_full_version >= "3.7.1" and python_version < "4" \
--hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
--hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
zipp==3.7.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
--hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d \
--hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375
zope-interface==5.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192 \
--hash=sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702 \
--hash=sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09 \
--hash=sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4 \
--hash=sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a \
--hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \
--hash=sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf \
--hash=sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c \
--hash=sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d \
--hash=sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78 \
--hash=sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83 \
--hash=sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531 \
--hash=sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46 \
--hash=sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021 \
--hash=sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94 \
--hash=sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc \
--hash=sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63 \
--hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \
--hash=sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117 \
--hash=sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25 \
--hash=sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05 \
--hash=sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e \
--hash=sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1 \
--hash=sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004 \
--hash=sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2 \
--hash=sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e \
--hash=sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f \
--hash=sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f \
--hash=sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120 \
--hash=sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f \
--hash=sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1 \
--hash=sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9 \
--hash=sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e \
--hash=sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7 \
--hash=sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8 \
--hash=sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b \
--hash=sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155 \
--hash=sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7 \
--hash=sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c \
--hash=sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325 \
--hash=sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d \
--hash=sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb \
--hash=sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e \
--hash=sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959 \
--hash=sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7 \
--hash=sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920 \
--hash=sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e \
--hash=sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48 \
--hash=sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8 \
--hash=sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4 \
--hash=sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263

View File

@ -24,7 +24,7 @@ anyhow = "1.0.63"
lazy_static = "1.4.0"
log = "0.4.17"
pyo3 = { version = "0.17.1", features = ["macros", "anyhow", "abi3", "abi3-py37"] }
pyo3-log = "0.8.1"
pyo3-log = "0.7.0"
pythonize = "0.17.0"
regex = "1.6.0"
serde = { version = "1.0.144", features = ["derive"] }

View File

@ -14,10 +14,8 @@
#![feature(test)]
use std::collections::BTreeSet;
use synapse::push::{
evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, JsonValue,
PushRules, SimpleJsonValue,
evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, PushRules,
};
use test::Bencher;
@ -26,18 +24,9 @@ extern crate test;
#[bench]
fn bench_match_exact(b: &mut Bencher) {
let flattened_keys = [
(
"type".to_string(),
JsonValue::Value(SimpleJsonValue::Str("m.text".to_string())),
),
(
"room_id".to_string(),
JsonValue::Value(SimpleJsonValue::Str("!room:server".to_string())),
),
(
"content.body".to_string(),
JsonValue::Value(SimpleJsonValue::Str("test message".to_string())),
),
("type".to_string(), "m.text".to_string()),
("room_id".to_string(), "!room:server".to_string()),
("content.body".to_string(), "test message".to_string()),
]
.into_iter()
.collect();
@ -45,6 +34,8 @@ fn bench_match_exact(b: &mut Bencher) {
let eval = PushRuleEvaluator::py_new(
flattened_keys,
false,
BTreeSet::new(),
false,
10,
Some(0),
Default::default(),
@ -52,14 +43,14 @@ fn bench_match_exact(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
EventMatchCondition {
key: "room_id".into(),
pattern: "!room:server".into(),
pattern: Some("!room:server".into()),
pattern_type: None,
},
));
@ -72,18 +63,9 @@ fn bench_match_exact(b: &mut Bencher) {
#[bench]
fn bench_match_word(b: &mut Bencher) {
let flattened_keys = [
(
"type".to_string(),
JsonValue::Value(SimpleJsonValue::Str("m.text".to_string())),
),
(
"room_id".to_string(),
JsonValue::Value(SimpleJsonValue::Str("!room:server".to_string())),
),
(
"content.body".to_string(),
JsonValue::Value(SimpleJsonValue::Str("test message".to_string())),
),
("type".to_string(), "m.text".to_string()),
("room_id".to_string(), "!room:server".to_string()),
("content.body".to_string(), "test message".to_string()),
]
.into_iter()
.collect();
@ -91,6 +73,8 @@ fn bench_match_word(b: &mut Bencher) {
let eval = PushRuleEvaluator::py_new(
flattened_keys,
false,
BTreeSet::new(),
false,
10,
Some(0),
Default::default(),
@ -98,14 +82,14 @@ fn bench_match_word(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
EventMatchCondition {
key: "content.body".into(),
pattern: "test".into(),
pattern: Some("test".into()),
pattern_type: None,
},
));
@ -118,18 +102,9 @@ fn bench_match_word(b: &mut Bencher) {
#[bench]
fn bench_match_word_miss(b: &mut Bencher) {
let flattened_keys = [
(
"type".to_string(),
JsonValue::Value(SimpleJsonValue::Str("m.text".to_string())),
),
(
"room_id".to_string(),
JsonValue::Value(SimpleJsonValue::Str("!room:server".to_string())),
),
(
"content.body".to_string(),
JsonValue::Value(SimpleJsonValue::Str("test message".to_string())),
),
("type".to_string(), "m.text".to_string()),
("room_id".to_string(), "!room:server".to_string()),
("content.body".to_string(), "test message".to_string()),
]
.into_iter()
.collect();
@ -137,6 +112,8 @@ fn bench_match_word_miss(b: &mut Bencher) {
let eval = PushRuleEvaluator::py_new(
flattened_keys,
false,
BTreeSet::new(),
false,
10,
Some(0),
Default::default(),
@ -144,14 +121,14 @@ fn bench_match_word_miss(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
EventMatchCondition {
key: "content.body".into(),
pattern: "foobar".into(),
pattern: Some("foobar".into()),
pattern_type: None,
},
));
@ -164,18 +141,9 @@ fn bench_match_word_miss(b: &mut Bencher) {
#[bench]
fn bench_eval_message(b: &mut Bencher) {
let flattened_keys = [
(
"type".to_string(),
JsonValue::Value(SimpleJsonValue::Str("m.text".to_string())),
),
(
"room_id".to_string(),
JsonValue::Value(SimpleJsonValue::Str("!room:server".to_string())),
),
(
"content.body".to_string(),
JsonValue::Value(SimpleJsonValue::Str("test message".to_string())),
),
("type".to_string(), "m.text".to_string()),
("room_id".to_string(), "!room:server".to_string()),
("content.body".to_string(), "test message".to_string()),
]
.into_iter()
.collect();
@ -183,6 +151,8 @@ fn bench_eval_message(b: &mut Bencher) {
let eval = PushRuleEvaluator::py_new(
flattened_keys,
false,
BTreeSet::new(),
false,
10,
Some(0),
Default::default(),
@ -190,7 +160,6 @@ fn bench_eval_message(b: &mut Bencher) {
true,
vec![],
false,
false,
)
.unwrap();

View File

@ -21,13 +21,13 @@ use lazy_static::lazy_static;
use serde_json::Value;
use super::KnownCondition;
use crate::push::RelatedEventMatchTypeCondition;
use crate::push::Action;
use crate::push::Condition;
use crate::push::EventMatchCondition;
use crate::push::PushRule;
use crate::push::RelatedEventMatchCondition;
use crate::push::SetTweak;
use crate::push::TweakValue;
use crate::push::{Action, EventPropertyIsCondition, SimpleJsonValue};
use crate::push::{Condition, EventMatchTypeCondition};
use crate::push::{EventMatchCondition, EventMatchPatternType};
use crate::push::{EventPropertyIsTypeCondition, PushRule};
const HIGHLIGHT_ACTION: Action = Action::SetTweak(SetTweak {
set_tweak: Cow::Borrowed("highlight"),
@ -72,7 +72,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("content.m.relates_to.rel_type"),
pattern: Cow::Borrowed("m.replace"),
pattern: Some(Cow::Borrowed("m.replace")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[]),
@ -85,7 +86,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("content.msgtype"),
pattern: Cow::Borrowed("m.notice"),
pattern: Some(Cow::Borrowed("m.notice")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::DontNotify]),
@ -98,15 +100,18 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.member"),
pattern: Some(Cow::Borrowed("m.room.member")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("content.membership"),
pattern: Cow::Borrowed("invite"),
pattern: Some(Cow::Borrowed("invite")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
pattern: None,
pattern_type: Some(Cow::Borrowed("user_id")),
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION, SOUND_ACTION]),
@ -119,7 +124,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.member"),
pattern: Some(Cow::Borrowed("m.room.member")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::DontNotify]),
@ -129,10 +135,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
PushRule {
rule_id: Cow::Borrowed("global/override/.im.nheko.msc3664.reply"),
priority_class: 5,
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatchType(
RelatedEventMatchTypeCondition {
key: Cow::Borrowed("sender"),
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatch(
RelatedEventMatchCondition {
key: Some(Cow::Borrowed("sender")),
pattern: None,
pattern_type: Some(Cow::Borrowed("user_id")),
rel_type: Cow::Borrowed("m.in_reply_to"),
include_fallbacks: None,
},
@ -144,12 +151,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
PushRule {
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_user_mention"),
priority_class: 5,
conditions: Cow::Borrowed(&[Condition::Known(
KnownCondition::ExactEventPropertyContainsType(EventPropertyIsTypeCondition {
key: Cow::Borrowed("content.org.matrix.msc3952.mentions.user_ids"),
value_type: Cow::Borrowed(&EventMatchPatternType::UserId),
}),
)]),
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::IsUserMention)]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
default: true,
default_enabled: true,
@ -166,10 +168,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_room_mention"),
priority_class: 5,
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventPropertyIs(EventPropertyIsCondition {
key: Cow::Borrowed("content.org.matrix.msc3952.mentions.room"),
value: Cow::Borrowed(&SimpleJsonValue::Bool(true)),
})),
Condition::Known(KnownCondition::IsRoomMention),
Condition::Known(KnownCondition::SenderNotificationPermission {
key: Cow::Borrowed("room"),
}),
@ -187,7 +186,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
}),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("content.body"),
pattern: Cow::Borrowed("@room"),
pattern: Some(Cow::Borrowed("@room")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
@ -200,11 +200,13 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.tombstone"),
pattern: Some(Cow::Borrowed("m.room.tombstone")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern: Cow::Borrowed(""),
pattern: Some(Cow::Borrowed("")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
@ -217,10 +219,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.reaction"),
pattern: Some(Cow::Borrowed("m.reaction")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[]),
actions: Cow::Borrowed(&[Action::DontNotify]),
default: true,
default_enabled: true,
},
@ -230,11 +233,13 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.server_acl"),
pattern: Some(Cow::Borrowed("m.room.server_acl")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern: Cow::Borrowed(""),
pattern: Some(Cow::Borrowed("")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[]),
@ -247,7 +252,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.response"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.response")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[]),
@ -259,10 +265,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
rule_id: Cow::Borrowed("global/content/.m.rule.contains_user_name"),
priority_class: 4,
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatchType(
EventMatchTypeCondition {
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("content.body"),
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserLocalpart),
pattern: None,
pattern_type: Some(Cow::Borrowed("user_localpart")),
},
))]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
@ -277,7 +284,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.call.invite"),
pattern: Some(Cow::Borrowed("m.call.invite")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify, RING_ACTION, HIGHLIGHT_FALSE_ACTION]),
@ -290,7 +298,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.message"),
pattern: Some(Cow::Borrowed("m.room.message")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -306,7 +315,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.encrypted"),
pattern: Some(Cow::Borrowed("m.room.encrypted")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -325,7 +335,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.encrypted"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.encrypted")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -349,7 +360,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.message"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.message")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -373,7 +385,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.file"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.file")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -397,7 +410,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.image"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.image")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -421,7 +435,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.video"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.video")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -445,7 +460,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.audio"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.audio")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@ -466,7 +482,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.message"),
pattern: Some(Cow::Borrowed("m.room.message")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
@ -479,7 +496,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.encrypted"),
pattern: Some(Cow::Borrowed("m.room.encrypted")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
@ -493,7 +511,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.encrypted"),
pattern: Some(Cow::Borrowed("m.encrypted")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@ -512,7 +531,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.message"),
pattern: Some(Cow::Borrowed("m.message")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@ -531,7 +551,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.file"),
pattern: Some(Cow::Borrowed("m.file")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@ -550,7 +571,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.image"),
pattern: Some(Cow::Borrowed("m.image")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@ -569,7 +591,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.video"),
pattern: Some(Cow::Borrowed("m.video")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@ -588,7 +611,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.audio"),
pattern: Some(Cow::Borrowed("m.audio")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@ -606,15 +630,18 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("im.vector.modular.widgets"),
pattern: Some(Cow::Borrowed("im.vector.modular.widgets")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("content.type"),
pattern: Cow::Borrowed("jitsi"),
pattern: Some(Cow::Borrowed("jitsi")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern: Cow::Borrowed("*"),
pattern: Some(Cow::Borrowed("*")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
@ -630,7 +657,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
}),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
@ -643,7 +671,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify]),
@ -659,7 +688,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
}),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
@ -672,7 +702,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify]),

View File

@ -12,8 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::{BTreeMap, BTreeSet};
use anyhow::{Context, Error};
use lazy_static::lazy_static;
@ -23,10 +22,9 @@ use regex::Regex;
use super::{
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
Action, Condition, EventPropertyIsCondition, FilteredPushRules, KnownCondition,
SimpleJsonValue,
Action, Condition, EventMatchCondition, FilteredPushRules, KnownCondition,
RelatedEventMatchCondition,
};
use crate::push::{EventMatchPatternType, JsonValue};
lazy_static! {
/// Used to parse the `is` clause in the room member count condition.
@ -63,15 +61,19 @@ impl RoomVersionFeatures {
/// Allows running a set of push rules against a particular event.
#[pyclass]
pub struct PushRuleEvaluator {
/// A mapping of "flattened" keys to simple JSON values in the event, e.g.
/// A mapping of "flattened" keys to string values in the event, e.g.
/// includes things like "type" and "content.msgtype".
flattened_keys: BTreeMap<String, JsonValue>,
flattened_keys: BTreeMap<String, String>,
/// The "content.body", if any.
body: String,
/// True if the event has a mentions property and MSC3952 support is enabled.
has_mentions: bool,
/// The user mentions that were part of the message.
user_mentions: BTreeSet<String>,
/// True if the message is a room message.
room_mention: bool,
/// The number of users in the room.
room_member_count: u64,
@ -85,7 +87,7 @@ pub struct PushRuleEvaluator {
/// The related events, indexed by relation type. Flattened in the same manner as
/// `flattened_keys`.
related_events_flattened: BTreeMap<String, BTreeMap<String, JsonValue>>,
related_events_flattened: BTreeMap<String, BTreeMap<String, String>>,
/// If msc3664, push rules for related events, is enabled.
related_event_match_enabled: bool,
@ -96,9 +98,6 @@ pub struct PushRuleEvaluator {
/// If MSC3931 (room version feature flags) is enabled. Usually controlled by the same
/// flag as MSC1767 (extensible events core).
msc3931_enabled: bool,
/// If MSC3966 (exact_event_property_contains push rule condition) is enabled.
msc3966_exact_event_property_contains: bool,
}
#[pymethods]
@ -107,26 +106,29 @@ impl PushRuleEvaluator {
#[allow(clippy::too_many_arguments)]
#[new]
pub fn py_new(
flattened_keys: BTreeMap<String, JsonValue>,
flattened_keys: BTreeMap<String, String>,
has_mentions: bool,
user_mentions: BTreeSet<String>,
room_mention: bool,
room_member_count: u64,
sender_power_level: Option<i64>,
notification_power_levels: BTreeMap<String, i64>,
related_events_flattened: BTreeMap<String, BTreeMap<String, JsonValue>>,
related_events_flattened: BTreeMap<String, BTreeMap<String, String>>,
related_event_match_enabled: bool,
room_version_feature_flags: Vec<String>,
msc3931_enabled: bool,
msc3966_exact_event_property_contains: bool,
) -> Result<Self, Error> {
let body = match flattened_keys.get("content.body") {
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone(),
_ => String::new(),
};
let body = flattened_keys
.get("content.body")
.cloned()
.unwrap_or_default();
Ok(PushRuleEvaluator {
flattened_keys,
body,
has_mentions,
user_mentions,
room_mention,
room_member_count,
notification_power_levels,
sender_power_level,
@ -134,7 +136,6 @@ impl PushRuleEvaluator {
related_event_match_enabled,
room_version_feature_flags,
msc3931_enabled,
msc3966_exact_event_property_contains,
})
}
@ -248,84 +249,20 @@ impl PushRuleEvaluator {
};
let result = match known_condition {
KnownCondition::EventMatch(event_match) => self.match_event_match(
&self.flattened_keys,
&event_match.key,
&event_match.pattern,
)?,
KnownCondition::EventMatchType(event_match) => {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
KnownCondition::EventMatch(event_match) => {
self.match_event_match(event_match, user_id)?
}
KnownCondition::RelatedEventMatch(event_match) => {
self.match_related_event_match(event_match, user_id)?
}
KnownCondition::IsUserMention => {
if let Some(uid) = user_id {
self.user_mentions.contains(uid)
} else {
return Ok(false);
};
let pattern = match &*event_match.pattern_type {
EventMatchPatternType::UserId => user_id,
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
};
self.match_event_match(&self.flattened_keys, &event_match.key, pattern)?
}
KnownCondition::EventPropertyIs(event_property_is) => {
self.match_event_property_is(event_property_is)?
}
KnownCondition::RelatedEventMatch(event_match) => self.match_related_event_match(
&event_match.rel_type.clone(),
event_match.include_fallbacks,
event_match.key.clone(),
event_match.pattern.clone(),
)?,
KnownCondition::RelatedEventMatchType(event_match) => {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
let pattern = match &*event_match.pattern_type {
EventMatchPatternType::UserId => user_id,
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
};
self.match_related_event_match(
&event_match.rel_type.clone(),
event_match.include_fallbacks,
Some(event_match.key.clone()),
Some(Cow::Borrowed(pattern)),
)?
}
KnownCondition::ExactEventPropertyContains(event_property_is) => self
.match_exact_event_property_contains(
event_property_is.key.clone(),
event_property_is.value.clone(),
)?,
KnownCondition::ExactEventPropertyContainsType(exact_event_match) => {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
let pattern = match &*exact_event_match.value_type {
EventMatchPatternType::UserId => user_id,
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
};
self.match_exact_event_property_contains(
exact_event_match.key.clone(),
Cow::Borrowed(&SimpleJsonValue::Str(pattern.to_string())),
)?
false
}
}
KnownCondition::IsRoomMention => self.room_mention,
KnownCondition::ContainsDisplayName => {
if let Some(dn) = display_name {
if !dn.is_empty() {
@ -376,13 +313,104 @@ impl PushRuleEvaluator {
/// Evaluates a `event_match` condition.
fn match_event_match(
&self,
flattened_event: &BTreeMap<String, JsonValue>,
key: &str,
pattern: &str,
event_match: &EventMatchCondition,
user_id: Option<&str>,
) -> Result<bool, Error> {
let haystack = if let Some(JsonValue::Value(SimpleJsonValue::Str(haystack))) =
flattened_event.get(key)
let pattern = if let Some(pattern) = &event_match.pattern {
pattern
} else if let Some(pattern_type) = &event_match.pattern_type {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
match &**pattern_type {
"user_id" => user_id,
"user_localpart" => get_localpart_from_id(user_id)?,
_ => return Ok(false),
}
} else {
return Ok(false);
};
let haystack = if let Some(haystack) = self.flattened_keys.get(&*event_match.key) {
haystack
} else {
return Ok(false);
};
// For the content.body we match against "words", but for everything
// else we match against the entire value.
let match_type = if event_match.key == "content.body" {
GlobMatchType::Word
} else {
GlobMatchType::Whole
};
let mut compiled_pattern = get_glob_matcher(pattern, match_type)?;
compiled_pattern.is_match(haystack)
}
/// Evaluates a `related_event_match` condition. (MSC3664)
fn match_related_event_match(
&self,
event_match: &RelatedEventMatchCondition,
user_id: Option<&str>,
) -> Result<bool, Error> {
// First check if related event matching is enabled...
if !self.related_event_match_enabled {
return Ok(false);
}
// get the related event, fail if there is none.
let event = if let Some(event) = self.related_events_flattened.get(&*event_match.rel_type) {
event
} else {
return Ok(false);
};
// If we are not matching fallbacks, don't match if our special key indicating this is a
// fallback relation is not present.
if !event_match.include_fallbacks.unwrap_or(false)
&& event.contains_key("im.vector.is_falling_back")
{
return Ok(false);
}
// if we have no key, accept the event as matching, if it existed without matching any
// fields.
let key = if let Some(key) = &event_match.key {
key
} else {
return Ok(true);
};
let pattern = if let Some(pattern) = &event_match.pattern {
pattern
} else if let Some(pattern_type) = &event_match.pattern_type {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
match &**pattern_type {
"user_id" => user_id,
"user_localpart" => get_localpart_from_id(user_id)?,
_ => return Ok(false),
}
} else {
return Ok(false);
};
let haystack = if let Some(haystack) = event.get(&**key) {
haystack
} else {
return Ok(false);
@ -400,80 +428,6 @@ impl PushRuleEvaluator {
compiled_pattern.is_match(haystack)
}
/// Evaluates a `event_property_is` condition.
fn match_event_property_is(
&self,
event_property_is: &EventPropertyIsCondition,
) -> Result<bool, Error> {
let value = &event_property_is.value;
let haystack = if let Some(JsonValue::Value(haystack)) =
self.flattened_keys.get(&*event_property_is.key)
{
haystack
} else {
return Ok(false);
};
Ok(haystack == &**value)
}
/// Evaluates a `related_event_match` condition. (MSC3664)
fn match_related_event_match(
&self,
rel_type: &str,
include_fallbacks: Option<bool>,
key: Option<Cow<str>>,
pattern: Option<Cow<str>>,
) -> Result<bool, Error> {
// First check if related event matching is enabled...
if !self.related_event_match_enabled {
return Ok(false);
}
// get the related event, fail if there is none.
let event = if let Some(event) = self.related_events_flattened.get(rel_type) {
event
} else {
return Ok(false);
};
// If we are not matching fallbacks, don't match if our special key indicating this is a
// fallback relation is not present.
if !include_fallbacks.unwrap_or(false) && event.contains_key("im.vector.is_falling_back") {
return Ok(false);
}
match (key, pattern) {
// if we have no key, accept the event as matching.
(None, _) => Ok(true),
// There was a key, so we *must* have a pattern to go with it.
(Some(_), None) => Ok(false),
// If there is a key & pattern, check if they're in the flattened event (given by rel_type).
(Some(key), Some(pattern)) => self.match_event_match(event, &key, &pattern),
}
}
/// Evaluates a `exact_event_property_contains` condition. (MSC3966)
fn match_exact_event_property_contains(
&self,
key: Cow<str>,
value: Cow<SimpleJsonValue>,
) -> Result<bool, Error> {
// First check if the feature is enabled.
if !self.msc3966_exact_event_property_contains {
return Ok(false);
}
let haystack = if let Some(JsonValue::Array(haystack)) = self.flattened_keys.get(&*key) {
haystack
} else {
return Ok(false);
};
Ok(haystack.contains(&value))
}
/// Match the member count against an 'is' condition
/// The `is` condition can be things like '>2', '==3' or even just '4'.
fn match_member_count(&self, is: &str) -> Result<bool, Error> {
@ -501,13 +455,12 @@ impl PushRuleEvaluator {
#[test]
fn push_rule_evaluator() {
let mut flattened_keys = BTreeMap::new();
flattened_keys.insert(
"content.body".to_string(),
JsonValue::Value(SimpleJsonValue::Str("foo bar bob hello".to_string())),
);
flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string());
let evaluator = PushRuleEvaluator::py_new(
flattened_keys,
false,
BTreeSet::new(),
false,
10,
Some(0),
BTreeMap::new(),
@ -515,7 +468,6 @@ fn push_rule_evaluator() {
true,
vec![],
true,
true,
)
.unwrap();
@ -530,14 +482,13 @@ fn test_requires_room_version_supports_condition() {
use crate::push::{PushRule, PushRules};
let mut flattened_keys = BTreeMap::new();
flattened_keys.insert(
"content.body".to_string(),
JsonValue::Value(SimpleJsonValue::Str("foo bar bob hello".to_string())),
);
flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string());
let flags = vec![RoomVersionFeatures::ExtensibleEvents.as_str().to_string()];
let evaluator = PushRuleEvaluator::py_new(
flattened_keys,
false,
BTreeSet::new(),
false,
10,
Some(0),
BTreeMap::new(),
@ -545,7 +496,6 @@ fn test_requires_room_version_supports_condition() {
false,
flags,
true,
true,
)
.unwrap();

View File

@ -56,9 +56,7 @@ use std::collections::{BTreeMap, HashMap, HashSet};
use anyhow::{Context, Error};
use log::warn;
use pyo3::exceptions::PyTypeError;
use pyo3::prelude::*;
use pyo3::types::{PyBool, PyList, PyLong, PyString};
use pythonize::{depythonize, pythonize};
use serde::de::Error as _;
use serde::{Deserialize, Serialize};
@ -250,65 +248,6 @@ impl<'de> Deserialize<'de> for Action {
}
}
/// A simple JSON values (string, int, boolean, or null).
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(untagged)]
pub enum SimpleJsonValue {
Str(String),
Int(i64),
Bool(bool),
Null,
}
impl<'source> FromPyObject<'source> for SimpleJsonValue {
fn extract(ob: &'source PyAny) -> PyResult<Self> {
if let Ok(s) = <PyString as pyo3::PyTryFrom>::try_from(ob) {
Ok(SimpleJsonValue::Str(s.to_string()))
// A bool *is* an int, ensure we try bool first.
} else if let Ok(b) = <PyBool as pyo3::PyTryFrom>::try_from(ob) {
Ok(SimpleJsonValue::Bool(b.extract()?))
} else if let Ok(i) = <PyLong as pyo3::PyTryFrom>::try_from(ob) {
Ok(SimpleJsonValue::Int(i.extract()?))
} else if ob.is_none() {
Ok(SimpleJsonValue::Null)
} else {
Err(PyTypeError::new_err(format!(
"Can't convert from {} to SimpleJsonValue",
ob.get_type().name()?
)))
}
}
}
/// A JSON values (list, string, int, boolean, or null).
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(untagged)]
pub enum JsonValue {
Array(Vec<SimpleJsonValue>),
Value(SimpleJsonValue),
}
impl<'source> FromPyObject<'source> for JsonValue {
fn extract(ob: &'source PyAny) -> PyResult<Self> {
if let Ok(l) = <PyList as pyo3::PyTryFrom>::try_from(ob) {
match l.iter().map(SimpleJsonValue::extract).collect() {
Ok(a) => Ok(JsonValue::Array(a)),
Err(e) => Err(PyTypeError::new_err(format!(
"Can't convert to JsonValue::Array: {}",
e
))),
}
} else if let Ok(v) = SimpleJsonValue::extract(ob) {
Ok(JsonValue::Value(v))
} else {
Err(PyTypeError::new_err(format!(
"Can't convert from {} to JsonValue",
ob.get_type().name()?
)))
}
}
}
/// A condition used in push rules to match against an event.
///
/// We need this split as `serde` doesn't give us the ability to have a
@ -328,23 +267,12 @@ pub enum Condition {
#[serde(tag = "kind")]
pub enum KnownCondition {
EventMatch(EventMatchCondition),
// Identical to event_match but gives predefined patterns. Cannot be added by users.
#[serde(skip_deserializing, rename = "event_match")]
EventMatchType(EventMatchTypeCondition),
EventPropertyIs(EventPropertyIsCondition),
#[serde(rename = "im.nheko.msc3664.related_event_match")]
RelatedEventMatch(RelatedEventMatchCondition),
// Identical to related_event_match but gives predefined patterns. Cannot be added by users.
#[serde(skip_deserializing, rename = "im.nheko.msc3664.related_event_match")]
RelatedEventMatchType(RelatedEventMatchTypeCondition),
#[serde(rename = "org.matrix.msc3966.exact_event_property_contains")]
ExactEventPropertyContains(EventPropertyIsCondition),
// Identical to exact_event_property_contains but gives predefined patterns. Cannot be added by users.
#[serde(
skip_deserializing,
rename = "org.matrix.msc3966.exact_event_property_contains"
)]
ExactEventPropertyContainsType(EventPropertyIsTypeCondition),
#[serde(rename = "org.matrix.msc3952.is_user_mention")]
IsUserMention,
#[serde(rename = "org.matrix.msc3952.is_room_mention")]
IsRoomMention,
ContainsDisplayName,
RoomMemberCount {
#[serde(skip_serializing_if = "Option::is_none")]
@ -371,43 +299,14 @@ impl<'source> FromPyObject<'source> for Condition {
}
}
/// The body of a [`Condition::EventMatch`] with a pattern.
/// The body of a [`Condition::EventMatch`]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct EventMatchCondition {
pub key: Cow<'static, str>,
pub pattern: Cow<'static, str>,
}
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum EventMatchPatternType {
UserId,
UserLocalpart,
}
/// The body of a [`Condition::EventMatch`] that uses user_id or user_localpart as a pattern.
#[derive(Serialize, Debug, Clone)]
pub struct EventMatchTypeCondition {
pub key: Cow<'static, str>,
// During serialization, the pattern_type property gets replaced with a
// pattern property of the correct value in synapse.push.clientformat.format_push_rules_for_user.
pub pattern_type: Cow<'static, EventMatchPatternType>,
}
/// The body of a [`Condition::EventPropertyIs`]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct EventPropertyIsCondition {
pub key: Cow<'static, str>,
pub value: Cow<'static, SimpleJsonValue>,
}
/// The body of a [`Condition::EventPropertyIs`] that uses user_id or user_localpart as a pattern.
#[derive(Serialize, Debug, Clone)]
pub struct EventPropertyIsTypeCondition {
pub key: Cow<'static, str>,
// During serialization, the pattern_type property gets replaced with a
// pattern property of the correct value in synapse.push.clientformat.format_push_rules_for_user.
pub value_type: Cow<'static, EventMatchPatternType>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern_type: Option<Cow<'static, str>>,
}
/// The body of a [`Condition::RelatedEventMatch`]
@ -417,18 +316,8 @@ pub struct RelatedEventMatchCondition {
pub key: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern: Option<Cow<'static, str>>,
pub rel_type: Cow<'static, str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include_fallbacks: Option<bool>,
}
/// The body of a [`Condition::RelatedEventMatch`] that uses user_id or user_localpart as a pattern.
#[derive(Serialize, Debug, Clone)]
pub struct RelatedEventMatchTypeCondition {
// This is only used if pattern_type exists (and thus key must exist), so is
// a bit simpler than RelatedEventMatchCondition.
pub key: Cow<'static, str>,
pub pattern_type: Cow<'static, EventMatchPatternType>,
pub pattern_type: Option<Cow<'static, str>>,
pub rel_type: Cow<'static, str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include_fallbacks: Option<bool>,
@ -612,7 +501,8 @@ impl FilteredPushRules {
fn test_serialize_condition() {
let condition = Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: "content.body".into(),
pattern: "coffee".into(),
pattern: Some("coffee".into()),
pattern_type: None,
}));
let json = serde_json::to_string(&condition).unwrap();
@ -626,33 +516,7 @@ fn test_serialize_condition() {
fn test_deserialize_condition() {
let json = r#"{"kind":"event_match","key":"content.body","pattern":"coffee"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(
condition,
Condition::Known(KnownCondition::EventMatch(_))
));
}
#[test]
fn test_serialize_event_match_condition_with_pattern_type() {
let condition = Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
key: "content.body".into(),
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
}));
let json = serde_json::to_string(&condition).unwrap();
assert_eq!(
json,
r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#
)
}
#[test]
fn test_cannot_deserialize_event_match_condition_with_pattern_type() {
let json = r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(condition, Condition::Unknown(_)));
let _: Condition = serde_json::from_str(json).unwrap();
}
#[test]
@ -666,37 +530,6 @@ fn test_deserialize_unstable_msc3664_condition() {
));
}
#[test]
fn test_serialize_unstable_msc3664_condition_with_pattern_type() {
let condition = Condition::Known(KnownCondition::RelatedEventMatchType(
RelatedEventMatchTypeCondition {
key: "content.body".into(),
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
rel_type: "m.in_reply_to".into(),
include_fallbacks: Some(true),
},
));
let json = serde_json::to_string(&condition).unwrap();
assert_eq!(
json,
r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to","include_fallbacks":true}"#
)
}
#[test]
fn test_cannot_deserialize_unstable_msc3664_condition_with_pattern_type() {
let json = r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
// Since pattern is optional on RelatedEventMatch it deserializes it to that
// instead of RelatedEventMatchType.
assert!(matches!(
condition,
Condition::Known(KnownCondition::RelatedEventMatch(_))
));
}
#[test]
fn test_deserialize_unstable_msc3931_condition() {
let json =
@ -710,41 +543,24 @@ fn test_deserialize_unstable_msc3931_condition() {
}
#[test]
fn test_deserialize_event_property_is_condition() {
// A string condition should work.
let json = r#"{"kind":"event_property_is","key":"content.value","value":"foo"}"#;
fn test_deserialize_unstable_msc3952_user_condition() {
let json = r#"{"kind":"org.matrix.msc3952.is_user_mention"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(
condition,
Condition::Known(KnownCondition::EventPropertyIs(_))
Condition::Known(KnownCondition::IsUserMention)
));
}
// A boolean condition should work.
let json = r#"{"kind":"event_property_is","key":"content.value","value":true}"#;
#[test]
fn test_deserialize_unstable_msc3952_room_condition() {
let json = r#"{"kind":"org.matrix.msc3952.is_room_mention"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(
condition,
Condition::Known(KnownCondition::EventPropertyIs(_))
));
// An integer condition should work.
let json = r#"{"kind":"event_property_is","key":"content.value","value":1}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(
condition,
Condition::Known(KnownCondition::EventPropertyIs(_))
));
// A null condition should work
let json = r#"{"kind":"event_property_is","key":"content.value","value":null}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(
condition,
Condition::Known(KnownCondition::EventPropertyIs(_))
Condition::Known(KnownCondition::IsRoomMention)
));
}

View File

@ -1,58 +0,0 @@
#! /usr/bin/env python
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from pathlib import Path
from typing import Dict, List
import tomli
def main() -> None:
lockfile_path = Path(__file__).parent.parent.joinpath("poetry.lock")
with open(lockfile_path, "rb") as lockfile:
lockfile_content = tomli.load(lockfile)
# Poetry 1.3+ lockfile format:
# There's a `files` inline table in each [[package]]
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
package["name"]: package["files"] for package in lockfile_content["package"]
}
success = True
for package_name, assets in packages_to_assets.items():
has_sdist = any(asset["file"].endswith(".tar.gz") for asset in assets)
if not has_sdist:
success = False
print(
f"Locked package {package_name!r} does not have a source distribution!",
file=sys.stderr,
)
if not success:
print(
"\nThere were some problems with the Poetry lockfile (poetry.lock).",
file=sys.stderr,
)
sys.exit(1)
print(
f"Poetry lockfile OK. {len(packages_to_assets)} locked packages checked.",
file=sys.stderr,
)
if __name__ == "__main__":
main()

View File

@ -59,11 +59,6 @@ Run the complement test suite on Synapse.
is important.
Not suitable for use in CI in case the editable environment is impure.
--rebuild-editable
Force a rebuild of the editable build of Synapse.
This is occasionally useful if the built-in rebuild detection with
--editable fails, e.g. when changing configure_workers_and_start.py.
For help on arguments to 'go test', run 'go help testflag'.
EOF
}
@ -87,9 +82,6 @@ while [ $# -ge 1 ]; do
"-e"|"--editable")
use_editable_synapse=1
;;
"--rebuild-editable")
rebuild_editable_synapse=1
;;
*)
# unknown arg: presumably an argument to gotest. break the loop.
break
@ -124,9 +116,7 @@ if [ -n "$use_editable_synapse" ]; then
fi
editable_mount="$(realpath .):/editable-src:z"
if [ -n "$rebuild_editable_synapse" ]; then
unset skip_docker_build
elif docker inspect complement-synapse-editable &>/dev/null; then
if docker inspect complement-synapse-editable &>/dev/null; then
# complement-synapse-editable already exists: see if we can still use it:
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)

View File

@ -112,7 +112,7 @@ python3 -m black "${files[@]}"
# Catch any common programming mistakes in Python code.
# --quiet suppresses the update check.
ruff --quiet --fix "${files[@]}"
ruff --quiet "${files[@]}"
# Catch any common programming mistakes in Rust code.
#

View File

@ -19,8 +19,7 @@ usage() {
echo "-c"
echo " CI mode. Prints every command that the script runs."
echo "-o <path>"
echo " Directory to output full schema files to. You probably want to use"
echo " '-o synapse/storage/schema'"
echo " Directory to output full schema files to."
echo "-n <schema number>"
echo " Schema number for the new snapshot. Used to set the location of files within "
echo " the output directory, mimicking that of synapse/storage/schemas."
@ -28,11 +27,6 @@ usage() {
echo "-h"
echo " Display this help text."
echo ""
echo ""
echo "You probably want to invoke this with something like"
echo " docker run --rm -e POSTGRES_PASSWORD=postgres -e POSTGRES_USER=postgres -e POSTGRES_DB=synapse -p 5432:5432 postgres:11-alpine"
echo " echo postgres | scripts-dev/make_full_schema.sh -p postgres -n MY_SCHEMA_NUMBER -o synapse/storage/schema"
echo ""
echo " NB: make sure to run this against the *oldest* supported version of postgres,"
echo " or else pg_dump might output non-backwards-compatible syntax."
}
@ -195,7 +189,7 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
echo "Running db background jobs..."
poetry run python synapse/_scripts/update_synapse_database.py --database-config "$SQLITE_CONFIG" --run-background-updates
synapse/_scripts/update_synapse_database.py --database-config "$SQLITE_CONFIG" --run-background-updates
# Create the PostgreSQL database.
echo "Creating postgres databases..."
@ -204,7 +198,7 @@ createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_MAIN_DB_NAM
createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_STATE_DB_NAME"
echo "Running db background jobs..."
poetry run python synapse/_scripts/update_synapse_database.py --database-config "$POSTGRES_CONFIG" --run-background-updates
synapse/_scripts/update_synapse_database.py --database-config "$POSTGRES_CONFIG" --run-background-updates
echo "Dropping unwanted db tables..."
@ -299,12 +293,4 @@ pg_dump --format=plain --data-only --inserts --no-tablespaces --no-acl --no-owne
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner "$POSTGRES_STATE_DB_NAME" | cleanup_pg_schema > "$OUTPUT_DIR/state/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
pg_dump --format=plain --data-only --inserts --no-tablespaces --no-acl --no-owner "$POSTGRES_STATE_DB_NAME" | cleanup_pg_schema >> "$OUTPUT_DIR/state/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
if [[ "$OUTPUT_DIR" == *synapse/storage/schema ]]; then
echo "Updating contrib/datagrip symlinks..."
ln -sf "../../synapse/storage/schema/common/full_schemas/$SCHEMA_NUMBER/full.sql.postgres" "contrib/datagrip/common.sql"
ln -sf "../../synapse/storage/schema/main/full_schemas/$SCHEMA_NUMBER/full.sql.postgres" "contrib/datagrip/main.sql"
ln -sf "../../synapse/storage/schema/state/full_schemas/$SCHEMA_NUMBER/full.sql.postgres" "contrib/datagrip/state.sql"
else
echo "Not updating contrib/datagrip symlinks (unknown output directory)"
fi
echo "Done! Files dumped to: $OUTPUT_DIR"

View File

@ -29,6 +29,7 @@ _Repr = Callable[[], str]
def recursive_repr(fillvalue: str = ...) -> Callable[[_Repr], _Repr]: ...
class SortedList(MutableSequence[_T]):
DEFAULT_LOAD_FACTOR: int = ...
def __init__(
self,

View File

@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Tuple, Union
from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Set, Tuple, Union
from synapse.types import JsonDict, JsonValue
from synapse.types import JsonDict
class PushRule:
@property
@ -56,16 +56,17 @@ def get_base_rule_ids() -> Collection[str]: ...
class PushRuleEvaluator:
def __init__(
self,
flattened_keys: Mapping[str, JsonValue],
flattened_keys: Mapping[str, str],
has_mentions: bool,
user_mentions: Set[str],
room_mention: bool,
room_member_count: int,
sender_power_level: Optional[int],
notification_power_levels: Mapping[str, int],
related_events_flattened: Mapping[str, Mapping[str, JsonValue]],
related_events_flattened: Mapping[str, Mapping[str, str]],
related_event_match_enabled: bool,
room_version_feature_flags: Tuple[str, ...],
msc3931_enabled: bool,
msc3966_exact_event_property_contains: bool,
): ...
def run(
self,

View File

@ -1,6 +1,5 @@
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2023 The Matrix.org Foundation C.I.C.
# Copyright 2018-9 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -14,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
""" This is an implementation of a Matrix homeserver.
""" This is a reference implementation of a Matrix homeserver.
"""
import json

View File

@ -37,7 +37,7 @@ import os
import shutil
import sys
from synapse.media.filepath import MediaFilePaths
from synapse.rest.media.v1.filepath import MediaFilePaths
logger = logging.getLogger()

View File

@ -47,6 +47,7 @@ def request_registration(
_print: Callable[[str], None] = print,
exit: Callable[[int], None] = sys.exit,
) -> None:
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
# Get the nonce
@ -153,6 +154,7 @@ def register_new_user(
def main() -> None:
logging.captureWarnings(True)
parser = argparse.ArgumentParser(

View File

@ -94,80 +94,61 @@ reactor = cast(ISynapseReactor, reactor_)
logger = logging.getLogger("synapse_port_db")
# SQLite doesn't have a dedicated boolean type (it stores True/False as 1/0). This means
# portdb will read sqlite bools as integers, then try to insert them into postgres
# boolean columns---which fails. Lacking some Python-parseable metaschema, we must
# specify which integer columns should be inserted as booleans into postgres.
BOOLEAN_COLUMNS = {
"access_tokens": ["used"],
"account_validity": ["email_sent"],
"device_lists_changes_in_room": ["converted_to_destinations"],
"device_lists_outbound_pokes": ["sent"],
"devices": ["hidden"],
"e2e_fallback_keys_json": ["used"],
"e2e_room_keys": ["is_verified"],
"event_edges": ["is_state"],
"events": ["processed", "outlier", "contains_url"],
"local_media_repository": ["safe_from_quarantine"],
"rooms": ["is_public", "has_auth_chain_index"],
"event_edges": ["is_state"],
"presence_list": ["accepted"],
"presence_stream": ["currently_active"],
"public_room_list_stream": ["visibility"],
"pushers": ["enabled"],
"devices": ["hidden"],
"device_lists_outbound_pokes": ["sent"],
"users_who_share_rooms": ["share_private"],
"e2e_room_keys": ["is_verified"],
"account_validity": ["email_sent"],
"redactions": ["have_censored"],
"room_stats_state": ["is_federatable"],
"rooms": ["is_public", "has_auth_chain_index"],
"local_media_repository": ["safe_from_quarantine"],
"users": ["shadow_banned", "approved"],
"un_partial_stated_event_stream": ["rejection_status_changed"],
"users_who_share_rooms": ["share_private"],
"e2e_fallback_keys_json": ["used"],
"access_tokens": ["used"],
"device_lists_changes_in_room": ["converted_to_destinations"],
"pushers": ["enabled"],
}
# These tables are never deleted from in normal operation [*], so we can resume porting
# over rows from a previous attempt rather than starting from scratch.
#
# [*]: We do delete from many of these tables when purging a room, and
# presumably when purging old events. So we might e.g.
#
# 1. Run portdb and port half of some table.
# 2. Stop portdb.
# 3. Purge something, deleting some of the rows we've ported over.
# 4. Restart portdb. The rows deleted from sqlite are still present in postgres.
#
# But this isn't the end of the world: we should be able to repeat the purge
# on the postgres DB when porting completes.
APPEND_ONLY_TABLES = [
"cache_invalidation_stream_by_instance",
"event_auth",
"event_edges",
"event_json",
"event_reference_hashes",
"event_search",
"event_to_state_groups",
"events",
"ex_outlier_stream",
"local_media_repository",
"local_media_repository_thumbnails",
"presence_stream",
"public_room_list_stream",
"push_rules_stream",
"received_transactions",
"redactions",
"rejections",
"remote_media_cache",
"remote_media_cache_thumbnails",
"event_json",
"state_events",
"room_memberships",
"topics",
"room_names",
"rooms",
"local_media_repository",
"local_media_repository_thumbnails",
"remote_media_cache",
"remote_media_cache_thumbnails",
"redactions",
"event_edges",
"event_auth",
"received_transactions",
"sent_transactions",
"state_events",
"state_group_edges",
"transaction_id_to_pdu",
"users",
"state_groups",
"state_groups_state",
"event_to_state_groups",
"rejections",
"event_search",
"presence_stream",
"push_rules_stream",
"ex_outlier_stream",
"cache_invalidation_stream_by_instance",
"public_room_list_stream",
"state_group_edges",
"stream_ordering_to_exterm",
"topics",
"transaction_id_to_pdu",
"un_partial_stated_event_stream",
"users",
]
@ -1205,6 +1186,7 @@ class CursesProgress(Progress):
if self.finished:
status = "Time spent: %s (Done!)" % (duration_str,)
else:
if self.total_processed > 0:
left = float(self.total_remaining) / self.total_processed

View File

@ -167,6 +167,7 @@ Worker = collections.namedtuple(
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument(

View File

@ -32,6 +32,7 @@ from synapse.appservice import ApplicationService
from synapse.http import get_request_user_agent
from synapse.http.site import SynapseRequest
from synapse.logging.opentracing import (
SynapseTags,
active_span,
force_tracing,
start_active_span,
@ -161,6 +162,12 @@ class Auth:
parent_span.set_tag(
"authenticated_entity", requester.authenticated_entity
)
# We tag the Synapse instance name so that it's an easy jumping
# off point into the logs. Can also be used to filter for an
# instance that is under load.
parent_span.set_tag(
SynapseTags.INSTANCE_NAME, self.hs.get_instance_name()
)
parent_span.set_tag("user_id", requester.user.to_string())
if requester.device_id is not None:
parent_span.set_tag("device_id", requester.device_id)

View File

@ -108,10 +108,6 @@ class Codes(str, Enum):
USER_AWAITING_APPROVAL = "ORG.MATRIX.MSC3866_USER_AWAITING_APPROVAL"
# Attempt to send a second annotation with the same event type & annotation key
# MSC2677
DUPLICATE_ANNOTATION = "M_DUPLICATE_ANNOTATION"
class CodeMessageException(RuntimeError):
"""An exception with integer code and message string attributes.
@ -755,25 +751,3 @@ class ModuleFailedException(Exception):
Raised when a module API callback fails, for example because it raised an
exception.
"""
class PartialStateConflictError(SynapseError):
"""An internal error raised when attempting to persist an event with partial state
after the room containing the event has been un-partial stated.
This error should be handled by recomputing the event context and trying again.
This error has an HTTP status code so that it can be transported over replication.
It should not be exposed to clients.
"""
@staticmethod
def message() -> str:
return "Cannot persist partial state event in un-partial stated room"
def __init__(self) -> None:
super().__init__(
HTTPStatus.CONFLICT,
msg=PartialStateConflictError.message(),
errcode=Codes.UNKNOWN,
)

View File

@ -219,13 +219,9 @@ class FilterCollection:
self._room_timeline_filter = Filter(hs, room_filter_json.get("timeline", {}))
self._room_state_filter = Filter(hs, room_filter_json.get("state", {}))
self._room_ephemeral_filter = Filter(hs, room_filter_json.get("ephemeral", {}))
self._room_account_data_filter = Filter(
hs, room_filter_json.get("account_data", {})
)
self._room_account_data = Filter(hs, room_filter_json.get("account_data", {}))
self._presence_filter = Filter(hs, filter_json.get("presence", {}))
self._global_account_data_filter = Filter(
hs, filter_json.get("account_data", {})
)
self._account_data = Filter(hs, filter_json.get("account_data", {}))
self.include_leave = filter_json.get("room", {}).get("include_leave", False)
self.event_fields = filter_json.get("event_fields", [])
@ -260,10 +256,8 @@ class FilterCollection:
) -> List[UserPresenceState]:
return await self._presence_filter.filter(presence_states)
async def filter_global_account_data(
self, events: Iterable[JsonDict]
) -> List[JsonDict]:
return await self._global_account_data_filter.filter(events)
async def filter_account_data(self, events: Iterable[JsonDict]) -> List[JsonDict]:
return await self._account_data.filter(events)
async def filter_room_state(self, events: Iterable[EventBase]) -> List[EventBase]:
return await self._room_state_filter.filter(
@ -285,7 +279,7 @@ class FilterCollection:
async def filter_room_account_data(
self, events: Iterable[JsonDict]
) -> List[JsonDict]:
return await self._room_account_data_filter.filter(
return await self._room_account_data.filter(
await self._room_filter.filter(events)
)
@ -298,13 +292,6 @@ class FilterCollection:
or self._presence_filter.filters_all_senders()
)
def blocks_all_global_account_data(self) -> bool:
"""True if all global acount data will be filtered out."""
return (
self._global_account_data_filter.filters_all_types()
or self._global_account_data_filter.filters_all_senders()
)
def blocks_all_room_ephemeral(self) -> bool:
return (
self._room_ephemeral_filter.filters_all_types()
@ -312,13 +299,6 @@ class FilterCollection:
or self._room_ephemeral_filter.filters_all_rooms()
)
def blocks_all_room_account_data(self) -> bool:
return (
self._room_account_data_filter.filters_all_types()
or self._room_account_data_filter.filters_all_senders()
or self._room_account_data_filter.filters_all_rooms()
)
def blocks_all_room_timeline(self) -> bool:
return (
self._room_timeline_filter.filters_all_types()

View File

@ -213,7 +213,7 @@ def handle_startup_exception(e: Exception) -> NoReturn:
def redirect_stdio_to_logs() -> None:
streams = [("stdout", LogLevel.info), ("stderr", LogLevel.error)]
for stream, level in streams:
for (stream, level) in streams:
oldStream = getattr(sys, stream)
loggingFile = LoggingFile(
logger=twisted.logger.Logger(namespace=stream),

View File

@ -17,7 +17,7 @@ import logging
import os
import sys
import tempfile
from typing import List, Mapping, Optional
from typing import List, Optional
from twisted.internet import defer, task
@ -44,7 +44,6 @@ from synapse.storage.databases.main.event_push_actions import (
)
from synapse.storage.databases.main.events_worker import EventsWorkerStore
from synapse.storage.databases.main.filtering import FilteringWorkerStore
from synapse.storage.databases.main.media_repository import MediaRepositoryStore
from synapse.storage.databases.main.profile import ProfileWorkerStore
from synapse.storage.databases.main.push_rule import PushRulesWorkerStore
from synapse.storage.databases.main.receipts import ReceiptsWorkerStore
@ -87,7 +86,6 @@ class AdminCmdSlavedStore(
RegistrationWorkerStore,
RoomWorkerStore,
ProfileWorkerStore,
MediaRepositoryStore,
):
def __init__(
self,
@ -151,7 +149,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
with open(events_file, "a") as f:
for event in events:
json.dump(event.get_pdu_json(), fp=f)
print(json.dumps(event.get_pdu_json()), file=f)
def write_state(
self, room_id: str, event_id: str, state: StateMap[EventBase]
@ -164,7 +162,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
with open(event_file, "a") as f:
for event in state.values():
json.dump(event.get_pdu_json(), fp=f)
print(json.dumps(event.get_pdu_json()), file=f)
def write_invite(
self, room_id: str, event: EventBase, state: StateMap[EventBase]
@ -180,7 +178,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
with open(invite_state, "a") as f:
for event in state.values():
json.dump(event, fp=f)
print(json.dumps(event), file=f)
def write_knock(
self, room_id: str, event: EventBase, state: StateMap[EventBase]
@ -196,7 +194,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
with open(knock_state, "a") as f:
for event in state.values():
json.dump(event, fp=f)
print(json.dumps(event), file=f)
def write_profile(self, profile: JsonDict) -> None:
user_directory = os.path.join(self.base_directory, "user_data")
@ -204,7 +202,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
profile_file = os.path.join(user_directory, "profile")
with open(profile_file, "a") as f:
json.dump(profile, fp=f)
print(json.dumps(profile), file=f)
def write_devices(self, devices: List[JsonDict]) -> None:
user_directory = os.path.join(self.base_directory, "user_data")
@ -213,7 +211,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
for device in devices:
with open(device_file, "a") as f:
json.dump(device, fp=f)
print(json.dumps(device), file=f)
def write_connections(self, connections: List[JsonDict]) -> None:
user_directory = os.path.join(self.base_directory, "user_data")
@ -222,28 +220,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
for connection in connections:
with open(connection_file, "a") as f:
json.dump(connection, fp=f)
def write_account_data(
self, file_name: str, account_data: Mapping[str, JsonDict]
) -> None:
account_data_directory = os.path.join(
self.base_directory, "user_data", "account_data"
)
os.makedirs(account_data_directory, exist_ok=True)
account_data_file = os.path.join(account_data_directory, file_name)
with open(account_data_file, "a") as f:
json.dump(account_data, fp=f)
def write_media_id(self, media_id: str, media_metadata: JsonDict) -> None:
file_directory = os.path.join(self.base_directory, "media_ids")
os.makedirs(file_directory, exist_ok=True)
media_id_file = os.path.join(file_directory, media_id)
with open(media_id_file, "w") as f:
json.dump(media_metadata, fp=f)
print(json.dumps(connection), file=f)
def finished(self) -> str:
return self.base_directory

View File

@ -219,7 +219,7 @@ def main() -> None:
# memory space and don't need to repeat the work of loading the code!
# Instead of using fork() directly, we use the multiprocessing library,
# which uses fork() on Unix platforms.
for func, worker_args in zip(worker_functions, args_by_worker):
for (func, worker_args) in zip(worker_functions, args_by_worker):
process = multiprocessing.Process(
target=_worker_entrypoint, args=(func, proxy_reactor, worker_args)
)

View File

@ -157,6 +157,7 @@ class GenericWorkerServer(HomeServer):
DATASTORE_CLASS = GenericWorkerSlavedStore # type: ignore
def _listen_http(self, listener_config: ListenerConfig) -> None:
assert listener_config.http_options is not None
# We always include a health resource.

View File

@ -321,6 +321,7 @@ def setup(config_options: List[str]) -> SynapseHomeServer:
and not config.registration.registrations_require_3pid
and not config.registration.registration_requires_token
):
raise ConfigError(
"You have enabled open registration without any verification. This is a known vector for "
"spam and abuse. If you would like to allow public registration, please consider adding email, "

View File

@ -15,7 +15,7 @@ import logging
import math
import resource
import sys
from typing import TYPE_CHECKING, List, Mapping, Sized, Tuple
from typing import TYPE_CHECKING, List, Sized, Tuple
from prometheus_client import Gauge
@ -194,7 +194,7 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
@wrap_as_background_process("generate_monthly_active_users")
async def generate_monthly_active_users() -> None:
current_mau_count = 0
current_mau_count_by_service: Mapping[str, int] = {}
current_mau_count_by_service = {}
reserved_users: Sized = ()
store = hs.get_datastores().main
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:

View File

@ -22,6 +22,7 @@ from ._base import Config
class ConsentConfig(Config):
section = "consent"
def __init__(self, *args: Any):

View File

@ -154,6 +154,7 @@ class DatabaseConfig(Config):
logger.warning(NON_SQLITE_DATABASE_PATH_WARNING)
def set_databasepath(self, database_path: str) -> None:
if database_path != ":memory:":
database_path = self.abspath(database_path)

View File

@ -166,31 +166,15 @@ class ExperimentalConfig(Config):
# MSC3391: Removing account data.
self.msc3391_enabled = experimental.get("msc3391_enabled", False)
# MSC3873: Disambiguate event_match keys.
self.msc3873_escape_event_match_key = experimental.get(
"msc3873_escape_event_match_key", False
)
# MSC3925: do not replace events with their edits
self.msc3925_inhibit_edit = experimental.get("msc3925_inhibit_edit", False)
# MSC3966: exact_event_property_contains push rule condition.
self.msc3966_exact_event_property_contains = experimental.get(
"msc3966_exact_event_property_contains", False
)
# MSC3952: Intentional mentions, this depends on MSC3966.
self.msc3952_intentional_mentions = (
experimental.get("msc3952_intentional_mentions", False)
and self.msc3966_exact_event_property_contains
# MSC3952: Intentional mentions
self.msc3952_intentional_mentions = experimental.get(
"msc3952_intentional_mentions", False
)
# MSC3959: Do not generate notifications for edits.
self.msc3958_supress_edit_notifs = experimental.get(
"msc3958_supress_edit_notifs", False
)
# MSC3966: exact_event_property_contains push rule condition.
self.msc3966_exact_event_property_contains = experimental.get(
"msc3966_exact_event_property_contains", False
)
# MSC3967: Do not require UIA when first uploading cross signing keys
self.msc3967_enabled = experimental.get("msc3967_enabled", False)

View File

@ -57,6 +57,7 @@ from .workers import WorkerConfig
class HomeServerConfig(RootConfig):
config_classes = [
MeowConfig,
ModulesConfig,

View File

@ -46,6 +46,7 @@ class RatelimitConfig(Config):
section = "ratelimiting"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
# Load the new-style messages config if it exists. Otherwise fall back
# to the old method.
if "rc_message" in config:
@ -86,18 +87,9 @@ class RatelimitConfig(Config):
defaults={"per_second": 0.1, "burst_count": 5},
)
# It is reasonable to login with a bunch of devices at once (i.e. when
# setting up an account), but it is *not* valid to continually be
# logging into new devices.
rc_login_config = config.get("rc_login", {})
self.rc_login_address = RatelimitSettings(
rc_login_config.get("address", {}),
defaults={"per_second": 0.003, "burst_count": 5},
)
self.rc_login_account = RatelimitSettings(
rc_login_config.get("account", {}),
defaults={"per_second": 0.003, "burst_count": 5},
)
self.rc_login_address = RatelimitSettings(rc_login_config.get("address", {}))
self.rc_login_account = RatelimitSettings(rc_login_config.get("account", {}))
self.rc_login_failed_attempts = RatelimitSettings(
rc_login_config.get("failed_attempts", {})
)

View File

@ -33,5 +33,4 @@ class RedisConfig(Config):
self.redis_host = redis_config.get("host", "localhost")
self.redis_port = redis_config.get("port", 6379)
self.redis_dbid = redis_config.get("dbid", None)
self.redis_password = redis_config.get("password")

View File

@ -118,6 +118,7 @@ class ContentRepositoryConfig(Config):
section = "media"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
# Only enable the media repo if either the media repo is enabled or the
# current worker app is the media repo.
if (
@ -180,13 +181,11 @@ class ContentRepositoryConfig(Config):
for i, provider_config in enumerate(storage_providers):
# We special case the module "file_system" so as not to need to
# expose FileStorageProviderBackend
if (
provider_config["module"] == "file_system"
or provider_config["module"] == "synapse.rest.media.v1.storage_provider"
):
provider_config[
"module"
] = "synapse.media.storage_provider.FileStorageProviderBackend"
if provider_config["module"] == "file_system":
provider_config["module"] = (
"synapse.rest.media.v1.storage_provider"
".FileStorageProviderBackend"
)
provider_class, parsed_config = load_module(
provider_config, ("media_storage_providers", "<item %i>" % i)

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Collection
from typing import Any, List
from matrix_common.regex import glob_to_regex
@ -70,7 +70,7 @@ class RoomDirectoryConfig(Config):
return False
def is_publishing_room_allowed(
self, user_id: str, room_id: str, aliases: Collection[str]
self, user_id: str, room_id: str, aliases: List[str]
) -> bool:
"""Checks if the given user is allowed to publish the room
@ -122,7 +122,7 @@ class _RoomDirectoryRule:
except Exception as e:
raise ConfigError("Failed to parse glob into regex") from e
def matches(self, user_id: str, room_id: str, aliases: Collection[str]) -> bool:
def matches(self, user_id: str, room_id: str, aliases: List[str]) -> bool:
"""Tests if this rule matches the given user_id, room_id and aliases.
Args:

View File

@ -177,7 +177,6 @@ KNOWN_RESOURCES = {
"client",
"consent",
"federation",
"health",
"keys",
"media",
"metrics",
@ -735,6 +734,7 @@ class ServerConfig(Config):
listeners: Optional[List[dict]],
**kwargs: Any,
) -> str:
_, bind_port = parse_and_validate_server_name(server_name)
if bind_port is not None:
unsecure_port = bind_port - 400

View File

@ -30,6 +30,7 @@ class TlsConfig(Config):
section = "tls"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))

View File

@ -399,7 +399,7 @@ class Keyring:
# We now convert the returned list of results into a map from server
# name to key ID to FetchKeyResult, to return.
to_return: Dict[str, Dict[str, FetchKeyResult]] = {}
for request, results in zip(deduped_requests, results_per_request):
for (request, results) in zip(deduped_requests, results_per_request):
to_return_by_server = to_return.setdefault(request.server_name, {})
for key_id, key_result in results.items():
existing = to_return_by_server.get(key_id)

View File

@ -16,7 +16,18 @@
import collections.abc
import logging
import typing
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple, Union
from typing import (
Any,
Collection,
Dict,
Iterable,
List,
Mapping,
Optional,
Set,
Tuple,
Union,
)
from canonicaljson import encode_canonical_json
from signedjson.key import decode_verify_key_bytes
@ -45,13 +56,7 @@ from synapse.api.room_versions import (
RoomVersions,
)
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import (
MutableStateMap,
StateMap,
StrCollection,
UserID,
get_domain_from_id,
)
from synapse.types import MutableStateMap, StateMap, UserID, get_domain_from_id
if typing.TYPE_CHECKING:
# conditional imports to avoid import cycle
@ -64,7 +69,7 @@ logger = logging.getLogger(__name__)
class _EventSourceStore(Protocol):
async def get_events(
self,
event_ids: StrCollection,
event_ids: Collection[str],
redact_behaviour: EventRedactBehaviour,
get_prev_content: bool = False,
allow_rejected: bool = False,

View File

@ -39,7 +39,7 @@ from unpaddedbase64 import encode_base64
from synapse.api.constants import RelationTypes
from synapse.api.room_versions import EventFormatVersions, RoomVersion, RoomVersions
from synapse.types import JsonDict, RoomStreamToken, StrCollection
from synapse.types import JsonDict, RoomStreamToken
from synapse.util.caches import intern_dict
from synapse.util.frozenutils import freeze
from synapse.util.stringutils import strtobool
@ -413,7 +413,7 @@ class EventBase(metaclass=abc.ABCMeta):
"""
return [e for e, _ in self._dict["prev_events"]]
def auth_event_ids(self) -> StrCollection:
def auth_event_ids(self) -> Sequence[str]:
"""Returns the list of auth event IDs. The order matches the order
specified in the event, though there is no meaning to it.
@ -558,7 +558,7 @@ class FrozenEventV2(EventBase):
"""
return self._dict["prev_events"]
def auth_event_ids(self) -> StrCollection:
def auth_event_ids(self) -> Sequence[str]:
"""Returns the list of auth event IDs. The order matches the order
specified in the event, though there is no meaning to it.

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Tuple, Union
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
import attr
from signedjson.types import SigningKey
@ -103,7 +103,7 @@ class EventBuilder:
async def build(
self,
prev_event_ids: Collection[str],
prev_event_ids: List[str],
auth_event_ids: Optional[List[str]],
depth: Optional[int] = None,
) -> EventBase:
@ -136,7 +136,7 @@ class EventBuilder:
format_version = self.room_version.event_format
# The types of auth/prev events changes between event versions.
prev_events: Union[Collection[str], List[Tuple[str, Dict[str, str]]]]
prev_events: Union[List[str], List[Tuple[str, Dict[str, str]]]]
auth_events: Union[List[str], List[Tuple[str, Dict[str, str]]]]
if format_version == EventFormatVersions.ROOM_V1_V2:
auth_events = await self._store.add_event_hashes(auth_event_ids)

View File

@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, List, Optional, Tuple
import attr
@ -23,56 +22,12 @@ from synapse.types import JsonDict, StateMap
if TYPE_CHECKING:
from synapse.storage.controllers import StorageControllers
from synapse.storage.databases import StateGroupDataStore
from synapse.storage.databases.main import DataStore
from synapse.types.state import StateFilter
class UnpersistedEventContextBase(ABC):
"""
This is a base class for EventContext and UnpersistedEventContext, objects which
hold information relevant to storing an associated event. Note that an
UnpersistedEventContexts must be converted into an EventContext before it is
suitable to send to the db with its associated event.
Attributes:
_storage: storage controllers for interfacing with the database
app_service: If the associated event is being sent by a (local) application service, that
app service.
"""
def __init__(self, storage_controller: "StorageControllers"):
self._storage: "StorageControllers" = storage_controller
self.app_service: Optional[ApplicationService] = None
@abstractmethod
async def persist(
self,
event: EventBase,
) -> "EventContext":
"""
A method to convert an UnpersistedEventContext to an EventContext, suitable for
sending to the database with the associated event.
"""
pass
@abstractmethod
async def get_prev_state_ids(
self, state_filter: Optional["StateFilter"] = None
) -> StateMap[str]:
"""
Gets the room state at the event (ie not including the event if the event is a
state event).
Args:
state_filter: specifies the type of state event to fetch from DB, example:
EventTypes.JoinRules
"""
pass
@attr.s(slots=True, auto_attribs=True)
class EventContext(UnpersistedEventContextBase):
class EventContext:
"""
Holds information relevant to persisting an event
@ -122,6 +77,9 @@ class EventContext(UnpersistedEventContextBase):
delta_ids: If ``prev_group`` is not None, the state delta between ``prev_group``
and ``state_group``.
app_service: If this event is being sent by a (local) application service, that
app service.
partial_state: if True, we may be storing this event with a temporary,
incomplete state.
"""
@ -164,9 +122,6 @@ class EventContext(UnpersistedEventContextBase):
"""Return an EventContext instance suitable for persisting an outlier event"""
return EventContext(storage=storage)
async def persist(self, event: EventBase) -> "EventContext":
return self
async def serialize(self, event: EventBase, store: "DataStore") -> JsonDict:
"""Converts self to a type that can be serialized as JSON, and then
deserialized by `deserialize`
@ -299,176 +254,6 @@ class EventContext(UnpersistedEventContextBase):
)
@attr.s(slots=True, auto_attribs=True)
class UnpersistedEventContext(UnpersistedEventContextBase):
"""
The event context holds information about the state groups for an event. It is important
to remember that an event technically has two state groups: the state group before the
event, and the state group after the event. If the event is not a state event, the state
group will not change (ie the state group before the event will be the same as the state
group after the event), but if it is a state event the state group before the event
will differ from the state group after the event.
This is a version of an EventContext before the new state group (if any) has been
computed and stored. It contains information about the state before the event (which
also may be the information after the event, if the event is not a state event). The
UnpersistedEventContext must be converted into an EventContext by calling the method
'persist' on it before it is suitable to be sent to the DB for processing.
state_group_after_event:
The state group after the event. This will always be None until it is persisted.
If the event is not a state event, this will be the same as
state_group_before_event.
state_group_before_event:
The ID of the state group representing the state of the room before this event.
state_delta_due_to_event:
If the event is a state event, then this is the delta of the state between
`state_group` and `state_group_before_event`
prev_group_for_state_group_before_event:
If it is known, ``state_group_before_event``'s previous state group.
delta_ids_to_state_group_before_event:
If ``prev_group_for_state_group_before_event`` is not None, the state delta
between ``prev_group_for_state_group_before_event`` and ``state_group_before_event``.
partial_state:
Whether the event has partial state.
state_map_before_event:
A map of the state before the event, i.e. the state at `state_group_before_event`
"""
_storage: "StorageControllers"
state_group_before_event: Optional[int]
state_group_after_event: Optional[int]
state_delta_due_to_event: Optional[dict]
prev_group_for_state_group_before_event: Optional[int]
delta_ids_to_state_group_before_event: Optional[StateMap[str]]
partial_state: bool
state_map_before_event: Optional[StateMap[str]] = None
@classmethod
async def batch_persist_unpersisted_contexts(
cls,
events_and_context: List[Tuple[EventBase, "UnpersistedEventContextBase"]],
room_id: str,
last_known_state_group: int,
datastore: "StateGroupDataStore",
) -> List[Tuple[EventBase, EventContext]]:
"""
Takes a list of events and their associated unpersisted contexts and persists
the unpersisted contexts, returning a list of events and persisted contexts.
Note that all the events must be in a linear chain (ie a <- b <- c).
Args:
events_and_context: A list of events and their unpersisted contexts
room_id: the room_id for the events
last_known_state_group: the last persisted state group
datastore: a state datastore
"""
amended_events_and_context = await datastore.store_state_deltas_for_batched(
events_and_context, room_id, last_known_state_group
)
events_and_persisted_context = []
for event, unpersisted_context in amended_events_and_context:
if event.is_state():
context = EventContext(
storage=unpersisted_context._storage,
state_group=unpersisted_context.state_group_after_event,
state_group_before_event=unpersisted_context.state_group_before_event,
state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
partial_state=unpersisted_context.partial_state,
prev_group=unpersisted_context.state_group_before_event,
delta_ids=unpersisted_context.state_delta_due_to_event,
)
else:
context = EventContext(
storage=unpersisted_context._storage,
state_group=unpersisted_context.state_group_after_event,
state_group_before_event=unpersisted_context.state_group_before_event,
state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
partial_state=unpersisted_context.partial_state,
prev_group=unpersisted_context.prev_group_for_state_group_before_event,
delta_ids=unpersisted_context.delta_ids_to_state_group_before_event,
)
events_and_persisted_context.append((event, context))
return events_and_persisted_context
async def get_prev_state_ids(
self, state_filter: Optional["StateFilter"] = None
) -> StateMap[str]:
"""
Gets the room state map, excluding this event.
Args:
state_filter: specifies the type of state event to fetch from DB
Returns:
Maps a (type, state_key) to the event ID of the state event matching
this tuple.
"""
if self.state_map_before_event:
return self.state_map_before_event
assert self.state_group_before_event is not None
return await self._storage.state.get_state_ids_for_group(
self.state_group_before_event, state_filter
)
async def persist(self, event: EventBase) -> EventContext:
"""
Creates a full `EventContext` for the event, persisting any referenced state that
has not yet been persisted.
Args:
event: event that the EventContext is associated with.
Returns: An EventContext suitable for sending to the database with the event
for persisting
"""
assert self.partial_state is not None
# If we have a full set of state for before the event but don't have a state
# group for that state, we need to get one
if self.state_group_before_event is None:
assert self.state_map_before_event
state_group_before_event = await self._storage.state.store_state_group(
event.event_id,
event.room_id,
prev_group=self.prev_group_for_state_group_before_event,
delta_ids=self.delta_ids_to_state_group_before_event,
current_state_ids=self.state_map_before_event,
)
self.state_group_before_event = state_group_before_event
# if the event isn't a state event the state group doesn't change
if not self.state_delta_due_to_event:
state_group_after_event = self.state_group_before_event
# otherwise if it is a state event we need to get a state group for it
else:
state_group_after_event = await self._storage.state.store_state_group(
event.event_id,
event.room_id,
prev_group=self.state_group_before_event,
delta_ids=self.state_delta_due_to_event,
current_state_ids=None,
)
return EventContext.with_state(
storage=self._storage,
state_group=state_group_after_event,
state_group_before_event=self.state_group_before_event,
state_delta_due_to_event=self.state_delta_due_to_event,
partial_state=self.partial_state,
prev_group=self.state_group_before_event,
delta_ids=self.state_delta_due_to_event,
)
def _encode_state_dict(
state_dict: Optional[StateMap[str]],
) -> Optional[List[Tuple[str, str, str]]]:

View File

@ -33,8 +33,8 @@ from typing_extensions import Literal
import synapse
from synapse.api.errors import Codes
from synapse.logging.opentracing import trace
from synapse.media._base import FileInfo
from synapse.media.media_storage import ReadableFileWrapper
from synapse.rest.media.v1._base import FileInfo
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
from synapse.spam_checker_api import RegistrationBehaviour
from synapse.types import JsonDict, RoomAlias, UserProfile
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable

View File

@ -18,7 +18,7 @@ from twisted.internet.defer import CancelledError
from synapse.api.errors import ModuleFailedException, SynapseError
from synapse.events import EventBase
from synapse.events.snapshot import UnpersistedEventContextBase
from synapse.events.snapshot import EventContext
from synapse.storage.roommember import ProfileInfo
from synapse.types import Requester, StateMap
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
@ -45,8 +45,6 @@ CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]]
ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable]
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable]
ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable]
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
@ -80,6 +78,7 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
# correctly, we need to await its result. Therefore it doesn't make a lot of
# sense to make it go through the run() wrapper.
if f.__name__ == "check_event_allowed":
# We need to wrap check_event_allowed because its old form would return either
# a boolean or a dict, but now we want to return the dict separately from the
# boolean.
@ -101,6 +100,7 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
return wrap_check_event_allowed
if f.__name__ == "on_create_room":
# We need to wrap on_create_room because its old form would return a boolean
# if the room creation is denied, but now we just want it to raise an
# exception.
@ -174,12 +174,6 @@ class ThirdPartyEventRules:
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
] = []
self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = []
self._on_add_user_third_party_identifier_callbacks: List[
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = []
self._on_remove_user_third_party_identifier_callbacks: List[
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = []
def register_third_party_rules_callbacks(
self,
@ -199,12 +193,6 @@ class ThirdPartyEventRules:
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
] = None,
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
on_add_user_third_party_identifier: Optional[
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = None,
on_remove_user_third_party_identifier: Optional[
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = None,
) -> None:
"""Register callbacks from modules for each hook."""
if check_event_allowed is not None:
@ -242,15 +230,8 @@ class ThirdPartyEventRules:
if on_threepid_bind is not None:
self._on_threepid_bind_callbacks.append(on_threepid_bind)
if on_add_user_third_party_identifier is not None:
self._on_add_user_third_party_identifier_callbacks.append(
on_add_user_third_party_identifier
)
async def check_event_allowed(
self,
event: EventBase,
context: UnpersistedEventContextBase,
self, event: EventBase, context: EventContext
) -> Tuple[bool, Optional[dict]]:
"""Check if a provided event should be allowed in the given context.
@ -530,9 +511,6 @@ class ThirdPartyEventRules:
local homeserver, not when it's created on an identity server (and then kept track
of so that it can be unbound on the same IS later on).
THIS MODULE CALLBACK METHOD HAS BEEN DEPRECATED. Please use the
`on_add_user_third_party_identifier` callback method instead.
Args:
user_id: the user being associated with the threepid.
medium: the threepid's medium.
@ -545,44 +523,3 @@ class ThirdPartyEventRules:
logger.exception(
"Failed to run module API callback %s: %s", callback, e
)
async def on_add_user_third_party_identifier(
self, user_id: str, medium: str, address: str
) -> None:
"""Called when an association between a user's Matrix ID and a third-party ID
(email, phone number) has successfully been registered on the homeserver.
Args:
user_id: The User ID included in the association.
medium: The medium of the third-party ID (email, msisdn).
address: The address of the third-party ID (i.e. an email address).
"""
for callback in self._on_add_user_third_party_identifier_callbacks:
try:
await callback(user_id, medium, address)
except Exception as e:
logger.exception(
"Failed to run module API callback %s: %s", callback, e
)
async def on_remove_user_third_party_identifier(
self, user_id: str, medium: str, address: str
) -> None:
"""Called when an association between a user's Matrix ID and a third-party ID
(email, phone number) has been successfully removed on the homeserver.
This is called *after* any known bindings on identity servers for this
association have been removed.
Args:
user_id: The User ID included in the removed association.
medium: The medium of the third-party ID (email, msisdn).
address: The address of the third-party ID (i.e. an email address).
"""
for callback in self._on_remove_user_third_party_identifier_callbacks:
try:
await callback(user_id, medium, address)
except Exception as e:
logger.exception(
"Failed to run module API callback %s: %s", callback, e
)

View File

@ -38,7 +38,8 @@ from synapse.api.constants import (
)
from synapse.api.errors import Codes, SynapseError
from synapse.api.room_versions import RoomVersion
from synapse.types import JsonDict, Requester
from synapse.types import JsonDict
from synapse.util.frozenutils import unfreeze
from . import EventBase
@ -316,9 +317,8 @@ class SerializeEventConfig:
as_client_event: bool = True
# Function to convert from federation format to client format
event_format: Callable[[JsonDict], JsonDict] = format_event_for_client_v1
# The entity that requested the event. This is used to determine whether to include
# the transaction_id in the unsigned section of the event.
requester: Optional[Requester] = None
# ID of the user's auth token - used for namespacing of transaction IDs
token_id: Optional[int] = None
# List of event fields to include. If empty, all fields will be returned.
only_event_fields: Optional[List[str]] = None
# Some events can have stripped room state stored in the `unsigned` field.
@ -368,24 +368,11 @@ def serialize_event(
e.unsigned["redacted_because"], time_now_ms, config=config
)
# If we have a txn_id saved in the internal_metadata, we should include it in the
# unsigned section of the event if it was sent by the same session as the one
# requesting the event.
# There is a special case for guests, because they only have one access token
# without associated access_token_id, so we always include the txn_id for events
# they sent.
txn_id = getattr(e.internal_metadata, "txn_id", None)
if txn_id is not None and config.requester is not None:
event_token_id = getattr(e.internal_metadata, "token_id", None)
if config.requester.user.to_string() == e.sender and (
(
event_token_id is not None
and config.requester.access_token_id is not None
and event_token_id == config.requester.access_token_id
)
or config.requester.is_guest
):
d["unsigned"]["transaction_id"] = txn_id
if config.token_id is not None:
if config.token_id == getattr(e.internal_metadata, "token_id", None):
txn_id = getattr(e.internal_metadata, "txn_id", None)
if txn_id is not None:
d["unsigned"]["transaction_id"] = txn_id
# invite_room_state and knock_room_state are a list of stripped room state events
# that are meant to provide metadata about a room to an invitee/knocker. They are
@ -416,6 +403,14 @@ class EventClientSerializer:
clients.
"""
def __init__(self, inhibit_replacement_via_edits: bool = False):
"""
Args:
inhibit_replacement_via_edits: If this is set to True, then events are
never replaced by their edits.
"""
self._inhibit_replacement_via_edits = inhibit_replacement_via_edits
def serialize_event(
self,
event: Union[JsonDict, EventBase],
@ -423,6 +418,7 @@ class EventClientSerializer:
*,
config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG,
bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None,
apply_edits: bool = True,
) -> JsonDict:
"""Serializes a single event.
@ -432,7 +428,10 @@ class EventClientSerializer:
config: Event serialization config
bundle_aggregations: A map from event_id to the aggregations to be bundled
into the event.
apply_edits: Whether the content of the event should be modified to reflect
any replacement in `bundle_aggregations[<event_id>].replace`.
See also the `inhibit_replacement_via_edits` constructor arg: if that is
set to True, then this argument is ignored.
Returns:
The serialized event
"""
@ -451,10 +450,38 @@ class EventClientSerializer:
config,
bundle_aggregations,
serialized_event,
apply_edits=apply_edits,
)
return serialized_event
def _apply_edit(
self, orig_event: EventBase, serialized_event: JsonDict, edit: EventBase
) -> None:
"""Replace the content, preserving existing relations of the serialized event.
Args:
orig_event: The original event.
serialized_event: The original event, serialized. This is modified.
edit: The event which edits the above.
"""
# Ensure we take copies of the edit content, otherwise we risk modifying
# the original event.
edit_content = edit.content.copy()
# Unfreeze the event content if necessary, so that we may modify it below
edit_content = unfreeze(edit_content)
serialized_event["content"] = edit_content.get("m.new_content", {})
# Check for existing relations
relates_to = orig_event.content.get("m.relates_to")
if relates_to:
# Keep the relations, ensuring we use a dict copy of the original
serialized_event["content"]["m.relates_to"] = relates_to.copy()
else:
serialized_event["content"].pop("m.relates_to", None)
def _inject_bundled_aggregations(
self,
event: EventBase,
@ -462,6 +489,7 @@ class EventClientSerializer:
config: SerializeEventConfig,
bundled_aggregations: Dict[str, "BundledAggregations"],
serialized_event: JsonDict,
apply_edits: bool,
) -> None:
"""Potentially injects bundled aggregations into the unsigned portion of the serialized event.
@ -476,6 +504,9 @@ class EventClientSerializer:
While serializing the bundled aggregations this map may be searched
again for additional events in a recursive manner.
serialized_event: The serialized event which may be modified.
apply_edits: Whether the content of the event should be modified to reflect
any replacement in `aggregations.replace` (subject to the
`inhibit_replacement_via_edits` constructor arg).
"""
# We have already checked that aggregations exist for this event.
@ -485,12 +516,22 @@ class EventClientSerializer:
# being serialized.
serialized_aggregations = {}
if event_aggregations.annotations:
serialized_aggregations[
RelationTypes.ANNOTATION
] = event_aggregations.annotations
if event_aggregations.references:
serialized_aggregations[
RelationTypes.REFERENCE
] = event_aggregations.references
if event_aggregations.replace:
# If there is an edit, optionally apply it to the event.
edit = event_aggregations.replace
if apply_edits and not self._inhibit_replacement_via_edits:
self._apply_edit(event, serialized_event, edit)
# Include information about it in the relations dict.
#
# Matrix spec v1.5 (https://spec.matrix.org/v1.5/client-server-api/#server-side-aggregation-of-mreplace-relationships)
@ -498,7 +539,10 @@ class EventClientSerializer:
# `sender` of the edit; however MSC3925 proposes extending it to the whole
# of the edit, which is what we do here.
serialized_aggregations[RelationTypes.REPLACE] = self.serialize_event(
event_aggregations.replace, time_now, config=config
edit,
time_now,
config=config,
apply_edits=False,
)
# Include any threaded replies to this event.

View File

@ -884,7 +884,7 @@ class FederationClient(FederationBase):
if 500 <= e.code < 600:
failover = True
elif 400 <= e.code < 500 and synapse_error.errcode in failover_errcodes:
elif e.code == 400 and synapse_error.errcode in failover_errcodes:
failover = True
elif failover_on_unknown_endpoint and self._is_unknown_endpoint(
@ -999,13 +999,14 @@ class FederationClient(FederationBase):
return destination, ev, room_version
failover_errcodes = {Codes.NOT_FOUND}
# MSC3083 defines additional error codes for room joins. Unfortunately
# we do not yet know the room version, assume these will only be returned
# by valid room versions.
if membership == Membership.JOIN:
failover_errcodes.add(Codes.UNABLE_AUTHORISE_JOIN)
failover_errcodes.add(Codes.UNABLE_TO_GRANT_JOIN)
failover_errcodes = (
(Codes.UNABLE_AUTHORISE_JOIN, Codes.UNABLE_TO_GRANT_JOIN)
if membership == Membership.JOIN
else None
)
return await self._try_destination_list(
"make_" + membership,

View File

@ -23,7 +23,6 @@ from typing import (
Collection,
Dict,
List,
Mapping,
Optional,
Tuple,
Union,
@ -48,7 +47,6 @@ from synapse.api.errors import (
FederationError,
IncompatibleRoomVersionError,
NotFoundError,
PartialStateConflictError,
SynapseError,
UnsupportedRoomVersionError,
)
@ -82,6 +80,7 @@ from synapse.replication.http.federation import (
ReplicationFederationSendEduRestServlet,
ReplicationGetQueryRestServlet,
)
from synapse.storage.databases.main.events import PartialStateConflictError
from synapse.storage.databases.main.lock import Lock
from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
from synapse.storage.roommember import MemberSummary
@ -1513,7 +1512,7 @@ class FederationHandlerRegistry:
def _get_event_ids_for_partial_state_join(
join_event: EventBase,
prev_state_ids: StateMap[str],
summary: Mapping[str, MemberSummary],
summary: Dict[str, MemberSummary],
) -> Collection[str]:
"""Calculate state to be returned in a partial_state send_join

View File

@ -314,7 +314,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
# stream position.
keyed_edus = {v: k for k, v in self.keyed_edu_changed.items()[i:j]}
for (destination, edu_key), pos in keyed_edus.items():
for ((destination, edu_key), pos) in keyed_edus.items():
rows.append(
(
pos,
@ -329,7 +329,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
j = self.edus.bisect_right(to_token) + 1
edus = self.edus.items()[i:j]
for pos, edu in edus:
for (pos, edu) in edus:
rows.append((pos, EduRow(edu)))
# Sort rows based on pos

View File

@ -155,6 +155,9 @@ class AccountDataHandler:
max_stream_id = await self._store.remove_account_data_for_room(
user_id, room_id, account_data_type
)
if max_stream_id is None:
# The referenced account data did not exist, so no delete occurred.
return None
self._notifier.on_new_event(
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
@ -227,6 +230,9 @@ class AccountDataHandler:
max_stream_id = await self._store.remove_account_data_for_user(
user_id, account_data_type
)
if max_stream_id is None:
# The referenced account data did not exist, so no delete occurred.
return None
self._notifier.on_new_event(
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
@ -242,6 +248,7 @@ class AccountDataHandler:
instance_name=random.choice(self._account_data_writers),
user_id=user_id,
account_data_type=account_data_type,
content={},
)
return response["max_stream_id"]
@ -336,12 +343,10 @@ class AccountDataEventSource(EventSource[int, JsonDict]):
}
)
account_data = await self.store.get_updated_global_account_data_for_user(
user_id, last_stream_id
)
room_account_data = await self.store.get_updated_room_account_data_for_user(
user_id, last_stream_id
)
(
account_data,
room_account_data,
) = await self.store.get_updated_account_data_for_user(user_id, last_stream_id)
for account_data_type, content in account_data.items():
results.append({"type": account_data_type, "content": content})

View File

@ -14,7 +14,7 @@
import abc
import logging
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Set
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set
from synapse.api.constants import Direction, Membership
from synapse.events import EventBase
@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
class AdminHandler:
def __init__(self, hs: "HomeServer"):
self._store = hs.get_datastores().main
self.store = hs.get_datastores().main
self._device_handler = hs.get_device_handler()
self._storage_controllers = hs.get_storage_controllers()
self._state_storage_controller = self._storage_controllers.state
@ -38,7 +38,7 @@ class AdminHandler:
async def get_whois(self, user: UserID) -> JsonDict:
connections = []
sessions = await self._store.get_user_ip_and_agents(user)
sessions = await self.store.get_user_ip_and_agents(user)
for session in sessions:
connections.append(
{
@ -57,7 +57,7 @@ class AdminHandler:
async def get_user(self, user: UserID) -> Optional[JsonDict]:
"""Function to get user details"""
user_info_dict = await self._store.get_user_by_id(user.to_string())
user_info_dict = await self.store.get_user_by_id(user.to_string())
if user_info_dict is None:
return None
@ -89,11 +89,11 @@ class AdminHandler:
}
# Add additional user metadata
profile = await self._store.get_profileinfo(user.localpart)
threepids = await self._store.user_get_threepids(user.to_string())
profile = await self.store.get_profileinfo(user.localpart)
threepids = await self.store.user_get_threepids(user.to_string())
external_ids = [
({"auth_provider": auth_provider, "external_id": external_id})
for auth_provider, external_id in await self._store.get_external_ids_by_user(
for auth_provider, external_id in await self.store.get_external_ids_by_user(
user.to_string()
)
]
@ -101,7 +101,7 @@ class AdminHandler:
user_info_dict["avatar_url"] = profile.avatar_url
user_info_dict["threepids"] = threepids
user_info_dict["external_ids"] = external_ids
user_info_dict["erased"] = await self._store.is_user_erased(user.to_string())
user_info_dict["erased"] = await self.store.is_user_erased(user.to_string())
return user_info_dict
@ -117,7 +117,7 @@ class AdminHandler:
The returned value is that returned by `writer.finished()`.
"""
# Get all rooms the user is in or has been in
rooms = await self._store.get_rooms_for_local_user_where_membership_is(
rooms = await self.store.get_rooms_for_local_user_where_membership_is(
user_id,
membership_list=(
Membership.JOIN,
@ -131,7 +131,7 @@ class AdminHandler:
# We only try and fetch events for rooms the user has been in. If
# they've been e.g. invited to a room without joining then we handle
# those separately.
rooms_user_has_been_in = await self._store.get_rooms_user_has_been_in(user_id)
rooms_user_has_been_in = await self.store.get_rooms_user_has_been_in(user_id)
for index, room in enumerate(rooms):
room_id = room.room_id
@ -140,7 +140,7 @@ class AdminHandler:
"[%s] Handling room %s, %d/%d", user_id, room_id, index + 1, len(rooms)
)
forgotten = await self._store.did_forget(user_id, room_id)
forgotten = await self.store.did_forget(user_id, room_id)
if forgotten:
logger.info("[%s] User forgot room %d, ignoring", user_id, room_id)
continue
@ -152,14 +152,14 @@ class AdminHandler:
if room.membership == Membership.INVITE:
event_id = room.event_id
invite = await self._store.get_event(event_id, allow_none=True)
invite = await self.store.get_event(event_id, allow_none=True)
if invite:
invited_state = invite.unsigned["invite_room_state"]
writer.write_invite(room_id, invite, invited_state)
if room.membership == Membership.KNOCK:
event_id = room.event_id
knock = await self._store.get_event(event_id, allow_none=True)
knock = await self.store.get_event(event_id, allow_none=True)
if knock:
knock_state = knock.unsigned["knock_room_state"]
writer.write_knock(room_id, knock, knock_state)
@ -170,7 +170,7 @@ class AdminHandler:
# were joined. We estimate that point by looking at the
# stream_ordering of the last membership if it wasn't a join.
if room.membership == Membership.JOIN:
stream_ordering = self._store.get_room_max_stream_ordering()
stream_ordering = self.store.get_room_max_stream_ordering()
else:
stream_ordering = room.stream_ordering
@ -197,7 +197,7 @@ class AdminHandler:
# events that we have and then filtering, this isn't the most
# efficient method perhaps but it does guarantee we get everything.
while True:
events, _ = await self._store.paginate_room_events(
events, _ = await self.store.paginate_room_events(
room_id, from_key, to_key, limit=100, direction=Direction.FORWARDS
)
if not events:
@ -252,49 +252,16 @@ class AdminHandler:
profile = await self.get_user(UserID.from_string(user_id))
if profile is not None:
writer.write_profile(profile)
logger.info("[%s] Written profile", user_id)
# Get all devices the user has
devices = await self._device_handler.get_devices_by_user(user_id)
writer.write_devices(devices)
logger.info("[%s] Written %s devices", user_id, len(devices))
# Get all connections the user has
connections = await self.get_whois(UserID.from_string(user_id))
writer.write_connections(
connections["devices"][""]["sessions"][0]["connections"]
)
logger.info("[%s] Written %s connections", user_id, len(connections))
# Get all account data the user has global and in rooms
global_data = await self._store.get_global_account_data_for_user(user_id)
by_room_data = await self._store.get_room_account_data_for_user(user_id)
writer.write_account_data("global", global_data)
for room_id in by_room_data:
writer.write_account_data(room_id, by_room_data[room_id])
logger.info(
"[%s] Written account data for %s rooms", user_id, len(by_room_data)
)
# Get all media ids the user has
limit = 100
start = 0
while True:
media_ids, total = await self._store.get_local_media_by_user_paginate(
start, limit, user_id
)
for media in media_ids:
writer.write_media_id(media["media_id"], media)
logger.info(
"[%s] Written %d media_ids of %s",
user_id,
(start + len(media_ids)),
total,
)
if (start + limit) >= total:
break
start += limit
return writer.finished()
@ -373,30 +340,6 @@ class ExfiltrationWriter(metaclass=abc.ABCMeta):
"""
raise NotImplementedError()
@abc.abstractmethod
def write_account_data(
self, file_name: str, account_data: Mapping[str, JsonDict]
) -> None:
"""Write the account data of a user.
Args:
file_name: file name to write data
account_data: mapping of global or room account_data
"""
raise NotImplementedError()
@abc.abstractmethod
def write_media_id(self, media_id: str, media_metadata: JsonDict) -> None:
"""Write the media's metadata of a user.
Exports only the metadata, as this can be fetched from the database via
read only. In order to access the files, a connection to the correct
media repository would be required.
Args:
media_id: ID of the media.
media_metadata: Metadata of one media file.
"""
@abc.abstractmethod
def finished(self) -> Any:
"""Called when all data has successfully been exported and written.

View File

@ -737,7 +737,7 @@ class ApplicationServicesHandler:
)
ret = []
for success, result in results:
for (success, result) in results:
if success:
ret.extend(result)

View File

@ -201,7 +201,7 @@ class AuthHandler:
for auth_checker_class in INTERACTIVE_AUTH_CHECKERS:
inst = auth_checker_class(hs)
if inst.is_enabled():
self.checkers[inst.AUTH_TYPE] = inst
self.checkers[inst.AUTH_TYPE] = inst # type: ignore
self.bcrypt_rounds = hs.config.registration.bcrypt_rounds
@ -815,6 +815,7 @@ class AuthHandler:
now_ms = self._clock.time_msec()
if existing_token.expiry_ts is not None and existing_token.expiry_ts < now_ms:
raise SynapseError(
HTTPStatus.FORBIDDEN,
"The supplied refresh token has expired",
@ -1542,17 +1543,6 @@ class AuthHandler:
async def add_threepid(
self, user_id: str, medium: str, address: str, validated_at: int
) -> None:
"""
Adds an association between a user's Matrix ID and a third-party ID (email,
phone number).
Args:
user_id: The ID of the user to associate.
medium: The medium of the third-party ID (email, msisdn).
address: The address of the third-party ID (i.e. an email address).
validated_at: The timestamp in ms of when the validation that the user owns
this third-party ID occurred.
"""
# check if medium has a valid value
if medium not in ["email", "msisdn"]:
raise SynapseError(
@ -1577,44 +1567,43 @@ class AuthHandler:
user_id, medium, address, validated_at, self.hs.get_clock().time_msec()
)
# Inform Synapse modules that a 3PID association has been created.
await self._third_party_rules.on_add_user_third_party_identifier(
user_id, medium, address
)
# Deprecated method for informing Synapse modules that a 3PID association
# has successfully been created.
await self._third_party_rules.on_threepid_bind(user_id, medium, address)
async def delete_local_threepid(
self, user_id: str, medium: str, address: str
) -> None:
"""Deletes an association between a third-party ID and a user ID from the local
database. This method does not unbind the association from any identity servers.
If `medium` is 'email' and a pusher is associated with this third-party ID, the
pusher will also be deleted.
async def delete_threepid(
self, user_id: str, medium: str, address: str, id_server: Optional[str] = None
) -> bool:
"""Attempts to unbind the 3pid on the identity servers and deletes it
from the local database.
Args:
user_id: ID of user to remove the 3pid from.
medium: The medium of the 3pid being removed: "email" or "msisdn".
address: The 3pid address to remove.
id_server: Use the given identity server when unbinding
any threepids. If None then will attempt to unbind using the
identity server specified when binding (if known).
Returns:
Returns True if successfully unbound the 3pid on
the identity server, False if identity server doesn't support the
unbind API.
"""
# 'Canonicalise' email addresses as per above
if medium == "email":
address = canonicalise_email(address)
await self.store.user_delete_threepid(user_id, medium, address)
# Inform Synapse modules that a 3PID association has been deleted.
await self._third_party_rules.on_remove_user_third_party_identifier(
user_id, medium, address
identity_handler = self.hs.get_identity_handler()
result = await identity_handler.try_unbind_threepid(
user_id, {"medium": medium, "address": address, "id_server": id_server}
)
await self.store.user_delete_threepid(user_id, medium, address)
if medium == "email":
await self.store.delete_pusher_by_app_id_pushkey_user_id(
app_id="m.email", pushkey=address, user_id=user_id
)
return result
async def hash(self, password: str) -> str:
"""Computes a secure hash of password.
@ -2271,6 +2260,7 @@ class PasswordAuthProvider:
async def on_logged_out(
self, user_id: str, device_id: Optional[str], access_token: str
) -> None:
# call all of the on_logged_out callbacks
for callback in self.on_logged_out_callbacks:
try:

View File

@ -100,28 +100,31 @@ class DeactivateAccountHandler:
# unbinding
identity_server_supports_unbinding = True
# Attempt to unbind any known bound threepids to this account from identity
# server(s).
bound_threepids = await self.store.user_get_bound_threepids(user_id)
for threepid in bound_threepids:
# Retrieve the 3PIDs this user has bound to an identity server
threepids = await self.store.user_get_bound_threepids(user_id)
for threepid in threepids:
try:
result = await self._identity_handler.try_unbind_threepid(
user_id, threepid["medium"], threepid["address"], id_server
user_id,
{
"medium": threepid["medium"],
"address": threepid["address"],
"id_server": id_server,
},
)
identity_server_supports_unbinding &= result
except Exception:
# Do we want this to be a fatal error or should we carry on?
logger.exception("Failed to remove threepid from ID server")
raise SynapseError(400, "Failed to remove threepid from ID server")
identity_server_supports_unbinding &= result
# Remove any local threepid associations for this account.
local_threepids = await self.store.user_get_threepids(user_id)
for threepid in local_threepids:
await self._auth_handler.delete_local_threepid(
await self.store.user_delete_threepid(
user_id, threepid["medium"], threepid["address"]
)
# Remove all 3PIDs this user has bound to the homeserver
await self.store.user_delete_threepids(user_id)
# delete any devices belonging to the user, which will also
# delete corresponding access tokens.
await self._device_handler.delete_all_devices_for_user(user_id)

View File

@ -14,7 +14,7 @@
import logging
import string
from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence
from typing import TYPE_CHECKING, Iterable, List, Optional
from typing_extensions import Literal
@ -490,8 +490,7 @@ class DirectoryHandler:
)
)
if canonical_alias:
# Ensure we do not mutate room_aliases.
room_aliases = list(room_aliases) + [canonical_alias]
room_aliases.append(canonical_alias)
if not self.config.roomdirectory.is_publishing_room_allowed(
user_id, room_id, room_aliases
@ -502,11 +501,9 @@ class DirectoryHandler:
raise SynapseError(403, "Not allowed to publish room")
# Check if publishing is blocked by a third party module
allowed_by_third_party_rules = (
await (
self.third_party_event_rules.check_visibility_can_be_modified(
room_id, visibility
)
allowed_by_third_party_rules = await (
self.third_party_event_rules.check_visibility_can_be_modified(
room_id, visibility
)
)
if not allowed_by_third_party_rules:
@ -536,7 +533,7 @@ class DirectoryHandler:
async def get_aliases_for_room(
self, requester: Requester, room_id: str
) -> Sequence[str]:
) -> List[str]:
"""
Get a list of the aliases that currently point to this room on this server
"""

View File

@ -159,22 +159,19 @@ class E2eKeysHandler:
# A map of destination -> user ID -> device IDs.
remote_queries_not_in_cache: Dict[str, Dict[str, Iterable[str]]] = {}
if remote_queries:
user_ids = set()
user_and_device_ids: List[Tuple[str, str]] = []
query_list: List[Tuple[str, Optional[str]]] = []
for user_id, device_ids in remote_queries.items():
if device_ids:
user_and_device_ids.extend(
query_list.extend(
(user_id, device_id) for device_id in device_ids
)
else:
user_ids.add(user_id)
query_list.append((user_id, None))
(
user_ids_not_in_cache,
remote_results,
) = await self.store.get_user_devices_from_cache(
user_ids, user_and_device_ids
)
) = await self.store.get_user_devices_from_cache(query_list)
# Check that the homeserver still shares a room with all cached users.
# Note that this check may be slightly racy when a remote user leaves a
@ -1301,20 +1298,6 @@ class E2eKeysHandler:
return desired_key_data
async def is_cross_signing_set_up_for_user(self, user_id: str) -> bool:
"""Checks if the user has cross-signing set up
Args:
user_id: The user to check
Returns:
True if the user has cross-signing set up, False otherwise
"""
existing_master_key = await self.store.get_e2e_cross_signing_key(
user_id, "master"
)
return existing_master_key is not None
def _check_cross_signing_key(
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None

View File

@ -188,6 +188,7 @@ class E2eRoomKeysHandler:
# XXX: perhaps we should use a finer grained lock here?
async with self._upload_linearizer.queue(user_id):
# Check that the version we're trying to upload is the current version
try:
version_info = await self.store.get_e2e_room_keys_version_info(user_id)

View File

@ -202,7 +202,7 @@ class EventAuthHandler:
state_ids: StateMap[str],
room_version: RoomVersion,
user_id: str,
prev_membership: Optional[str],
prev_member_event: Optional[EventBase],
) -> None:
"""
Check whether a user can join a room without an invite due to restricted join rules.
@ -214,14 +214,15 @@ class EventAuthHandler:
state_ids: The state of the room as it currently is.
room_version: The room version of the room being joined.
user_id: The user joining the room.
prev_membership: The current membership state for this user. `None` if the
user has never joined the room (equivalent to "leave").
prev_member_event: The current membership event for this user.
Raises:
AuthError if the user cannot join the room.
"""
# If the member is invited or currently joined, then nothing to do.
if prev_membership in (Membership.JOIN, Membership.INVITE):
if prev_member_event and (
prev_member_event.membership in (Membership.JOIN, Membership.INVITE)
):
return
# This is not a room with a restricted join rule, so we don't need to do the
@ -236,6 +237,7 @@ class EventAuthHandler:
# in any of them.
allowed_rooms = await self.get_rooms_that_allow_join(state_ids)
if not await self.is_user_in_rooms(allowed_rooms, user_id):
# If this is a remote request, the user might be in an allowed room
# that we do not know about.
if get_domain_from_id(user_id) != self._server_name:
@ -253,14 +255,13 @@ class EventAuthHandler:
)
async def has_restricted_join_rules(
self, partial_state_ids: StateMap[str], room_version: RoomVersion
self, state_ids: StateMap[str], room_version: RoomVersion
) -> bool:
"""
Return if the room has the proper join rules set for access via rooms.
Args:
state_ids: The state of the room as it currently is. May be full or partial
state.
state_ids: The state of the room as it currently is.
room_version: The room version of the room to query.
Returns:
@ -271,7 +272,7 @@ class EventAuthHandler:
return False
# If there's no join rule, then it defaults to invite (so this doesn't apply).
join_rules_event_id = partial_state_ids.get((EventTypes.JoinRules, ""), None)
join_rules_event_id = state_ids.get((EventTypes.JoinRules, ""), None)
if not join_rules_event_id:
return False

View File

@ -23,7 +23,7 @@ from synapse.events.utils import SerializeEventConfig
from synapse.handlers.presence import format_user_presence_state
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict, Requester, UserID
from synapse.types import JsonDict, UserID
from synapse.visibility import filter_events_for_client
if TYPE_CHECKING:
@ -46,12 +46,13 @@ class EventStreamHandler:
async def get_stream(
self,
requester: Requester,
auth_user_id: str,
pagin_config: PaginationConfig,
timeout: int = 0,
as_client_event: bool = True,
affect_presence: bool = True,
room_id: Optional[str] = None,
is_guest: bool = False,
) -> JsonDict:
"""Fetches the events stream for a given user."""
@ -61,12 +62,13 @@ class EventStreamHandler:
raise SynapseError(403, "This room has been blocked on this server")
# send any outstanding server notices to the user.
await self._server_notices_sender.on_user_syncing(requester.user.to_string())
await self._server_notices_sender.on_user_syncing(auth_user_id)
auth_user = UserID.from_string(auth_user_id)
presence_handler = self.hs.get_presence_handler()
context = await presence_handler.user_syncing(
requester.user.to_string(),
auth_user_id,
affect_presence=affect_presence,
presence_state=PresenceState.ONLINE,
)
@ -80,10 +82,10 @@ class EventStreamHandler:
timeout = random.randint(int(timeout * 0.9), int(timeout * 1.1))
stream_result = await self.notifier.get_events_for(
requester.user,
auth_user,
pagin_config,
timeout,
is_guest=requester.is_guest,
is_guest=is_guest,
explicit_room_id=room_id,
)
events = stream_result.events
@ -100,7 +102,7 @@ class EventStreamHandler:
if event.membership != Membership.JOIN:
continue
# Send down presence.
if event.state_key == requester.user.to_string():
if event.state_key == auth_user_id:
# Send down presence for everyone in the room.
users: Iterable[str] = await self.store.get_users_in_room(
event.room_id
@ -122,9 +124,7 @@ class EventStreamHandler:
chunks = self._event_serializer.serialize_events(
events,
time_now,
config=SerializeEventConfig(
as_client_event=as_client_event, requester=requester
),
config=SerializeEventConfig(as_client_event=as_client_event),
)
chunk = {

View File

@ -49,7 +49,6 @@ from synapse.api.errors import (
FederationPullAttemptBackoffError,
HttpResponseException,
NotFoundError,
PartialStateConflictError,
RequestSendFailed,
SynapseError,
)
@ -57,7 +56,7 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
from synapse.crypto.event_signing import compute_event_signature
from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase
from synapse.events.snapshot import EventContext, UnpersistedEventContextBase
from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator
from synapse.federation.federation_client import InvalidResponseError
from synapse.http.servlet import assert_params_in_dict
@ -69,6 +68,7 @@ from synapse.replication.http.federation import (
ReplicationCleanRoomRestServlet,
ReplicationStoreRoomOnOutlierMembershipRestServlet,
)
from synapse.storage.databases.main.events import PartialStateConflictError
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import JsonDict, StrCollection, get_domain_from_id
from synapse.types.state import StateFilter
@ -952,20 +952,7 @@ class FederationHandler:
#
# Note that this requires the /send_join request to come back to the
# same server.
prev_event_ids = None
if room_version.msc3083_join_rules:
# Note that the room's state can change out from under us and render our
# nice join rules-conformant event non-conformant by the time we build the
# event. When this happens, our validation at the end fails and we respond
# to the requesting server with a 403, which is misleading — it indicates
# that the user is not allowed to join the room and the joining server
# should not bother retrying via this homeserver or any others, when
# in fact we've just messed up with building the event.
#
# To reduce the likelihood of this race, we capture the forward extremities
# of the room (prev_event_ids) just before fetching the current state, and
# hope that the state we fetch corresponds to the prev events we chose.
prev_event_ids = await self.store.get_prev_events_for_room(room_id)
state_ids = await self._state_storage_controller.get_current_state_ids(
room_id
)
@ -1003,21 +990,15 @@ class FederationHandler:
)
try:
(
event,
unpersisted_context,
) = await self.event_creation_handler.create_new_client_event(
builder=builder,
prev_event_ids=prev_event_ids,
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
except SynapseError as e:
logger.warning("Failed to create join to %s because %s", room_id, e)
raise
# Ensure the user can even join the room.
await self._federation_event_handler.check_join_restrictions(
unpersisted_context, event
)
await self._federation_event_handler.check_join_restrictions(context, event)
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request`
@ -1197,7 +1178,7 @@ class FederationHandler:
},
)
event, _ = await self.event_creation_handler.create_new_client_event(
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
@ -1247,13 +1228,12 @@ class FederationHandler:
},
)
(
event,
unpersisted_context,
) = await self.event_creation_handler.create_new_client_event(builder=builder)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event_allowed, _ = await self.third_party_event_rules.check_event_allowed(
event, unpersisted_context
event, context
)
if not event_allowed:
logger.warning("Creation of knock %s forbidden by third-party rules", event)
@ -1426,20 +1406,15 @@ class FederationHandler:
try:
(
event,
unpersisted_context,
context,
) = await self.event_creation_handler.create_new_client_event(
builder=builder
)
(
event,
unpersisted_context,
) = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, unpersisted_context
event, context = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, context
)
context = await unpersisted_context.persist(event)
EventValidator().validate_new(event, self.config)
# We need to tell the transaction queue to send this out, even
@ -1508,19 +1483,14 @@ class FederationHandler:
try:
(
event,
unpersisted_context,
context,
) = await self.event_creation_handler.create_new_client_event(
builder=builder
)
(
event,
unpersisted_context,
) = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, unpersisted_context
event, context = await self.add_display_name_to_third_party_invite(
room_version_obj, event_dict, event, context
)
context = await unpersisted_context.persist(event)
try:
validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(event)
@ -1552,8 +1522,8 @@ class FederationHandler:
room_version_obj: RoomVersion,
event_dict: JsonDict,
event: EventBase,
context: UnpersistedEventContextBase,
) -> Tuple[EventBase, UnpersistedEventContextBase]:
context: EventContext,
) -> Tuple[EventBase, EventContext]:
key = (
EventTypes.ThirdPartyInvite,
event.content["third_party_invite"]["signed"]["token"],
@ -1587,14 +1557,11 @@ class FederationHandler:
room_version_obj, event_dict
)
EventValidator().validate_builder(builder, self.hs.config)
(
event,
unpersisted_context,
) = await self.event_creation_handler.create_new_client_event(builder=builder)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
EventValidator().validate_new(event, self.config)
return event, unpersisted_context
return event, context
async def _check_signature(self, event: EventBase, context: EventContext) -> None:
"""
@ -1894,11 +1861,6 @@ class FederationHandler:
logger.info("Updating current state for %s", room_id)
# TODO(faster_joins): notify workers in notify_room_un_partial_stated
# https://github.com/matrix-org/synapse/issues/12994
#
# NB: there's a potential race here. If room is purged just before we
# call this, we _might_ end up inserting rows into current_state_events.
# (The logic is hard to chase through.) We think this is fine, but if
# not the HS admin should purge the room again.
await self.state_handler.update_current_state(room_id)
logger.info("Handling any pending device list updates")

View File

@ -47,7 +47,6 @@ from synapse.api.errors import (
FederationError,
FederationPullAttemptBackoffError,
HttpResponseException,
PartialStateConflictError,
RequestSendFailed,
SynapseError,
)
@ -59,7 +58,7 @@ from synapse.event_auth import (
validate_event_for_room_version,
)
from synapse.events import EventBase
from synapse.events.snapshot import EventContext, UnpersistedEventContextBase
from synapse.events.snapshot import EventContext
from synapse.federation.federation_client import InvalidResponseError, PulledPduInfo
from synapse.logging.context import nested_logging_context
from synapse.logging.opentracing import (
@ -75,6 +74,7 @@ from synapse.replication.http.federation import (
ReplicationFederationSendEventsRestServlet,
)
from synapse.state import StateResolutionStore
from synapse.storage.databases.main.events import PartialStateConflictError
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import (
PersistedEventPosition,
@ -426,9 +426,7 @@ class FederationEventHandler:
return event, context
async def check_join_restrictions(
self,
context: UnpersistedEventContextBase,
event: EventBase,
self, context: EventContext, event: EventBase
) -> None:
"""Check that restrictions in restricted join rules are matched
@ -441,17 +439,16 @@ class FederationEventHandler:
# Check if the user is already in the room or invited to the room.
user_id = event.state_key
prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None)
prev_membership = None
prev_member_event = None
if prev_member_event_id:
prev_member_event = await self._store.get_event(prev_member_event_id)
prev_membership = prev_member_event.membership
# Check if the member should be allowed access via membership in a space.
await self._event_auth_handler.check_restricted_join_rules(
prev_state_ids,
event.room_version,
user_id,
prev_membership,
prev_member_event,
)
@trace
@ -527,57 +524,11 @@ class FederationEventHandler:
"Peristing join-via-remote %s (partial_state: %s)", event, partial_state
)
with nested_logging_context(suffix=event.event_id):
if partial_state:
# When handling a second partial state join into a partial state room,
# the returned state will exclude the membership from the first join. To
# preserve prior memberships, we try to compute the partial state before
# the event ourselves if we know about any of the prev events.
#
# When we don't know about any of the prev events, it's fine to just use
# the returned state, since the new join will create a new forward
# extremity, and leave the forward extremity containing our prior
# memberships alone.
prev_event_ids = set(event.prev_event_ids())
seen_event_ids = await self._store.have_events_in_timeline(
prev_event_ids
)
missing_event_ids = prev_event_ids - seen_event_ids
state_maps_to_resolve: List[StateMap[str]] = []
# Fetch the state after the prev events that we know about.
state_maps_to_resolve.extend(
(
await self._state_storage_controller.get_state_groups_ids(
room_id, seen_event_ids, await_full_state=False
)
).values()
)
# When there are prev events we do not have the state for, we state
# resolve with the state returned by the remote homeserver.
if missing_event_ids or len(state_maps_to_resolve) == 0:
state_maps_to_resolve.append(
{(e.type, e.state_key): e.event_id for e in state}
)
state_ids_before_event = (
await self._state_resolution_handler.resolve_events_with_store(
event.room_id,
room_version.identifier,
state_maps_to_resolve,
event_map=None,
state_res_store=StateResolutionStore(self._store),
)
)
else:
state_ids_before_event = {
(e.type, e.state_key): e.event_id for e in state
}
context = await self._state_handler.compute_event_context(
event,
state_ids_before_event=state_ids_before_event,
state_ids_before_event={
(e.type, e.state_key): e.event_id for e in state
},
partial_state=partial_state,
)

View File

@ -219,31 +219,28 @@ class IdentityHandler:
data = json_decoder.decode(e.msg) # XXX WAT?
return data
async def try_unbind_threepid(
self, mxid: str, medium: str, address: str, id_server: Optional[str]
) -> bool:
"""Attempt to remove a 3PID from one or more identity servers.
async def try_unbind_threepid(self, mxid: str, threepid: dict) -> bool:
"""Attempt to remove a 3PID from an identity server, or if one is not provided, all
identity servers we're aware the binding is present on
Args:
mxid: Matrix user ID of binding to be removed
medium: The medium of the third-party ID.
address: The address of the third-party ID.
id_server: An identity server to attempt to unbind from. If None,
attempt to remove the association from all identity servers
known to potentially have it.
threepid: Dict with medium & address of binding to be
removed, and an optional id_server.
Raises:
SynapseError: If we failed to contact one or more identity servers.
SynapseError: If we failed to contact the identity server
Returns:
True on success, otherwise False if the identity server doesn't
support unbinding (or no identity server to contact was found).
True on success, otherwise False if the identity
server doesn't support unbinding (or no identity server found to
contact).
"""
if id_server:
id_servers = [id_server]
if threepid.get("id_server"):
id_servers = [threepid["id_server"]]
else:
id_servers = await self.store.get_id_servers_user_bound(
mxid, medium, address
user_id=mxid, medium=threepid["medium"], address=threepid["address"]
)
# We don't know where to unbind, so we don't have a choice but to return
@ -252,21 +249,20 @@ class IdentityHandler:
changed = True
for id_server in id_servers:
changed &= await self._try_unbind_threepid_with_id_server(
mxid, medium, address, id_server
changed &= await self.try_unbind_threepid_with_id_server(
mxid, threepid, id_server
)
return changed
async def _try_unbind_threepid_with_id_server(
self, mxid: str, medium: str, address: str, id_server: str
async def try_unbind_threepid_with_id_server(
self, mxid: str, threepid: dict, id_server: str
) -> bool:
"""Removes a binding from an identity server
Args:
mxid: Matrix user ID of binding to be removed
medium: The medium of the third-party ID
address: The address of the third-party ID
threepid: Dict with medium & address of binding to be removed
id_server: Identity server to unbind from
Raises:
@ -290,7 +286,7 @@ class IdentityHandler:
content = {
"mxid": mxid,
"threepid": {"medium": medium, "address": address},
"threepid": {"medium": threepid["medium"], "address": threepid["address"]},
}
# we abuse the federation http client to sign the request, but we have to send it
@ -323,7 +319,12 @@ class IdentityHandler:
except RequestTimedOutError:
raise SynapseError(500, "Timed out contacting identity server")
await self.store.remove_user_bound_threepid(mxid, medium, address, id_server)
await self.store.remove_user_bound_threepid(
user_id=mxid,
medium=threepid["medium"],
address=threepid["address"],
id_server=id_server,
)
return changed

View File

@ -124,6 +124,7 @@ class InitialSyncHandler:
as_client_event: bool = True,
include_archived: bool = False,
) -> JsonDict:
memberships = [Membership.INVITE, Membership.JOIN]
if include_archived:
memberships.append(Membership.LEAVE)
@ -153,8 +154,9 @@ class InitialSyncHandler:
tags_by_room = await self.store.get_tags_for_user(user_id)
account_data = await self.store.get_global_account_data_for_user(user_id)
account_data_by_room = await self.store.get_room_account_data_for_user(user_id)
account_data, account_data_by_room = await self.store.get_account_data_for_user(
user_id
)
public_room_ids = await self.store.get_public_room_ids()
@ -318,9 +320,11 @@ class InitialSyncHandler:
)
is_peeking = member_event_id is None
user_id = requester.user.to_string()
if membership == Membership.JOIN:
result = await self._room_initial_sync_joined(
requester, room_id, pagin_config, membership, is_peeking
user_id, room_id, pagin_config, membership, is_peeking
)
elif membership == Membership.LEAVE:
# The member_event_id will always be available if membership is set
@ -328,16 +332,10 @@ class InitialSyncHandler:
assert member_event_id
result = await self._room_initial_sync_parted(
requester,
room_id,
pagin_config,
membership,
member_event_id,
is_peeking,
user_id, room_id, pagin_config, membership, member_event_id, is_peeking
)
account_data_events = []
user_id = requester.user.to_string()
tags = await self.store.get_tags_for_room(user_id, room_id)
if tags:
account_data_events.append(
@ -354,7 +352,7 @@ class InitialSyncHandler:
async def _room_initial_sync_parted(
self,
requester: Requester,
user_id: str,
room_id: str,
pagin_config: PaginationConfig,
membership: str,
@ -373,17 +371,13 @@ class InitialSyncHandler:
)
messages = await filter_events_for_client(
self._storage_controllers,
requester.user.to_string(),
messages,
is_peeking=is_peeking,
self._storage_controllers, user_id, messages, is_peeking=is_peeking
)
start_token = StreamToken.START.copy_and_replace(StreamKeyType.ROOM, token)
end_token = StreamToken.START.copy_and_replace(StreamKeyType.ROOM, stream_token)
time_now = self.clock.time_msec()
serialize_options = SerializeEventConfig(requester=requester)
return {
"membership": membership,
@ -391,18 +385,14 @@ class InitialSyncHandler:
"messages": {
"chunk": (
# Don't bundle aggregations as this is a deprecated API.
self._event_serializer.serialize_events(
messages, time_now, config=serialize_options
)
self._event_serializer.serialize_events(messages, time_now)
),
"start": await start_token.to_string(self.store),
"end": await end_token.to_string(self.store),
},
"state": (
# Don't bundle aggregations as this is a deprecated API.
self._event_serializer.serialize_events(
room_state.values(), time_now, config=serialize_options
)
self._event_serializer.serialize_events(room_state.values(), time_now)
),
"presence": [],
"receipts": [],
@ -410,7 +400,7 @@ class InitialSyncHandler:
async def _room_initial_sync_joined(
self,
requester: Requester,
user_id: str,
room_id: str,
pagin_config: PaginationConfig,
membership: str,
@ -422,12 +412,9 @@ class InitialSyncHandler:
# TODO: These concurrently
time_now = self.clock.time_msec()
serialize_options = SerializeEventConfig(requester=requester)
# Don't bundle aggregations as this is a deprecated API.
state = self._event_serializer.serialize_events(
current_state.values(),
time_now,
config=serialize_options,
current_state.values(), time_now
)
now_token = self.hs.get_event_sources().get_current_token()
@ -465,10 +452,7 @@ class InitialSyncHandler:
if not receipts:
return []
return ReceiptEventSource.filter_out_private_receipts(
receipts,
requester.user.to_string(),
)
return ReceiptEventSource.filter_out_private_receipts(receipts, user_id)
presence, receipts, (messages, token) = await make_deferred_yieldable(
gather_results(
@ -487,23 +471,20 @@ class InitialSyncHandler:
)
messages = await filter_events_for_client(
self._storage_controllers,
requester.user.to_string(),
messages,
is_peeking=is_peeking,
self._storage_controllers, user_id, messages, is_peeking=is_peeking
)
start_token = now_token.copy_and_replace(StreamKeyType.ROOM, token)
end_token = now_token
time_now = self.clock.time_msec()
ret = {
"room_id": room_id,
"messages": {
"chunk": (
# Don't bundle aggregations as this is a deprecated API.
self._event_serializer.serialize_events(
messages, time_now, config=serialize_options
)
self._event_serializer.serialize_events(messages, time_now)
),
"start": await start_token.to_string(self.store),
"end": await end_token.to_string(self.store),

Some files were not shown because too many files have changed in this diff Show More