mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2025-04-21 05:06:29 -04:00
Compare commits
82 Commits
meow-patch
...
master
Author | SHA1 | Date | |
---|---|---|---|
![]() |
212eb8bf88 | ||
![]() |
a939ba5976 | ||
![]() |
f7f471bb80 | ||
![]() |
7dbc917769 | ||
![]() |
4eddcf6653 | ||
![]() |
a7bdc4a1ed | ||
![]() |
3ec25f27ab | ||
![]() |
4f525ff19c | ||
![]() |
0c6d40f6c4 | ||
![]() |
cca37a0ecc | ||
![]() |
ddbd79a981 | ||
![]() |
cf1b4fab59 | ||
![]() |
e3b312a8ca | ||
![]() |
543fcb6f17 | ||
![]() |
932e104468 | ||
![]() |
2af1a982c1 | ||
![]() |
8314646cd3 | ||
![]() |
506e24ffc4 | ||
![]() |
c0854ce65a | ||
![]() |
869ef75cb7 | ||
![]() |
2a869d257f | ||
![]() |
a9478e436e | ||
![]() |
89ae8ce7ca | ||
![]() |
c114befd6b | ||
![]() |
c69aae94cd | ||
![]() |
41f127e068 | ||
![]() |
05e0a4089a | ||
![]() |
fd9cadcf53 | ||
![]() |
95876cf5f1 | ||
![]() |
242d2a27ce | ||
![]() |
6b6e91e610 | ||
![]() |
02f74f3a99 | ||
![]() |
848f7e3d5f | ||
![]() |
7ae4f7236a | ||
![]() |
15e975f68f | ||
![]() |
1eea662780 | ||
![]() |
ecbe0ddbe7 | ||
![]() |
c8665dd25d | ||
![]() |
c4f4dc35cd | ||
![]() |
8ef324ea6f | ||
![]() |
33a85cf08c | ||
![]() |
7ec1f096d3 | ||
![]() |
65f10afb64 | ||
![]() |
916b8061d2 | ||
![]() |
2b78981736 | ||
![]() |
b2fd03d075 | ||
![]() |
69553052cc | ||
![]() |
d62cd940cb | ||
![]() |
8c3fa748e6 | ||
![]() |
682d31c702 | ||
![]() |
c369d82df0 | ||
![]() |
e746f80b4f | ||
![]() |
521026897c | ||
![]() |
93f7955eba | ||
![]() |
1cd4fbc51d | ||
![]() |
189a878a35 | ||
![]() |
b40657314e | ||
![]() |
4fc8875876 | ||
![]() |
3f2ef205e2 | ||
![]() |
f7e49afb99 | ||
![]() |
d3afe59d5a | ||
![]() |
80884579f5 | ||
![]() |
229ae5bcec | ||
![]() |
81a0dc35f7 | ||
![]() |
965956160a | ||
![]() |
1ff2d20a6f | ||
![]() |
a74c099ece | ||
![]() |
1c95ddd09b | ||
![]() |
b2357a898c | ||
![]() |
335f52d595 | ||
![]() |
682151a464 | ||
![]() |
f8a584ed02 | ||
![]() |
ec79870f14 | ||
![]() |
1a1738eca2 | ||
![]() |
a068ad7dd4 | ||
![]() |
452b009eb0 | ||
![]() |
adac949a41 | ||
![]() |
9bb2eac719 | ||
![]() |
4ed08ff72e | ||
![]() |
6def779a1a | ||
![]() |
91f8de7b56 | ||
![]() |
647ff3ef65 |
@ -109,11 +109,26 @@ sytest_tests = [
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "postgres",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "testing",
|
||||
"postgres": "postgres",
|
||||
|
@ -8,6 +8,7 @@
|
||||
!README.rst
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!requirements.txt
|
||||
!Cargo.lock
|
||||
!Cargo.toml
|
||||
!build_rust.py
|
||||
|
@ -21,4 +21,8 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||
|
||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
|
||||
# Update black to 23.1.0 (#15103)
|
||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||
|
||||
|
2
.github/workflows/docs-pr-netlify.yaml
vendored
2
.github/workflows/docs-pr-netlify.yaml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@b59d8c6a6c5c6c6437954f470d963c0b20ea7415 # v2.25.0
|
||||
uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
4
.github/workflows/docs-pr.yaml
vendored
4
.github/workflows/docs-pr.yaml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
@ -39,7 +39,7 @@ jobs:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
|
2
.github/workflows/push_complement_image.yml
vendored
2
.github/workflows/push_complement_image.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
4
.github/workflows/tests.yml
vendored
4
.github/workflows/tests.yml
vendored
@ -156,7 +156,8 @@ jobs:
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
# We use nightly so that it correctly groups together imports
|
||||
toolchain: nightly-2022-12-01
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@ -368,6 +369,7 @@ jobs:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||
with:
|
||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||
content_id: ${{ github.event.issue.node_id }}
|
||||
|
19
.gitlab-ci.yml
Normal file
19
.gitlab-ci.yml
Normal file
@ -0,0 +1,19 @@
|
||||
image: docker:stable
|
||||
|
||||
stages:
|
||||
- build
|
||||
|
||||
build amd64:
|
||||
stage: build
|
||||
tags:
|
||||
- amd64
|
||||
only:
|
||||
- master
|
||||
before_script:
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
script:
|
||||
- synversion=$(cat pyproject.toml | grep '^version =' | sed -E 's/^version = "(.+)"$/\1/')
|
||||
- docker build --tag $CI_REGISTRY_IMAGE:latest --tag $CI_REGISTRY_IMAGE:$synversion .
|
||||
- docker push $CI_REGISTRY_IMAGE:latest
|
||||
- docker push $CI_REGISTRY_IMAGE:$synversion
|
||||
- docker rmi $CI_REGISTRY_IMAGE:latest $CI_REGISTRY_IMAGE:$synversion
|
106
CHANGES.md
106
CHANGES.md
@ -1,3 +1,109 @@
|
||||
Synapse 1.79.0rc1 (2023-03-07)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add two new Third Party Rules module API callbacks: [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier) and [`on_remove_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_remove_user_third_party_identifier). ([\#15044](https://github.com/matrix-org/synapse/issues/15044))
|
||||
- Experimental support for [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967) to not require UIA for setting up cross-signing on first use. ([\#15077](https://github.com/matrix-org/synapse/issues/15077))
|
||||
- Add media information to the command line [user data export tool](https://matrix-org.github.io/synapse/v1.79/usage/administration/admin_faq.html#how-can-i-export-user-data). ([\#15107](https://github.com/matrix-org/synapse/issues/15107))
|
||||
- Add an [admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html) to delete a [specific event report](https://spec.matrix.org/v1.6/client-server-api/#reporting-content). ([\#15116](https://github.com/matrix-org/synapse/issues/15116))
|
||||
- Add support for knocking to workers. ([\#15133](https://github.com/matrix-org/synapse/issues/15133))
|
||||
- Allow use of the `/filter` Client-Server APIs on workers. ([\#15134](https://github.com/matrix-org/synapse/issues/15134))
|
||||
- Update support for [MSC2677](https://github.com/matrix-org/matrix-spec-proposals/pull/2677): remove support for server-side aggregation of reactions. ([\#15172](https://github.com/matrix-org/synapse/issues/15172))
|
||||
- Stabilise support for [MSC3758](https://github.com/matrix-org/matrix-spec-proposals/pull/3758): `event_property_is` push condition. ([\#15185](https://github.com/matrix-org/synapse/issues/15185))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.75 that caused experimental support for deleting account data to raise an internal server error while using an account data writer worker. ([\#14869](https://github.com/matrix-org/synapse/issues/14869))
|
||||
- Fix a long-standing bug where Synapse handled an unspecced field on push rules. ([\#15088](https://github.com/matrix-org/synapse/issues/15088))
|
||||
- Fix a long-standing bug where a URL preview would break if the discovered oEmbed failed to download. ([\#15092](https://github.com/matrix-org/synapse/issues/15092))
|
||||
- Fix a long-standing bug where an initial sync would not respond to changes to the list of ignored users if there was an initial sync cached. ([\#15163](https://github.com/matrix-org/synapse/issues/15163))
|
||||
- Add the `transaction_id` in the events included in many endpoints' responses. ([\#15174](https://github.com/matrix-org/synapse/issues/15174))
|
||||
- Fix a bug introduced in Synapse 1.78.0 where requests to claim dehydrated devices would fail with a `405` error. ([\#15180](https://github.com/matrix-org/synapse/issues/15180))
|
||||
- Stop applying edits when bundling aggregations, per [MSC3925](https://github.com/matrix-org/matrix-spec-proposals/pull/3925). ([\#15193](https://github.com/matrix-org/synapse/issues/15193))
|
||||
- Fix a long-standing bug where the user directory search was not case-insensitive for accented characters. ([\#15143](https://github.com/matrix-org/synapse/issues/15143))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Improve startup logging in the with-workers Docker image. ([\#15186](https://github.com/matrix-org/synapse/issues/15186))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Document how to use caches in a module. ([\#14026](https://github.com/matrix-org/synapse/issues/14026))
|
||||
- Clarify which worker processes the ThirdPartyRules' [`on_new_event`](https://matrix-org.github.io/synapse/v1.78/modules/third_party_rules_callbacks.html#on_new_event) module API callback runs on. ([\#15071](https://github.com/matrix-org/synapse/issues/15071))
|
||||
- Document using [Shibboleth](https://www.shibboleth.net/) as an OpenID Provider. ([\#15112](https://github.com/matrix-org/synapse/issues/15112))
|
||||
- Correct reference to `federation_verify_certificates` in configuration documentation. ([\#15139](https://github.com/matrix-org/synapse/issues/15139))
|
||||
- Correct small documentation errors in some `MatrixFederationHttpClient` methods. ([\#15148](https://github.com/matrix-org/synapse/issues/15148))
|
||||
- Correct the description of the behavior of `registration_shared_secret_path` on startup. ([\#15168](https://github.com/matrix-org/synapse/issues/15168))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate the `on_threepid_bind` module callback, to be replaced by [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier). See [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.79/docs/upgrade.md#upgrading-to-v1790). ([\#15044]
|
||||
- Remove the unspecced `room_alias` field from the [`/createRoom`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3createroom) response. ([\#15093](https://github.com/matrix-org/synapse/issues/15093))
|
||||
- Remove the unspecced `PUT` on the `/knock/{roomIdOrAlias}` endpoint. ([\#15189](https://github.com/matrix-org/synapse/issues/15189))
|
||||
- Remove the undocumented and unspecced `type` parameter to the `/thumbnail` endpoint. ([\#15137](https://github.com/matrix-org/synapse/issues/15137))
|
||||
- Remove unspecced and buggy `PUT` method on the unstable `/rooms/<room_id>/batch_send` endpoint. ([\#15199](https://github.com/matrix-org/synapse/issues/15199))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Run the integration test suites with the asyncio reactor enabled in CI. ([\#14101](https://github.com/matrix-org/synapse/issues/14101))
|
||||
- Batch up storing state groups when creating a new room. ([\#14918](https://github.com/matrix-org/synapse/issues/14918))
|
||||
- Update [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952) support based on changes to the MSC. ([\#15051](https://github.com/matrix-org/synapse/issues/15051))
|
||||
- Refactor writing json data in `FileExfiltrationWriter`. ([\#15095](https://github.com/matrix-org/synapse/issues/15095))
|
||||
- Tighten the login ratelimit defaults. ([\#15135](https://github.com/matrix-org/synapse/issues/15135))
|
||||
- Fix a typo in an experimental config setting. ([\#15138](https://github.com/matrix-org/synapse/issues/15138))
|
||||
- Refactor the media modules. ([\#15146](https://github.com/matrix-org/synapse/issues/15146), [\#15175](https://github.com/matrix-org/synapse/issues/15175))
|
||||
- Improve type hints. ([\#15164](https://github.com/matrix-org/synapse/issues/15164))
|
||||
- Move `get_event_report` and `get_event_reports_paginate` from `RoomStore` to `RoomWorkerStore`. ([\#15165](https://github.com/matrix-org/synapse/issues/15165))
|
||||
- Remove dangling reference to being a reference implementation in docstring. ([\#15167](https://github.com/matrix-org/synapse/issues/15167))
|
||||
- Add an option to force a rebuild of the "editable" complement image. ([\#15184](https://github.com/matrix-org/synapse/issues/15184))
|
||||
- Use nightly rustfmt in CI. ([\#15188](https://github.com/matrix-org/synapse/issues/15188))
|
||||
- Add a `get_next_txn` method to `StreamIdGenerator` to match `MultiWriterIdGenerator`. ([\#15191](https://github.com/matrix-org/synapse/issues/15191))
|
||||
- Combine `AbstractStreamIdTracker` and `AbstractStreamIdGenerator`. ([\#15192](https://github.com/matrix-org/synapse/issues/15192))
|
||||
- Automatically fix errors with `ruff`. ([\#15194](https://github.com/matrix-org/synapse/issues/15194))
|
||||
- Refactor database transaction for query users' devices to reduce database pool contention. ([\#15215](https://github.com/matrix-org/synapse/issues/15215))
|
||||
- Correct `test_icu_word_boundary_punctuation` so that it passes with the ICU versions available in Alpine and macOS. ([\#15177](https://github.com/matrix-org/synapse/issues/15177))
|
||||
|
||||
<details><summary>Locked dependency updates</summary>
|
||||
|
||||
- Bump actions/checkout from 2 to 3. ([\#15155](https://github.com/matrix-org/synapse/issues/15155))
|
||||
- Bump black from 22.12.0 to 23.1.0. ([\#15103](https://github.com/matrix-org/synapse/issues/15103))
|
||||
- Bump dawidd6/action-download-artifact from 2.25.0 to 2.26.0. ([\#15152](https://github.com/matrix-org/synapse/issues/15152))
|
||||
- Bump docker/login-action from 1 to 2. ([\#15154](https://github.com/matrix-org/synapse/issues/15154))
|
||||
- Bump matrix-org/backend-meta from 1 to 2. ([\#15156](https://github.com/matrix-org/synapse/issues/15156))
|
||||
- Bump ruff from 0.0.237 to 0.0.252. ([\#15159](https://github.com/matrix-org/synapse/issues/15159))
|
||||
- Bump serde_json from 1.0.93 to 1.0.94. ([\#15214](https://github.com/matrix-org/synapse/issues/15214))
|
||||
- Bump types-commonmark from 0.9.2.1 to 0.9.2.2. ([\#15209](https://github.com/matrix-org/synapse/issues/15209))
|
||||
- Bump types-opentracing from 2.4.10.1 to 2.4.10.3. ([\#15158](https://github.com/matrix-org/synapse/issues/15158))
|
||||
- Bump types-pillow from 9.4.0.13 to 9.4.0.17. ([\#15211](https://github.com/matrix-org/synapse/issues/15211))
|
||||
- Bump types-psycopg2 from 2.9.21.4 to 2.9.21.8. ([\#15210](https://github.com/matrix-org/synapse/issues/15210))
|
||||
- Bump types-pyopenssl from 22.1.0.2 to 23.0.0.4. ([\#15213](https://github.com/matrix-org/synapse/issues/15213))
|
||||
- Bump types-setuptools from 67.3.0.1 to 67.4.0.3. ([\#15160](https://github.com/matrix-org/synapse/issues/15160))
|
||||
- Bump types-setuptools from 67.4.0.3 to 67.5.0.0. ([\#15212](https://github.com/matrix-org/synapse/issues/15212))
|
||||
- Bump typing-extensions from 4.4.0 to 4.5.0. ([\#15157](https://github.com/matrix-org/synapse/issues/15157))
|
||||
</details>
|
||||
|
||||
|
||||
Synapse 1.78.0 (2023-02-28)
|
||||
===========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.76 where 5s delays would occasionally occur in deployments using workers. ([\#15150](https://github.com/matrix-org/synapse/issues/15150))
|
||||
|
||||
|
||||
Synapse 1.78.0rc1 (2023-02-21)
|
||||
==============================
|
||||
|
||||
|
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -343,9 +343,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.93"
|
||||
version = "1.0.94"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
|
||||
checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
|
61
Dockerfile
Normal file
61
Dockerfile
Normal file
@ -0,0 +1,61 @@
|
||||
ARG PYTHON_VERSION=3.11
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
zlib1g-dev \
|
||||
openssl \
|
||||
git \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable
|
||||
|
||||
COPY synapse /synapse/synapse/
|
||||
COPY rust /synapse/rust/
|
||||
COPY README.rst pyproject.toml requirements.txt build_rust.py /synapse/
|
||||
|
||||
RUN pip install --prefix="/install" --no-warn-script-location --ignore-installed \
|
||||
--no-deps -r /synapse/requirements.txt \
|
||||
&& pip install --prefix="/install" --no-warn-script-location \
|
||||
--no-deps \
|
||||
'git+https://github.com/maunium/synapse-simple-antispam#egg=synapse-simple-antispam' \
|
||||
'git+https://github.com/devture/matrix-synapse-shared-secret-auth@2.0.2#egg=shared_secret_authenticator' \
|
||||
&& pip install --prefix="/install" --no-warn-script-location \
|
||||
--no-deps /synapse
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /install /usr/local
|
||||
|
||||
VOLUME ["/data"]
|
||||
ENV LD_PRELOAD="/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
|
||||
|
||||
ENTRYPOINT ["python3", "-m", "synapse.app.homeserver"]
|
||||
CMD ["--keys-directory", "/data", "-c", "/data/homeserver.yaml"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=1m --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
69
README.md
Normal file
69
README.md
Normal file
@ -0,0 +1,69 @@
|
||||
# Maunium Synapse
|
||||
This is a fork of [Synapse] to remove dumb limits and fix bugs that the
|
||||
upstream devs don't want to fix.
|
||||
|
||||
The only official distribution is the docker image in the [GitLab container
|
||||
registry], but you can also install from source ([upstream instructions]).
|
||||
|
||||
The master branch and `:latest` docker tag are upgraded to each upstream
|
||||
release candidate very soon after release (usually within 10 minutes†). There
|
||||
are also docker tags for each release, e.g. `:1.75.0`. If you don't want RCs,
|
||||
use the specific release tags.
|
||||
|
||||
†If there are merge conflicts, the update may be delayed for up to a few days
|
||||
after the full release.
|
||||
|
||||
[Synapse]: https://github.com/matrix-org/synapse
|
||||
[GitLab container registry]: https://mau.dev/maunium/synapse/container_registry
|
||||
[upstream instructions]: https://github.com/matrix-org/synapse/blob/develop/INSTALL.md#installing-from-source
|
||||
|
||||
## List of changes
|
||||
* Default power level for room creator is 9001 instead of 100.
|
||||
* Room creator can specify a custom room ID with the `room_id` param in the
|
||||
request body. If the room ID is already in use, it will return `M_CONFLICT`.
|
||||
* ~~URL previewer user agent includes `Bot` so Twitter previews work properly.~~
|
||||
Upstreamed after over 2 years 🎉
|
||||
* ~~Local event creation concurrency is disabled to avoid unnecessary state
|
||||
resolution.~~ Upstreamed after over 3 years 🎉
|
||||
* Register admin API can register invalid user IDs.
|
||||
* Docker image with jemalloc enabled by default.
|
||||
* Config option to allow specific users to send events without unnecessary
|
||||
validation.
|
||||
* Config option to allow specific users to receive events that are usually
|
||||
filtered away (e.g. `org.matrix.dummy_event` and `m.room.aliases`).
|
||||
* Config option to allow specific users to use timestamp massaging without
|
||||
being appservice users.
|
||||
* Config option to allow appservices to use MSC2716 batch sending as any local user.
|
||||
* Removed bad pusher URL validation.
|
||||
* webp images are thumbnailed to webp instead of jpeg to avoid losing
|
||||
transparency.
|
||||
* Media repo `Cache-Control` header says `immutable` and 1 year for all media
|
||||
that exists, as media IDs in Matrix are immutable.
|
||||
* Allowed sending custom data with read receipts.
|
||||
|
||||
You can view the full list of changes on the [meow-patchset] branch.
|
||||
Additionally, historical patch sets are saved as `meow-patchset-vX` [tags].
|
||||
|
||||
[meow-patchset]: https://mau.dev/maunium/synapse/-/compare/patchset-base...meow-patchset
|
||||
[tags]: https://mau.dev/maunium/synapse/-/tags?search=meow-patchset&sort=updated_desc
|
||||
|
||||
## Configuration
|
||||
Generating a new config will include the `meow` section, but this is here for
|
||||
reference for existing configs.
|
||||
|
||||
```yaml
|
||||
meow:
|
||||
# List of users who aren't subject to unnecessary validation in the C-S API.
|
||||
validation_override:
|
||||
- "@you:example.com"
|
||||
# List of users who will get org.matrix.dummy_event and m.room.aliases events down /sync
|
||||
filter_override:
|
||||
- "@you:example.com"
|
||||
# Whether or not the admin API should be able to register invalid user IDs.
|
||||
admin_api_register_invalid: true
|
||||
# List of users who can use timestamp massaging without being appservices
|
||||
timestamp_override:
|
||||
- "@you:example.com"
|
||||
# Whether appservices should be allowed to use MSC2716 batch sending as any local user.
|
||||
appservice_batch_send_any: false
|
||||
```
|
12
debian/changelog
vendored
12
debian/changelog
vendored
@ -1,3 +1,15 @@
|
||||
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Mar 2023 12:03:49 +0000
|
||||
|
||||
matrix-synapse-py3 (1.78.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.78.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Feb 2023 08:56:03 -0800
|
||||
|
||||
matrix-synapse-py3 (1.78.0~rc1) stable; urgency=medium
|
||||
|
||||
* Add `matrix-org-archive-keyring` package as recommended.
|
||||
|
@ -142,6 +142,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
|
||||
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@ -204,6 +205,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/send",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
||||
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
|
||||
],
|
||||
@ -674,17 +676,21 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
if not os.path.exists(config_path):
|
||||
log("Generating base homeserver config")
|
||||
generate_base_homeserver_config()
|
||||
|
||||
else:
|
||||
log("Base homeserver config exists—not regenerating")
|
||||
# This script may be run multiple times (mostly by Complement, see note at top of file).
|
||||
# Don't re-configure workers in this instance.
|
||||
mark_filepath = "/conf/workers_have_been_configured"
|
||||
if not os.path.exists(mark_filepath):
|
||||
# Always regenerate all other config files
|
||||
log("Generating worker config files")
|
||||
generate_worker_files(environ, config_path, data_dir)
|
||||
|
||||
# Mark workers as being configured
|
||||
with open(mark_filepath, "w") as f:
|
||||
f.write("")
|
||||
else:
|
||||
log("Worker config exists—not regenerating")
|
||||
|
||||
# Lifted right out of start.py
|
||||
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
||||
|
@ -169,3 +169,17 @@ The following fields are returned in the JSON response body:
|
||||
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
|
||||
have a canonical alias set.
|
||||
* `event_json`: object - Details of the original event that was reported.
|
||||
|
||||
# Delete a specific event report
|
||||
|
||||
This API deletes a specific event report. If the request is successful, the response body
|
||||
will be an empty JSON object.
|
||||
|
||||
The api is:
|
||||
```
|
||||
DELETE /_synapse/admin/v1/event_reports/<report_id>
|
||||
```
|
||||
|
||||
**URL parameters:**
|
||||
|
||||
* `report_id`: string - The ID of the event report.
|
||||
|
@ -307,8 +307,8 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
|
||||
|
||||
```python
|
||||
async def check_media_file_for_spam(
|
||||
file_wrapper: "synapse.rest.media.v1.media_storage.ReadableFileWrapper",
|
||||
file_info: "synapse.rest.media.v1._base.FileInfo",
|
||||
file_wrapper: "synapse.media.media_storage.ReadableFileWrapper",
|
||||
file_info: "synapse.media._base.FileInfo",
|
||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||
```
|
||||
|
||||
|
@ -146,6 +146,9 @@ Note that this callback is called when the event has already been processed and
|
||||
into the room, which means this callback cannot be used to deny persisting the event. To
|
||||
deny an incoming event, see [`check_event_for_spam`](spam_checker_callbacks.md#check_event_for_spam) instead.
|
||||
|
||||
For any given event, this callback will be called on every worker process, even if that worker will not end up
|
||||
acting on that event. This callback will not be called for events that are marked as rejected.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `check_can_shutdown_room`
|
||||
@ -251,6 +254,11 @@ If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
_First introduced in Synapse v1.56.0_
|
||||
|
||||
**<span style="color:red">
|
||||
This callback is deprecated in favour of the `on_add_user_third_party_identifier` callback, which
|
||||
features the same functionality. The only difference is in name.
|
||||
</span>**
|
||||
|
||||
```python
|
||||
async def on_threepid_bind(user_id: str, medium: str, address: str) -> None:
|
||||
```
|
||||
@ -265,6 +273,44 @@ server_.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `on_add_user_third_party_identifier`
|
||||
|
||||
_First introduced in Synapse v1.79.0_
|
||||
|
||||
```python
|
||||
async def on_add_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
|
||||
```
|
||||
|
||||
Called after successfully creating an association between a user and a third-party identifier
|
||||
(email address, phone number). The module is given the Matrix ID of the user the
|
||||
association is for, as well as the medium (`email` or `msisdn`) and address of the
|
||||
third-party identifier (i.e. an email address).
|
||||
|
||||
Note that this callback is _not_ called if a user attempts to bind their third-party identifier
|
||||
to an identity server (via a call to [`POST
|
||||
/_matrix/client/v3/account/3pid/bind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidbind)).
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `on_remove_user_third_party_identifier`
|
||||
|
||||
_First introduced in Synapse v1.79.0_
|
||||
|
||||
```python
|
||||
async def on_remove_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
|
||||
```
|
||||
|
||||
Called after successfully removing an association between a user and a third-party identifier
|
||||
(email address, phone number). The module is given the Matrix ID of the user the
|
||||
association is for, as well as the medium (`email` or `msisdn`) and address of the
|
||||
third-party identifier (i.e. an email address).
|
||||
|
||||
Note that this callback is _not_ called if a user attempts to unbind their third-party
|
||||
identifier from an identity server (via a call to [`POST
|
||||
/_matrix/client/v3/account/3pid/unbind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidunbind)).
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the third-party rules callback
|
||||
@ -297,4 +343,4 @@ class EventCensorer:
|
||||
)
|
||||
event_dict["content"] = new_event_content
|
||||
return event_dict
|
||||
```
|
||||
```
|
@ -83,3 +83,59 @@ the callback name as the argument name and the function as its value. A
|
||||
|
||||
Callbacks for each category can be found on their respective page of the
|
||||
[Synapse documentation website](https://matrix-org.github.io/synapse).
|
||||
|
||||
## Caching
|
||||
|
||||
_Added in Synapse 1.74.0._
|
||||
|
||||
Modules can leverage Synapse's caching tools to manage their own cached functions. This
|
||||
can be helpful for modules that need to repeatedly request the same data from the database
|
||||
or a remote service.
|
||||
|
||||
Functions that need to be wrapped with a cache need to be decorated with a `@cached()`
|
||||
decorator (which can be imported from `synapse.module_api`) and registered with the
|
||||
[`ModuleApi.register_cached_function`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L888)
|
||||
API when initialising the module. If the module needs to invalidate an entry in a cache,
|
||||
it needs to use the [`ModuleApi.invalidate_cache`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L904)
|
||||
API, with the function to invalidate the cache of and the key(s) of the entry to
|
||||
invalidate.
|
||||
|
||||
Below is an example of a simple module using a cached function:
|
||||
|
||||
```python
|
||||
from typing import Any
|
||||
from synapse.module_api import cached, ModuleApi
|
||||
|
||||
class MyModule:
|
||||
def __init__(self, config: Any, api: ModuleApi):
|
||||
self.api = api
|
||||
|
||||
# Register the cached function so Synapse knows how to correctly invalidate
|
||||
# entries for it.
|
||||
self.api.register_cached_function(self.get_user_from_id)
|
||||
|
||||
@cached()
|
||||
async def get_department_for_user(self, user_id: str) -> str:
|
||||
"""A function with a cache."""
|
||||
# Request a department from an external service.
|
||||
return await self.http_client.get_json(
|
||||
"https://int.example.com/users", {"user_id": user_id)
|
||||
)["department"]
|
||||
|
||||
async def do_something_with_users(self) -> None:
|
||||
"""Calls the cached function and then invalidates an entry in its cache."""
|
||||
|
||||
user_id = "@alice:example.com"
|
||||
|
||||
# Get the user. Since get_department_for_user is wrapped with a cache,
|
||||
# the return value for this user_id will be cached.
|
||||
department = await self.get_department_for_user(user_id)
|
||||
|
||||
# Do something with `department`...
|
||||
|
||||
# Let's say something has changed with our user, and the entry we have for
|
||||
# them in the cache is out of date, so we want to invalidate it.
|
||||
await self.api.invalidate_cache(self.get_department_for_user, (user_id,))
|
||||
```
|
||||
|
||||
See the [`cached` docstring](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L190) for more details.
|
||||
|
@ -590,6 +590,47 @@ oidc_providers:
|
||||
|
||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||
|
||||
### Shibboleth with OIDC Plugin
|
||||
|
||||
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
|
||||
|
||||
1. Shibboleth needs the [OIDC Plugin](https://shibboleth.atlassian.net/wiki/spaces/IDPPLUGINS/pages/1376878976/OIDC+OP) installed and working correctly.
|
||||
2. Create a new config on the IdP Side, ensure that the `client_id` and `client_secret`
|
||||
are randomly generated data.
|
||||
```json
|
||||
{
|
||||
"client_id": "SOME-CLIENT-ID",
|
||||
"client_secret": "SOME-SUPER-SECRET-SECRET",
|
||||
"response_types": ["code"],
|
||||
"grant_types": ["authorization_code"],
|
||||
"scope": "openid profile email",
|
||||
"redirect_uris": ["https://[synapse public baseurl]/_synapse/client/oidc/callback"]
|
||||
}
|
||||
```
|
||||
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_providers:
|
||||
# Shibboleth IDP
|
||||
#
|
||||
- idp_id: shibboleth
|
||||
idp_name: "Shibboleth Login"
|
||||
discover: true
|
||||
issuer: "https://YOUR-IDP-URL.TLD"
|
||||
client_id: "YOUR_CLIENT_ID"
|
||||
client_secret: "YOUR-CLIENT-SECRECT-FROM-YOUR-IDP"
|
||||
scopes: ["openid", "profile", "email"]
|
||||
allow_existing_users: true
|
||||
user_profile_method: "userinfo_endpoint"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "sub"
|
||||
localpart_template: "{{ user.sub.split('@')[0] }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
email_template: "{{ user.email }}"
|
||||
```
|
||||
|
||||
### Twitch
|
||||
|
||||
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
|
||||
|
@ -88,6 +88,30 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.79.0
|
||||
|
||||
## The `on_threepid_bind` module callback method has been deprecated
|
||||
|
||||
Synapse v1.79.0 deprecates the
|
||||
[`on_threepid_bind`](modules/third_party_rules_callbacks.md#on_threepid_bind)
|
||||
"third-party rules" Synapse module callback method in favour of a new module method,
|
||||
[`on_add_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_add_user_third_party_identifier).
|
||||
`on_threepid_bind` will be removed in a future version of Synapse. You should check whether any Synapse
|
||||
modules in use in your deployment are making use of `on_threepid_bind`, and update them where possible.
|
||||
|
||||
The arguments and functionality of the new method are the same.
|
||||
|
||||
The justification behind the name change is that the old method's name, `on_threepid_bind`, was
|
||||
misleading. A user is considered to "bind" their third-party ID to their Matrix ID only if they
|
||||
do so via an [identity server](https://spec.matrix.org/latest/identity-service-api/)
|
||||
(so that users on other homeservers may find them). But this method was not called in that case -
|
||||
it was only called when a user added a third-party identifier on the local homeserver.
|
||||
|
||||
Module developers may also be interested in the related
|
||||
[`on_remove_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_remove_user_third_party_identifier)
|
||||
module callback method that was also added in Synapse v1.79.0. This new method is called when a
|
||||
user removes a third-party identifier from their account.
|
||||
|
||||
# Upgrading to v1.78.0
|
||||
|
||||
## Deprecate the `/_synapse/admin/v1/media/<server_name>/delete` admin API
|
||||
|
@ -70,13 +70,55 @@ output-directory
|
||||
│ ├───state
|
||||
│ ├───invite_state
|
||||
│ └───knock_state
|
||||
└───user_data
|
||||
├───account_data
|
||||
│ ├───global
|
||||
│ └───<room_id>
|
||||
├───connections
|
||||
├───devices
|
||||
└───profile
|
||||
├───user_data
|
||||
│ ├───account_data
|
||||
│ │ ├───global
|
||||
│ │ └───<room_id>
|
||||
│ ├───connections
|
||||
│ ├───devices
|
||||
│ └───profile
|
||||
└───media_ids
|
||||
└───<media_id>
|
||||
```
|
||||
|
||||
The `media_ids` folder contains only the metadata of the media uploaded by the user.
|
||||
It does not contain the media itself.
|
||||
Furthermore, only the `media_ids` that Synapse manages itself are exported.
|
||||
If another media repository (e.g. [matrix-media-repo](https://github.com/turt2live/matrix-media-repo))
|
||||
is used, the data must be exported separately.
|
||||
|
||||
With the `media_ids` the media files can be downloaded.
|
||||
Media that have been sent in encrypted rooms are only retrieved in encrypted form.
|
||||
The following script can help with download the media files:
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Parameters
|
||||
#
|
||||
# source_directory: Directory which contains the export with the media_ids.
|
||||
# target_directory: Directory into which all files are to be downloaded.
|
||||
# repository_url: Address of the media repository resp. media worker.
|
||||
# serverName: Name of the server (`server_name` from homeserver.yaml).
|
||||
#
|
||||
# Example:
|
||||
# ./download_media.sh /tmp/export_data/media_ids/ /tmp/export_data/media_files/ http://localhost:8008 matrix.example.com
|
||||
|
||||
source_directory=$1
|
||||
target_directory=$2
|
||||
repository_url=$3
|
||||
serverName=$4
|
||||
|
||||
mkdir -p $target_directory
|
||||
|
||||
for file in $source_directory/*; do
|
||||
filename=$(basename ${file})
|
||||
url=$repository_url/_matrix/media/v3/download/$serverName/$filename
|
||||
echo "Downloading $filename - $url"
|
||||
if ! wget -o /dev/null -P $target_directory $url; then
|
||||
echo "Could not download $filename"
|
||||
fi
|
||||
done
|
||||
```
|
||||
|
||||
Manually resetting passwords
|
||||
@ -87,7 +129,7 @@ can reset a user's password using the [admin API](../../admin_api/user_admin_api
|
||||
|
||||
I have a problem with my server. Can I just delete my database and start again?
|
||||
---
|
||||
Deleting your database is unlikely to make anything better.
|
||||
Deleting your database is unlikely to make anything better.
|
||||
|
||||
It's easy to make the mistake of thinking that you can start again from a clean
|
||||
slate by dropping your database, but things don't work like that in a federated
|
||||
@ -102,7 +144,7 @@ Come and seek help in https://matrix.to/#/#synapse:matrix.org.
|
||||
|
||||
There are two exceptions when it might be sensible to delete your database and start again:
|
||||
* You have *never* joined any rooms which are federated with other servers. For
|
||||
instance, a local deployment which the outside world can't talk to.
|
||||
instance, a local deployment which the outside world can't talk to.
|
||||
* You are changing the `server_name` in the homeserver configuration. In effect
|
||||
this makes your server a completely new one from the point of view of the network,
|
||||
so in this case it makes sense to start with a clean database.
|
||||
@ -115,7 +157,7 @@ Using the following curl command:
|
||||
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
|
||||
```
|
||||
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
|
||||
Access Token:\<click to reveal\>
|
||||
Access Token:\<click to reveal\>
|
||||
|
||||
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
|
||||
|
||||
@ -152,13 +194,13 @@ What are the biggest rooms on my server?
|
||||
---
|
||||
|
||||
```sql
|
||||
SELECT s.canonical_alias, g.room_id, count(*) AS num_rows
|
||||
FROM
|
||||
state_groups_state AS g,
|
||||
room_stats_state AS s
|
||||
WHERE g.room_id = s.room_id
|
||||
SELECT s.canonical_alias, g.room_id, count(*) AS num_rows
|
||||
FROM
|
||||
state_groups_state AS g,
|
||||
room_stats_state AS s
|
||||
WHERE g.room_id = s.room_id
|
||||
GROUP BY s.canonical_alias, g.room_id
|
||||
ORDER BY num_rows desc
|
||||
ORDER BY num_rows desc
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
|
@ -1105,7 +1105,7 @@ This setting should only be used in very specific cases, such as
|
||||
federation over Tor hidden services and similar. For private networks
|
||||
of homeservers, you likely want to use a private CA instead.
|
||||
|
||||
Only effective if `federation_verify_certicates` is `true`.
|
||||
Only effective if `federation_verify_certificates` is `true`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@ -1518,11 +1518,11 @@ rc_registration_token_validity:
|
||||
|
||||
This option specifies several limits for login:
|
||||
* `address` ratelimits login requests based on the client's IP
|
||||
address. Defaults to `per_second: 0.17`, `burst_count: 3`.
|
||||
address. Defaults to `per_second: 0.003`, `burst_count: 5`.
|
||||
|
||||
* `account` ratelimits login requests based on the account the
|
||||
client is attempting to log into. Defaults to `per_second: 0.17`,
|
||||
`burst_count: 3`.
|
||||
client is attempting to log into. Defaults to `per_second: 0.03`,
|
||||
`burst_count: 5`.
|
||||
|
||||
* `failed_attempts` ratelimits login requests based on the account the
|
||||
client is attempting to log into, based on the amount of failed login
|
||||
@ -2227,8 +2227,8 @@ allows the shared secret to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the shared secret.
|
||||
|
||||
If this file does not exist, Synapse will create a new signing
|
||||
key on startup and store it in this file.
|
||||
If this file does not exist, Synapse will create a new shared
|
||||
secret on startup and store it in this file.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
|
@ -232,6 +232,7 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms$
|
||||
^/_matrix/client/v1/rooms/.*/timestamp_to_event$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
|
||||
^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)
|
||||
|
||||
# Encryption requests
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||
@ -251,6 +252,7 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/join/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/knock/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/profile/
|
||||
|
||||
# Account data requests
|
||||
|
3
mypy.ini
3
mypy.ini
@ -36,9 +36,6 @@ exclude = (?x)
|
||||
[mypy-synapse.federation.transport.client]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.http.client]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.http.matrixfederationclient]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
|
178
poetry.lock
generated
178
poetry.lock
generated
@ -90,32 +90,46 @@ typecheck = ["mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "22.12.0"
|
||||
version = "23.1.0"
|
||||
description = "The uncompromising code formatter."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
|
||||
{file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
|
||||
{file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
|
||||
{file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
|
||||
{file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
|
||||
{file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
|
||||
{file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
|
||||
{file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
|
||||
{file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
|
||||
{file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
|
||||
{file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
|
||||
{file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
|
||||
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
|
||||
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
|
||||
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
|
||||
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
|
||||
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
|
||||
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
|
||||
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
|
||||
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
|
||||
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
|
||||
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
|
||||
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
|
||||
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
|
||||
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.0.0"
|
||||
mypy-extensions = ">=0.4.3"
|
||||
packaging = ">=22.0"
|
||||
pathspec = ">=0.9.0"
|
||||
platformdirs = ">=2"
|
||||
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
|
||||
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
@ -1971,28 +1985,29 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.237"
|
||||
version = "0.0.252"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.0.237-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:2ea04d826ffca58a7ae926115a801960c757d53c9027f2ca9acbe84c9f2b2f04"},
|
||||
{file = "ruff-0.0.237-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8ed113937fab9f73f8c1a6c0350bb4fe03e951370139c6e0adb81f48a8dcf4c6"},
|
||||
{file = "ruff-0.0.237-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9bcb71a3efb5fe886eb48d739cfae5df4a15617e7b5a7668aa45ebf74c0d3fa"},
|
||||
{file = "ruff-0.0.237-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:80ce10718abbf502818c0d650ebab99fdcef5e937a1ded3884493ddff804373c"},
|
||||
{file = "ruff-0.0.237-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cc6cb7c1efcc260df5a939435649610a28f9f438b8b313384c8985ac6574f9f"},
|
||||
{file = "ruff-0.0.237-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7eef0c7a1e45a4e30328ae101613575944cbf47a3a11494bf9827722da6c66b3"},
|
||||
{file = "ruff-0.0.237-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d122433a21ce4a21fbba34b73fc3add0ccddd1643b3ff5abb8d2767952f872e"},
|
||||
{file = "ruff-0.0.237-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b76311335adda4de3c1d471e64e89a49abfeebf02647e3db064e7740e7f36ed6"},
|
||||
{file = "ruff-0.0.237-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c5977b643aaf2b6f84641265f835b6c7f67fcca38dbae08c4f15602e084ca0"},
|
||||
{file = "ruff-0.0.237-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3d6ed86d0d4d742360a262d52191581f12b669a68e59ae3b52e80d7483b3d7b3"},
|
||||
{file = "ruff-0.0.237-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fedfb60f986c26cdb1809db02866e68508db99910c587d2c4066a5c07aa85593"},
|
||||
{file = "ruff-0.0.237-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb96796be5919871fa9ae7e88968ba9e14306d9a3f217ca6c204f68a5abeccdd"},
|
||||
{file = "ruff-0.0.237-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea239cfedf67b74ea4952e1074bb99a4281c2145441d70bc7e2f058d5c49f1c9"},
|
||||
{file = "ruff-0.0.237-py3-none-win32.whl", hash = "sha256:8d6a1d21ae15da2b1dcffeee2606e90de0e6717e72957da7d16ab6ae18dd0058"},
|
||||
{file = "ruff-0.0.237-py3-none-win_amd64.whl", hash = "sha256:525e5ec81cee29b993f77976026a6bf44528a14aa6edb1ef47bd8079147395ae"},
|
||||
{file = "ruff-0.0.237.tar.gz", hash = "sha256:630c575f543733adf6c19a11d9a02ca9ecc364bd7140af8a4c854d4728be6b56"},
|
||||
{file = "ruff-0.0.252-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:349367a227c4db7abbc3a9993efea8a608b5bea4bb4a1e5fc6f0d56819524f92"},
|
||||
{file = "ruff-0.0.252-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:ce77f9106d96b4faf7865860fb5155b9deaf6f699d9c279118c5ad947739ecaf"},
|
||||
{file = "ruff-0.0.252-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edadb0b050293b4e60dab979ba6a4e734d9c899cbe316a0ee5b65e3cdd39c750"},
|
||||
{file = "ruff-0.0.252-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4efdae98937d1e4d23ab0b7fc7e8e6b6836cc7d2d42238ceeacbc793ef780542"},
|
||||
{file = "ruff-0.0.252-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8546d879f7d3f669379a03e7b103d90e11901976ab508aeda59c03dfd8a359e"},
|
||||
{file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83fdc7169b6c1fb5fe8d1cdf345697f558c1b433ef97df9ca11defa2a8f3ee9e"},
|
||||
{file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84ed9be1a17e2a556a571a5b959398633dd10910abd8dcf8b098061e746e892d"},
|
||||
{file = "ruff-0.0.252-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f5e77bd9ba4438cf2ee32154e2673afe22f538ef29f5d65ca47e3dc46c42cf8"},
|
||||
{file = "ruff-0.0.252-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5179b94b45c0f8512eaff3ab304c14714a46df2e9ca72a9d96084adc376b71"},
|
||||
{file = "ruff-0.0.252-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:92efd8a71157595df5bc46aaaa0613d8a2fbc5cddc53ae7b749c16025c324732"},
|
||||
{file = "ruff-0.0.252-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd350fc10832cfd28e681d829a8aa83ea3e653326e0ea9d98637dfb8d46177d2"},
|
||||
{file = "ruff-0.0.252-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f119240c9631216e846166e06023b1d878e25fbac93bf20da50069e91cfbfaee"},
|
||||
{file = "ruff-0.0.252-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5c5a49f89f5ede93d16eddfeeadd7e5739ec703e8f63ac95eac30236b9e49da3"},
|
||||
{file = "ruff-0.0.252-py3-none-win32.whl", hash = "sha256:89a897dc743f2fe063483ea666097e72e848f4bbe40493fe0533e61799959f6e"},
|
||||
{file = "ruff-0.0.252-py3-none-win_amd64.whl", hash = "sha256:cdc89ad6ff88519b1fb1816ac82a9ad910762c90ff5fd64dda7691b72d36aff7"},
|
||||
{file = "ruff-0.0.252-py3-none-win_arm64.whl", hash = "sha256:4b594a17cf53077165429486650658a0e1b2ac6ab88954f5afd50d2b1b5657a9"},
|
||||
{file = "ruff-0.0.252.tar.gz", hash = "sha256:6992611ab7bdbe7204e4831c95ddd3febfeece2e6f5e44bbed044454c7db0f63"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2560,66 +2575,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-commonmark"
|
||||
version = "0.9.2.1"
|
||||
version = "0.9.2.2"
|
||||
description = "Typing stubs for commonmark"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-commonmark-0.9.2.1.tar.gz", hash = "sha256:db8277e6aeb83429265eccece98a24954a9a502dde7bc7cf840a8741abd96b86"},
|
||||
{file = "types_commonmark-0.9.2.1-py3-none-any.whl", hash = "sha256:9d5f500cb7eced801bde728137b0a10667bd853d328db641d03141f189e3aab4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-cryptography"
|
||||
version = "3.3.15"
|
||||
description = "Typing stubs for cryptography"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"},
|
||||
{file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-enum34 = "*"
|
||||
types-ipaddress = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-docutils"
|
||||
version = "0.19.1.1"
|
||||
description = "Typing stubs for docutils"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-docutils-0.19.1.1.tar.gz", hash = "sha256:be0a51ba1c7dd215d9d2df66d6845e63c1009b4bbf4c5beb87a0d9745cdba962"},
|
||||
{file = "types_docutils-0.19.1.1-py3-none-any.whl", hash = "sha256:a024cada35f0c13cc45eb0b68a102719018a634013690b7fef723bcbfadbd1f1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-enum34"
|
||||
version = "1.1.8"
|
||||
description = "Typing stubs for enum34"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-enum34-1.1.8.tar.gz", hash = "sha256:6f9c769641d06d73a55e11c14d38ac76fcd37eb545ce79cebb6eec9d50a64110"},
|
||||
{file = "types_enum34-1.1.8-py3-none-any.whl", hash = "sha256:05058c7a495f6bfaaca0be4aeac3cce5cdd80a2bad2aab01fd49a20bf4a0209d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-ipaddress"
|
||||
version = "1.0.8"
|
||||
description = "Typing stubs for ipaddress"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-ipaddress-1.0.8.tar.gz", hash = "sha256:a03df3be5935e50ba03fa843daabff539a041a28e73e0fce2c5705bee54d3841"},
|
||||
{file = "types_ipaddress-1.0.8-py3-none-any.whl", hash = "sha256:4933b74da157ba877b1a705d64f6fa7742745e9ffd65e51011f370c11ebedb55"},
|
||||
{file = "types-commonmark-0.9.2.2.tar.gz", hash = "sha256:f3259350634c2ce68ae503398430482f7cf44e5cae3d344995e916fbf453b4be"},
|
||||
{file = "types_commonmark-0.9.2.2-py3-none-any.whl", hash = "sha256:d3d878692615e7fbe47bf19ba67497837b135812d665012a3d42219c1f2c3a61"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2636,54 +2599,54 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-opentracing"
|
||||
version = "2.4.10.1"
|
||||
version = "2.4.10.3"
|
||||
description = "Typing stubs for opentracing"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-opentracing-2.4.10.1.tar.gz", hash = "sha256:49e7e52b8b6e221865a9201fc8c2df0bcda8e7098d4ebb35903dbfa4b4d29195"},
|
||||
{file = "types_opentracing-2.4.10.1-py3-none-any.whl", hash = "sha256:eb63394acd793e7d9e327956242349fee14580a87c025408dc268d4dd883cc24"},
|
||||
{file = "types-opentracing-2.4.10.3.tar.gz", hash = "sha256:b277f114265b41216714f9c77dffcab57038f1730fd141e2c55c5c9f6f2caa87"},
|
||||
{file = "types_opentracing-2.4.10.3-py3-none-any.whl", hash = "sha256:60244d718fcd9de7043645ecaf597222d550432507098ab2e6268f7b589a7fa7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "9.4.0.13"
|
||||
version = "9.4.0.17"
|
||||
description = "Typing stubs for Pillow"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-Pillow-9.4.0.13.tar.gz", hash = "sha256:4510aa98a28947bf63f2b29edebbd11b7cff8647d90b867cec9b3674c0a8c321"},
|
||||
{file = "types_Pillow-9.4.0.13-py3-none-any.whl", hash = "sha256:14a8a19021b8fe569a9fef9edc64a8d8a4aef340e38669d4fb3dc05cfd941130"},
|
||||
{file = "types-Pillow-9.4.0.17.tar.gz", hash = "sha256:7f0e871d2d46fbb6bc7deca3e02dc552cf9c1e8b49deb9595509551be3954e49"},
|
||||
{file = "types_Pillow-9.4.0.17-py3-none-any.whl", hash = "sha256:f8b848a05f17cb4d53d245c59bf560372b9778d4cfaf9705f6245009bf9f65f3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-psycopg2"
|
||||
version = "2.9.21.4"
|
||||
version = "2.9.21.8"
|
||||
description = "Typing stubs for psycopg2"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-psycopg2-2.9.21.4.tar.gz", hash = "sha256:d43dda166a70d073ddac40718e06539836b5844c99b58ef8d4489a8df2edf5c0"},
|
||||
{file = "types_psycopg2-2.9.21.4-py3-none-any.whl", hash = "sha256:6a05dca0856996aa37d7abe436751803bf47ec006cabbefea092e057f23bc95d"},
|
||||
{file = "types-psycopg2-2.9.21.8.tar.gz", hash = "sha256:b629440ffcfdebd742fab07f777ff69aefdd19394a138c18e921a1964c3cf5f6"},
|
||||
{file = "types_psycopg2-2.9.21.8-py3-none-any.whl", hash = "sha256:e747fbec6e0e2502b625bc7686d13cc62fc170e8ae920e5ba27fac946778eeb9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-pyopenssl"
|
||||
version = "22.1.0.2"
|
||||
version = "23.0.0.4"
|
||||
description = "Typing stubs for pyOpenSSL"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-pyOpenSSL-22.1.0.2.tar.gz", hash = "sha256:7a350e29e55bc3ee4571f996b4b1c18c4e4098947db45f7485b016eaa35b44bc"},
|
||||
{file = "types_pyOpenSSL-22.1.0.2-py3-none-any.whl", hash = "sha256:54606a6afb203eb261e0fca9b7f75fa6c24d5ff71e13903c162ffb951c2c64c6"},
|
||||
{file = "types-pyOpenSSL-23.0.0.4.tar.gz", hash = "sha256:8b3550b6e19d51ce78aabd724b0d8ebd962081a5fce95e7f85a592dfcdbc16bf"},
|
||||
{file = "types_pyOpenSSL-23.0.0.4-py3-none-any.whl", hash = "sha256:ad49e15bb8bb2f251b8fc24776f414d877629e44b1b049240063ab013b5a6a7d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-cryptography = "*"
|
||||
cryptography = ">=35.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "types-pyyaml"
|
||||
@ -2714,19 +2677,16 @@ types-urllib3 = "<1.27"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "67.3.0.1"
|
||||
version = "67.5.0.0"
|
||||
description = "Typing stubs for setuptools"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-setuptools-67.3.0.1.tar.gz", hash = "sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f"},
|
||||
{file = "types_setuptools-67.3.0.1-py3-none-any.whl", hash = "sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1"},
|
||||
{file = "types-setuptools-67.5.0.0.tar.gz", hash = "sha256:fa6f231eeb27e86b1d6e8260f73de300e91f99c205b9a5e21debd49f3726a849"},
|
||||
{file = "types_setuptools-67.5.0.0-py3-none-any.whl", hash = "sha256:f7f4bf4ab777e88631d3a387bbfdd4d480a2a4693ca896130f8ef738370377b8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-docutils = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-urllib3"
|
||||
version = "1.26.10"
|
||||
@ -2741,14 +2701,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.4.0"
|
||||
version = "4.5.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
|
||||
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
|
||||
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
|
||||
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3030,4 +2990,4 @@ user-search = ["pyicu"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.7.1"
|
||||
content-hash = "e12077711e5ff83f3c6038ea44c37bd49773799ec8245035b01094b7800c5c92"
|
||||
content-hash = "7bcffef7b6e6d4b1113222e2ca152b3798c997872789c8a1ea01238f199d56fe"
|
||||
|
@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.78.0rc1"
|
||||
version = "1.79.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@ -313,7 +313,7 @@ all = [
|
||||
# We pin black so that our tests don't start failing on new releases.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.3.0"
|
||||
ruff = "0.0.237"
|
||||
ruff = "0.0.252"
|
||||
|
||||
# Typechecking
|
||||
mypy = "*"
|
||||
|
936
requirements.txt
Normal file
936
requirements.txt
Normal file
@ -0,0 +1,936 @@
|
||||
attrs==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \
|
||||
--hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99
|
||||
authlib==1.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a \
|
||||
--hash=sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d
|
||||
automat==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180 \
|
||||
--hash=sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e
|
||||
bcrypt==4.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535 \
|
||||
--hash=sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0 \
|
||||
--hash=sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410 \
|
||||
--hash=sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd \
|
||||
--hash=sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665 \
|
||||
--hash=sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab \
|
||||
--hash=sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71 \
|
||||
--hash=sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215 \
|
||||
--hash=sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b \
|
||||
--hash=sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda \
|
||||
--hash=sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9 \
|
||||
--hash=sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a \
|
||||
--hash=sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344 \
|
||||
--hash=sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f \
|
||||
--hash=sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d \
|
||||
--hash=sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c \
|
||||
--hash=sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c \
|
||||
--hash=sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2 \
|
||||
--hash=sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d \
|
||||
--hash=sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e \
|
||||
--hash=sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3
|
||||
bleach==6.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414 \
|
||||
--hash=sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4
|
||||
canonicaljson==1.6.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:68dfc157b011e07d94bf74b5d4ccc01958584ed942d9dfd5fdd706609e81cd4b \
|
||||
--hash=sha256:806ea6f2cbb7405d20259e1c36dd1214ba5c242fa9165f5bd0bf2081f82c23fb
|
||||
certifi==2022.12.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
|
||||
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
|
||||
cffi==1.15.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
|
||||
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
|
||||
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
|
||||
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
|
||||
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
|
||||
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
|
||||
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
|
||||
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
|
||||
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
|
||||
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
|
||||
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
|
||||
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
|
||||
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
|
||||
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
|
||||
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
|
||||
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
|
||||
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
|
||||
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
|
||||
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
|
||||
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
|
||||
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
|
||||
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
|
||||
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
|
||||
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
|
||||
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
|
||||
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
|
||||
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
|
||||
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
|
||||
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
|
||||
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
|
||||
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
|
||||
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
|
||||
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
|
||||
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
|
||||
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
|
||||
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
|
||||
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
|
||||
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
|
||||
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
|
||||
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
|
||||
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
|
||||
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
|
||||
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
|
||||
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
|
||||
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
|
||||
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
|
||||
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
|
||||
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
|
||||
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
|
||||
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
|
||||
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
|
||||
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
|
||||
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
|
||||
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
|
||||
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
|
||||
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
|
||||
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
|
||||
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
|
||||
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
|
||||
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
|
||||
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
|
||||
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
|
||||
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
|
||||
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
|
||||
charset-normalizer==2.0.12 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
|
||||
--hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
|
||||
constantly==15.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 \
|
||||
--hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d
|
||||
cryptography==39.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \
|
||||
--hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \
|
||||
--hash=sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885 \
|
||||
--hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \
|
||||
--hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \
|
||||
--hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \
|
||||
--hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \
|
||||
--hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \
|
||||
--hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \
|
||||
--hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \
|
||||
--hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \
|
||||
--hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \
|
||||
--hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \
|
||||
--hash=sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef \
|
||||
--hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \
|
||||
--hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \
|
||||
--hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \
|
||||
--hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \
|
||||
--hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \
|
||||
--hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \
|
||||
--hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \
|
||||
--hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \
|
||||
--hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8
|
||||
frozendict==2.3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78 \
|
||||
--hash=sha256:25a6d2e8b7cf6b6e5677a1a4b53b4073e5d9ec640d1db30dc679627668d25e90 \
|
||||
--hash=sha256:389f395a74eb16992217ac1521e689c1dea2d70113bcb18714669ace1ed623b9 \
|
||||
--hash=sha256:3d8042b7dab5e992e30889c9b71b781d5feef19b372d47d735e4d7d45846fd4a \
|
||||
--hash=sha256:3e93aebc6e69a8ef329bbe9afb8342bd33c7b5c7a0c480cb9f7e60b0cbe48072 \
|
||||
--hash=sha256:3ec86ebf143dd685184215c27ec416c36e0ba1b80d81b1b9482f7d380c049b4e \
|
||||
--hash=sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc \
|
||||
--hash=sha256:5809e6ff6b7257043a486f7a3b73a7da71cf69a38980b4171e4741291d0d9eb3 \
|
||||
--hash=sha256:7c550ed7fdf1962984bec21630c584d722b3ee5d5f57a0ae2527a0121dc0414a \
|
||||
--hash=sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782 \
|
||||
--hash=sha256:95bac22f7f09d81f378f2b3f672b7a50a974ca180feae1507f5e21bc147e8bc8 \
|
||||
--hash=sha256:aca59108b77cadc13ba7dfea7e8f50811208c7652a13dc6c7f92d7782a24d299 \
|
||||
--hash=sha256:b98a0d65a59af6da03f794f90b0c3085a7ee14e7bf8f0ef36b079ee8aa992439 \
|
||||
--hash=sha256:ccb6450a416c9cc9acef7683e637e28356e3ceeabf83521f74cc2718883076b7 \
|
||||
--hash=sha256:d722f3d89db6ae35ef35ecc243c40c800eb344848c83dba4798353312cd37b15 \
|
||||
--hash=sha256:dae686722c144b333c4dbdc16323a5de11406d26b76d2be1cc175f90afacb5ba \
|
||||
--hash=sha256:dbbe1339ac2646523e0bb00d1896085d1f70de23780e4927ca82b36ab8a044d3
|
||||
hiredis==2.2.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:02118dc8545e2371448b9983a0041f12124eea907eb61858f2be8e7c1dfa1e43 \
|
||||
--hash=sha256:03c6a1f6bf2f64f40d076c997cdfcb8b3d1c9557dda6cb7bbad2c5c839921726 \
|
||||
--hash=sha256:0474ab858f5dd15be6b467d89ec14b4c287f53b55ca5455369c3a1a787ef3a24 \
|
||||
--hash=sha256:04c972593f26f4769e2be7058b7928179337593bcfc6a8b6bda87eea807b7cbf \
|
||||
--hash=sha256:0a9493bbc477436a3725e99cfcba768f416ab70ab92956e373d1a3b480b1e204 \
|
||||
--hash=sha256:0e199868fe78c2d175bbb7b88f5daf2eae4a643a62f03f8d6736f9832f04f88b \
|
||||
--hash=sha256:0f2607e08dcb1c5d1e925c451facbfc357927acaa336a004552c32a6dd68e050 \
|
||||
--hash=sha256:0f4b92df1e69dc48411045d2117d1d27ec6b5f0dd2b6501759cea2f6c68d5618 \
|
||||
--hash=sha256:103bde304d558061c4ba1d7ff94351e761da753c28883fd68964f25080152dac \
|
||||
--hash=sha256:14f67987e1d55b197e46729d1497019228ad8c94427bb63500e6f217aa586ca5 \
|
||||
--hash=sha256:1523ec56d711bee863aaaf4325cef4430da3143ec388e60465f47e28818016cd \
|
||||
--hash=sha256:1776db8af168b22588ec10c3df674897b20cc6d25f093cd2724b8b26d7dac057 \
|
||||
--hash=sha256:17e9f363db56a8edb4eff936354cfa273197465bcd970922f3d292032eca87b0 \
|
||||
--hash=sha256:18135ecf28fc6577e71c0f8d8eb2f31e4783020a7d455571e7e5d2793374ce20 \
|
||||
--hash=sha256:19666eb154b7155d043bf941e50d1640125f92d3294e2746df87639cc44a10e6 \
|
||||
--hash=sha256:209b94fa473b39e174b665186cad73206ca849cf6e822900b761e83080f67b06 \
|
||||
--hash=sha256:220b6ac9d3fce60d14ccc34f9790e20a50dc56b6fb747fc357600963c0cf6aca \
|
||||
--hash=sha256:231e5836579fc75b25c6f9bb6213950ea3d39aadcfeb7f880211ca55df968342 \
|
||||
--hash=sha256:2bb682785a37145b209f44f5d5290b0f9f4b56205542fc592d0f1b3d5ffdfcf0 \
|
||||
--hash=sha256:2ed6c948648798b440a9da74db65cdd2ad22f38cf4687f5212df369031394591 \
|
||||
--hash=sha256:2f6e80fb7cd4cc61af95ab2875801e4c36941a956c183297c3273cbfbbefa9d3 \
|
||||
--hash=sha256:33624903dfb629d6f7c17ed353b4b415211c29fd447f31e6bf03361865b97e68 \
|
||||
--hash=sha256:341952a311654c39433c1e0d8d31c2a0c5864b2675ed159ed264ecaa5cfb225b \
|
||||
--hash=sha256:38270042f40ed9e576966c603d06c984c80364b0d9ec86962a31551dae27b0cd \
|
||||
--hash=sha256:3af3071d33432960cba88ce4e4932b508ab3e13ce41431c2a1b2dc9a988f7627 \
|
||||
--hash=sha256:3afc76a012b907895e679d1e6bcc6394845d0cc91b75264711f8caf53d7b0f37 \
|
||||
--hash=sha256:42504e4058246536a9f477f450ab21275126fc5f094be5d5e5290c6de9d855f9 \
|
||||
--hash=sha256:497a8837984ddfbf6f5a4c034c0107f2c5aaaebeebf34e2c6ab591acffce5f12 \
|
||||
--hash=sha256:49a518b456403602775218062a4dd06bed42b26854ff1ff6784cfee2ef6fa347 \
|
||||
--hash=sha256:4e3b8f0eba6d88c2aec63e6d1e38960f8a25c01f9796d32993ffa1cfcf48744c \
|
||||
--hash=sha256:58e51d83b42fdcc29780897641b1dcb30c0e4d3c4f6d9d71d79b2cfec99b8eb7 \
|
||||
--hash=sha256:595474e6c25f1c3c8ec67d587188e7dd47c492829b2c7c5ba1b17ee9e7e9a9ea \
|
||||
--hash=sha256:5cd590dd7858d0107c37b438aa27bbcaa0ba77c5b8eda6ebab7acff0aa89f7d7 \
|
||||
--hash=sha256:5da26970c41084a2ac337a4f075301a78cffb0e0f3df5e98c3049fc95e10725c \
|
||||
--hash=sha256:63f941e77c024be2a1451089e2fdbd5ff450ff0965f49948bbeb383aef1799ea \
|
||||
--hash=sha256:69c32d54ac1f6708145c77d79af12f7448ca1025a0bf912700ad1f0be511026a \
|
||||
--hash=sha256:6afbddc82bbb2c4c405d9a49a056ffe6541f8ad3160df49a80573b399f94ba3a \
|
||||
--hash=sha256:6ba9f425739a55e1409fda5dafad7fdda91c6dcd2b111ba93bb7b53d90737506 \
|
||||
--hash=sha256:6f5f469ba5ae613e4c652cdedfc723aa802329fcc2d65df1e9ab0ac0de34ad9e \
|
||||
--hash=sha256:6fbb1a56d455602bd6c276d5c316ae245111b2dc8158355112f2d905e7471c85 \
|
||||
--hash=sha256:706995fb1173fab7f12110fbad00bb95dd0453336f7f0b341b4ca7b1b9ff0bc7 \
|
||||
--hash=sha256:70db8f514ebcb6f884497c4eee21d0350bbc4102e63502411f8e100cf3b7921e \
|
||||
--hash=sha256:724aed63871bc386d6f28b5f4d15490d84934709f093e021c4abb785e72db5db \
|
||||
--hash=sha256:78f2a53149b116e0088f6eda720574f723fbc75189195aab8a7a2a591ca89cab \
|
||||
--hash=sha256:796b616478a5c1cac83e9e10fcd803e746e5a02461bfa7767aebae8b304e2124 \
|
||||
--hash=sha256:7a5dac3ae05bc64b233f950edf37dce9c904aedbc7e18cfc2adfb98edb85da46 \
|
||||
--hash=sha256:812e27a9b20db967f942306267bcd8b1369d7c171831b6f45d22d75576cd01cd \
|
||||
--hash=sha256:82ad46d1140c5779cd9dfdafc35f47dd09dadff7654d8001c50bb283da82e7c9 \
|
||||
--hash=sha256:8a11250dd0521e9f729325b19ce9121df4cbb80ad3468cc21e56803e8380bc4b \
|
||||
--hash=sha256:8ad00a7621de8ef9ae1616cf24a53d48ad1a699b96668637559a8982d109a800 \
|
||||
--hash=sha256:8fe289556264cb1a2efbcd3d6b3c55e059394ad01b6afa88151264137f85c352 \
|
||||
--hash=sha256:943631a49d7746cd413acaf0b712d030a15f02671af94c54759ba3144351f97a \
|
||||
--hash=sha256:954abb363ed1d18dfb7510dbd89402cb7c21106307e04e2ee7bccf35a134f4dd \
|
||||
--hash=sha256:96745c4cdca261a50bd70c01f14c6c352a48c4d6a78e2d422040fba7919eadef \
|
||||
--hash=sha256:96b079c53b6acd355edb6fe615270613f3f7ddc4159d69837ce15ec518925c40 \
|
||||
--hash=sha256:998ab35070dc81806a23be5de837466a51b25e739fb1a0d5313474d5bb29c829 \
|
||||
--hash=sha256:99b5bcadd5e029234f89d244b86bc8d21093be7ac26111068bebd92a4a95dc73 \
|
||||
--hash=sha256:a0e98106a28fabb672bb014f6c4506cc67491e4cf9ac56d189cbb1e81a9a3e68 \
|
||||
--hash=sha256:a16d81115128e6a9fc6904de051475be195f6c460c9515583dccfd407b16ff78 \
|
||||
--hash=sha256:a386f00800b1b043b091b93850e02814a8b398952438a9d4895bd70f5c80a821 \
|
||||
--hash=sha256:a54d2b3328a2305e0dfb257a4545053fdc64df0c64e0635982e191c846cc0456 \
|
||||
--hash=sha256:a57a4a33a78e94618d026fc68e853d3f71fa4a1d4da7a6e828e927819b001f2d \
|
||||
--hash=sha256:a5e5e51faa7cd02444d4ee1eb59e316c08e974bcfa3a959cb790bc4e9bb616c5 \
|
||||
--hash=sha256:a99c0d50d1a31be285c83301eff4b911dca16aac1c3fe1875c7d6f517a1e9fc4 \
|
||||
--hash=sha256:ae6b356ed166a0ec663a46b547c988815d2b0e5f2d0af31ef34a16cf3ce705d0 \
|
||||
--hash=sha256:b2b847ea3f9af99e02c4c58b7cc6714e105c8d73705e5ff1132e9a249391f688 \
|
||||
--hash=sha256:b90dd0adb1d659f8c94b32556198af1e61e38edd27fc7434d4b3b68ad4e51d37 \
|
||||
--hash=sha256:bad6e9a0e31678ee15ac3ef72e77c08177c86df05c37d2423ff3cded95131e51 \
|
||||
--hash=sha256:bde0178e7e6c49e408b8d3a8c0ec8e69a23e8dc2ae29f87af2d74b21025385dc \
|
||||
--hash=sha256:c233199b9f4dd43e2297577e32ba5fcd0378871a47207bc424d5e5344d030a3e \
|
||||
--hash=sha256:c604919bba041e4c4708ecb0fe6c7c8a92a7f1e886b0ae8d2c13c3e4abfc5eda \
|
||||
--hash=sha256:c65f38418e35970d44f9b5a59533f0f60f14b9f91b712dba51092d2c74d4dcd1 \
|
||||
--hash=sha256:c702dd28d52656bb86f7a2a76ea9341ac434810871b51fcd6cd28c6d7490fbdf \
|
||||
--hash=sha256:c7fd6394779c9a3b324b65394deadb949311662f3770bd34f904b8c04328082c \
|
||||
--hash=sha256:cb59a7535e0b8373f694ce87576c573f533438c5fbee450193333a22118f4a98 \
|
||||
--hash=sha256:cbb3f56d371b560bf39fe45d29c24e3d819ae2399733e2c86394a34e76adab38 \
|
||||
--hash=sha256:d4f6bbe599d255a504ef789c19e23118c654d256343c1ecdf7042fb4b4d0f7fa \
|
||||
--hash=sha256:d77dbc13d55c1d45d6a203da910002fffd13fa310af5e9c5994959587a192789 \
|
||||
--hash=sha256:d87f90064106dfd7d2cc7baeb007a8ca289ee985f4bf64bb627c50cdc34208ed \
|
||||
--hash=sha256:d8849bc74473778c10377f82cf9a534e240734e2f9a92c181ef6d51b4e3d3eb2 \
|
||||
--hash=sha256:d9fbef7f9070055a7cc012ac965e3dbabbf2400b395649ea8d6016dc82a7d13a \
|
||||
--hash=sha256:dcad9c9239845b29f149a895e7e99b8307889cecbfc37b69924c2dad1f4ae4e8 \
|
||||
--hash=sha256:df6325aade17b1f86c8b87f6a1d9549a4184fda00e27e2fca0e5d2a987130365 \
|
||||
--hash=sha256:e57dfcd72f036cce9eab77bc533a932444459f7e54d96a555d25acf2501048be \
|
||||
--hash=sha256:e5945ef29a76ab792973bef1ffa2970d81dd22edb94dfa5d6cba48beb9f51962 \
|
||||
--hash=sha256:ea011b3bfa37f2746737860c1e5ba198b63c9b4764e40b042aac7bd2c258938f \
|
||||
--hash=sha256:ed79f65098c4643cb6ec4530b337535f00b58ea02e25180e3df15e9cc9da58dc \
|
||||
--hash=sha256:f0ccf6fc116795d76bca72aa301a33874c507f9e77402e857d298c73419b5ea3 \
|
||||
--hash=sha256:fec7465caac7b0a36551abb37066221cabf59f776d78fdd58ff17669942b4b41
|
||||
hyperlink==21.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \
|
||||
--hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
|
||||
idna==3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
|
||||
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
|
||||
ijson==3.2.0.post0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:00594ed3ef2218fee8c652d9e7f862fb39f8251b67c6379ef12f7e044bf6bbf3 \
|
||||
--hash=sha256:03dfd4c8ed19e704d04b0ad4f34f598dc569fd3f73089f80eed698e7f6069233 \
|
||||
--hash=sha256:09fe3a53e00c59de33b825ba8d6d39f544a7d7180983cd3d6bd2c3794ae35442 \
|
||||
--hash=sha256:0eb838b4e4360e65c00aa13c78b35afc2477759d423b602b60335af5bed3de5b \
|
||||
--hash=sha256:11bb84a53c37e227e733c6dffad2037391cf0b3474bff78596dc4373b02008a0 \
|
||||
--hash=sha256:11dfd64633fe1382c4237477ac3836f682ca17e25e0d0799e84737795b0611df \
|
||||
--hash=sha256:1302dc6490da7d44c3a76a5f0b87d8bec9f918454c6d6e6bf4ed922e47da58bb \
|
||||
--hash=sha256:13f2939db983327dd0492f6c1c0e77be3f2cbf9b620c92c7547d1d2cd6ef0486 \
|
||||
--hash=sha256:158494bfe89ccb32618d0e53b471364080ceb975462ec464d9f9f37d9832b653 \
|
||||
--hash=sha256:183841b8d033ca95457f61fb0719185dc7f51a616070bdf1dcaf03473bed05b2 \
|
||||
--hash=sha256:1a75cfb34217b41136b714985be645f12269e4345da35d7b48aabd317c82fd10 \
|
||||
--hash=sha256:1d64ffaab1d006a4fa9584a4c723e95cc9609bf6c3365478e250cd0bffaaadf3 \
|
||||
--hash=sha256:25919b444426f58dcc62f763d1c6be6297f309da85ecab55f51da6ca86fc9fdf \
|
||||
--hash=sha256:26b57838e712b8852c40ec6d74c6de8bb226446440e1af1354c077a6f81b9142 \
|
||||
--hash=sha256:27409ba44cfd006901971063d37699f72e092b5efaa1586288b5067d80c6b5bd \
|
||||
--hash=sha256:2d50b2ad9c6c51ca160aa60de7f4dacd1357c38d0e503f51aed95c1c1945ff53 \
|
||||
--hash=sha256:2f204f6d4cedeb28326c230a0b046968b5263c234c65a5b18cee22865800fff7 \
|
||||
--hash=sha256:2f9d449f86f8971c24609e319811f7f3b6b734f0218c4a0e799debe19300d15b \
|
||||
--hash=sha256:3b21b1ecd20ed2f918f6f99cdfa68284a416c0f015ffa64b68fa933df1b24d40 \
|
||||
--hash=sha256:3ccc4d4b947549f9c431651c02b95ef571412c78f88ded198612a41d5c5701a0 \
|
||||
--hash=sha256:41e955e173f77f54337fecaaa58a35c464b75e232b1f939b282497134a4d4f0e \
|
||||
--hash=sha256:424232c2bf3e8181f1b572db92c179c2376b57eba9fc8931453fba975f48cb80 \
|
||||
--hash=sha256:434e57e7ec5c334ccb0e67bb4d9e60c264dcb2a3843713dbeb12cb19fe42a668 \
|
||||
--hash=sha256:47a56e3628c227081a2aa58569cbf2af378bad8af648aa904080e87cd6644cfb \
|
||||
--hash=sha256:4d4e143908f47307042c9678803d27706e0e2099d0a6c1988c6cae1da07760bf \
|
||||
--hash=sha256:4e7c4fdc7d24747c8cc7d528c145afda4de23210bf4054bd98cd63bf07e4882d \
|
||||
--hash=sha256:51c1db80d7791fb761ad9a6c70f521acd2c4b0e5afa2fe0d813beb2140d16c37 \
|
||||
--hash=sha256:5242cb2313ba3ece307b426efa56424ac13cc291c36f292b501d412a98ad0703 \
|
||||
--hash=sha256:535665a77408b6bea56eb828806fae125846dff2e2e0ed4cb2e0a8e36244d753 \
|
||||
--hash=sha256:535a59d61b9aef6fc2a3d01564c1151e38e5a44b92cd6583cb4e8ccf0f58043f \
|
||||
--hash=sha256:53f1a13eb99ab514c562869513172135d4b55a914b344e6518ba09ad3ef1e503 \
|
||||
--hash=sha256:5418066666b25b05f2b8ae2698408daa0afa68f07b0b217f2ab24465b7e9cbd9 \
|
||||
--hash=sha256:56500dac8f52989ef7c0075257a8b471cbea8ef77f1044822742b3cbf2246e8b \
|
||||
--hash=sha256:5809752045ef74c26adf159ed03df7fb7e7a8d656992fd7562663ed47d6d39d9 \
|
||||
--hash=sha256:5c93ae4d49d8cf8accfedc8a8e7815851f56ceb6e399b0c186754a68fed22844 \
|
||||
--hash=sha256:5d365df54d18076f1d5f2ffb1eef2ac7f0d067789838f13d393b5586fbb77b02 \
|
||||
--hash=sha256:6def9ac8d73b76cb02e9e9837763f27f71e5e67ec0afae5f1f4cf8f61c39b1ac \
|
||||
--hash=sha256:6ee9537e8a8aa15dd2d0912737aeb6265e781e74f7f7cad8165048fcb5f39230 \
|
||||
--hash=sha256:6eed1ddd3147de49226db4f213851cf7860493a7b6c7bd5e62516941c007094c \
|
||||
--hash=sha256:6fd55f7a46429de95383fc0d0158c1bfb798e976d59d52830337343c2d9bda5c \
|
||||
--hash=sha256:775444a3b647350158d0b3c6c39c88b4a0995643a076cb104bf25042c9aedcf8 \
|
||||
--hash=sha256:79b94662c2e9d366ab362c2c5858097eae0da100dea0dfd340db09ab28c8d5e8 \
|
||||
--hash=sha256:7e0d1713a9074a7677eb8e43f424b731589d1c689d4676e2f57a5ce59d089e89 \
|
||||
--hash=sha256:80a5bd7e9923cab200701f67ad2372104328b99ddf249dbbe8834102c852d316 \
|
||||
--hash=sha256:830de03f391f7e72b8587bb178c22d534da31153e9ee4234d54ef82cde5ace5e \
|
||||
--hash=sha256:84eed88177f6c243c52b280cb094f751de600d98d2221e0dec331920894889ec \
|
||||
--hash=sha256:8f20072376e338af0e51ccecb02335b4e242d55a9218a640f545be7fc64cca99 \
|
||||
--hash=sha256:93aaec00cbde65c192f15c21f3ee44d2ab0c11eb1a35020b5c4c2676f7fe01d0 \
|
||||
--hash=sha256:9829a17f6f78d7f4d0aeff28c126926a1e5f86828ebb60d6a0acfa0d08457f9f \
|
||||
--hash=sha256:986a0347fe19e5117a5241276b72add570839e5bcdc7a6dac4b538c5928eeff5 \
|
||||
--hash=sha256:992e9e68003df32e2aa0f31eb82c0a94f21286203ab2f2b2c666410e17b59d2f \
|
||||
--hash=sha256:9ecbf85a6d73fc72f6534c38f7d92ed15d212e29e0dbe9810a465d61c8a66d23 \
|
||||
--hash=sha256:a340413a9bf307fafd99254a4dd4ac6c567b91a205bf896dde18888315fd7fcd \
|
||||
--hash=sha256:a4465c90b25ca7903410fabe4145e7b45493295cc3b84ec1216653fbe9021276 \
|
||||
--hash=sha256:a7698bc480df76073067017f73ba4139dbaae20f7a6c9a0c7855b9c5e9a62124 \
|
||||
--hash=sha256:a8af68fe579f6f0b9a8b3f033d10caacfed6a4b89b8c7a1d9478a8f5d8aba4a1 \
|
||||
--hash=sha256:a8c84dff2d60ae06d5280ec87cd63050bbd74a90c02bfc7c390c803cfc8ac8fc \
|
||||
--hash=sha256:b3456cd5b16ec9db3ef23dd27f37bf5a14f765e8272e9af3e3de9ee9a4cba867 \
|
||||
--hash=sha256:b3bdd2e12d9b9a18713dd6f3c5ef3734fdab25b79b177054ba9e35ecc746cb6e \
|
||||
--hash=sha256:b3c6cf18b61b94db9590f86af0dd60edbccb36e151643152b8688066f677fbc9 \
|
||||
--hash=sha256:b3e8d46c1004afcf2bf513a8fb575ee2ec3d8009a2668566b5926a2dcf7f1a45 \
|
||||
--hash=sha256:bced6cd5b09d4d002dda9f37292dd58d26eb1c4d0d179b820d3708d776300bb4 \
|
||||
--hash=sha256:bed8dcb7dbfdb98e647ad47676045e0891f610d38095dcfdae468e1e1efb2766 \
|
||||
--hash=sha256:c85892d68895ba7a0b16a0e6b7d9f9a0e30e86f2b1e0f6986243473ba8735432 \
|
||||
--hash=sha256:c8646eb81eec559d7d8b1e51a5087299d06ecab3bc7da54c01f7df94350df135 \
|
||||
--hash=sha256:cd0450e76b9c629b7f86e7d5b91b7cc9c281dd719630160a992b19a856f7bdbd \
|
||||
--hash=sha256:ce4be2beece2629bd24bcab147741d1532bd5ed40fb52f2b4fcde5c5bf606df0 \
|
||||
--hash=sha256:d3e255ef05b434f20fc9d4b18ea15733d1038bec3e4960d772b06216fa79e82d \
|
||||
--hash=sha256:dcec67fc15e5978ad286e8cc2a3f9347076e28e0e01673b5ace18c73da64e3ff \
|
||||
--hash=sha256:e97e6e07851cefe7baa41f1ebf5c0899d2d00d94bfef59825752e4c784bebbe8 \
|
||||
--hash=sha256:eb167ee21d9c413d6b0ab65ec12f3e7ea0122879da8b3569fa1063526f9f03a8 \
|
||||
--hash=sha256:efee1e9b4f691e1086730f3010e31c55625bc2e0f7db292a38a2cdf2774c2e13 \
|
||||
--hash=sha256:f349bee14d0a4a72ba41e1b1cce52af324ebf704f5066c09e3dd04cfa6f545f0 \
|
||||
--hash=sha256:f470f3d750e00df86e03254fdcb422d2f726f4fb3a0d8eeee35e81343985e58a \
|
||||
--hash=sha256:f6464242f7895268d3086d7829ef031b05c77870dad1e13e51ef79d0a9cfe029 \
|
||||
--hash=sha256:f6785ba0f65eb64b1ce3b7fcfec101085faf98f4e77b234f14287fd4138ffb25 \
|
||||
--hash=sha256:fd218b338ac68213c997d4c88437c0e726f16d301616bf837e1468901934042c \
|
||||
--hash=sha256:fe7f414edd69dd9199b0dfffa0ada22f23d8009e10fe2a719e0993b7dcc2e6e2
|
||||
importlib-metadata==6.0.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \
|
||||
--hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
|
||||
--hash=sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d
|
||||
importlib-resources==5.4.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45 \
|
||||
--hash=sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b
|
||||
incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \
|
||||
--hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321
|
||||
jinja2==3.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
|
||||
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
|
||||
jsonschema==4.17.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \
|
||||
--hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6
|
||||
lxml==4.9.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7 \
|
||||
--hash=sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726 \
|
||||
--hash=sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03 \
|
||||
--hash=sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140 \
|
||||
--hash=sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a \
|
||||
--hash=sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05 \
|
||||
--hash=sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03 \
|
||||
--hash=sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419 \
|
||||
--hash=sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4 \
|
||||
--hash=sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e \
|
||||
--hash=sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67 \
|
||||
--hash=sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50 \
|
||||
--hash=sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894 \
|
||||
--hash=sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf \
|
||||
--hash=sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947 \
|
||||
--hash=sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1 \
|
||||
--hash=sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd \
|
||||
--hash=sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3 \
|
||||
--hash=sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92 \
|
||||
--hash=sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3 \
|
||||
--hash=sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457 \
|
||||
--hash=sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74 \
|
||||
--hash=sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf \
|
||||
--hash=sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1 \
|
||||
--hash=sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4 \
|
||||
--hash=sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975 \
|
||||
--hash=sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5 \
|
||||
--hash=sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe \
|
||||
--hash=sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7 \
|
||||
--hash=sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1 \
|
||||
--hash=sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2 \
|
||||
--hash=sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409 \
|
||||
--hash=sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f \
|
||||
--hash=sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f \
|
||||
--hash=sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5 \
|
||||
--hash=sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24 \
|
||||
--hash=sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e \
|
||||
--hash=sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4 \
|
||||
--hash=sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a \
|
||||
--hash=sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c \
|
||||
--hash=sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de \
|
||||
--hash=sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f \
|
||||
--hash=sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b \
|
||||
--hash=sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5 \
|
||||
--hash=sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7 \
|
||||
--hash=sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a \
|
||||
--hash=sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c \
|
||||
--hash=sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9 \
|
||||
--hash=sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e \
|
||||
--hash=sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab \
|
||||
--hash=sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941 \
|
||||
--hash=sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5 \
|
||||
--hash=sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45 \
|
||||
--hash=sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7 \
|
||||
--hash=sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892 \
|
||||
--hash=sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746 \
|
||||
--hash=sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c \
|
||||
--hash=sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53 \
|
||||
--hash=sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe \
|
||||
--hash=sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184 \
|
||||
--hash=sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38 \
|
||||
--hash=sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df \
|
||||
--hash=sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9 \
|
||||
--hash=sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b \
|
||||
--hash=sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2 \
|
||||
--hash=sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0 \
|
||||
--hash=sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda \
|
||||
--hash=sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b \
|
||||
--hash=sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5 \
|
||||
--hash=sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380 \
|
||||
--hash=sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33 \
|
||||
--hash=sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8 \
|
||||
--hash=sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1 \
|
||||
--hash=sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889 \
|
||||
--hash=sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9 \
|
||||
--hash=sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f \
|
||||
--hash=sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c
|
||||
markupsafe==2.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3 \
|
||||
--hash=sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8 \
|
||||
--hash=sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759 \
|
||||
--hash=sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed \
|
||||
--hash=sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989 \
|
||||
--hash=sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3 \
|
||||
--hash=sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a \
|
||||
--hash=sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c \
|
||||
--hash=sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c \
|
||||
--hash=sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8 \
|
||||
--hash=sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454 \
|
||||
--hash=sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad \
|
||||
--hash=sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d \
|
||||
--hash=sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635 \
|
||||
--hash=sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61 \
|
||||
--hash=sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea \
|
||||
--hash=sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49 \
|
||||
--hash=sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce \
|
||||
--hash=sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e \
|
||||
--hash=sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f \
|
||||
--hash=sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f \
|
||||
--hash=sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f \
|
||||
--hash=sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7 \
|
||||
--hash=sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a \
|
||||
--hash=sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7 \
|
||||
--hash=sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076 \
|
||||
--hash=sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb \
|
||||
--hash=sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7 \
|
||||
--hash=sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7 \
|
||||
--hash=sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c \
|
||||
--hash=sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26 \
|
||||
--hash=sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c \
|
||||
--hash=sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8 \
|
||||
--hash=sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448 \
|
||||
--hash=sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956 \
|
||||
--hash=sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05 \
|
||||
--hash=sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1 \
|
||||
--hash=sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357 \
|
||||
--hash=sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea \
|
||||
--hash=sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730
|
||||
matrix-common==1.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20 \
|
||||
--hash=sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1
|
||||
msgpack==1.0.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467 \
|
||||
--hash=sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae \
|
||||
--hash=sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92 \
|
||||
--hash=sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef \
|
||||
--hash=sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624 \
|
||||
--hash=sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227 \
|
||||
--hash=sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88 \
|
||||
--hash=sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9 \
|
||||
--hash=sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8 \
|
||||
--hash=sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd \
|
||||
--hash=sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6 \
|
||||
--hash=sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55 \
|
||||
--hash=sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e \
|
||||
--hash=sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2 \
|
||||
--hash=sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44 \
|
||||
--hash=sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6 \
|
||||
--hash=sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9 \
|
||||
--hash=sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab \
|
||||
--hash=sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae \
|
||||
--hash=sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa \
|
||||
--hash=sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9 \
|
||||
--hash=sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e \
|
||||
--hash=sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250 \
|
||||
--hash=sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce \
|
||||
--hash=sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075 \
|
||||
--hash=sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236 \
|
||||
--hash=sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae \
|
||||
--hash=sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e \
|
||||
--hash=sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f \
|
||||
--hash=sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08 \
|
||||
--hash=sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6 \
|
||||
--hash=sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d \
|
||||
--hash=sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43 \
|
||||
--hash=sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1 \
|
||||
--hash=sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6 \
|
||||
--hash=sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0 \
|
||||
--hash=sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c \
|
||||
--hash=sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff \
|
||||
--hash=sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db \
|
||||
--hash=sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243 \
|
||||
--hash=sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661 \
|
||||
--hash=sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba \
|
||||
--hash=sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e \
|
||||
--hash=sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb \
|
||||
--hash=sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52 \
|
||||
--hash=sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6 \
|
||||
--hash=sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1 \
|
||||
--hash=sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f \
|
||||
--hash=sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da \
|
||||
--hash=sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f \
|
||||
--hash=sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c \
|
||||
--hash=sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8
|
||||
netaddr==0.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac \
|
||||
--hash=sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243
|
||||
packaging==23.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \
|
||||
--hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97
|
||||
parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c \
|
||||
--hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9
|
||||
phonenumbers==8.13.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:2e3fd1f3fde226b289489275517c76edf223eafd9f43a2c2c36498a44b73d4b0 \
|
||||
--hash=sha256:6eb2faf29c19f946baf10f1c977a1f856cab90819fe7735b8e141d5407420c4a
|
||||
pillow==9.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33 \
|
||||
--hash=sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b \
|
||||
--hash=sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e \
|
||||
--hash=sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35 \
|
||||
--hash=sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153 \
|
||||
--hash=sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9 \
|
||||
--hash=sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569 \
|
||||
--hash=sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57 \
|
||||
--hash=sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8 \
|
||||
--hash=sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1 \
|
||||
--hash=sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264 \
|
||||
--hash=sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157 \
|
||||
--hash=sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9 \
|
||||
--hash=sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133 \
|
||||
--hash=sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9 \
|
||||
--hash=sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab \
|
||||
--hash=sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6 \
|
||||
--hash=sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5 \
|
||||
--hash=sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df \
|
||||
--hash=sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503 \
|
||||
--hash=sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b \
|
||||
--hash=sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa \
|
||||
--hash=sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327 \
|
||||
--hash=sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493 \
|
||||
--hash=sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d \
|
||||
--hash=sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4 \
|
||||
--hash=sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4 \
|
||||
--hash=sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35 \
|
||||
--hash=sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2 \
|
||||
--hash=sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c \
|
||||
--hash=sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011 \
|
||||
--hash=sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a \
|
||||
--hash=sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e \
|
||||
--hash=sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f \
|
||||
--hash=sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848 \
|
||||
--hash=sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57 \
|
||||
--hash=sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f \
|
||||
--hash=sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c \
|
||||
--hash=sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9 \
|
||||
--hash=sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5 \
|
||||
--hash=sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9 \
|
||||
--hash=sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d \
|
||||
--hash=sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0 \
|
||||
--hash=sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1 \
|
||||
--hash=sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e \
|
||||
--hash=sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815 \
|
||||
--hash=sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0 \
|
||||
--hash=sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b \
|
||||
--hash=sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd \
|
||||
--hash=sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c \
|
||||
--hash=sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3 \
|
||||
--hash=sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab \
|
||||
--hash=sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858 \
|
||||
--hash=sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5 \
|
||||
--hash=sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee \
|
||||
--hash=sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343 \
|
||||
--hash=sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb \
|
||||
--hash=sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47 \
|
||||
--hash=sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed \
|
||||
--hash=sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837 \
|
||||
--hash=sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286 \
|
||||
--hash=sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28 \
|
||||
--hash=sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628 \
|
||||
--hash=sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df \
|
||||
--hash=sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d \
|
||||
--hash=sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d \
|
||||
--hash=sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a \
|
||||
--hash=sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6 \
|
||||
--hash=sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336 \
|
||||
--hash=sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132 \
|
||||
--hash=sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070 \
|
||||
--hash=sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe \
|
||||
--hash=sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a \
|
||||
--hash=sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd \
|
||||
--hash=sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391 \
|
||||
--hash=sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a \
|
||||
--hash=sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12
|
||||
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
|
||||
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
|
||||
prometheus-client==0.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab \
|
||||
--hash=sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48
|
||||
psycopg2==2.9.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955 \
|
||||
--hash=sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa \
|
||||
--hash=sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e \
|
||||
--hash=sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a \
|
||||
--hash=sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5 \
|
||||
--hash=sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee \
|
||||
--hash=sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad \
|
||||
--hash=sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0 \
|
||||
--hash=sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a \
|
||||
--hash=sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d \
|
||||
--hash=sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f \
|
||||
--hash=sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2 \
|
||||
--hash=sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5
|
||||
psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
||||
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
|
||||
psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
||||
--hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52
|
||||
pyasn1-modules==0.2.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
|
||||
--hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74
|
||||
pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
|
||||
--hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
|
||||
pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
|
||||
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
|
||||
pydantic==1.10.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:05a81b006be15655b2a1bae5faa4280cf7c81d0e09fcb49b342ebf826abe5a72 \
|
||||
--hash=sha256:0b53e1d41e97063d51a02821b80538053ee4608b9a181c1005441f1673c55423 \
|
||||
--hash=sha256:2b3ce5f16deb45c472dde1a0ee05619298c864a20cded09c4edd820e1454129f \
|
||||
--hash=sha256:2e82a6d37a95e0b1b42b82ab340ada3963aea1317fd7f888bb6b9dfbf4fff57c \
|
||||
--hash=sha256:301d626a59edbe5dfb48fcae245896379a450d04baeed50ef40d8199f2733b06 \
|
||||
--hash=sha256:39f4a73e5342b25c2959529f07f026ef58147249f9b7431e1ba8414a36761f53 \
|
||||
--hash=sha256:4948f264678c703f3877d1c8877c4e3b2e12e549c57795107f08cf70c6ec7774 \
|
||||
--hash=sha256:4b05697738e7d2040696b0a66d9f0a10bec0efa1883ca75ee9e55baf511909d6 \
|
||||
--hash=sha256:51bdeb10d2db0f288e71d49c9cefa609bca271720ecd0c58009bd7504a0c464c \
|
||||
--hash=sha256:55b1625899acd33229c4352ce0ae54038529b412bd51c4915349b49ca575258f \
|
||||
--hash=sha256:572066051eeac73d23f95ba9a71349c42a3e05999d0ee1572b7860235b850cc6 \
|
||||
--hash=sha256:6a05a9db1ef5be0fe63e988f9617ca2551013f55000289c671f71ec16f4985e3 \
|
||||
--hash=sha256:6dc1cc241440ed7ca9ab59d9929075445da6b7c94ced281b3dd4cfe6c8cff817 \
|
||||
--hash=sha256:6e7124d6855b2780611d9f5e1e145e86667eaa3bd9459192c8dc1a097f5e9903 \
|
||||
--hash=sha256:75d52162fe6b2b55964fbb0af2ee58e99791a3138588c482572bb6087953113a \
|
||||
--hash=sha256:78cec42b95dbb500a1f7120bdf95c401f6abb616bbe8785ef09887306792e66e \
|
||||
--hash=sha256:7feb6a2d401f4d6863050f58325b8d99c1e56f4512d98b11ac64ad1751dc647d \
|
||||
--hash=sha256:8775d4ef5e7299a2f4699501077a0defdaac5b6c4321173bcb0f3c496fbadf85 \
|
||||
--hash=sha256:887ca463c3bc47103c123bc06919c86720e80e1214aab79e9b779cda0ff92a00 \
|
||||
--hash=sha256:9193d4f4ee8feca58bc56c8306bcb820f5c7905fd919e0750acdeeeef0615b28 \
|
||||
--hash=sha256:983e720704431a6573d626b00662eb78a07148c9115129f9b4351091ec95ecc3 \
|
||||
--hash=sha256:990406d226dea0e8f25f643b370224771878142155b879784ce89f633541a024 \
|
||||
--hash=sha256:9cbdc268a62d9a98c56e2452d6c41c0263d64a2009aac69246486f01b4f594c4 \
|
||||
--hash=sha256:a48f1953c4a1d9bd0b5167ac50da9a79f6072c63c4cef4cf2a3736994903583e \
|
||||
--hash=sha256:a9a6747cac06c2beb466064dda999a13176b23535e4c496c9d48e6406f92d42d \
|
||||
--hash=sha256:a9f2de23bec87ff306aef658384b02aa7c32389766af3c5dee9ce33e80222dfa \
|
||||
--hash=sha256:b5635de53e6686fe7a44b5cf25fcc419a0d5e5c1a1efe73d49d48fe7586db854 \
|
||||
--hash=sha256:b6f9d649892a6f54a39ed56b8dfd5e08b5f3be5f893da430bed76975f3735d15 \
|
||||
--hash=sha256:b9a3859f24eb4e097502a3be1fb4b2abb79b6103dd9e2e0edb70613a4459a648 \
|
||||
--hash=sha256:cd8702c5142afda03dc2b1ee6bc358b62b3735b2cce53fc77b31ca9f728e4bc8 \
|
||||
--hash=sha256:d7b5a3821225f5c43496c324b0d6875fde910a1c2933d726a743ce328fbb2a8c \
|
||||
--hash=sha256:d88c4c0e5c5dfd05092a4b271282ef0588e5f4aaf345778056fc5259ba098857 \
|
||||
--hash=sha256:eb992a1ef739cc7b543576337bebfc62c0e6567434e522e97291b251a41dad7f \
|
||||
--hash=sha256:f2f7eb6273dd12472d7f218e1fef6f7c7c2f00ac2e1ecde4db8824c457300416 \
|
||||
--hash=sha256:fdf88ab63c3ee282c76d652fc86518aacb737ff35796023fae56a65ced1a5978 \
|
||||
--hash=sha256:fdf8d759ef326962b4678d89e275ffc55b7ce59d917d9f72233762061fd04a2d
|
||||
pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \
|
||||
--hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907
|
||||
pynacl==1.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \
|
||||
--hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \
|
||||
--hash=sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 \
|
||||
--hash=sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 \
|
||||
--hash=sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92 \
|
||||
--hash=sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff \
|
||||
--hash=sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba \
|
||||
--hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \
|
||||
--hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \
|
||||
--hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543
|
||||
pyopenssl==23.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:c1cc5f86bcacefc84dada7d31175cae1b1518d5f60d3d0bb595a67822a868a6f \
|
||||
--hash=sha256:df5fc28af899e74e19fccb5510df423581047e10ab6f1f4ba1763ff5fde844c0
|
||||
pyrsistent==0.18.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \
|
||||
--hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \
|
||||
--hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \
|
||||
--hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \
|
||||
--hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \
|
||||
--hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \
|
||||
--hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \
|
||||
--hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \
|
||||
--hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \
|
||||
--hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \
|
||||
--hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \
|
||||
--hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \
|
||||
--hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \
|
||||
--hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 \
|
||||
--hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \
|
||||
--hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \
|
||||
--hash=sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07 \
|
||||
--hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \
|
||||
--hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \
|
||||
--hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \
|
||||
--hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6
|
||||
pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
|
||||
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
|
||||
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
|
||||
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
|
||||
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
|
||||
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
|
||||
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
|
||||
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
|
||||
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
|
||||
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
|
||||
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
|
||||
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
|
||||
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
|
||||
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
|
||||
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
|
||||
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
|
||||
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
|
||||
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
|
||||
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
|
||||
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
|
||||
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
|
||||
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
|
||||
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
|
||||
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
|
||||
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
|
||||
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
|
||||
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
|
||||
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
|
||||
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
|
||||
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
|
||||
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
|
||||
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
|
||||
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
|
||||
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
|
||||
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
|
||||
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
|
||||
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
|
||||
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
|
||||
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
|
||||
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
|
||||
requests==2.27.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
|
||||
--hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
|
||||
semantic-version==2.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c \
|
||||
--hash=sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177
|
||||
service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34 \
|
||||
--hash=sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db
|
||||
setuptools-rust==1.5.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206 \
|
||||
--hash=sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7
|
||||
setuptools==65.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
|
||||
--hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
|
||||
signedjson==1.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228 \
|
||||
--hash=sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492
|
||||
simplejson==3.17.6 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce \
|
||||
--hash=sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1 \
|
||||
--hash=sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0 \
|
||||
--hash=sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf \
|
||||
--hash=sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a \
|
||||
--hash=sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe \
|
||||
--hash=sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d \
|
||||
--hash=sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94 \
|
||||
--hash=sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad \
|
||||
--hash=sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a \
|
||||
--hash=sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2 \
|
||||
--hash=sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc \
|
||||
--hash=sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81 \
|
||||
--hash=sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7 \
|
||||
--hash=sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837 \
|
||||
--hash=sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566 \
|
||||
--hash=sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3 \
|
||||
--hash=sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f \
|
||||
--hash=sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802 \
|
||||
--hash=sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e \
|
||||
--hash=sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9 \
|
||||
--hash=sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7 \
|
||||
--hash=sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a \
|
||||
--hash=sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044 \
|
||||
--hash=sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494 \
|
||||
--hash=sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51 \
|
||||
--hash=sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d \
|
||||
--hash=sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33 \
|
||||
--hash=sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27 \
|
||||
--hash=sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f \
|
||||
--hash=sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198 \
|
||||
--hash=sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70 \
|
||||
--hash=sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c \
|
||||
--hash=sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab \
|
||||
--hash=sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045 \
|
||||
--hash=sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5 \
|
||||
--hash=sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7 \
|
||||
--hash=sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882 \
|
||||
--hash=sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40 \
|
||||
--hash=sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198 \
|
||||
--hash=sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180 \
|
||||
--hash=sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a \
|
||||
--hash=sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda \
|
||||
--hash=sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2 \
|
||||
--hash=sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851 \
|
||||
--hash=sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe \
|
||||
--hash=sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a \
|
||||
--hash=sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211 \
|
||||
--hash=sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1 \
|
||||
--hash=sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6 \
|
||||
--hash=sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb \
|
||||
--hash=sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9 \
|
||||
--hash=sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed \
|
||||
--hash=sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd \
|
||||
--hash=sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd \
|
||||
--hash=sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6 \
|
||||
--hash=sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a \
|
||||
--hash=sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366 \
|
||||
--hash=sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81 \
|
||||
--hash=sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a \
|
||||
--hash=sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba
|
||||
six==1.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
|
||||
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
|
||||
sortedcontainers==2.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \
|
||||
--hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0
|
||||
treq==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2 \
|
||||
--hash=sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec
|
||||
twisted-iocpsupport==1.0.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_system == "Windows" \
|
||||
--hash=sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41 \
|
||||
--hash=sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d \
|
||||
--hash=sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9 \
|
||||
--hash=sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf \
|
||||
--hash=sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323 \
|
||||
--hash=sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32 \
|
||||
--hash=sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4 \
|
||||
--hash=sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f \
|
||||
--hash=sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546 \
|
||||
--hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \
|
||||
--hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \
|
||||
--hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415
|
||||
twisted==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31 \
|
||||
--hash=sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0
|
||||
twisted[tls]==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31 \
|
||||
--hash=sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0
|
||||
txredisapi==1.4.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:34c9eba8d34f452d30661f073b67b8cd42b695e3d31678ec1bbf628a65a0f059 \
|
||||
--hash=sha256:e6cc43f51e35d608abdca8f8c7d20e148fe1d82679f6e584baea613ebec812bb
|
||||
typing-extensions==4.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
|
||||
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
|
||||
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
|
||||
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
|
||||
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005
|
||||
urllib3==1.26.12 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
|
||||
--hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
|
||||
webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
|
||||
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
|
||||
zipp==3.7.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d \
|
||||
--hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375
|
||||
zope-interface==5.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192 \
|
||||
--hash=sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702 \
|
||||
--hash=sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09 \
|
||||
--hash=sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4 \
|
||||
--hash=sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a \
|
||||
--hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \
|
||||
--hash=sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf \
|
||||
--hash=sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c \
|
||||
--hash=sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d \
|
||||
--hash=sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78 \
|
||||
--hash=sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83 \
|
||||
--hash=sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531 \
|
||||
--hash=sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46 \
|
||||
--hash=sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021 \
|
||||
--hash=sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94 \
|
||||
--hash=sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc \
|
||||
--hash=sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63 \
|
||||
--hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \
|
||||
--hash=sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117 \
|
||||
--hash=sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25 \
|
||||
--hash=sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05 \
|
||||
--hash=sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e \
|
||||
--hash=sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1 \
|
||||
--hash=sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004 \
|
||||
--hash=sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2 \
|
||||
--hash=sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e \
|
||||
--hash=sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f \
|
||||
--hash=sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f \
|
||||
--hash=sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120 \
|
||||
--hash=sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f \
|
||||
--hash=sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1 \
|
||||
--hash=sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9 \
|
||||
--hash=sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e \
|
||||
--hash=sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7 \
|
||||
--hash=sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8 \
|
||||
--hash=sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b \
|
||||
--hash=sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155 \
|
||||
--hash=sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7 \
|
||||
--hash=sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c \
|
||||
--hash=sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325 \
|
||||
--hash=sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d \
|
||||
--hash=sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb \
|
||||
--hash=sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e \
|
||||
--hash=sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959 \
|
||||
--hash=sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7 \
|
||||
--hash=sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920 \
|
||||
--hash=sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e \
|
||||
--hash=sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48 \
|
||||
--hash=sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8 \
|
||||
--hash=sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4 \
|
||||
--hash=sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263
|
@ -14,6 +14,7 @@
|
||||
|
||||
#![feature(test)]
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use synapse::push::{
|
||||
evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, JsonValue,
|
||||
PushRules, SimpleJsonValue,
|
||||
@ -44,7 +45,6 @@ fn bench_match_exact(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -53,15 +53,13 @@ fn bench_match_exact(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "room_id".into(),
|
||||
pattern: Some("!room:server".into()),
|
||||
pattern_type: None,
|
||||
pattern: "!room:server".into(),
|
||||
},
|
||||
));
|
||||
|
||||
@ -93,7 +91,6 @@ fn bench_match_word(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -102,15 +99,13 @@ fn bench_match_word(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("test".into()),
|
||||
pattern_type: None,
|
||||
pattern: "test".into(),
|
||||
},
|
||||
));
|
||||
|
||||
@ -142,7 +137,6 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -151,15 +145,13 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("foobar".into()),
|
||||
pattern_type: None,
|
||||
pattern: "foobar".into(),
|
||||
},
|
||||
));
|
||||
|
||||
@ -191,7 +183,6 @@ fn bench_eval_message(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -200,7 +191,6 @@ fn bench_eval_message(b: &mut Bencher) {
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -21,13 +21,13 @@ use lazy_static::lazy_static;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::KnownCondition;
|
||||
use crate::push::Condition;
|
||||
use crate::push::EventMatchCondition;
|
||||
use crate::push::PushRule;
|
||||
use crate::push::RelatedEventMatchCondition;
|
||||
use crate::push::RelatedEventMatchTypeCondition;
|
||||
use crate::push::SetTweak;
|
||||
use crate::push::TweakValue;
|
||||
use crate::push::{Action, ExactEventMatchCondition, SimpleJsonValue};
|
||||
use crate::push::{Action, EventPropertyIsCondition, SimpleJsonValue};
|
||||
use crate::push::{Condition, EventMatchTypeCondition};
|
||||
use crate::push::{EventMatchCondition, EventMatchPatternType};
|
||||
use crate::push::{EventPropertyIsTypeCondition, PushRule};
|
||||
|
||||
const HIGHLIGHT_ACTION: Action = Action::SetTweak(SetTweak {
|
||||
set_tweak: Cow::Borrowed("highlight"),
|
||||
@ -72,8 +72,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("content.m.relates_to.rel_type"),
|
||||
pattern: Some(Cow::Borrowed("m.replace")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.replace"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -86,8 +85,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("content.msgtype"),
|
||||
pattern: Some(Cow::Borrowed("m.notice")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.notice"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
@ -100,18 +98,15 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.member")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.member"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.membership"),
|
||||
pattern: Some(Cow::Borrowed("invite")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("invite"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_id")),
|
||||
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION, SOUND_ACTION]),
|
||||
@ -124,8 +119,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.member")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.member"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
@ -135,11 +129,10 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.im.nheko.msc3664.reply"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatch(
|
||||
RelatedEventMatchCondition {
|
||||
key: Some(Cow::Borrowed("sender")),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_id")),
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatchType(
|
||||
RelatedEventMatchTypeCondition {
|
||||
key: Cow::Borrowed("sender"),
|
||||
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
|
||||
rel_type: Cow::Borrowed("m.in_reply_to"),
|
||||
include_fallbacks: None,
|
||||
},
|
||||
@ -151,7 +144,12 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_user_mention"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::IsUserMention)]),
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::ExactEventPropertyContainsType(EventPropertyIsTypeCondition {
|
||||
key: Cow::Borrowed("content.org.matrix.msc3952.mentions.user_ids"),
|
||||
value_type: Cow::Borrowed(&EventMatchPatternType::UserId),
|
||||
}),
|
||||
)]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
@ -168,7 +166,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_room_mention"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::ExactEventMatch(ExactEventMatchCondition {
|
||||
Condition::Known(KnownCondition::EventPropertyIs(EventPropertyIsCondition {
|
||||
key: Cow::Borrowed("content.org.matrix.msc3952.mentions.room"),
|
||||
value: Cow::Borrowed(&SimpleJsonValue::Bool(true)),
|
||||
})),
|
||||
@ -189,8 +187,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
}),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.body"),
|
||||
pattern: Some(Cow::Borrowed("@room")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("@room"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
|
||||
@ -203,13 +200,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.tombstone")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.tombstone"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed(""),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
|
||||
@ -222,8 +217,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.reaction")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.reaction"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -236,13 +230,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.server_acl")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.server_acl"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed(""),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -255,8 +247,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.response")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.response"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -268,11 +259,10 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
|
||||
rule_id: Cow::Borrowed("global/content/.m.rule.contains_user_name"),
|
||||
priority_class: 4,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatchType(
|
||||
EventMatchTypeCondition {
|
||||
key: Cow::Borrowed("content.body"),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_localpart")),
|
||||
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserLocalpart),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
|
||||
@ -287,8 +277,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.call.invite")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.call.invite"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, RING_ACTION, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -301,8 +290,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.message"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -318,8 +306,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.encrypted"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -338,8 +325,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.encrypted"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -363,8 +349,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.message"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -388,8 +373,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.file")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.file"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -413,8 +397,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.image")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.image"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -438,8 +421,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.video")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.video"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -463,8 +445,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.audio")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.audio"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -485,8 +466,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.message"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -499,8 +479,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.encrypted"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -514,8 +493,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.encrypted"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -534,8 +512,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.message"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -554,8 +531,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.file")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.file"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -574,8 +550,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.image")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.image"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -594,8 +569,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.video")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.video"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -614,8 +588,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.audio")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.audio"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -633,18 +606,15 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("im.vector.modular.widgets")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("im.vector.modular.widgets"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.type"),
|
||||
pattern: Some(Cow::Borrowed("jitsi")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("jitsi"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("*")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("*"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -660,8 +630,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
}),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
|
||||
@ -674,8 +643,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify]),
|
||||
@ -691,8 +659,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
}),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
|
||||
@ -705,8 +672,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify]),
|
||||
|
@ -12,9 +12,9 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::push::JsonValue;
|
||||
use anyhow::{Context, Error};
|
||||
use lazy_static::lazy_static;
|
||||
use log::warn;
|
||||
@ -23,9 +23,10 @@ use regex::Regex;
|
||||
|
||||
use super::{
|
||||
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
|
||||
Action, Condition, EventMatchCondition, ExactEventMatchCondition, FilteredPushRules,
|
||||
KnownCondition, RelatedEventMatchCondition, SimpleJsonValue,
|
||||
Action, Condition, EventPropertyIsCondition, FilteredPushRules, KnownCondition,
|
||||
SimpleJsonValue,
|
||||
};
|
||||
use crate::push::{EventMatchPatternType, JsonValue};
|
||||
|
||||
lazy_static! {
|
||||
/// Used to parse the `is` clause in the room member count condition.
|
||||
@ -71,8 +72,6 @@ pub struct PushRuleEvaluator {
|
||||
|
||||
/// True if the event has a mentions property and MSC3952 support is enabled.
|
||||
has_mentions: bool,
|
||||
/// The user mentions that were part of the message.
|
||||
user_mentions: BTreeSet<String>,
|
||||
|
||||
/// The number of users in the room.
|
||||
room_member_count: u64,
|
||||
@ -98,9 +97,6 @@ pub struct PushRuleEvaluator {
|
||||
/// flag as MSC1767 (extensible events core).
|
||||
msc3931_enabled: bool,
|
||||
|
||||
/// If MSC3758 (exact_event_match push rule condition) is enabled.
|
||||
msc3758_exact_event_match: bool,
|
||||
|
||||
/// If MSC3966 (exact_event_property_contains push rule condition) is enabled.
|
||||
msc3966_exact_event_property_contains: bool,
|
||||
}
|
||||
@ -113,7 +109,6 @@ impl PushRuleEvaluator {
|
||||
pub fn py_new(
|
||||
flattened_keys: BTreeMap<String, JsonValue>,
|
||||
has_mentions: bool,
|
||||
user_mentions: BTreeSet<String>,
|
||||
room_member_count: u64,
|
||||
sender_power_level: Option<i64>,
|
||||
notification_power_levels: BTreeMap<String, i64>,
|
||||
@ -121,7 +116,6 @@ impl PushRuleEvaluator {
|
||||
related_event_match_enabled: bool,
|
||||
room_version_feature_flags: Vec<String>,
|
||||
msc3931_enabled: bool,
|
||||
msc3758_exact_event_match: bool,
|
||||
msc3966_exact_event_property_contains: bool,
|
||||
) -> Result<Self, Error> {
|
||||
let body = match flattened_keys.get("content.body") {
|
||||
@ -133,7 +127,6 @@ impl PushRuleEvaluator {
|
||||
flattened_keys,
|
||||
body,
|
||||
has_mentions,
|
||||
user_mentions,
|
||||
room_member_count,
|
||||
notification_power_levels,
|
||||
sender_power_level,
|
||||
@ -141,7 +134,6 @@ impl PushRuleEvaluator {
|
||||
related_event_match_enabled,
|
||||
room_version_feature_flags,
|
||||
msc3931_enabled,
|
||||
msc3758_exact_event_match,
|
||||
msc3966_exact_event_property_contains,
|
||||
})
|
||||
}
|
||||
@ -256,24 +248,83 @@ impl PushRuleEvaluator {
|
||||
};
|
||||
|
||||
let result = match known_condition {
|
||||
KnownCondition::EventMatch(event_match) => {
|
||||
self.match_event_match(event_match, user_id)?
|
||||
}
|
||||
KnownCondition::ExactEventMatch(exact_event_match) => {
|
||||
self.match_exact_event_match(exact_event_match)?
|
||||
}
|
||||
KnownCondition::RelatedEventMatch(event_match) => {
|
||||
self.match_related_event_match(event_match, user_id)?
|
||||
}
|
||||
KnownCondition::ExactEventPropertyContains(exact_event_match) => {
|
||||
self.match_exact_event_property_contains(exact_event_match)?
|
||||
}
|
||||
KnownCondition::IsUserMention => {
|
||||
if let Some(uid) = user_id {
|
||||
self.user_mentions.contains(uid)
|
||||
KnownCondition::EventMatch(event_match) => self.match_event_match(
|
||||
&self.flattened_keys,
|
||||
&event_match.key,
|
||||
&event_match.pattern,
|
||||
)?,
|
||||
KnownCondition::EventMatchType(event_match) => {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
false
|
||||
}
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let pattern = match &*event_match.pattern_type {
|
||||
EventMatchPatternType::UserId => user_id,
|
||||
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
|
||||
};
|
||||
|
||||
self.match_event_match(&self.flattened_keys, &event_match.key, pattern)?
|
||||
}
|
||||
KnownCondition::EventPropertyIs(event_property_is) => {
|
||||
self.match_event_property_is(event_property_is)?
|
||||
}
|
||||
KnownCondition::RelatedEventMatch(event_match) => self.match_related_event_match(
|
||||
&event_match.rel_type.clone(),
|
||||
event_match.include_fallbacks,
|
||||
event_match.key.clone(),
|
||||
event_match.pattern.clone(),
|
||||
)?,
|
||||
KnownCondition::RelatedEventMatchType(event_match) => {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let pattern = match &*event_match.pattern_type {
|
||||
EventMatchPatternType::UserId => user_id,
|
||||
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
|
||||
};
|
||||
|
||||
self.match_related_event_match(
|
||||
&event_match.rel_type.clone(),
|
||||
event_match.include_fallbacks,
|
||||
Some(event_match.key.clone()),
|
||||
Some(Cow::Borrowed(pattern)),
|
||||
)?
|
||||
}
|
||||
KnownCondition::ExactEventPropertyContains(event_property_is) => self
|
||||
.match_exact_event_property_contains(
|
||||
event_property_is.key.clone(),
|
||||
event_property_is.value.clone(),
|
||||
)?,
|
||||
KnownCondition::ExactEventPropertyContainsType(exact_event_match) => {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let pattern = match &*exact_event_match.value_type {
|
||||
EventMatchPatternType::UserId => user_id,
|
||||
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
|
||||
};
|
||||
|
||||
self.match_exact_event_property_contains(
|
||||
exact_event_match.key.clone(),
|
||||
Cow::Borrowed(&SimpleJsonValue::Str(pattern.to_string())),
|
||||
)?
|
||||
}
|
||||
KnownCondition::ContainsDisplayName => {
|
||||
if let Some(dn) = display_name {
|
||||
@ -325,135 +376,18 @@ impl PushRuleEvaluator {
|
||||
/// Evaluates a `event_match` condition.
|
||||
fn match_event_match(
|
||||
&self,
|
||||
event_match: &EventMatchCondition,
|
||||
user_id: Option<&str>,
|
||||
flattened_event: &BTreeMap<String, JsonValue>,
|
||||
key: &str,
|
||||
pattern: &str,
|
||||
) -> Result<bool, Error> {
|
||||
let pattern = if let Some(pattern) = &event_match.pattern {
|
||||
pattern
|
||||
} else if let Some(pattern_type) = &event_match.pattern_type {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
match &**pattern_type {
|
||||
"user_id" => user_id,
|
||||
"user_localpart" => get_localpart_from_id(user_id)?,
|
||||
_ => return Ok(false),
|
||||
}
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let haystack = if let Some(JsonValue::Value(SimpleJsonValue::Str(haystack))) =
|
||||
self.flattened_keys.get(&*event_match.key)
|
||||
flattened_event.get(key)
|
||||
{
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// For the content.body we match against "words", but for everything
|
||||
// else we match against the entire value.
|
||||
let match_type = if event_match.key == "content.body" {
|
||||
GlobMatchType::Word
|
||||
} else {
|
||||
GlobMatchType::Whole
|
||||
};
|
||||
|
||||
let mut compiled_pattern = get_glob_matcher(pattern, match_type)?;
|
||||
compiled_pattern.is_match(haystack)
|
||||
}
|
||||
|
||||
/// Evaluates a `exact_event_match` condition. (MSC3758)
|
||||
fn match_exact_event_match(
|
||||
&self,
|
||||
exact_event_match: &ExactEventMatchCondition,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if the feature is enabled.
|
||||
if !self.msc3758_exact_event_match {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let value = &exact_event_match.value;
|
||||
|
||||
let haystack = if let Some(JsonValue::Value(haystack)) =
|
||||
self.flattened_keys.get(&*exact_event_match.key)
|
||||
{
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(haystack == &**value)
|
||||
}
|
||||
|
||||
/// Evaluates a `related_event_match` condition. (MSC3664)
|
||||
fn match_related_event_match(
|
||||
&self,
|
||||
event_match: &RelatedEventMatchCondition,
|
||||
user_id: Option<&str>,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if related event matching is enabled...
|
||||
if !self.related_event_match_enabled {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// get the related event, fail if there is none.
|
||||
let event = if let Some(event) = self.related_events_flattened.get(&*event_match.rel_type) {
|
||||
event
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// If we are not matching fallbacks, don't match if our special key indicating this is a
|
||||
// fallback relation is not present.
|
||||
if !event_match.include_fallbacks.unwrap_or(false)
|
||||
&& event.contains_key("im.vector.is_falling_back")
|
||||
{
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// if we have no key, accept the event as matching, if it existed without matching any
|
||||
// fields.
|
||||
let key = if let Some(key) = &event_match.key {
|
||||
key
|
||||
} else {
|
||||
return Ok(true);
|
||||
};
|
||||
|
||||
let pattern = if let Some(pattern) = &event_match.pattern {
|
||||
pattern
|
||||
} else if let Some(pattern_type) = &event_match.pattern_type {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
match &**pattern_type {
|
||||
"user_id" => user_id,
|
||||
"user_localpart" => get_localpart_from_id(user_id)?,
|
||||
_ => return Ok(false),
|
||||
}
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let haystack =
|
||||
if let Some(JsonValue::Value(SimpleJsonValue::Str(haystack))) = event.get(&**key) {
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// For the content.body we match against "words", but for everything
|
||||
// else we match against the entire value.
|
||||
let match_type = if key == "content.body" {
|
||||
@ -466,27 +400,78 @@ impl PushRuleEvaluator {
|
||||
compiled_pattern.is_match(haystack)
|
||||
}
|
||||
|
||||
/// Evaluates a `exact_event_property_contains` condition. (MSC3758)
|
||||
fn match_exact_event_property_contains(
|
||||
/// Evaluates a `event_property_is` condition.
|
||||
fn match_event_property_is(
|
||||
&self,
|
||||
exact_event_match: &ExactEventMatchCondition,
|
||||
event_property_is: &EventPropertyIsCondition,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if the feature is enabled.
|
||||
if !self.msc3966_exact_event_property_contains {
|
||||
return Ok(false);
|
||||
}
|
||||
let value = &event_property_is.value;
|
||||
|
||||
let value = &exact_event_match.value;
|
||||
|
||||
let haystack = if let Some(JsonValue::Array(haystack)) =
|
||||
self.flattened_keys.get(&*exact_event_match.key)
|
||||
let haystack = if let Some(JsonValue::Value(haystack)) =
|
||||
self.flattened_keys.get(&*event_property_is.key)
|
||||
{
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(haystack.contains(&**value))
|
||||
Ok(haystack == &**value)
|
||||
}
|
||||
|
||||
/// Evaluates a `related_event_match` condition. (MSC3664)
|
||||
fn match_related_event_match(
|
||||
&self,
|
||||
rel_type: &str,
|
||||
include_fallbacks: Option<bool>,
|
||||
key: Option<Cow<str>>,
|
||||
pattern: Option<Cow<str>>,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if related event matching is enabled...
|
||||
if !self.related_event_match_enabled {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// get the related event, fail if there is none.
|
||||
let event = if let Some(event) = self.related_events_flattened.get(rel_type) {
|
||||
event
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// If we are not matching fallbacks, don't match if our special key indicating this is a
|
||||
// fallback relation is not present.
|
||||
if !include_fallbacks.unwrap_or(false) && event.contains_key("im.vector.is_falling_back") {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
match (key, pattern) {
|
||||
// if we have no key, accept the event as matching.
|
||||
(None, _) => Ok(true),
|
||||
// There was a key, so we *must* have a pattern to go with it.
|
||||
(Some(_), None) => Ok(false),
|
||||
// If there is a key & pattern, check if they're in the flattened event (given by rel_type).
|
||||
(Some(key), Some(pattern)) => self.match_event_match(event, &key, &pattern),
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluates a `exact_event_property_contains` condition. (MSC3966)
|
||||
fn match_exact_event_property_contains(
|
||||
&self,
|
||||
key: Cow<str>,
|
||||
value: Cow<SimpleJsonValue>,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if the feature is enabled.
|
||||
if !self.msc3966_exact_event_property_contains {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let haystack = if let Some(JsonValue::Array(haystack)) = self.flattened_keys.get(&*key) {
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(haystack.contains(&value))
|
||||
}
|
||||
|
||||
/// Match the member count against an 'is' condition
|
||||
@ -523,7 +508,6 @@ fn push_rule_evaluator() {
|
||||
let evaluator = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
BTreeMap::new(),
|
||||
@ -532,7 +516,6 @@ fn push_rule_evaluator() {
|
||||
vec![],
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -555,7 +538,6 @@ fn test_requires_room_version_supports_condition() {
|
||||
let evaluator = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
BTreeMap::new(),
|
||||
@ -564,7 +546,6 @@ fn test_requires_room_version_supports_condition() {
|
||||
flags,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -328,14 +328,23 @@ pub enum Condition {
|
||||
#[serde(tag = "kind")]
|
||||
pub enum KnownCondition {
|
||||
EventMatch(EventMatchCondition),
|
||||
#[serde(rename = "com.beeper.msc3758.exact_event_match")]
|
||||
ExactEventMatch(ExactEventMatchCondition),
|
||||
// Identical to event_match but gives predefined patterns. Cannot be added by users.
|
||||
#[serde(skip_deserializing, rename = "event_match")]
|
||||
EventMatchType(EventMatchTypeCondition),
|
||||
EventPropertyIs(EventPropertyIsCondition),
|
||||
#[serde(rename = "im.nheko.msc3664.related_event_match")]
|
||||
RelatedEventMatch(RelatedEventMatchCondition),
|
||||
// Identical to related_event_match but gives predefined patterns. Cannot be added by users.
|
||||
#[serde(skip_deserializing, rename = "im.nheko.msc3664.related_event_match")]
|
||||
RelatedEventMatchType(RelatedEventMatchTypeCondition),
|
||||
#[serde(rename = "org.matrix.msc3966.exact_event_property_contains")]
|
||||
ExactEventPropertyContains(ExactEventMatchCondition),
|
||||
#[serde(rename = "org.matrix.msc3952.is_user_mention")]
|
||||
IsUserMention,
|
||||
ExactEventPropertyContains(EventPropertyIsCondition),
|
||||
// Identical to exact_event_property_contains but gives predefined patterns. Cannot be added by users.
|
||||
#[serde(
|
||||
skip_deserializing,
|
||||
rename = "org.matrix.msc3966.exact_event_property_contains"
|
||||
)]
|
||||
ExactEventPropertyContainsType(EventPropertyIsTypeCondition),
|
||||
ContainsDisplayName,
|
||||
RoomMemberCount {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@ -362,23 +371,45 @@ impl<'source> FromPyObject<'source> for Condition {
|
||||
}
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventMatch`]
|
||||
/// The body of a [`Condition::EventMatch`] with a pattern.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct EventMatchCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern: Option<Cow<'static, str>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern_type: Option<Cow<'static, str>>,
|
||||
pub pattern: Cow<'static, str>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::ExactEventMatch`]
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum EventMatchPatternType {
|
||||
UserId,
|
||||
UserLocalpart,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventMatch`] that uses user_id or user_localpart as a pattern.
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct EventMatchTypeCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
// During serialization, the pattern_type property gets replaced with a
|
||||
// pattern property of the correct value in synapse.push.clientformat.format_push_rules_for_user.
|
||||
pub pattern_type: Cow<'static, EventMatchPatternType>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventPropertyIs`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct ExactEventMatchCondition {
|
||||
pub struct EventPropertyIsCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
pub value: Cow<'static, SimpleJsonValue>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventPropertyIs`] that uses user_id or user_localpart as a pattern.
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct EventPropertyIsTypeCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
// During serialization, the pattern_type property gets replaced with a
|
||||
// pattern property of the correct value in synapse.push.clientformat.format_push_rules_for_user.
|
||||
pub value_type: Cow<'static, EventMatchPatternType>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::RelatedEventMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct RelatedEventMatchCondition {
|
||||
@ -386,8 +417,18 @@ pub struct RelatedEventMatchCondition {
|
||||
pub key: Option<Cow<'static, str>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern: Option<Cow<'static, str>>,
|
||||
pub rel_type: Cow<'static, str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern_type: Option<Cow<'static, str>>,
|
||||
pub include_fallbacks: Option<bool>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::RelatedEventMatch`] that uses user_id or user_localpart as a pattern.
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct RelatedEventMatchTypeCondition {
|
||||
// This is only used if pattern_type exists (and thus key must exist), so is
|
||||
// a bit simpler than RelatedEventMatchCondition.
|
||||
pub key: Cow<'static, str>,
|
||||
pub pattern_type: Cow<'static, EventMatchPatternType>,
|
||||
pub rel_type: Cow<'static, str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_fallbacks: Option<bool>,
|
||||
@ -571,8 +612,7 @@ impl FilteredPushRules {
|
||||
fn test_serialize_condition() {
|
||||
let condition = Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("coffee".into()),
|
||||
pattern_type: None,
|
||||
pattern: "coffee".into(),
|
||||
}));
|
||||
|
||||
let json = serde_json::to_string(&condition).unwrap();
|
||||
@ -586,7 +626,33 @@ fn test_serialize_condition() {
|
||||
fn test_deserialize_condition() {
|
||||
let json = r#"{"kind":"event_match","key":"content.body","pattern":"coffee"}"#;
|
||||
|
||||
let _: Condition = serde_json::from_str(json).unwrap();
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::EventMatch(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_event_match_condition_with_pattern_type() {
|
||||
let condition = Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
|
||||
key: "content.body".into(),
|
||||
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
|
||||
}));
|
||||
|
||||
let json = serde_json::to_string(&condition).unwrap();
|
||||
assert_eq!(
|
||||
json,
|
||||
r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cannot_deserialize_event_match_condition_with_pattern_type() {
|
||||
let json = r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(condition, Condition::Unknown(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -600,6 +666,37 @@ fn test_deserialize_unstable_msc3664_condition() {
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_unstable_msc3664_condition_with_pattern_type() {
|
||||
let condition = Condition::Known(KnownCondition::RelatedEventMatchType(
|
||||
RelatedEventMatchTypeCondition {
|
||||
key: "content.body".into(),
|
||||
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
|
||||
rel_type: "m.in_reply_to".into(),
|
||||
include_fallbacks: Some(true),
|
||||
},
|
||||
));
|
||||
|
||||
let json = serde_json::to_string(&condition).unwrap();
|
||||
assert_eq!(
|
||||
json,
|
||||
r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to","include_fallbacks":true}"#
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cannot_deserialize_unstable_msc3664_condition_with_pattern_type() {
|
||||
let json = r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
// Since pattern is optional on RelatedEventMatch it deserializes it to that
|
||||
// instead of RelatedEventMatchType.
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::RelatedEventMatch(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_unstable_msc3931_condition() {
|
||||
let json =
|
||||
@ -613,55 +710,41 @@ fn test_deserialize_unstable_msc3931_condition() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_unstable_msc3758_condition() {
|
||||
fn test_deserialize_event_property_is_condition() {
|
||||
// A string condition should work.
|
||||
let json =
|
||||
r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":"foo"}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":"foo"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
|
||||
// A boolean condition should work.
|
||||
let json =
|
||||
r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":true}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":true}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
|
||||
// An integer condition should work.
|
||||
let json = r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":1}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":1}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
|
||||
// A null condition should work
|
||||
let json =
|
||||
r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":null}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":null}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_unstable_msc3952_user_condition() {
|
||||
let json = r#"{"kind":"org.matrix.msc3952.is_user_mention"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::IsUserMention)
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -59,6 +59,11 @@ Run the complement test suite on Synapse.
|
||||
is important.
|
||||
Not suitable for use in CI in case the editable environment is impure.
|
||||
|
||||
--rebuild-editable
|
||||
Force a rebuild of the editable build of Synapse.
|
||||
This is occasionally useful if the built-in rebuild detection with
|
||||
--editable fails, e.g. when changing configure_workers_and_start.py.
|
||||
|
||||
For help on arguments to 'go test', run 'go help testflag'.
|
||||
EOF
|
||||
}
|
||||
@ -82,6 +87,9 @@ while [ $# -ge 1 ]; do
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
"--rebuild-editable")
|
||||
rebuild_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
@ -116,7 +124,9 @@ if [ -n "$use_editable_synapse" ]; then
|
||||
fi
|
||||
|
||||
editable_mount="$(realpath .):/editable-src:z"
|
||||
if docker inspect complement-synapse-editable &>/dev/null; then
|
||||
if [ -n "$rebuild_editable_synapse" ]; then
|
||||
unset skip_docker_build
|
||||
elif docker inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
@ -112,7 +112,7 @@ python3 -m black "${files[@]}"
|
||||
|
||||
# Catch any common programming mistakes in Python code.
|
||||
# --quiet suppresses the update check.
|
||||
ruff --quiet "${files[@]}"
|
||||
ruff --quiet --fix "${files[@]}"
|
||||
|
||||
# Catch any common programming mistakes in Rust code.
|
||||
#
|
||||
|
@ -29,7 +29,6 @@ _Repr = Callable[[], str]
|
||||
def recursive_repr(fillvalue: str = ...) -> Callable[[_Repr], _Repr]: ...
|
||||
|
||||
class SortedList(MutableSequence[_T]):
|
||||
|
||||
DEFAULT_LOAD_FACTOR: int = ...
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Set, Tuple, Union
|
||||
from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Tuple, Union
|
||||
|
||||
from synapse.types import JsonDict, JsonValue
|
||||
|
||||
@ -58,7 +58,6 @@ class PushRuleEvaluator:
|
||||
self,
|
||||
flattened_keys: Mapping[str, JsonValue],
|
||||
has_mentions: bool,
|
||||
user_mentions: Set[str],
|
||||
room_member_count: int,
|
||||
sender_power_level: Optional[int],
|
||||
notification_power_levels: Mapping[str, int],
|
||||
@ -66,7 +65,6 @@ class PushRuleEvaluator:
|
||||
related_event_match_enabled: bool,
|
||||
room_version_feature_flags: Tuple[str, ...],
|
||||
msc3931_enabled: bool,
|
||||
msc3758_exact_event_match: bool,
|
||||
msc3966_exact_event_property_contains: bool,
|
||||
): ...
|
||||
def run(
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2018-9 New Vector Ltd
|
||||
# Copyright 2018-2019 New Vector Ltd
|
||||
# Copyright 2023 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -13,7 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" This is a reference implementation of a Matrix homeserver.
|
||||
""" This is an implementation of a Matrix homeserver.
|
||||
"""
|
||||
|
||||
import json
|
||||
|
@ -37,7 +37,7 @@ import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from synapse.rest.media.v1.filepath import MediaFilePaths
|
||||
from synapse.media.filepath import MediaFilePaths
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
@ -47,7 +47,6 @@ def request_registration(
|
||||
_print: Callable[[str], None] = print,
|
||||
exit: Callable[[int], None] = sys.exit,
|
||||
) -> None:
|
||||
|
||||
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
|
||||
|
||||
# Get the nonce
|
||||
@ -154,7 +153,6 @@ def register_new_user(
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
logging.captureWarnings(True)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
|
@ -1205,7 +1205,6 @@ class CursesProgress(Progress):
|
||||
if self.finished:
|
||||
status = "Time spent: %s (Done!)" % (duration_str,)
|
||||
else:
|
||||
|
||||
if self.total_processed > 0:
|
||||
left = float(self.total_remaining) / self.total_processed
|
||||
|
||||
|
@ -167,7 +167,6 @@ Worker = collections.namedtuple(
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
|
@ -254,7 +254,7 @@ class Auth:
|
||||
raise MissingClientTokenError()
|
||||
|
||||
async def validate_appservice_can_control_user_id(
|
||||
self, app_service: ApplicationService, user_id: str
|
||||
self, app_service: ApplicationService, user_id: str, allow_any: bool = False
|
||||
) -> None:
|
||||
"""Validates that the app service is allowed to control
|
||||
the given user.
|
||||
@ -262,6 +262,7 @@ class Auth:
|
||||
Args:
|
||||
app_service: The app service that controls the user
|
||||
user_id: The author MXID that the app service is controlling
|
||||
allow_any: Allow the appservice to control any local user
|
||||
|
||||
Raises:
|
||||
AuthError: If the application service is not allowed to control the user
|
||||
@ -273,7 +274,7 @@ class Auth:
|
||||
if app_service.sender == user_id:
|
||||
pass
|
||||
# Check to make sure the app service is allowed to control the user
|
||||
elif not app_service.is_interested_in_user(user_id):
|
||||
elif not app_service.is_interested_in_user(user_id) and not allow_any:
|
||||
raise AuthError(
|
||||
403,
|
||||
"Application service cannot masquerade as this user (%s)." % user_id,
|
||||
|
@ -213,7 +213,7 @@ def handle_startup_exception(e: Exception) -> NoReturn:
|
||||
def redirect_stdio_to_logs() -> None:
|
||||
streams = [("stdout", LogLevel.info), ("stderr", LogLevel.error)]
|
||||
|
||||
for (stream, level) in streams:
|
||||
for stream, level in streams:
|
||||
oldStream = getattr(sys, stream)
|
||||
loggingFile = LoggingFile(
|
||||
logger=twisted.logger.Logger(namespace=stream),
|
||||
|
@ -44,6 +44,7 @@ from synapse.storage.databases.main.event_push_actions import (
|
||||
)
|
||||
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
||||
from synapse.storage.databases.main.filtering import FilteringWorkerStore
|
||||
from synapse.storage.databases.main.media_repository import MediaRepositoryStore
|
||||
from synapse.storage.databases.main.profile import ProfileWorkerStore
|
||||
from synapse.storage.databases.main.push_rule import PushRulesWorkerStore
|
||||
from synapse.storage.databases.main.receipts import ReceiptsWorkerStore
|
||||
@ -86,6 +87,7 @@ class AdminCmdSlavedStore(
|
||||
RegistrationWorkerStore,
|
||||
RoomWorkerStore,
|
||||
ProfileWorkerStore,
|
||||
MediaRepositoryStore,
|
||||
):
|
||||
def __init__(
|
||||
self,
|
||||
@ -149,7 +151,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(events_file, "a") as f:
|
||||
for event in events:
|
||||
print(json.dumps(event.get_pdu_json()), file=f)
|
||||
json.dump(event.get_pdu_json(), fp=f)
|
||||
|
||||
def write_state(
|
||||
self, room_id: str, event_id: str, state: StateMap[EventBase]
|
||||
@ -162,7 +164,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(event_file, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event.get_pdu_json()), file=f)
|
||||
json.dump(event.get_pdu_json(), fp=f)
|
||||
|
||||
def write_invite(
|
||||
self, room_id: str, event: EventBase, state: StateMap[EventBase]
|
||||
@ -178,7 +180,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(invite_state, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event), file=f)
|
||||
json.dump(event, fp=f)
|
||||
|
||||
def write_knock(
|
||||
self, room_id: str, event: EventBase, state: StateMap[EventBase]
|
||||
@ -194,7 +196,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(knock_state, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event), file=f)
|
||||
json.dump(event, fp=f)
|
||||
|
||||
def write_profile(self, profile: JsonDict) -> None:
|
||||
user_directory = os.path.join(self.base_directory, "user_data")
|
||||
@ -202,7 +204,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
profile_file = os.path.join(user_directory, "profile")
|
||||
|
||||
with open(profile_file, "a") as f:
|
||||
print(json.dumps(profile), file=f)
|
||||
json.dump(profile, fp=f)
|
||||
|
||||
def write_devices(self, devices: List[JsonDict]) -> None:
|
||||
user_directory = os.path.join(self.base_directory, "user_data")
|
||||
@ -211,7 +213,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
for device in devices:
|
||||
with open(device_file, "a") as f:
|
||||
print(json.dumps(device), file=f)
|
||||
json.dump(device, fp=f)
|
||||
|
||||
def write_connections(self, connections: List[JsonDict]) -> None:
|
||||
user_directory = os.path.join(self.base_directory, "user_data")
|
||||
@ -220,7 +222,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
for connection in connections:
|
||||
with open(connection_file, "a") as f:
|
||||
print(json.dumps(connection), file=f)
|
||||
json.dump(connection, fp=f)
|
||||
|
||||
def write_account_data(
|
||||
self, file_name: str, account_data: Mapping[str, JsonDict]
|
||||
@ -233,7 +235,15 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
account_data_file = os.path.join(account_data_directory, file_name)
|
||||
|
||||
with open(account_data_file, "a") as f:
|
||||
print(json.dumps(account_data), file=f)
|
||||
json.dump(account_data, fp=f)
|
||||
|
||||
def write_media_id(self, media_id: str, media_metadata: JsonDict) -> None:
|
||||
file_directory = os.path.join(self.base_directory, "media_ids")
|
||||
os.makedirs(file_directory, exist_ok=True)
|
||||
media_id_file = os.path.join(file_directory, media_id)
|
||||
|
||||
with open(media_id_file, "w") as f:
|
||||
json.dump(media_metadata, fp=f)
|
||||
|
||||
def finished(self) -> str:
|
||||
return self.base_directory
|
||||
|
@ -219,7 +219,7 @@ def main() -> None:
|
||||
# memory space and don't need to repeat the work of loading the code!
|
||||
# Instead of using fork() directly, we use the multiprocessing library,
|
||||
# which uses fork() on Unix platforms.
|
||||
for (func, worker_args) in zip(worker_functions, args_by_worker):
|
||||
for func, worker_args in zip(worker_functions, args_by_worker):
|
||||
process = multiprocessing.Process(
|
||||
target=_worker_entrypoint, args=(func, proxy_reactor, worker_args)
|
||||
)
|
||||
|
@ -157,7 +157,6 @@ class GenericWorkerServer(HomeServer):
|
||||
DATASTORE_CLASS = GenericWorkerSlavedStore # type: ignore
|
||||
|
||||
def _listen_http(self, listener_config: ListenerConfig) -> None:
|
||||
|
||||
assert listener_config.http_options is not None
|
||||
|
||||
# We always include a health resource.
|
||||
|
@ -321,7 +321,6 @@ def setup(config_options: List[str]) -> SynapseHomeServer:
|
||||
and not config.registration.registrations_require_3pid
|
||||
and not config.registration.registration_requires_token
|
||||
):
|
||||
|
||||
raise ConfigError(
|
||||
"You have enabled open registration without any verification. This is a known vector for "
|
||||
"spam and abuse. If you would like to allow public registration, please consider adding email, "
|
||||
|
@ -35,6 +35,7 @@ from synapse.config import ( # noqa: F401
|
||||
jwt,
|
||||
key,
|
||||
logger,
|
||||
meow,
|
||||
metrics,
|
||||
modules,
|
||||
oembed,
|
||||
@ -90,6 +91,7 @@ class RootConfig:
|
||||
voip: voip.VoipConfig
|
||||
registration: registration.RegistrationConfig
|
||||
account_validity: account_validity.AccountValidityConfig
|
||||
meow: meow.MeowConfig
|
||||
metrics: metrics.MetricsConfig
|
||||
api: api.ApiConfig
|
||||
appservice: appservice.AppServiceConfig
|
||||
|
@ -22,7 +22,6 @@ from ._base import Config
|
||||
|
||||
|
||||
class ConsentConfig(Config):
|
||||
|
||||
section = "consent"
|
||||
|
||||
def __init__(self, *args: Any):
|
||||
|
@ -154,7 +154,6 @@ class DatabaseConfig(Config):
|
||||
logger.warning(NON_SQLITE_DATABASE_PATH_WARNING)
|
||||
|
||||
def set_databasepath(self, database_path: str) -> None:
|
||||
|
||||
if database_path != ":memory:":
|
||||
database_path = self.abspath(database_path)
|
||||
|
||||
|
@ -166,23 +166,20 @@ class ExperimentalConfig(Config):
|
||||
# MSC3391: Removing account data.
|
||||
self.msc3391_enabled = experimental.get("msc3391_enabled", False)
|
||||
|
||||
# MSC3925: do not replace events with their edits
|
||||
self.msc3925_inhibit_edit = experimental.get("msc3925_inhibit_edit", False)
|
||||
|
||||
# MSC3758: exact_event_match push rule condition
|
||||
self.msc3758_exact_event_match = experimental.get(
|
||||
"msc3758_exact_event_match", False
|
||||
)
|
||||
|
||||
# MSC3873: Disambiguate event_match keys.
|
||||
self.msc3783_escape_event_match_key = experimental.get(
|
||||
"msc3783_escape_event_match_key", False
|
||||
self.msc3873_escape_event_match_key = experimental.get(
|
||||
"msc3873_escape_event_match_key", False
|
||||
)
|
||||
|
||||
# MSC3952: Intentional mentions, this depends on MSC3758.
|
||||
# MSC3966: exact_event_property_contains push rule condition.
|
||||
self.msc3966_exact_event_property_contains = experimental.get(
|
||||
"msc3966_exact_event_property_contains", False
|
||||
)
|
||||
|
||||
# MSC3952: Intentional mentions, this depends on MSC3966.
|
||||
self.msc3952_intentional_mentions = (
|
||||
experimental.get("msc3952_intentional_mentions", False)
|
||||
and self.msc3758_exact_event_match
|
||||
and self.msc3966_exact_event_property_contains
|
||||
)
|
||||
|
||||
# MSC3959: Do not generate notifications for edits.
|
||||
@ -194,3 +191,6 @@ class ExperimentalConfig(Config):
|
||||
self.msc3966_exact_event_property_contains = experimental.get(
|
||||
"msc3966_exact_event_property_contains", False
|
||||
)
|
||||
|
||||
# MSC3967: Do not require UIA when first uploading cross signing keys
|
||||
self.msc3967_enabled = experimental.get("msc3967_enabled", False)
|
||||
|
@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from ._base import RootConfig
|
||||
from .meow import MeowConfig
|
||||
from .account_validity import AccountValidityConfig
|
||||
from .api import ApiConfig
|
||||
from .appservice import AppServiceConfig
|
||||
@ -56,8 +57,8 @@ from .workers import WorkerConfig
|
||||
|
||||
|
||||
class HomeServerConfig(RootConfig):
|
||||
|
||||
config_classes = [
|
||||
MeowConfig,
|
||||
ModulesConfig,
|
||||
ServerConfig,
|
||||
RetentionConfig,
|
||||
|
56
synapse/config/meow.py
Normal file
56
synapse/config/meow.py
Normal file
@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 Maunium
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
|
||||
|
||||
class MeowConfig(Config):
|
||||
"""Meow Configuration
|
||||
Configuration for disabling dumb limits in Synapse
|
||||
"""
|
||||
|
||||
section = "meow"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
meow_config = config.get("meow", {})
|
||||
self.validation_override = set(meow_config.get("validation_override", []))
|
||||
self.filter_override = set(meow_config.get("filter_override", []))
|
||||
self.timestamp_override = set(meow_config.get("timestamp_override", []))
|
||||
self.admin_api_register_invalid = meow_config.get(
|
||||
"admin_api_register_invalid", True
|
||||
)
|
||||
self.appservice_batch_send_any = meow_config.get(
|
||||
"appservice_batch_send_any", False
|
||||
)
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return """
|
||||
# Configuration for disabling dumb limits in Synapse
|
||||
#
|
||||
#meow:
|
||||
# # List of users who aren't subject to unnecessary validation in the C-S API.
|
||||
# validation_override:
|
||||
# - "@you:example.com"
|
||||
# # List of users who will get org.matrix.dummy_event and m.room.aliases events down /sync
|
||||
# filter_override:
|
||||
# - "@you:example.com"
|
||||
# # List of users who can use timestamp massaging without being appservices
|
||||
# timestamp_override:
|
||||
# - "@you:example.com"
|
||||
# # Whether or not the admin API should be able to register invalid user IDs.
|
||||
# admin_api_register_invalid: true
|
||||
# # Whether appservices should be allowed to use MSC2716 batch sending as any local user.
|
||||
# appservice_batch_send_any: false
|
||||
"""
|
@ -46,7 +46,6 @@ class RatelimitConfig(Config):
|
||||
section = "ratelimiting"
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
|
||||
# Load the new-style messages config if it exists. Otherwise fall back
|
||||
# to the old method.
|
||||
if "rc_message" in config:
|
||||
@ -87,9 +86,18 @@ class RatelimitConfig(Config):
|
||||
defaults={"per_second": 0.1, "burst_count": 5},
|
||||
)
|
||||
|
||||
# It is reasonable to login with a bunch of devices at once (i.e. when
|
||||
# setting up an account), but it is *not* valid to continually be
|
||||
# logging into new devices.
|
||||
rc_login_config = config.get("rc_login", {})
|
||||
self.rc_login_address = RatelimitSettings(rc_login_config.get("address", {}))
|
||||
self.rc_login_account = RatelimitSettings(rc_login_config.get("account", {}))
|
||||
self.rc_login_address = RatelimitSettings(
|
||||
rc_login_config.get("address", {}),
|
||||
defaults={"per_second": 0.003, "burst_count": 5},
|
||||
)
|
||||
self.rc_login_account = RatelimitSettings(
|
||||
rc_login_config.get("account", {}),
|
||||
defaults={"per_second": 0.003, "burst_count": 5},
|
||||
)
|
||||
self.rc_login_failed_attempts = RatelimitSettings(
|
||||
rc_login_config.get("failed_attempts", {})
|
||||
)
|
||||
|
@ -47,10 +47,8 @@ THUMBNAIL_SIZE_YAML = """\
|
||||
THUMBNAIL_SUPPORTED_MEDIA_FORMAT_MAP = {
|
||||
"image/jpeg": "jpeg",
|
||||
"image/jpg": "jpeg",
|
||||
"image/webp": "jpeg",
|
||||
# Thumbnails can only be jpeg or png. We choose png thumbnails for gif
|
||||
# because it can have transparency.
|
||||
"image/gif": "png",
|
||||
"image/webp": "webp",
|
||||
"image/gif": "webp",
|
||||
"image/png": "png",
|
||||
}
|
||||
|
||||
@ -102,6 +100,10 @@ def parse_thumbnail_requirements(
|
||||
requirement.append(
|
||||
ThumbnailRequirement(width, height, method, "image/png")
|
||||
)
|
||||
elif thumbnail_format == "webp":
|
||||
requirement.append(
|
||||
ThumbnailRequirement(width, height, method, "image/webp")
|
||||
)
|
||||
else:
|
||||
raise Exception(
|
||||
"Unknown thumbnail mapping from %s to %s. This is a Synapse problem, please report!"
|
||||
@ -116,7 +118,6 @@ class ContentRepositoryConfig(Config):
|
||||
section = "media"
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
|
||||
# Only enable the media repo if either the media repo is enabled or the
|
||||
# current worker app is the media repo.
|
||||
if (
|
||||
@ -179,11 +180,13 @@ class ContentRepositoryConfig(Config):
|
||||
for i, provider_config in enumerate(storage_providers):
|
||||
# We special case the module "file_system" so as not to need to
|
||||
# expose FileStorageProviderBackend
|
||||
if provider_config["module"] == "file_system":
|
||||
provider_config["module"] = (
|
||||
"synapse.rest.media.v1.storage_provider"
|
||||
".FileStorageProviderBackend"
|
||||
)
|
||||
if (
|
||||
provider_config["module"] == "file_system"
|
||||
or provider_config["module"] == "synapse.rest.media.v1.storage_provider"
|
||||
):
|
||||
provider_config[
|
||||
"module"
|
||||
] = "synapse.media.storage_provider.FileStorageProviderBackend"
|
||||
|
||||
provider_class, parsed_config = load_module(
|
||||
provider_config, ("media_storage_providers", "<item %i>" % i)
|
||||
|
@ -735,7 +735,6 @@ class ServerConfig(Config):
|
||||
listeners: Optional[List[dict]],
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
|
||||
_, bind_port = parse_and_validate_server_name(server_name)
|
||||
if bind_port is not None:
|
||||
unsecure_port = bind_port - 400
|
||||
|
@ -30,7 +30,6 @@ class TlsConfig(Config):
|
||||
section = "tls"
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
|
||||
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
|
||||
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
|
||||
|
||||
|
@ -399,7 +399,7 @@ class Keyring:
|
||||
# We now convert the returned list of results into a map from server
|
||||
# name to key ID to FetchKeyResult, to return.
|
||||
to_return: Dict[str, Dict[str, FetchKeyResult]] = {}
|
||||
for (request, results) in zip(deduped_requests, results_per_request):
|
||||
for request, results in zip(deduped_requests, results_per_request):
|
||||
to_return_by_server = to_return.setdefault(request.server_name, {})
|
||||
for key_id, key_result in results.items():
|
||||
existing = to_return_by_server.get(key_id)
|
||||
|
@ -23,6 +23,7 @@ from synapse.types import JsonDict, StateMap
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.storage.controllers import StorageControllers
|
||||
from synapse.storage.databases import StateGroupDataStore
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types.state import StateFilter
|
||||
|
||||
@ -348,6 +349,54 @@ class UnpersistedEventContext(UnpersistedEventContextBase):
|
||||
partial_state: bool
|
||||
state_map_before_event: Optional[StateMap[str]] = None
|
||||
|
||||
@classmethod
|
||||
async def batch_persist_unpersisted_contexts(
|
||||
cls,
|
||||
events_and_context: List[Tuple[EventBase, "UnpersistedEventContextBase"]],
|
||||
room_id: str,
|
||||
last_known_state_group: int,
|
||||
datastore: "StateGroupDataStore",
|
||||
) -> List[Tuple[EventBase, EventContext]]:
|
||||
"""
|
||||
Takes a list of events and their associated unpersisted contexts and persists
|
||||
the unpersisted contexts, returning a list of events and persisted contexts.
|
||||
Note that all the events must be in a linear chain (ie a <- b <- c).
|
||||
|
||||
Args:
|
||||
events_and_context: A list of events and their unpersisted contexts
|
||||
room_id: the room_id for the events
|
||||
last_known_state_group: the last persisted state group
|
||||
datastore: a state datastore
|
||||
"""
|
||||
amended_events_and_context = await datastore.store_state_deltas_for_batched(
|
||||
events_and_context, room_id, last_known_state_group
|
||||
)
|
||||
|
||||
events_and_persisted_context = []
|
||||
for event, unpersisted_context in amended_events_and_context:
|
||||
if event.is_state():
|
||||
context = EventContext(
|
||||
storage=unpersisted_context._storage,
|
||||
state_group=unpersisted_context.state_group_after_event,
|
||||
state_group_before_event=unpersisted_context.state_group_before_event,
|
||||
state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
|
||||
partial_state=unpersisted_context.partial_state,
|
||||
prev_group=unpersisted_context.state_group_before_event,
|
||||
delta_ids=unpersisted_context.state_delta_due_to_event,
|
||||
)
|
||||
else:
|
||||
context = EventContext(
|
||||
storage=unpersisted_context._storage,
|
||||
state_group=unpersisted_context.state_group_after_event,
|
||||
state_group_before_event=unpersisted_context.state_group_before_event,
|
||||
state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
|
||||
partial_state=unpersisted_context.partial_state,
|
||||
prev_group=unpersisted_context.prev_group_for_state_group_before_event,
|
||||
delta_ids=unpersisted_context.delta_ids_to_state_group_before_event,
|
||||
)
|
||||
events_and_persisted_context.append((event, context))
|
||||
return events_and_persisted_context
|
||||
|
||||
async def get_prev_state_ids(
|
||||
self, state_filter: Optional["StateFilter"] = None
|
||||
) -> StateMap[str]:
|
||||
|
@ -33,8 +33,8 @@ from typing_extensions import Literal
|
||||
import synapse
|
||||
from synapse.api.errors import Codes
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.rest.media.v1._base import FileInfo
|
||||
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
|
||||
from synapse.media._base import FileInfo
|
||||
from synapse.media.media_storage import ReadableFileWrapper
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
from synapse.types import JsonDict, RoomAlias, UserProfile
|
||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||
|
@ -45,6 +45,8 @@ CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]]
|
||||
ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable]
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable]
|
||||
ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable]
|
||||
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
|
||||
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
|
||||
|
||||
|
||||
def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||
@ -78,7 +80,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||
# correctly, we need to await its result. Therefore it doesn't make a lot of
|
||||
# sense to make it go through the run() wrapper.
|
||||
if f.__name__ == "check_event_allowed":
|
||||
|
||||
# We need to wrap check_event_allowed because its old form would return either
|
||||
# a boolean or a dict, but now we want to return the dict separately from the
|
||||
# boolean.
|
||||
@ -100,7 +101,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||
return wrap_check_event_allowed
|
||||
|
||||
if f.__name__ == "on_create_room":
|
||||
|
||||
# We need to wrap on_create_room because its old form would return a boolean
|
||||
# if the room creation is denied, but now we just want it to raise an
|
||||
# exception.
|
||||
@ -174,6 +174,12 @@ class ThirdPartyEventRules:
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
|
||||
] = []
|
||||
self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = []
|
||||
self._on_add_user_third_party_identifier_callbacks: List[
|
||||
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = []
|
||||
self._on_remove_user_third_party_identifier_callbacks: List[
|
||||
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = []
|
||||
|
||||
def register_third_party_rules_callbacks(
|
||||
self,
|
||||
@ -193,6 +199,12 @@ class ThirdPartyEventRules:
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
|
||||
] = None,
|
||||
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
|
||||
on_add_user_third_party_identifier: Optional[
|
||||
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = None,
|
||||
on_remove_user_third_party_identifier: Optional[
|
||||
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = None,
|
||||
) -> None:
|
||||
"""Register callbacks from modules for each hook."""
|
||||
if check_event_allowed is not None:
|
||||
@ -230,6 +242,11 @@ class ThirdPartyEventRules:
|
||||
if on_threepid_bind is not None:
|
||||
self._on_threepid_bind_callbacks.append(on_threepid_bind)
|
||||
|
||||
if on_add_user_third_party_identifier is not None:
|
||||
self._on_add_user_third_party_identifier_callbacks.append(
|
||||
on_add_user_third_party_identifier
|
||||
)
|
||||
|
||||
async def check_event_allowed(
|
||||
self,
|
||||
event: EventBase,
|
||||
@ -513,6 +530,9 @@ class ThirdPartyEventRules:
|
||||
local homeserver, not when it's created on an identity server (and then kept track
|
||||
of so that it can be unbound on the same IS later on).
|
||||
|
||||
THIS MODULE CALLBACK METHOD HAS BEEN DEPRECATED. Please use the
|
||||
`on_add_user_third_party_identifier` callback method instead.
|
||||
|
||||
Args:
|
||||
user_id: the user being associated with the threepid.
|
||||
medium: the threepid's medium.
|
||||
@ -525,3 +545,44 @@ class ThirdPartyEventRules:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
||||
async def on_add_user_third_party_identifier(
|
||||
self, user_id: str, medium: str, address: str
|
||||
) -> None:
|
||||
"""Called when an association between a user's Matrix ID and a third-party ID
|
||||
(email, phone number) has successfully been registered on the homeserver.
|
||||
|
||||
Args:
|
||||
user_id: The User ID included in the association.
|
||||
medium: The medium of the third-party ID (email, msisdn).
|
||||
address: The address of the third-party ID (i.e. an email address).
|
||||
"""
|
||||
for callback in self._on_add_user_third_party_identifier_callbacks:
|
||||
try:
|
||||
await callback(user_id, medium, address)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
||||
async def on_remove_user_third_party_identifier(
|
||||
self, user_id: str, medium: str, address: str
|
||||
) -> None:
|
||||
"""Called when an association between a user's Matrix ID and a third-party ID
|
||||
(email, phone number) has been successfully removed on the homeserver.
|
||||
|
||||
This is called *after* any known bindings on identity servers for this
|
||||
association have been removed.
|
||||
|
||||
Args:
|
||||
user_id: The User ID included in the removed association.
|
||||
medium: The medium of the third-party ID (email, msisdn).
|
||||
address: The address of the third-party ID (i.e. an email address).
|
||||
"""
|
||||
for callback in self._on_remove_user_third_party_identifier_callbacks:
|
||||
try:
|
||||
await callback(user_id, medium, address)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
@ -38,8 +38,7 @@ from synapse.api.constants import (
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import RoomVersion
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
from synapse.types import JsonDict, Requester
|
||||
|
||||
from . import EventBase
|
||||
|
||||
@ -317,8 +316,9 @@ class SerializeEventConfig:
|
||||
as_client_event: bool = True
|
||||
# Function to convert from federation format to client format
|
||||
event_format: Callable[[JsonDict], JsonDict] = format_event_for_client_v1
|
||||
# ID of the user's auth token - used for namespacing of transaction IDs
|
||||
token_id: Optional[int] = None
|
||||
# The entity that requested the event. This is used to determine whether to include
|
||||
# the transaction_id in the unsigned section of the event.
|
||||
requester: Optional[Requester] = None
|
||||
# List of event fields to include. If empty, all fields will be returned.
|
||||
only_event_fields: Optional[List[str]] = None
|
||||
# Some events can have stripped room state stored in the `unsigned` field.
|
||||
@ -368,11 +368,24 @@ def serialize_event(
|
||||
e.unsigned["redacted_because"], time_now_ms, config=config
|
||||
)
|
||||
|
||||
if config.token_id is not None:
|
||||
if config.token_id == getattr(e.internal_metadata, "token_id", None):
|
||||
txn_id = getattr(e.internal_metadata, "txn_id", None)
|
||||
if txn_id is not None:
|
||||
d["unsigned"]["transaction_id"] = txn_id
|
||||
# If we have a txn_id saved in the internal_metadata, we should include it in the
|
||||
# unsigned section of the event if it was sent by the same session as the one
|
||||
# requesting the event.
|
||||
# There is a special case for guests, because they only have one access token
|
||||
# without associated access_token_id, so we always include the txn_id for events
|
||||
# they sent.
|
||||
txn_id = getattr(e.internal_metadata, "txn_id", None)
|
||||
if txn_id is not None and config.requester is not None:
|
||||
event_token_id = getattr(e.internal_metadata, "token_id", None)
|
||||
if config.requester.user.to_string() == e.sender and (
|
||||
(
|
||||
event_token_id is not None
|
||||
and config.requester.access_token_id is not None
|
||||
and event_token_id == config.requester.access_token_id
|
||||
)
|
||||
or config.requester.is_guest
|
||||
):
|
||||
d["unsigned"]["transaction_id"] = txn_id
|
||||
|
||||
# invite_room_state and knock_room_state are a list of stripped room state events
|
||||
# that are meant to provide metadata about a room to an invitee/knocker. They are
|
||||
@ -403,14 +416,6 @@ class EventClientSerializer:
|
||||
clients.
|
||||
"""
|
||||
|
||||
def __init__(self, inhibit_replacement_via_edits: bool = False):
|
||||
"""
|
||||
Args:
|
||||
inhibit_replacement_via_edits: If this is set to True, then events are
|
||||
never replaced by their edits.
|
||||
"""
|
||||
self._inhibit_replacement_via_edits = inhibit_replacement_via_edits
|
||||
|
||||
def serialize_event(
|
||||
self,
|
||||
event: Union[JsonDict, EventBase],
|
||||
@ -418,7 +423,6 @@ class EventClientSerializer:
|
||||
*,
|
||||
config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG,
|
||||
bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None,
|
||||
apply_edits: bool = True,
|
||||
) -> JsonDict:
|
||||
"""Serializes a single event.
|
||||
|
||||
@ -428,10 +432,7 @@ class EventClientSerializer:
|
||||
config: Event serialization config
|
||||
bundle_aggregations: A map from event_id to the aggregations to be bundled
|
||||
into the event.
|
||||
apply_edits: Whether the content of the event should be modified to reflect
|
||||
any replacement in `bundle_aggregations[<event_id>].replace`.
|
||||
See also the `inhibit_replacement_via_edits` constructor arg: if that is
|
||||
set to True, then this argument is ignored.
|
||||
|
||||
Returns:
|
||||
The serialized event
|
||||
"""
|
||||
@ -450,38 +451,10 @@ class EventClientSerializer:
|
||||
config,
|
||||
bundle_aggregations,
|
||||
serialized_event,
|
||||
apply_edits=apply_edits,
|
||||
)
|
||||
|
||||
return serialized_event
|
||||
|
||||
def _apply_edit(
|
||||
self, orig_event: EventBase, serialized_event: JsonDict, edit: EventBase
|
||||
) -> None:
|
||||
"""Replace the content, preserving existing relations of the serialized event.
|
||||
|
||||
Args:
|
||||
orig_event: The original event.
|
||||
serialized_event: The original event, serialized. This is modified.
|
||||
edit: The event which edits the above.
|
||||
"""
|
||||
|
||||
# Ensure we take copies of the edit content, otherwise we risk modifying
|
||||
# the original event.
|
||||
edit_content = edit.content.copy()
|
||||
|
||||
# Unfreeze the event content if necessary, so that we may modify it below
|
||||
edit_content = unfreeze(edit_content)
|
||||
serialized_event["content"] = edit_content.get("m.new_content", {})
|
||||
|
||||
# Check for existing relations
|
||||
relates_to = orig_event.content.get("m.relates_to")
|
||||
if relates_to:
|
||||
# Keep the relations, ensuring we use a dict copy of the original
|
||||
serialized_event["content"]["m.relates_to"] = relates_to.copy()
|
||||
else:
|
||||
serialized_event["content"].pop("m.relates_to", None)
|
||||
|
||||
def _inject_bundled_aggregations(
|
||||
self,
|
||||
event: EventBase,
|
||||
@ -489,7 +462,6 @@ class EventClientSerializer:
|
||||
config: SerializeEventConfig,
|
||||
bundled_aggregations: Dict[str, "BundledAggregations"],
|
||||
serialized_event: JsonDict,
|
||||
apply_edits: bool,
|
||||
) -> None:
|
||||
"""Potentially injects bundled aggregations into the unsigned portion of the serialized event.
|
||||
|
||||
@ -504,9 +476,6 @@ class EventClientSerializer:
|
||||
While serializing the bundled aggregations this map may be searched
|
||||
again for additional events in a recursive manner.
|
||||
serialized_event: The serialized event which may be modified.
|
||||
apply_edits: Whether the content of the event should be modified to reflect
|
||||
any replacement in `aggregations.replace` (subject to the
|
||||
`inhibit_replacement_via_edits` constructor arg).
|
||||
"""
|
||||
|
||||
# We have already checked that aggregations exist for this event.
|
||||
@ -516,22 +485,12 @@ class EventClientSerializer:
|
||||
# being serialized.
|
||||
serialized_aggregations = {}
|
||||
|
||||
if event_aggregations.annotations:
|
||||
serialized_aggregations[
|
||||
RelationTypes.ANNOTATION
|
||||
] = event_aggregations.annotations
|
||||
|
||||
if event_aggregations.references:
|
||||
serialized_aggregations[
|
||||
RelationTypes.REFERENCE
|
||||
] = event_aggregations.references
|
||||
|
||||
if event_aggregations.replace:
|
||||
# If there is an edit, optionally apply it to the event.
|
||||
edit = event_aggregations.replace
|
||||
if apply_edits and not self._inhibit_replacement_via_edits:
|
||||
self._apply_edit(event, serialized_event, edit)
|
||||
|
||||
# Include information about it in the relations dict.
|
||||
#
|
||||
# Matrix spec v1.5 (https://spec.matrix.org/v1.5/client-server-api/#server-side-aggregation-of-mreplace-relationships)
|
||||
@ -539,10 +498,7 @@ class EventClientSerializer:
|
||||
# `sender` of the edit; however MSC3925 proposes extending it to the whole
|
||||
# of the edit, which is what we do here.
|
||||
serialized_aggregations[RelationTypes.REPLACE] = self.serialize_event(
|
||||
edit,
|
||||
time_now,
|
||||
config=config,
|
||||
apply_edits=False,
|
||||
event_aggregations.replace, time_now, config=config
|
||||
)
|
||||
|
||||
# Include any threaded replies to this event.
|
||||
|
@ -43,7 +43,7 @@ class EventValidator:
|
||||
event: The event to validate.
|
||||
config: The homeserver's configuration.
|
||||
"""
|
||||
self.validate_builder(event)
|
||||
self.validate_builder(event, config)
|
||||
|
||||
if event.format_version == EventFormatVersions.ROOM_V1_V2:
|
||||
EventID.from_string(event.event_id)
|
||||
@ -74,6 +74,12 @@ class EventValidator:
|
||||
# Note that only the client controlled portion of the event is
|
||||
# checked, since we trust the portions of the event we created.
|
||||
validate_canonicaljson(event.content)
|
||||
if not 0 < event.origin_server_ts < 2**53:
|
||||
raise SynapseError(400, "Event timestamp is out of range")
|
||||
|
||||
# meow: allow specific users to send potentially dangerous events.
|
||||
if event.sender in config.meow.validation_override:
|
||||
return
|
||||
|
||||
if event.type == EventTypes.Aliases:
|
||||
if "aliases" in event.content:
|
||||
@ -165,7 +171,9 @@ class EventValidator:
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
def validate_builder(self, event: Union[EventBase, EventBuilder]) -> None:
|
||||
def validate_builder(
|
||||
self, event: Union[EventBase, EventBuilder], config: HomeServerConfig
|
||||
) -> None:
|
||||
"""Validates that the builder/event has roughly the right format. Only
|
||||
checks values that we expect a proto event to have, rather than all the
|
||||
fields an event would have
|
||||
@ -183,6 +191,10 @@ class EventValidator:
|
||||
RoomID.from_string(event.room_id)
|
||||
UserID.from_string(event.sender)
|
||||
|
||||
# meow: allow specific users to send so-called invalid events
|
||||
if event.sender in config.meow.validation_override:
|
||||
return
|
||||
|
||||
if event.type == EventTypes.Message:
|
||||
strings = ["body", "msgtype"]
|
||||
|
||||
|
@ -314,7 +314,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
# stream position.
|
||||
keyed_edus = {v: k for k, v in self.keyed_edu_changed.items()[i:j]}
|
||||
|
||||
for ((destination, edu_key), pos) in keyed_edus.items():
|
||||
for (destination, edu_key), pos in keyed_edus.items():
|
||||
rows.append(
|
||||
(
|
||||
pos,
|
||||
@ -329,7 +329,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
j = self.edus.bisect_right(to_token) + 1
|
||||
edus = self.edus.items()[i:j]
|
||||
|
||||
for (pos, edu) in edus:
|
||||
for pos, edu in edus:
|
||||
rows.append((pos, EduRow(edu)))
|
||||
|
||||
# Sort rows based on pos
|
||||
|
@ -155,9 +155,6 @@ class AccountDataHandler:
|
||||
max_stream_id = await self._store.remove_account_data_for_room(
|
||||
user_id, room_id, account_data_type
|
||||
)
|
||||
if max_stream_id is None:
|
||||
# The referenced account data did not exist, so no delete occurred.
|
||||
return None
|
||||
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
|
||||
@ -230,9 +227,6 @@ class AccountDataHandler:
|
||||
max_stream_id = await self._store.remove_account_data_for_user(
|
||||
user_id, account_data_type
|
||||
)
|
||||
if max_stream_id is None:
|
||||
# The referenced account data did not exist, so no delete occurred.
|
||||
return None
|
||||
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
|
||||
@ -248,7 +242,6 @@ class AccountDataHandler:
|
||||
instance_name=random.choice(self._account_data_writers),
|
||||
user_id=user_id,
|
||||
account_data_type=account_data_type,
|
||||
content={},
|
||||
)
|
||||
return response["max_stream_id"]
|
||||
|
||||
|
@ -252,16 +252,19 @@ class AdminHandler:
|
||||
profile = await self.get_user(UserID.from_string(user_id))
|
||||
if profile is not None:
|
||||
writer.write_profile(profile)
|
||||
logger.info("[%s] Written profile", user_id)
|
||||
|
||||
# Get all devices the user has
|
||||
devices = await self._device_handler.get_devices_by_user(user_id)
|
||||
writer.write_devices(devices)
|
||||
logger.info("[%s] Written %s devices", user_id, len(devices))
|
||||
|
||||
# Get all connections the user has
|
||||
connections = await self.get_whois(UserID.from_string(user_id))
|
||||
writer.write_connections(
|
||||
connections["devices"][""]["sessions"][0]["connections"]
|
||||
)
|
||||
logger.info("[%s] Written %s connections", user_id, len(connections))
|
||||
|
||||
# Get all account data the user has global and in rooms
|
||||
global_data = await self._store.get_global_account_data_for_user(user_id)
|
||||
@ -269,6 +272,29 @@ class AdminHandler:
|
||||
writer.write_account_data("global", global_data)
|
||||
for room_id in by_room_data:
|
||||
writer.write_account_data(room_id, by_room_data[room_id])
|
||||
logger.info(
|
||||
"[%s] Written account data for %s rooms", user_id, len(by_room_data)
|
||||
)
|
||||
|
||||
# Get all media ids the user has
|
||||
limit = 100
|
||||
start = 0
|
||||
while True:
|
||||
media_ids, total = await self._store.get_local_media_by_user_paginate(
|
||||
start, limit, user_id
|
||||
)
|
||||
for media in media_ids:
|
||||
writer.write_media_id(media["media_id"], media)
|
||||
|
||||
logger.info(
|
||||
"[%s] Written %d media_ids of %s",
|
||||
user_id,
|
||||
(start + len(media_ids)),
|
||||
total,
|
||||
)
|
||||
if (start + limit) >= total:
|
||||
break
|
||||
start += limit
|
||||
|
||||
return writer.finished()
|
||||
|
||||
@ -359,6 +385,18 @@ class ExfiltrationWriter(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def write_media_id(self, media_id: str, media_metadata: JsonDict) -> None:
|
||||
"""Write the media's metadata of a user.
|
||||
Exports only the metadata, as this can be fetched from the database via
|
||||
read only. In order to access the files, a connection to the correct
|
||||
media repository would be required.
|
||||
|
||||
Args:
|
||||
media_id: ID of the media.
|
||||
media_metadata: Metadata of one media file.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finished(self) -> Any:
|
||||
"""Called when all data has successfully been exported and written.
|
||||
|
@ -737,7 +737,7 @@ class ApplicationServicesHandler:
|
||||
)
|
||||
|
||||
ret = []
|
||||
for (success, result) in results:
|
||||
for success, result in results:
|
||||
if success:
|
||||
ret.extend(result)
|
||||
|
||||
|
@ -815,7 +815,6 @@ class AuthHandler:
|
||||
now_ms = self._clock.time_msec()
|
||||
|
||||
if existing_token.expiry_ts is not None and existing_token.expiry_ts < now_ms:
|
||||
|
||||
raise SynapseError(
|
||||
HTTPStatus.FORBIDDEN,
|
||||
"The supplied refresh token has expired",
|
||||
@ -1543,6 +1542,17 @@ class AuthHandler:
|
||||
async def add_threepid(
|
||||
self, user_id: str, medium: str, address: str, validated_at: int
|
||||
) -> None:
|
||||
"""
|
||||
Adds an association between a user's Matrix ID and a third-party ID (email,
|
||||
phone number).
|
||||
|
||||
Args:
|
||||
user_id: The ID of the user to associate.
|
||||
medium: The medium of the third-party ID (email, msisdn).
|
||||
address: The address of the third-party ID (i.e. an email address).
|
||||
validated_at: The timestamp in ms of when the validation that the user owns
|
||||
this third-party ID occurred.
|
||||
"""
|
||||
# check if medium has a valid value
|
||||
if medium not in ["email", "msisdn"]:
|
||||
raise SynapseError(
|
||||
@ -1567,42 +1577,44 @@ class AuthHandler:
|
||||
user_id, medium, address, validated_at, self.hs.get_clock().time_msec()
|
||||
)
|
||||
|
||||
# Inform Synapse modules that a 3PID association has been created.
|
||||
await self._third_party_rules.on_add_user_third_party_identifier(
|
||||
user_id, medium, address
|
||||
)
|
||||
|
||||
# Deprecated method for informing Synapse modules that a 3PID association
|
||||
# has successfully been created.
|
||||
await self._third_party_rules.on_threepid_bind(user_id, medium, address)
|
||||
|
||||
async def delete_threepid(
|
||||
self, user_id: str, medium: str, address: str, id_server: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Attempts to unbind the 3pid on the identity servers and deletes it
|
||||
from the local database.
|
||||
async def delete_local_threepid(
|
||||
self, user_id: str, medium: str, address: str
|
||||
) -> None:
|
||||
"""Deletes an association between a third-party ID and a user ID from the local
|
||||
database. This method does not unbind the association from any identity servers.
|
||||
|
||||
If `medium` is 'email' and a pusher is associated with this third-party ID, the
|
||||
pusher will also be deleted.
|
||||
|
||||
Args:
|
||||
user_id: ID of user to remove the 3pid from.
|
||||
medium: The medium of the 3pid being removed: "email" or "msisdn".
|
||||
address: The 3pid address to remove.
|
||||
id_server: Use the given identity server when unbinding
|
||||
any threepids. If None then will attempt to unbind using the
|
||||
identity server specified when binding (if known).
|
||||
|
||||
Returns:
|
||||
Returns True if successfully unbound the 3pid on
|
||||
the identity server, False if identity server doesn't support the
|
||||
unbind API.
|
||||
"""
|
||||
|
||||
# 'Canonicalise' email addresses as per above
|
||||
if medium == "email":
|
||||
address = canonicalise_email(address)
|
||||
|
||||
result = await self.hs.get_identity_handler().try_unbind_threepid(
|
||||
user_id, medium, address, id_server
|
||||
await self.store.user_delete_threepid(user_id, medium, address)
|
||||
|
||||
# Inform Synapse modules that a 3PID association has been deleted.
|
||||
await self._third_party_rules.on_remove_user_third_party_identifier(
|
||||
user_id, medium, address
|
||||
)
|
||||
|
||||
await self.store.user_delete_threepid(user_id, medium, address)
|
||||
if medium == "email":
|
||||
await self.store.delete_pusher_by_app_id_pushkey_user_id(
|
||||
app_id="m.email", pushkey=address, user_id=user_id
|
||||
)
|
||||
return result
|
||||
|
||||
async def hash(self, password: str) -> str:
|
||||
"""Computes a secure hash of password.
|
||||
@ -2259,7 +2271,6 @@ class PasswordAuthProvider:
|
||||
async def on_logged_out(
|
||||
self, user_id: str, device_id: Optional[str], access_token: str
|
||||
) -> None:
|
||||
|
||||
# call all of the on_logged_out callbacks
|
||||
for callback in self.on_logged_out_callbacks:
|
||||
try:
|
||||
|
@ -100,26 +100,28 @@ class DeactivateAccountHandler:
|
||||
# unbinding
|
||||
identity_server_supports_unbinding = True
|
||||
|
||||
# Retrieve the 3PIDs this user has bound to an identity server
|
||||
threepids = await self.store.user_get_bound_threepids(user_id)
|
||||
|
||||
for threepid in threepids:
|
||||
# Attempt to unbind any known bound threepids to this account from identity
|
||||
# server(s).
|
||||
bound_threepids = await self.store.user_get_bound_threepids(user_id)
|
||||
for threepid in bound_threepids:
|
||||
try:
|
||||
result = await self._identity_handler.try_unbind_threepid(
|
||||
user_id, threepid["medium"], threepid["address"], id_server
|
||||
)
|
||||
identity_server_supports_unbinding &= result
|
||||
except Exception:
|
||||
# Do we want this to be a fatal error or should we carry on?
|
||||
logger.exception("Failed to remove threepid from ID server")
|
||||
raise SynapseError(400, "Failed to remove threepid from ID server")
|
||||
await self.store.user_delete_threepid(
|
||||
|
||||
identity_server_supports_unbinding &= result
|
||||
|
||||
# Remove any local threepid associations for this account.
|
||||
local_threepids = await self.store.user_get_threepids(user_id)
|
||||
for threepid in local_threepids:
|
||||
await self._auth_handler.delete_local_threepid(
|
||||
user_id, threepid["medium"], threepid["address"]
|
||||
)
|
||||
|
||||
# Remove all 3PIDs this user has bound to the homeserver
|
||||
await self.store.user_delete_threepids(user_id)
|
||||
|
||||
# delete any devices belonging to the user, which will also
|
||||
# delete corresponding access tokens.
|
||||
await self._device_handler.delete_all_devices_for_user(user_id)
|
||||
|
@ -71,9 +71,11 @@ class DirectoryHandler:
|
||||
) -> None:
|
||||
# general association creation for both human users and app services
|
||||
|
||||
for wchar in string.whitespace:
|
||||
if wchar in room_alias.localpart:
|
||||
raise SynapseError(400, "Invalid characters in room alias")
|
||||
# meow: allow specific users to include anything in room aliases
|
||||
if creator not in self.config.meow.validation_override:
|
||||
for wchar in string.whitespace:
|
||||
if wchar in room_alias.localpart:
|
||||
raise SynapseError(400, "Invalid characters in room alias")
|
||||
|
||||
if ":" in room_alias.localpart:
|
||||
raise SynapseError(400, "Invalid character in room alias localpart: ':'.")
|
||||
@ -118,7 +120,10 @@ class DirectoryHandler:
|
||||
user_id = requester.user.to_string()
|
||||
room_alias_str = room_alias.to_string()
|
||||
|
||||
if len(room_alias_str) > MAX_ALIAS_LENGTH:
|
||||
if (
|
||||
user_id not in self.hs.config.meow.validation_override
|
||||
and len(room_alias_str) > MAX_ALIAS_LENGTH
|
||||
):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Can't create aliases longer than %s characters" % MAX_ALIAS_LENGTH,
|
||||
@ -497,9 +502,11 @@ class DirectoryHandler:
|
||||
raise SynapseError(403, "Not allowed to publish room")
|
||||
|
||||
# Check if publishing is blocked by a third party module
|
||||
allowed_by_third_party_rules = await (
|
||||
self.third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
allowed_by_third_party_rules = (
|
||||
await (
|
||||
self.third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
)
|
||||
)
|
||||
)
|
||||
if not allowed_by_third_party_rules:
|
||||
|
@ -1301,6 +1301,20 @@ class E2eKeysHandler:
|
||||
|
||||
return desired_key_data
|
||||
|
||||
async def is_cross_signing_set_up_for_user(self, user_id: str) -> bool:
|
||||
"""Checks if the user has cross-signing set up
|
||||
|
||||
Args:
|
||||
user_id: The user to check
|
||||
|
||||
Returns:
|
||||
True if the user has cross-signing set up, False otherwise
|
||||
"""
|
||||
existing_master_key = await self.store.get_e2e_cross_signing_key(
|
||||
user_id, "master"
|
||||
)
|
||||
return existing_master_key is not None
|
||||
|
||||
|
||||
def _check_cross_signing_key(
|
||||
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None
|
||||
|
@ -188,7 +188,6 @@ class E2eRoomKeysHandler:
|
||||
|
||||
# XXX: perhaps we should use a finer grained lock here?
|
||||
async with self._upload_linearizer.queue(user_id):
|
||||
|
||||
# Check that the version we're trying to upload is the current version
|
||||
try:
|
||||
version_info = await self.store.get_e2e_room_keys_version_info(user_id)
|
||||
|
@ -236,7 +236,6 @@ class EventAuthHandler:
|
||||
# in any of them.
|
||||
allowed_rooms = await self.get_rooms_that_allow_join(state_ids)
|
||||
if not await self.is_user_in_rooms(allowed_rooms, user_id):
|
||||
|
||||
# If this is a remote request, the user might be in an allowed room
|
||||
# that we do not know about.
|
||||
if get_domain_from_id(user_id) != self._server_name:
|
||||
|
@ -23,7 +23,7 @@ from synapse.events.utils import SerializeEventConfig
|
||||
from synapse.handlers.presence import format_user_presence_state
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, UserID
|
||||
from synapse.types import JsonDict, Requester, UserID
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -46,13 +46,12 @@ class EventStreamHandler:
|
||||
|
||||
async def get_stream(
|
||||
self,
|
||||
auth_user_id: str,
|
||||
requester: Requester,
|
||||
pagin_config: PaginationConfig,
|
||||
timeout: int = 0,
|
||||
as_client_event: bool = True,
|
||||
affect_presence: bool = True,
|
||||
room_id: Optional[str] = None,
|
||||
is_guest: bool = False,
|
||||
) -> JsonDict:
|
||||
"""Fetches the events stream for a given user."""
|
||||
|
||||
@ -62,13 +61,12 @@ class EventStreamHandler:
|
||||
raise SynapseError(403, "This room has been blocked on this server")
|
||||
|
||||
# send any outstanding server notices to the user.
|
||||
await self._server_notices_sender.on_user_syncing(auth_user_id)
|
||||
await self._server_notices_sender.on_user_syncing(requester.user.to_string())
|
||||
|
||||
auth_user = UserID.from_string(auth_user_id)
|
||||
presence_handler = self.hs.get_presence_handler()
|
||||
|
||||
context = await presence_handler.user_syncing(
|
||||
auth_user_id,
|
||||
requester.user.to_string(),
|
||||
affect_presence=affect_presence,
|
||||
presence_state=PresenceState.ONLINE,
|
||||
)
|
||||
@ -82,10 +80,10 @@ class EventStreamHandler:
|
||||
timeout = random.randint(int(timeout * 0.9), int(timeout * 1.1))
|
||||
|
||||
stream_result = await self.notifier.get_events_for(
|
||||
auth_user,
|
||||
requester.user,
|
||||
pagin_config,
|
||||
timeout,
|
||||
is_guest=is_guest,
|
||||
is_guest=requester.is_guest,
|
||||
explicit_room_id=room_id,
|
||||
)
|
||||
events = stream_result.events
|
||||
@ -102,7 +100,7 @@ class EventStreamHandler:
|
||||
if event.membership != Membership.JOIN:
|
||||
continue
|
||||
# Send down presence.
|
||||
if event.state_key == auth_user_id:
|
||||
if event.state_key == requester.user.to_string():
|
||||
# Send down presence for everyone in the room.
|
||||
users: Iterable[str] = await self.store.get_users_in_room(
|
||||
event.room_id
|
||||
@ -124,7 +122,9 @@ class EventStreamHandler:
|
||||
chunks = self._event_serializer.serialize_events(
|
||||
events,
|
||||
time_now,
|
||||
config=SerializeEventConfig(as_client_event=as_client_event),
|
||||
config=SerializeEventConfig(
|
||||
as_client_event=as_client_event, requester=requester
|
||||
),
|
||||
)
|
||||
|
||||
chunk = {
|
||||
|
@ -1417,7 +1417,7 @@ class FederationHandler:
|
||||
room_version_obj, event_dict
|
||||
)
|
||||
|
||||
EventValidator().validate_builder(builder)
|
||||
EventValidator().validate_builder(builder, self.hs.config)
|
||||
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in send_membership_event, cf comment in except block.
|
||||
@ -1586,7 +1586,7 @@ class FederationHandler:
|
||||
builder = self.event_builder_factory.for_room_version(
|
||||
room_version_obj, event_dict
|
||||
)
|
||||
EventValidator().validate_builder(builder)
|
||||
EventValidator().validate_builder(builder, self.hs.config)
|
||||
|
||||
(
|
||||
event,
|
||||
|
@ -124,7 +124,6 @@ class InitialSyncHandler:
|
||||
as_client_event: bool = True,
|
||||
include_archived: bool = False,
|
||||
) -> JsonDict:
|
||||
|
||||
memberships = [Membership.INVITE, Membership.JOIN]
|
||||
if include_archived:
|
||||
memberships.append(Membership.LEAVE)
|
||||
@ -319,11 +318,9 @@ class InitialSyncHandler:
|
||||
)
|
||||
is_peeking = member_event_id is None
|
||||
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
if membership == Membership.JOIN:
|
||||
result = await self._room_initial_sync_joined(
|
||||
user_id, room_id, pagin_config, membership, is_peeking
|
||||
requester, room_id, pagin_config, membership, is_peeking
|
||||
)
|
||||
elif membership == Membership.LEAVE:
|
||||
# The member_event_id will always be available if membership is set
|
||||
@ -331,10 +328,16 @@ class InitialSyncHandler:
|
||||
assert member_event_id
|
||||
|
||||
result = await self._room_initial_sync_parted(
|
||||
user_id, room_id, pagin_config, membership, member_event_id, is_peeking
|
||||
requester,
|
||||
room_id,
|
||||
pagin_config,
|
||||
membership,
|
||||
member_event_id,
|
||||
is_peeking,
|
||||
)
|
||||
|
||||
account_data_events = []
|
||||
user_id = requester.user.to_string()
|
||||
tags = await self.store.get_tags_for_room(user_id, room_id)
|
||||
if tags:
|
||||
account_data_events.append(
|
||||
@ -351,7 +354,7 @@ class InitialSyncHandler:
|
||||
|
||||
async def _room_initial_sync_parted(
|
||||
self,
|
||||
user_id: str,
|
||||
requester: Requester,
|
||||
room_id: str,
|
||||
pagin_config: PaginationConfig,
|
||||
membership: str,
|
||||
@ -370,13 +373,17 @@ class InitialSyncHandler:
|
||||
)
|
||||
|
||||
messages = await filter_events_for_client(
|
||||
self._storage_controllers, user_id, messages, is_peeking=is_peeking
|
||||
self._storage_controllers,
|
||||
requester.user.to_string(),
|
||||
messages,
|
||||
is_peeking=is_peeking,
|
||||
)
|
||||
|
||||
start_token = StreamToken.START.copy_and_replace(StreamKeyType.ROOM, token)
|
||||
end_token = StreamToken.START.copy_and_replace(StreamKeyType.ROOM, stream_token)
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
serialize_options = SerializeEventConfig(requester=requester)
|
||||
|
||||
return {
|
||||
"membership": membership,
|
||||
@ -384,14 +391,18 @@ class InitialSyncHandler:
|
||||
"messages": {
|
||||
"chunk": (
|
||||
# Don't bundle aggregations as this is a deprecated API.
|
||||
self._event_serializer.serialize_events(messages, time_now)
|
||||
self._event_serializer.serialize_events(
|
||||
messages, time_now, config=serialize_options
|
||||
)
|
||||
),
|
||||
"start": await start_token.to_string(self.store),
|
||||
"end": await end_token.to_string(self.store),
|
||||
},
|
||||
"state": (
|
||||
# Don't bundle aggregations as this is a deprecated API.
|
||||
self._event_serializer.serialize_events(room_state.values(), time_now)
|
||||
self._event_serializer.serialize_events(
|
||||
room_state.values(), time_now, config=serialize_options
|
||||
)
|
||||
),
|
||||
"presence": [],
|
||||
"receipts": [],
|
||||
@ -399,7 +410,7 @@ class InitialSyncHandler:
|
||||
|
||||
async def _room_initial_sync_joined(
|
||||
self,
|
||||
user_id: str,
|
||||
requester: Requester,
|
||||
room_id: str,
|
||||
pagin_config: PaginationConfig,
|
||||
membership: str,
|
||||
@ -411,9 +422,12 @@ class InitialSyncHandler:
|
||||
|
||||
# TODO: These concurrently
|
||||
time_now = self.clock.time_msec()
|
||||
serialize_options = SerializeEventConfig(requester=requester)
|
||||
# Don't bundle aggregations as this is a deprecated API.
|
||||
state = self._event_serializer.serialize_events(
|
||||
current_state.values(), time_now
|
||||
current_state.values(),
|
||||
time_now,
|
||||
config=serialize_options,
|
||||
)
|
||||
|
||||
now_token = self.hs.get_event_sources().get_current_token()
|
||||
@ -451,7 +465,10 @@ class InitialSyncHandler:
|
||||
if not receipts:
|
||||
return []
|
||||
|
||||
return ReceiptEventSource.filter_out_private_receipts(receipts, user_id)
|
||||
return ReceiptEventSource.filter_out_private_receipts(
|
||||
receipts,
|
||||
requester.user.to_string(),
|
||||
)
|
||||
|
||||
presence, receipts, (messages, token) = await make_deferred_yieldable(
|
||||
gather_results(
|
||||
@ -470,20 +487,23 @@ class InitialSyncHandler:
|
||||
)
|
||||
|
||||
messages = await filter_events_for_client(
|
||||
self._storage_controllers, user_id, messages, is_peeking=is_peeking
|
||||
self._storage_controllers,
|
||||
requester.user.to_string(),
|
||||
messages,
|
||||
is_peeking=is_peeking,
|
||||
)
|
||||
|
||||
start_token = now_token.copy_and_replace(StreamKeyType.ROOM, token)
|
||||
end_token = now_token
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
|
||||
ret = {
|
||||
"room_id": room_id,
|
||||
"messages": {
|
||||
"chunk": (
|
||||
# Don't bundle aggregations as this is a deprecated API.
|
||||
self._event_serializer.serialize_events(messages, time_now)
|
||||
self._event_serializer.serialize_events(
|
||||
messages, time_now, config=serialize_options
|
||||
)
|
||||
),
|
||||
"start": await start_token.to_string(self.store),
|
||||
"end": await end_token.to_string(self.store),
|
||||
|
@ -50,7 +50,7 @@ from synapse.event_auth import validate_event_for_room_version
|
||||
from synapse.events import EventBase, relation_from_event
|
||||
from synapse.events.builder import EventBuilder
|
||||
from synapse.events.snapshot import EventContext, UnpersistedEventContextBase
|
||||
from synapse.events.utils import maybe_upsert_event_field
|
||||
from synapse.events.utils import SerializeEventConfig, maybe_upsert_event_field
|
||||
from synapse.events.validator import EventValidator
|
||||
from synapse.handlers.directory import DirectoryHandler
|
||||
from synapse.logging import opentracing
|
||||
@ -245,8 +245,11 @@ class MessageHandler:
|
||||
)
|
||||
room_state = room_state_events[membership_event_id]
|
||||
|
||||
now = self.clock.time_msec()
|
||||
events = self._event_serializer.serialize_events(room_state.values(), now)
|
||||
events = self._event_serializer.serialize_events(
|
||||
room_state.values(),
|
||||
self.clock.time_msec(),
|
||||
config=SerializeEventConfig(requester=requester),
|
||||
)
|
||||
return events
|
||||
|
||||
async def _user_can_see_state_at_event(
|
||||
@ -574,7 +577,7 @@ class EventCreationHandler:
|
||||
state_map: Optional[StateMap[str]] = None,
|
||||
for_batch: bool = False,
|
||||
current_state_group: Optional[int] = None,
|
||||
) -> Tuple[EventBase, EventContext]:
|
||||
) -> Tuple[EventBase, UnpersistedEventContextBase]:
|
||||
"""
|
||||
Given a dict from a client, create a new event. If bool for_batch is true, will
|
||||
create an event using the prev_event_ids, and will create an event context for
|
||||
@ -669,7 +672,7 @@ class EventCreationHandler:
|
||||
room_version_obj, event_dict
|
||||
)
|
||||
|
||||
self.validator.validate_builder(builder)
|
||||
self.validator.validate_builder(builder, self.config)
|
||||
|
||||
if builder.type == EventTypes.Member:
|
||||
membership = builder.content.get("membership", None)
|
||||
@ -721,8 +724,6 @@ class EventCreationHandler:
|
||||
current_state_group=current_state_group,
|
||||
)
|
||||
|
||||
context = await unpersisted_context.persist(event)
|
||||
|
||||
# In an ideal world we wouldn't need the second part of this condition. However,
|
||||
# this behaviour isn't spec'd yet, meaning we should be able to deactivate this
|
||||
# behaviour. Another reason is that this code is also evaluated each time a new
|
||||
@ -739,7 +740,7 @@ class EventCreationHandler:
|
||||
assert state_map is not None
|
||||
prev_event_id = state_map.get((EventTypes.Member, event.sender))
|
||||
else:
|
||||
prev_state_ids = await context.get_prev_state_ids(
|
||||
prev_state_ids = await unpersisted_context.get_prev_state_ids(
|
||||
StateFilter.from_types([(EventTypes.Member, None)])
|
||||
)
|
||||
prev_event_id = prev_state_ids.get((EventTypes.Member, event.sender))
|
||||
@ -764,8 +765,7 @@ class EventCreationHandler:
|
||||
)
|
||||
|
||||
self.validator.validate_new(event, self.config)
|
||||
|
||||
return event, context
|
||||
return event, unpersisted_context
|
||||
|
||||
async def _is_exempt_from_privacy_policy(
|
||||
self, builder: EventBuilder, requester: Requester
|
||||
@ -1005,7 +1005,7 @@ class EventCreationHandler:
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.create_event(
|
||||
event, unpersisted_context = await self.create_event(
|
||||
requester,
|
||||
event_dict,
|
||||
txn_id=txn_id,
|
||||
@ -1016,6 +1016,7 @@ class EventCreationHandler:
|
||||
historical=historical,
|
||||
depth=depth,
|
||||
)
|
||||
context = await unpersisted_context.persist(event)
|
||||
|
||||
assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % (
|
||||
event.sender,
|
||||
@ -1190,7 +1191,6 @@ class EventCreationHandler:
|
||||
if for_batch:
|
||||
assert prev_event_ids is not None
|
||||
assert state_map is not None
|
||||
assert current_state_group is not None
|
||||
auth_ids = self._event_auth_handler.compute_auth_events(builder, state_map)
|
||||
event = await builder.build(
|
||||
prev_event_ids=prev_event_ids, auth_event_ids=auth_ids, depth=depth
|
||||
@ -1303,6 +1303,8 @@ class EventCreationHandler:
|
||||
Raises:
|
||||
SynapseError if the event is invalid.
|
||||
"""
|
||||
if event.sender in self.config.meow.validation_override:
|
||||
return
|
||||
|
||||
relation = relation_from_event(event)
|
||||
if not relation:
|
||||
@ -1358,6 +1360,7 @@ class EventCreationHandler:
|
||||
ratelimit: bool = True,
|
||||
extra_users: Optional[List[UserID]] = None,
|
||||
ignore_shadow_ban: bool = False,
|
||||
dont_notify: bool = False,
|
||||
) -> EventBase:
|
||||
"""Processes new events. Please note that if batch persisting events, an error in
|
||||
handling any one of these events will result in all of the events being dropped.
|
||||
@ -1377,6 +1380,8 @@ class EventCreationHandler:
|
||||
ignore_shadow_ban: True if shadow-banned users should be allowed to
|
||||
send this event.
|
||||
|
||||
dont_notify
|
||||
|
||||
Return:
|
||||
If the event was deduplicated, the previous, duplicate, event. Otherwise,
|
||||
`event`.
|
||||
@ -1454,6 +1459,7 @@ class EventCreationHandler:
|
||||
events_and_context=events_and_context,
|
||||
ratelimit=ratelimit,
|
||||
extra_users=extra_users,
|
||||
dont_notify=dont_notify,
|
||||
),
|
||||
run_in_background(
|
||||
self.cache_joined_hosts_for_events, events_and_context
|
||||
@ -1471,6 +1477,7 @@ class EventCreationHandler:
|
||||
events_and_context: List[Tuple[EventBase, EventContext]],
|
||||
ratelimit: bool = True,
|
||||
extra_users: Optional[List[UserID]] = None,
|
||||
dont_notify: bool = False,
|
||||
) -> EventBase:
|
||||
"""Actually persists new events. Should only be called by
|
||||
`handle_new_client_event`, and see its docstring for documentation of
|
||||
@ -1500,6 +1507,7 @@ class EventCreationHandler:
|
||||
requester=requester,
|
||||
ratelimit=ratelimit,
|
||||
extra_users=extra_users,
|
||||
dont_notify=dont_notify,
|
||||
)
|
||||
except SynapseError as e:
|
||||
if e.code == HTTPStatus.CONFLICT:
|
||||
@ -1529,6 +1537,7 @@ class EventCreationHandler:
|
||||
events_and_context,
|
||||
ratelimit=ratelimit,
|
||||
extra_users=extra_users,
|
||||
dont_notify=dont_notify,
|
||||
)
|
||||
|
||||
return event
|
||||
@ -1650,6 +1659,7 @@ class EventCreationHandler:
|
||||
events_and_context: List[Tuple[EventBase, EventContext]],
|
||||
ratelimit: bool = True,
|
||||
extra_users: Optional[List[UserID]] = None,
|
||||
dont_notify: bool = False,
|
||||
) -> EventBase:
|
||||
"""Called when we have fully built the events, have already
|
||||
calculated the push actions for the events, and checked auth.
|
||||
@ -1716,7 +1726,8 @@ class EventCreationHandler:
|
||||
|
||||
await self._maybe_kick_guest_users(event, context)
|
||||
|
||||
if event.type == EventTypes.CanonicalAlias:
|
||||
validation_override = event.sender in self.config.meow.validation_override
|
||||
if event.type == EventTypes.CanonicalAlias and not validation_override:
|
||||
# Validate a newly added alias or newly added alt_aliases.
|
||||
|
||||
original_alias = None
|
||||
@ -1951,7 +1962,7 @@ class EventCreationHandler:
|
||||
pos = PersistedEventPosition(self._instance_name, stream_ordering)
|
||||
events_and_pos.append((event, pos))
|
||||
|
||||
if event.type == EventTypes.Message:
|
||||
if not dont_notify and event.type == EventTypes.Message:
|
||||
# We don't want to block sending messages on any presence code. This
|
||||
# matters as sometimes presence code can take a while.
|
||||
run_as_background_process(
|
||||
@ -1966,7 +1977,10 @@ class EventCreationHandler:
|
||||
except Exception:
|
||||
logger.exception("Error notifying about new room events")
|
||||
|
||||
run_in_background(_notify)
|
||||
if not dont_notify:
|
||||
# Skip notifying clients, this is used for Beeper's custom
|
||||
# batch sending of non-historical messages.
|
||||
run_in_background(_notify)
|
||||
|
||||
return persisted_events[-1]
|
||||
|
||||
@ -2046,7 +2060,7 @@ class EventCreationHandler:
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.create_event(
|
||||
event, unpersisted_context = await self.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Dummy,
|
||||
@ -2055,6 +2069,7 @@ class EventCreationHandler:
|
||||
"sender": user_id,
|
||||
},
|
||||
)
|
||||
context = await unpersisted_context.persist(event)
|
||||
|
||||
event.internal_metadata.proactively_send = False
|
||||
|
||||
@ -2107,7 +2122,7 @@ class EventCreationHandler:
|
||||
builder = self.event_builder_factory.for_room_version(
|
||||
original_event.room_version, third_party_result
|
||||
)
|
||||
self.validator.validate_builder(builder)
|
||||
self.validator.validate_builder(builder, self.config)
|
||||
except SynapseError as e:
|
||||
raise Exception(
|
||||
"Third party rules module created an invalid event: " + e.msg,
|
||||
|
@ -579,7 +579,9 @@ class PaginationHandler:
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
|
||||
serialize_options = SerializeEventConfig(as_client_event=as_client_event)
|
||||
serialize_options = SerializeEventConfig(
|
||||
as_client_event=as_client_event, requester=requester
|
||||
)
|
||||
|
||||
chunk = {
|
||||
"chunk": (
|
||||
|
@ -777,7 +777,6 @@ class PresenceHandler(BasePresenceHandler):
|
||||
)
|
||||
|
||||
if self.unpersisted_users_changes:
|
||||
|
||||
await self.store.update_presence(
|
||||
[
|
||||
self.user_to_current_state[user_id]
|
||||
@ -823,7 +822,6 @@ class PresenceHandler(BasePresenceHandler):
|
||||
now = self.clock.time_msec()
|
||||
|
||||
with Measure(self.clock, "presence_update_states"):
|
||||
|
||||
# NOTE: We purposefully don't await between now and when we've
|
||||
# calculated what we want to do with the new states, to avoid races.
|
||||
|
||||
|
@ -13,9 +13,10 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@ -31,7 +32,11 @@ class ReadMarkerHandler:
|
||||
self.read_marker_linearizer = Linearizer(name="read_marker")
|
||||
|
||||
async def received_client_read_marker(
|
||||
self, room_id: str, user_id: str, event_id: str
|
||||
self,
|
||||
room_id: str,
|
||||
user_id: str,
|
||||
event_id: str,
|
||||
extra_content: Optional[JsonDict] = None,
|
||||
) -> None:
|
||||
"""Updates the read marker for a given user in a given room if the event ID given
|
||||
is ahead in the stream relative to the current read marker.
|
||||
@ -54,7 +59,7 @@ class ReadMarkerHandler:
|
||||
)
|
||||
|
||||
if should_update:
|
||||
content = {"event_id": event_id}
|
||||
content = {"event_id": event_id, **(extra_content or {})}
|
||||
await self.account_data_handler.add_account_data_to_room(
|
||||
user_id, room_id, "m.fully_read", content
|
||||
)
|
||||
|
@ -161,6 +161,7 @@ class ReceiptsHandler:
|
||||
user_id: str,
|
||||
event_id: str,
|
||||
thread_id: Optional[str],
|
||||
extra_content: Optional[JsonDict] = None,
|
||||
) -> None:
|
||||
"""Called when a client tells us a local user has read up to the given
|
||||
event_id in the room.
|
||||
@ -171,7 +172,7 @@ class ReceiptsHandler:
|
||||
user_id=user_id,
|
||||
event_ids=[event_id],
|
||||
thread_id=thread_id,
|
||||
data={"ts": int(self.clock.time_msec())},
|
||||
data={"ts": int(self.clock.time_msec()), **(extra_content or {})},
|
||||
)
|
||||
|
||||
is_new = await self._handle_new_receipts([receipt])
|
||||
|
@ -141,22 +141,25 @@ class RegistrationHandler:
|
||||
localpart: str,
|
||||
guest_access_token: Optional[str] = None,
|
||||
assigned_user_id: Optional[str] = None,
|
||||
allow_invalid: bool = False,
|
||||
inhibit_user_in_use_error: bool = False,
|
||||
) -> None:
|
||||
if types.contains_invalid_mxid_characters(localpart):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"User ID can only contain characters a-z, 0-9, or '=_-./'",
|
||||
Codes.INVALID_USERNAME,
|
||||
)
|
||||
# meow: allow admins to register invalid user ids
|
||||
if not allow_invalid:
|
||||
if types.contains_invalid_mxid_characters(localpart):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"User ID can only contain characters a-z, 0-9, or '=_-./'",
|
||||
Codes.INVALID_USERNAME,
|
||||
)
|
||||
|
||||
if not localpart:
|
||||
raise SynapseError(400, "User ID cannot be empty", Codes.INVALID_USERNAME)
|
||||
if not localpart:
|
||||
raise SynapseError(400, "User ID cannot be empty", Codes.INVALID_USERNAME)
|
||||
|
||||
if localpart[0] == "_":
|
||||
raise SynapseError(
|
||||
400, "User ID may not begin with _", Codes.INVALID_USERNAME
|
||||
)
|
||||
if localpart[0] == "_":
|
||||
raise SynapseError(
|
||||
400, "User ID may not begin with _", Codes.INVALID_USERNAME
|
||||
)
|
||||
|
||||
user = UserID(localpart, self.hs.hostname)
|
||||
user_id = user.to_string()
|
||||
@ -170,14 +173,16 @@ class RegistrationHandler:
|
||||
"A different user ID has already been registered for this session",
|
||||
)
|
||||
|
||||
self.check_user_id_not_appservice_exclusive(user_id)
|
||||
# meow: allow admins to register reserved user ids and long user ids
|
||||
if not allow_invalid:
|
||||
self.check_user_id_not_appservice_exclusive(user_id)
|
||||
|
||||
if len(user_id) > MAX_USERID_LENGTH:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"User ID may not be longer than %s characters" % (MAX_USERID_LENGTH,),
|
||||
Codes.INVALID_USERNAME,
|
||||
)
|
||||
if len(user_id) > MAX_USERID_LENGTH:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"User ID may not be longer than %s characters" % (MAX_USERID_LENGTH,),
|
||||
Codes.INVALID_USERNAME,
|
||||
)
|
||||
|
||||
users = await self.store.get_users_by_id_case_insensitive(user_id)
|
||||
if users:
|
||||
@ -287,7 +292,12 @@ class RegistrationHandler:
|
||||
await self.auth_blocking.check_auth_blocking(threepid=threepid)
|
||||
|
||||
if localpart is not None:
|
||||
await self.check_username(localpart, guest_access_token=guest_access_token)
|
||||
allow_invalid = by_admin and self.hs.config.meow.admin_api_register_invalid
|
||||
await self.check_username(
|
||||
localpart,
|
||||
guest_access_token=guest_access_token,
|
||||
allow_invalid=allow_invalid,
|
||||
)
|
||||
|
||||
was_guest = guest_access_token is not None
|
||||
|
||||
@ -476,7 +486,7 @@ class RegistrationHandler:
|
||||
# create room expects the localpart of the room alias
|
||||
config["room_alias_name"] = room_alias.localpart
|
||||
|
||||
info, _ = await room_creation_handler.create_room(
|
||||
room_id, _, _ = await room_creation_handler.create_room(
|
||||
fake_requester,
|
||||
config=config,
|
||||
ratelimit=False,
|
||||
@ -490,7 +500,7 @@ class RegistrationHandler:
|
||||
user_id, authenticated_entity=self._server_name
|
||||
),
|
||||
target=UserID.from_string(user_id),
|
||||
room_id=info["room_id"],
|
||||
room_id=room_id,
|
||||
# Since it was just created, there are no remote hosts.
|
||||
remote_room_hosts=[],
|
||||
action="join",
|
||||
|
@ -20,6 +20,7 @@ import attr
|
||||
from synapse.api.constants import Direction, EventTypes, RelationTypes
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.events import EventBase, relation_from_event
|
||||
from synapse.events.utils import SerializeEventConfig
|
||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.storage.databases.main.relations import ThreadsNextBatch, _RelatedEvent
|
||||
@ -60,13 +61,12 @@ class BundledAggregations:
|
||||
Some values require additional processing during serialization.
|
||||
"""
|
||||
|
||||
annotations: Optional[JsonDict] = None
|
||||
references: Optional[JsonDict] = None
|
||||
replace: Optional[EventBase] = None
|
||||
thread: Optional[_ThreadAggregation] = None
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self.annotations or self.references or self.replace or self.thread)
|
||||
return bool(self.references or self.replace or self.thread)
|
||||
|
||||
|
||||
class RelationsHandler:
|
||||
@ -152,16 +152,23 @@ class RelationsHandler:
|
||||
)
|
||||
|
||||
now = self._clock.time_msec()
|
||||
serialize_options = SerializeEventConfig(requester=requester)
|
||||
return_value: JsonDict = {
|
||||
"chunk": self._event_serializer.serialize_events(
|
||||
events, now, bundle_aggregations=aggregations
|
||||
events,
|
||||
now,
|
||||
bundle_aggregations=aggregations,
|
||||
config=serialize_options,
|
||||
),
|
||||
}
|
||||
if include_original_event:
|
||||
# Do not bundle aggregations when retrieving the original event because
|
||||
# we want the content before relations are applied to it.
|
||||
return_value["original_event"] = self._event_serializer.serialize_event(
|
||||
event, now, bundle_aggregations=None
|
||||
event,
|
||||
now,
|
||||
bundle_aggregations=None,
|
||||
config=serialize_options,
|
||||
)
|
||||
|
||||
if next_token:
|
||||
@ -227,67 +234,6 @@ class RelationsHandler:
|
||||
e.msg,
|
||||
)
|
||||
|
||||
async def get_annotations_for_events(
|
||||
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
|
||||
) -> Dict[str, List[JsonDict]]:
|
||||
"""Get a list of annotations to the given events, grouped by event type and
|
||||
aggregation key, sorted by count.
|
||||
|
||||
This is used e.g. to get the what and how many reactions have happened
|
||||
on an event.
|
||||
|
||||
Args:
|
||||
event_ids: Fetch events that relate to these event IDs.
|
||||
ignored_users: The users ignored by the requesting user.
|
||||
|
||||
Returns:
|
||||
A map of event IDs to a list of groups of annotations that match.
|
||||
Each entry is a dict with `type`, `key` and `count` fields.
|
||||
"""
|
||||
# Get the base results for all users.
|
||||
full_results = await self._main_store.get_aggregation_groups_for_events(
|
||||
event_ids
|
||||
)
|
||||
|
||||
# Avoid additional logic if there are no ignored users.
|
||||
if not ignored_users:
|
||||
return {
|
||||
event_id: results
|
||||
for event_id, results in full_results.items()
|
||||
if results
|
||||
}
|
||||
|
||||
# Then subtract off the results for any ignored users.
|
||||
ignored_results = await self._main_store.get_aggregation_groups_for_users(
|
||||
[event_id for event_id, results in full_results.items() if results],
|
||||
ignored_users,
|
||||
)
|
||||
|
||||
filtered_results = {}
|
||||
for event_id, results in full_results.items():
|
||||
# If no annotations, skip.
|
||||
if not results:
|
||||
continue
|
||||
|
||||
# If there are not ignored results for this event, copy verbatim.
|
||||
if event_id not in ignored_results:
|
||||
filtered_results[event_id] = results
|
||||
continue
|
||||
|
||||
# Otherwise, subtract out the ignored results.
|
||||
event_ignored_results = ignored_results[event_id]
|
||||
for result in results:
|
||||
key = (result["type"], result["key"])
|
||||
if key in event_ignored_results:
|
||||
# Ensure to not modify the cache.
|
||||
result = result.copy()
|
||||
result["count"] -= event_ignored_results[key]
|
||||
if result["count"] <= 0:
|
||||
continue
|
||||
filtered_results.setdefault(event_id, []).append(result)
|
||||
|
||||
return filtered_results
|
||||
|
||||
async def get_references_for_events(
|
||||
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
|
||||
) -> Dict[str, List[_RelatedEvent]]:
|
||||
@ -531,17 +477,6 @@ class RelationsHandler:
|
||||
# (as that is what makes it part of the thread).
|
||||
relations_by_id[latest_thread_event.event_id] = RelationTypes.THREAD
|
||||
|
||||
async def _fetch_annotations() -> None:
|
||||
"""Fetch any annotations (ie, reactions) to bundle with this event."""
|
||||
annotations_by_event_id = await self.get_annotations_for_events(
|
||||
events_by_id.keys(), ignored_users=ignored_users
|
||||
)
|
||||
for event_id, annotations in annotations_by_event_id.items():
|
||||
if annotations:
|
||||
results.setdefault(event_id, BundledAggregations()).annotations = {
|
||||
"chunk": annotations
|
||||
}
|
||||
|
||||
async def _fetch_references() -> None:
|
||||
"""Fetch any references to bundle with this event."""
|
||||
references_by_event_id = await self.get_references_for_events(
|
||||
@ -575,7 +510,6 @@ class RelationsHandler:
|
||||
await make_deferred_yieldable(
|
||||
gather_results(
|
||||
(
|
||||
run_in_background(_fetch_annotations),
|
||||
run_in_background(_fetch_references),
|
||||
run_in_background(_fetch_edits),
|
||||
)
|
||||
|
@ -51,6 +51,7 @@ from synapse.api.filtering import Filter
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
||||
from synapse.event_auth import validate_event_for_room_version
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.snapshot import UnpersistedEventContext
|
||||
from synapse.events.utils import copy_and_fixup_power_levels_contents
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.module_api import NOT_SPAM
|
||||
@ -211,7 +212,7 @@ class RoomCreationHandler:
|
||||
# the required power level to send the tombstone event.
|
||||
(
|
||||
tombstone_event,
|
||||
tombstone_context,
|
||||
tombstone_unpersisted_context,
|
||||
) = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
{
|
||||
@ -225,6 +226,9 @@ class RoomCreationHandler:
|
||||
},
|
||||
},
|
||||
)
|
||||
tombstone_context = await tombstone_unpersisted_context.persist(
|
||||
tombstone_event
|
||||
)
|
||||
validate_event_for_room_version(tombstone_event)
|
||||
await self._event_auth_handler.check_auth_rules_from_context(
|
||||
tombstone_event
|
||||
@ -690,13 +694,14 @@ class RoomCreationHandler:
|
||||
config: JsonDict,
|
||||
ratelimit: bool = True,
|
||||
creator_join_profile: Optional[JsonDict] = None,
|
||||
) -> Tuple[dict, int]:
|
||||
) -> Tuple[str, Optional[RoomAlias], int]:
|
||||
"""Creates a new room.
|
||||
|
||||
Args:
|
||||
requester:
|
||||
The user who requested the room creation.
|
||||
config : A dict of configuration options.
|
||||
requester: The user who requested the room creation.
|
||||
config: A dict of configuration options. This will be the body of
|
||||
a /createRoom request; see
|
||||
https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom
|
||||
ratelimit: set to False to disable the rate limiter
|
||||
|
||||
creator_join_profile:
|
||||
@ -707,14 +712,17 @@ class RoomCreationHandler:
|
||||
`avatar_url` and/or `displayname`.
|
||||
|
||||
Returns:
|
||||
First, a dict containing the keys `room_id` and, if an alias
|
||||
was, requested, `room_alias`. Secondly, the stream_id of the
|
||||
last persisted event.
|
||||
A 3-tuple containing:
|
||||
- the room ID;
|
||||
- if requested, the room alias, otherwise None; and
|
||||
- the `stream_id` of the last persisted event.
|
||||
Raises:
|
||||
SynapseError if the room ID couldn't be stored, 3pid invitation config
|
||||
validation failed, or something went horribly wrong.
|
||||
ResourceLimitError if server is blocked to some resource being
|
||||
exceeded
|
||||
SynapseError:
|
||||
if the room ID couldn't be stored, 3pid invitation config
|
||||
validation failed, or something went horribly wrong.
|
||||
ResourceLimitError:
|
||||
if server is blocked to some resource being
|
||||
exceeded
|
||||
"""
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
@ -857,16 +865,30 @@ class RoomCreationHandler:
|
||||
visibility = config.get("visibility", "private")
|
||||
is_public = visibility == "public"
|
||||
|
||||
room_id = await self._generate_and_create_room_id(
|
||||
creator_id=user_id,
|
||||
is_public=is_public,
|
||||
room_version=room_version,
|
||||
)
|
||||
if "room_id" in config:
|
||||
room_id = config["room_id"]
|
||||
try:
|
||||
await self.store.store_room(
|
||||
room_id=room_id,
|
||||
room_creator_user_id=user_id,
|
||||
is_public=is_public,
|
||||
room_version=room_version,
|
||||
)
|
||||
except StoreError:
|
||||
raise SynapseError(409, "Room ID already in use", errcode="M_CONFLICT")
|
||||
else:
|
||||
room_id = await self._generate_and_create_room_id(
|
||||
creator_id=user_id,
|
||||
is_public=is_public,
|
||||
room_version=room_version,
|
||||
)
|
||||
|
||||
# Check whether this visibility value is blocked by a third party module
|
||||
allowed_by_third_party_rules = await (
|
||||
self.third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
allowed_by_third_party_rules = (
|
||||
await (
|
||||
self.third_party_event_rules.check_visibility_can_be_modified(
|
||||
room_id, visibility
|
||||
)
|
||||
)
|
||||
)
|
||||
if not allowed_by_third_party_rules:
|
||||
@ -1024,11 +1046,6 @@ class RoomCreationHandler:
|
||||
last_sent_event_id = member_event_id
|
||||
depth += 1
|
||||
|
||||
result = {"room_id": room_id}
|
||||
|
||||
if room_alias:
|
||||
result["room_alias"] = room_alias.to_string()
|
||||
|
||||
# Always wait for room creation to propagate before returning
|
||||
await self._replication.wait_for_stream_position(
|
||||
self.hs.config.worker.events_shard_config.get_instance(room_id),
|
||||
@ -1036,7 +1053,7 @@ class RoomCreationHandler:
|
||||
last_stream_id,
|
||||
)
|
||||
|
||||
return result, last_stream_id
|
||||
return room_id, room_alias, last_stream_id
|
||||
|
||||
async def _send_events_for_new_room(
|
||||
self,
|
||||
@ -1091,7 +1108,7 @@ class RoomCreationHandler:
|
||||
content: JsonDict,
|
||||
for_batch: bool,
|
||||
**kwargs: Any,
|
||||
) -> Tuple[EventBase, synapse.events.snapshot.EventContext]:
|
||||
) -> Tuple[EventBase, synapse.events.snapshot.UnpersistedEventContextBase]:
|
||||
"""
|
||||
Creates an event and associated event context.
|
||||
Args:
|
||||
@ -1110,20 +1127,23 @@ class RoomCreationHandler:
|
||||
|
||||
event_dict = create_event_dict(etype, content, **kwargs)
|
||||
|
||||
new_event, new_context = await self.event_creation_handler.create_event(
|
||||
(
|
||||
new_event,
|
||||
new_unpersisted_context,
|
||||
) = await self.event_creation_handler.create_event(
|
||||
creator,
|
||||
event_dict,
|
||||
prev_event_ids=prev_event,
|
||||
depth=depth,
|
||||
state_map=state_map,
|
||||
for_batch=for_batch,
|
||||
current_state_group=current_state_group,
|
||||
)
|
||||
|
||||
depth += 1
|
||||
prev_event = [new_event.event_id]
|
||||
state_map[(new_event.type, new_event.state_key)] = new_event.event_id
|
||||
|
||||
return new_event, new_context
|
||||
return new_event, new_unpersisted_context
|
||||
|
||||
try:
|
||||
config = self._presets_dict[preset_config]
|
||||
@ -1133,10 +1153,10 @@ class RoomCreationHandler:
|
||||
)
|
||||
|
||||
creation_content.update({"creator": creator_id})
|
||||
creation_event, creation_context = await create_event(
|
||||
creation_event, unpersisted_creation_context = await create_event(
|
||||
EventTypes.Create, creation_content, False
|
||||
)
|
||||
|
||||
creation_context = await unpersisted_creation_context.persist(creation_event)
|
||||
logger.debug("Sending %s in new room", EventTypes.Member)
|
||||
ev = await self.event_creation_handler.handle_new_client_event(
|
||||
requester=creator,
|
||||
@ -1180,11 +1200,10 @@ class RoomCreationHandler:
|
||||
power_event, power_context = await create_event(
|
||||
EventTypes.PowerLevels, pl_content, True
|
||||
)
|
||||
current_state_group = power_context._state_group
|
||||
events_to_send.append((power_event, power_context))
|
||||
else:
|
||||
power_level_content: JsonDict = {
|
||||
"users": {creator_id: 100},
|
||||
"users": {creator_id: 9001},
|
||||
"users_default": 0,
|
||||
"events": {
|
||||
EventTypes.Name: 50,
|
||||
@ -1229,14 +1248,12 @@ class RoomCreationHandler:
|
||||
power_level_content,
|
||||
True,
|
||||
)
|
||||
current_state_group = pl_context._state_group
|
||||
events_to_send.append((pl_event, pl_context))
|
||||
|
||||
if room_alias and (EventTypes.CanonicalAlias, "") not in initial_state:
|
||||
room_alias_event, room_alias_context = await create_event(
|
||||
EventTypes.CanonicalAlias, {"alias": room_alias.to_string()}, True
|
||||
)
|
||||
current_state_group = room_alias_context._state_group
|
||||
events_to_send.append((room_alias_event, room_alias_context))
|
||||
|
||||
if (EventTypes.JoinRules, "") not in initial_state:
|
||||
@ -1245,7 +1262,6 @@ class RoomCreationHandler:
|
||||
{"join_rule": config["join_rules"]},
|
||||
True,
|
||||
)
|
||||
current_state_group = join_rules_context._state_group
|
||||
events_to_send.append((join_rules_event, join_rules_context))
|
||||
|
||||
if (EventTypes.RoomHistoryVisibility, "") not in initial_state:
|
||||
@ -1254,7 +1270,6 @@ class RoomCreationHandler:
|
||||
{"history_visibility": config["history_visibility"]},
|
||||
True,
|
||||
)
|
||||
current_state_group = visibility_context._state_group
|
||||
events_to_send.append((visibility_event, visibility_context))
|
||||
|
||||
if config["guest_can_join"]:
|
||||
@ -1264,14 +1279,12 @@ class RoomCreationHandler:
|
||||
{EventContentFields.GUEST_ACCESS: GuestAccess.CAN_JOIN},
|
||||
True,
|
||||
)
|
||||
current_state_group = guest_access_context._state_group
|
||||
events_to_send.append((guest_access_event, guest_access_context))
|
||||
|
||||
for (etype, state_key), content in initial_state.items():
|
||||
event, context = await create_event(
|
||||
etype, content, True, state_key=state_key
|
||||
)
|
||||
current_state_group = context._state_group
|
||||
events_to_send.append((event, context))
|
||||
|
||||
if config["encrypted"]:
|
||||
@ -1283,9 +1296,16 @@ class RoomCreationHandler:
|
||||
)
|
||||
events_to_send.append((encryption_event, encryption_context))
|
||||
|
||||
datastore = self.hs.get_datastores().state
|
||||
events_and_context = (
|
||||
await UnpersistedEventContext.batch_persist_unpersisted_contexts(
|
||||
events_to_send, room_id, current_state_group, datastore
|
||||
)
|
||||
)
|
||||
|
||||
last_event = await self.event_creation_handler.handle_new_client_event(
|
||||
creator,
|
||||
events_to_send,
|
||||
events_and_context,
|
||||
ignore_shadow_ban=True,
|
||||
ratelimit=False,
|
||||
)
|
||||
@ -1825,7 +1845,7 @@ class RoomShutdownHandler:
|
||||
new_room_user_id, authenticated_entity=requester_user_id
|
||||
)
|
||||
|
||||
info, stream_id = await self._room_creation_handler.create_room(
|
||||
new_room_id, _, stream_id = await self._room_creation_handler.create_room(
|
||||
room_creator_requester,
|
||||
config={
|
||||
"preset": RoomCreationPreset.PUBLIC_CHAT,
|
||||
@ -1834,7 +1854,6 @@ class RoomShutdownHandler:
|
||||
},
|
||||
ratelimit=False,
|
||||
)
|
||||
new_room_id = info["room_id"]
|
||||
|
||||
logger.info(
|
||||
"Shutting down room %r, joining to new room: %r", room_id, new_room_id
|
||||
@ -1887,6 +1906,7 @@ class RoomShutdownHandler:
|
||||
|
||||
# Join users to new room
|
||||
if new_room_user_id:
|
||||
assert new_room_id is not None
|
||||
await self.room_member_handler.update_membership(
|
||||
requester=target_requester,
|
||||
target=target_requester.user,
|
||||
@ -1919,6 +1939,7 @@ class RoomShutdownHandler:
|
||||
|
||||
aliases_for_room = await self.store.get_aliases_for_room(room_id)
|
||||
|
||||
assert new_room_id is not None
|
||||
await self.store.update_aliases_for_room(
|
||||
room_id, new_room_id, requester_user_id
|
||||
)
|
||||
|
@ -21,6 +21,7 @@ class RoomBatchHandler:
|
||||
self.event_creation_handler = hs.get_event_creation_handler()
|
||||
self.room_member_handler = hs.get_room_member_handler()
|
||||
self.auth = hs.get_auth()
|
||||
self.allow_send_any = self.hs.config.meow.appservice_batch_send_any
|
||||
|
||||
async def inherit_depth_from_prev_ids(self, prev_event_ids: List[str]) -> int:
|
||||
"""Finds the depth which would sort it after the most-recent
|
||||
@ -118,7 +119,9 @@ class RoomBatchHandler:
|
||||
Requester object
|
||||
"""
|
||||
|
||||
await self.auth.validate_appservice_can_control_user_id(app_service, user_id)
|
||||
await self.auth.validate_appservice_can_control_user_id(
|
||||
app_service, user_id, allow_any=self.allow_send_any
|
||||
)
|
||||
|
||||
return create_requester(user_id, app_service=app_service)
|
||||
|
||||
@ -271,6 +274,8 @@ class RoomBatchHandler:
|
||||
inherited_depth: int,
|
||||
initial_state_event_ids: List[str],
|
||||
app_service_requester: Requester,
|
||||
beeper_new_messages: bool,
|
||||
beeper_initial_prev_event_ids: List[str] = None,
|
||||
) -> List[str]:
|
||||
"""Create and persists all events provided sequentially. Handles the
|
||||
complexity of creating events in chronological order so they can
|
||||
@ -290,21 +295,24 @@ class RoomBatchHandler:
|
||||
the start of the historical batch since it's floating with no
|
||||
prev_events to derive state from automatically.
|
||||
app_service_requester: The requester of an application service.
|
||||
beeper_new_messages: Is this a batch of new events rather than history?
|
||||
beeper_initial_prev_event_ids: prev_event_ids for the first event to send.
|
||||
|
||||
Returns:
|
||||
List of persisted event IDs
|
||||
"""
|
||||
assert app_service_requester.app_service
|
||||
|
||||
# We expect the first event in a historical batch to be an insertion event
|
||||
assert events_to_create[0]["type"] == EventTypes.MSC2716_INSERTION
|
||||
# We expect the last event in a historical batch to be an batch event
|
||||
assert events_to_create[-1]["type"] == EventTypes.MSC2716_BATCH
|
||||
if not beeper_new_messages:
|
||||
# We expect the first event in a historical batch to be an insertion event
|
||||
assert events_to_create[0]["type"] == EventTypes.MSC2716_INSERTION
|
||||
# We expect the last event in a historical batch to be an batch event
|
||||
assert events_to_create[-1]["type"] == EventTypes.MSC2716_BATCH
|
||||
|
||||
# Make the historical event chain float off on its own by specifying no
|
||||
# prev_events for the first event in the chain which causes the HS to
|
||||
# ask for the state at the start of the batch later.
|
||||
prev_event_ids: List[str] = []
|
||||
prev_event_ids: List[str] = beeper_initial_prev_event_ids or []
|
||||
|
||||
event_ids = []
|
||||
events_to_persist = []
|
||||
@ -327,7 +335,7 @@ class RoomBatchHandler:
|
||||
# Mark all events as historical
|
||||
event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True
|
||||
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
event, unpersisted_context = await self.event_creation_handler.create_event(
|
||||
await self.create_requester_for_user_id_from_app_service(
|
||||
ev["sender"], app_service_requester.app_service
|
||||
),
|
||||
@ -335,17 +343,17 @@ class RoomBatchHandler:
|
||||
# Only the first event (which is the insertion event) in the
|
||||
# chain should be floating. The rest should hang off each other
|
||||
# in a chain.
|
||||
allow_no_prev_events=index == 0,
|
||||
allow_no_prev_events=index == 0 and not beeper_new_messages,
|
||||
prev_event_ids=event_dict.get("prev_events"),
|
||||
# Since the first event (which is the insertion event) in the
|
||||
# chain is floating with no `prev_events`, it can't derive state
|
||||
# from anywhere automatically. So we need to set some state
|
||||
# explicitly.
|
||||
state_event_ids=initial_state_event_ids if index == 0 else None,
|
||||
historical=True,
|
||||
historical=not beeper_new_messages,
|
||||
depth=inherited_depth,
|
||||
)
|
||||
|
||||
context = await unpersisted_context.persist(event)
|
||||
assert context._state_group
|
||||
|
||||
# Normally this is done when persisting the event but we have to
|
||||
@ -370,11 +378,23 @@ class RoomBatchHandler:
|
||||
event_ids.append(event_id)
|
||||
prev_event_ids = [event_id]
|
||||
|
||||
if beeper_new_messages:
|
||||
for index, (event, context) in enumerate(events_to_persist):
|
||||
await self.event_creation_handler.handle_new_client_event(
|
||||
await self.create_requester_for_user_id_from_app_service(
|
||||
event.sender, app_service_requester.app_service
|
||||
),
|
||||
event=event,
|
||||
context=context,
|
||||
dont_notify=index < len(events_to_persist) - 1,
|
||||
)
|
||||
return event_ids
|
||||
|
||||
# Persist events in reverse-chronological order so they have the
|
||||
# correct stream_ordering as they are backfilled (which decrements).
|
||||
# Events are sorted by (topological_ordering, stream_ordering)
|
||||
# where topological_ordering is just depth.
|
||||
for (event, context) in reversed(events_to_persist):
|
||||
for event, context in reversed(events_to_persist):
|
||||
# This call can't raise `PartialStateConflictError` since we forbid
|
||||
# use of the historical batch API during partial state
|
||||
await self.event_creation_handler.handle_new_client_event(
|
||||
@ -394,6 +414,8 @@ class RoomBatchHandler:
|
||||
inherited_depth: int,
|
||||
initial_state_event_ids: List[str],
|
||||
app_service_requester: Requester,
|
||||
beeper_new_messages: bool,
|
||||
beeper_initial_prev_event_ids: List[str] = None,
|
||||
) -> Tuple[List[str], str]:
|
||||
"""
|
||||
Handles creating and persisting all of the historical events as well as
|
||||
@ -415,6 +437,8 @@ class RoomBatchHandler:
|
||||
`/batch_send?prev_event_id=$abc` plus the outcome of
|
||||
`persist_state_events_at_start`
|
||||
app_service_requester: The requester of an application service.
|
||||
beeper_new_messages: Is this a batch of new events rather than history?
|
||||
beeper_initial_prev_event_ids: prev_event_ids for the first event to send.
|
||||
|
||||
Returns:
|
||||
Tuple containing a list of created events and the next_batch_id
|
||||
@ -435,8 +459,9 @@ class RoomBatchHandler:
|
||||
# the last event we're inserting
|
||||
"origin_server_ts": last_event_in_batch["origin_server_ts"],
|
||||
}
|
||||
# Add the batch event to the end of the batch (newest-in-time)
|
||||
events_to_create.append(batch_event)
|
||||
if not beeper_new_messages:
|
||||
# Add the batch event to the end of the batch (newest-in-time)
|
||||
events_to_create.append(batch_event)
|
||||
|
||||
# Add an "insertion" event to the start of each batch (next to the oldest-in-time
|
||||
# event in the batch) so the next batch can be connected to this one.
|
||||
@ -451,8 +476,9 @@ class RoomBatchHandler:
|
||||
next_batch_id = insertion_event["content"][
|
||||
EventContentFields.MSC2716_NEXT_BATCH_ID
|
||||
]
|
||||
# Prepend the insertion event to the start of the batch (oldest-in-time)
|
||||
events_to_create = [insertion_event] + events_to_create
|
||||
if not beeper_new_messages:
|
||||
# Prepend the insertion event to the start of the batch (oldest-in-time)
|
||||
events_to_create = [insertion_event] + events_to_create
|
||||
|
||||
# Create and persist all of the historical events
|
||||
event_ids = await self.persist_historical_events(
|
||||
@ -461,6 +487,8 @@ class RoomBatchHandler:
|
||||
inherited_depth=inherited_depth,
|
||||
initial_state_event_ids=initial_state_event_ids,
|
||||
app_service_requester=app_service_requester,
|
||||
beeper_new_messages=beeper_new_messages,
|
||||
beeper_initial_prev_event_ids=beeper_initial_prev_event_ids,
|
||||
)
|
||||
|
||||
return event_ids, next_batch_id
|
||||
|
@ -207,6 +207,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
async def remote_knock(
|
||||
self,
|
||||
requester: Requester,
|
||||
remote_room_hosts: List[str],
|
||||
room_id: str,
|
||||
user: UserID,
|
||||
@ -414,7 +415,10 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
(
|
||||
event,
|
||||
unpersisted_context,
|
||||
) = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
@ -435,7 +439,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
outlier=outlier,
|
||||
historical=historical,
|
||||
)
|
||||
|
||||
context = await unpersisted_context.persist(event)
|
||||
prev_state_ids = await context.get_prev_state_ids(
|
||||
StateFilter.from_types([(EventTypes.Member, None)])
|
||||
)
|
||||
@ -723,26 +727,6 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
content.pop("displayname", None)
|
||||
content.pop("avatar_url", None)
|
||||
|
||||
if len(content.get("displayname") or "") > MAX_DISPLAYNAME_LEN:
|
||||
raise SynapseError(
|
||||
400,
|
||||
f"Displayname is too long (max {MAX_DISPLAYNAME_LEN})",
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if len(content.get("avatar_url") or "") > MAX_AVATAR_URL_LEN:
|
||||
raise SynapseError(
|
||||
400,
|
||||
f"Avatar URL is too long (max {MAX_AVATAR_URL_LEN})",
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if "avatar_url" in content and content.get("avatar_url") is not None:
|
||||
if not await self.profile_handler.check_avatar_size_and_mime_type(
|
||||
content["avatar_url"],
|
||||
):
|
||||
raise SynapseError(403, "This avatar is not allowed", Codes.FORBIDDEN)
|
||||
|
||||
# The event content should *not* include the authorising user as
|
||||
# it won't be properly signed. Strip it out since it might come
|
||||
# back from a client updating a display name / avatar.
|
||||
@ -1070,7 +1054,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
)
|
||||
|
||||
return await self.remote_knock(
|
||||
remote_room_hosts, room_id, target, content
|
||||
requester, remote_room_hosts, room_id, target, content
|
||||
)
|
||||
|
||||
return await self._local_membership_update(
|
||||
@ -1944,7 +1928,10 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
(
|
||||
event,
|
||||
unpersisted_context,
|
||||
) = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
event_dict,
|
||||
txn_id=txn_id,
|
||||
@ -1952,6 +1939,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||
auth_event_ids=auth_event_ids,
|
||||
outlier=True,
|
||||
)
|
||||
context = await unpersisted_context.persist(event)
|
||||
event.internal_metadata.out_of_band_membership = True
|
||||
|
||||
result_event = (
|
||||
@ -1977,6 +1965,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||
|
||||
async def remote_knock(
|
||||
self,
|
||||
requester: Requester,
|
||||
remote_room_hosts: List[str],
|
||||
room_id: str,
|
||||
user: UserID,
|
||||
|
@ -113,6 +113,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
|
||||
|
||||
async def remote_knock(
|
||||
self,
|
||||
requester: Requester,
|
||||
remote_room_hosts: List[str],
|
||||
room_id: str,
|
||||
user: UserID,
|
||||
@ -123,9 +124,10 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
|
||||
Implements RoomMemberHandler.remote_knock
|
||||
"""
|
||||
ret = await self._remote_knock_client(
|
||||
requester=requester,
|
||||
remote_room_hosts=remote_room_hosts,
|
||||
room_id=room_id,
|
||||
user=user,
|
||||
user_id=user.to_string(),
|
||||
content=content,
|
||||
)
|
||||
return ret["event_id"], ret["stream_id"]
|
||||
|
@ -23,7 +23,8 @@ from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.api.filtering import Filter
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import JsonDict, StrCollection, StreamKeyType, UserID
|
||||
from synapse.events.utils import SerializeEventConfig
|
||||
from synapse.types import JsonDict, Requester, StrCollection, StreamKeyType, UserID
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
||||
@ -109,12 +110,12 @@ class SearchHandler:
|
||||
return historical_room_ids
|
||||
|
||||
async def search(
|
||||
self, user: UserID, content: JsonDict, batch: Optional[str] = None
|
||||
self, requester: Requester, content: JsonDict, batch: Optional[str] = None
|
||||
) -> JsonDict:
|
||||
"""Performs a full text search for a user.
|
||||
|
||||
Args:
|
||||
user: The user performing the search.
|
||||
requester: The user performing the search.
|
||||
content: Search parameters
|
||||
batch: The next_batch parameter. Used for pagination.
|
||||
|
||||
@ -199,7 +200,7 @@ class SearchHandler:
|
||||
)
|
||||
|
||||
return await self._search(
|
||||
user,
|
||||
requester,
|
||||
batch_group,
|
||||
batch_group_key,
|
||||
batch_token,
|
||||
@ -217,7 +218,7 @@ class SearchHandler:
|
||||
|
||||
async def _search(
|
||||
self,
|
||||
user: UserID,
|
||||
requester: Requester,
|
||||
batch_group: Optional[str],
|
||||
batch_group_key: Optional[str],
|
||||
batch_token: Optional[str],
|
||||
@ -235,7 +236,7 @@ class SearchHandler:
|
||||
"""Performs a full text search for a user.
|
||||
|
||||
Args:
|
||||
user: The user performing the search.
|
||||
requester: The user performing the search.
|
||||
batch_group: Pagination information.
|
||||
batch_group_key: Pagination information.
|
||||
batch_token: Pagination information.
|
||||
@ -269,7 +270,7 @@ class SearchHandler:
|
||||
|
||||
# TODO: Search through left rooms too
|
||||
rooms = await self.store.get_rooms_for_local_user_where_membership_is(
|
||||
user.to_string(),
|
||||
requester.user.to_string(),
|
||||
membership_list=[Membership.JOIN],
|
||||
# membership_list=[Membership.JOIN, Membership.LEAVE, Membership.Ban],
|
||||
)
|
||||
@ -303,13 +304,13 @@ class SearchHandler:
|
||||
|
||||
if order_by == "rank":
|
||||
search_result, sender_group = await self._search_by_rank(
|
||||
user, room_ids, search_term, keys, search_filter
|
||||
requester.user, room_ids, search_term, keys, search_filter
|
||||
)
|
||||
# Unused return values for rank search.
|
||||
global_next_batch = None
|
||||
elif order_by == "recent":
|
||||
search_result, global_next_batch = await self._search_by_recent(
|
||||
user,
|
||||
requester.user,
|
||||
room_ids,
|
||||
search_term,
|
||||
keys,
|
||||
@ -334,7 +335,7 @@ class SearchHandler:
|
||||
assert after_limit is not None
|
||||
|
||||
contexts = await self._calculate_event_contexts(
|
||||
user,
|
||||
requester.user,
|
||||
search_result.allowed_events,
|
||||
before_limit,
|
||||
after_limit,
|
||||
@ -363,27 +364,37 @@ class SearchHandler:
|
||||
# The returned events.
|
||||
search_result.allowed_events,
|
||||
),
|
||||
user.to_string(),
|
||||
requester.user.to_string(),
|
||||
)
|
||||
|
||||
# We're now about to serialize the events. We should not make any
|
||||
# blocking calls after this. Otherwise, the 'age' will be wrong.
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
serialize_options = SerializeEventConfig(requester=requester)
|
||||
|
||||
for context in contexts.values():
|
||||
context["events_before"] = self._event_serializer.serialize_events(
|
||||
context["events_before"], time_now, bundle_aggregations=aggregations
|
||||
context["events_before"],
|
||||
time_now,
|
||||
bundle_aggregations=aggregations,
|
||||
config=serialize_options,
|
||||
)
|
||||
context["events_after"] = self._event_serializer.serialize_events(
|
||||
context["events_after"], time_now, bundle_aggregations=aggregations
|
||||
context["events_after"],
|
||||
time_now,
|
||||
bundle_aggregations=aggregations,
|
||||
config=serialize_options,
|
||||
)
|
||||
|
||||
results = [
|
||||
{
|
||||
"rank": search_result.rank_map[e.event_id],
|
||||
"result": self._event_serializer.serialize_event(
|
||||
e, time_now, bundle_aggregations=aggregations
|
||||
e,
|
||||
time_now,
|
||||
bundle_aggregations=aggregations,
|
||||
config=serialize_options,
|
||||
),
|
||||
"context": contexts.get(e.event_id, {}),
|
||||
}
|
||||
@ -398,7 +409,9 @@ class SearchHandler:
|
||||
|
||||
if state_results:
|
||||
rooms_cat_res["state"] = {
|
||||
room_id: self._event_serializer.serialize_events(state_events, time_now)
|
||||
room_id: self._event_serializer.serialize_events(
|
||||
state_events, time_now, config=serialize_options
|
||||
)
|
||||
for room_id, state_events in state_results.items()
|
||||
}
|
||||
|
||||
|
@ -1175,7 +1175,6 @@ class SyncHandler:
|
||||
for e in await sync_config.filter_collection.filter_room_state(
|
||||
list(state.values())
|
||||
)
|
||||
if e.type != EventTypes.Aliases # until MSC2261 or alternative solution
|
||||
}
|
||||
|
||||
async def _find_missing_partial_state_memberships(
|
||||
@ -1297,7 +1296,6 @@ class SyncHandler:
|
||||
return RoomNotifCounts.empty()
|
||||
|
||||
with Measure(self.clock, "unread_notifs_for_room_id"):
|
||||
|
||||
return await self.store.get_unread_event_push_actions_by_room_for_user(
|
||||
room_id,
|
||||
sync_config.user.to_string(),
|
||||
|
@ -44,6 +44,7 @@ from twisted.internet.interfaces import (
|
||||
IAddress,
|
||||
IDelayedCall,
|
||||
IHostResolution,
|
||||
IOpenSSLContextFactory,
|
||||
IReactorCore,
|
||||
IReactorPluggableNameResolver,
|
||||
IReactorTime,
|
||||
@ -958,8 +959,8 @@ class InsecureInterceptableContextFactory(ssl.ContextFactory):
|
||||
self._context = SSL.Context(SSL.SSLv23_METHOD)
|
||||
self._context.set_verify(VERIFY_NONE, lambda *_: False)
|
||||
|
||||
def getContext(self, hostname=None, port=None):
|
||||
def getContext(self) -> SSL.Context:
|
||||
return self._context
|
||||
|
||||
def creatorForNetloc(self, hostname: bytes, port: int):
|
||||
def creatorForNetloc(self, hostname: bytes, port: int) -> IOpenSSLContextFactory:
|
||||
return self
|
||||
|
@ -440,7 +440,7 @@ class MatrixFederationHttpClient:
|
||||
Args:
|
||||
request: details of request to be sent
|
||||
|
||||
retry_on_dns_fail: true if the request should be retied on DNS failures
|
||||
retry_on_dns_fail: true if the request should be retried on DNS failures
|
||||
|
||||
timeout: number of milliseconds to wait for the response headers
|
||||
(including connecting to the server), *for each attempt*.
|
||||
@ -475,7 +475,7 @@ class MatrixFederationHttpClient:
|
||||
(except 429).
|
||||
NotRetryingDestination: If we are not yet ready to retry this
|
||||
server.
|
||||
FederationDeniedError: If this destination is not on our
|
||||
FederationDeniedError: If this destination is not on our
|
||||
federation whitelist
|
||||
RequestSendFailed: If there were problems connecting to the
|
||||
remote, due to e.g. DNS failures, connection timeouts etc.
|
||||
@ -871,7 +871,7 @@ class MatrixFederationHttpClient:
|
||||
(except 429).
|
||||
NotRetryingDestination: If we are not yet ready to retry this
|
||||
server.
|
||||
FederationDeniedError: If this destination is not on our
|
||||
FederationDeniedError: If this destination is not on our
|
||||
federation whitelist
|
||||
RequestSendFailed: If there were problems connecting to the
|
||||
remote, due to e.g. DNS failures, connection timeouts etc.
|
||||
@ -958,7 +958,7 @@ class MatrixFederationHttpClient:
|
||||
(except 429).
|
||||
NotRetryingDestination: If we are not yet ready to retry this
|
||||
server.
|
||||
FederationDeniedError: If this destination is not on our
|
||||
FederationDeniedError: If this destination is not on our
|
||||
federation whitelist
|
||||
RequestSendFailed: If there were problems connecting to the
|
||||
remote, due to e.g. DNS failures, connection timeouts etc.
|
||||
@ -1036,6 +1036,8 @@ class MatrixFederationHttpClient:
|
||||
args: A dictionary used to create query strings, defaults to
|
||||
None.
|
||||
|
||||
retry_on_dns_fail: true if the request should be retried on DNS failures
|
||||
|
||||
timeout: number of milliseconds to wait for the response.
|
||||
self._default_timeout (60s) by default.
|
||||
|
||||
@ -1063,7 +1065,7 @@ class MatrixFederationHttpClient:
|
||||
(except 429).
|
||||
NotRetryingDestination: If we are not yet ready to retry this
|
||||
server.
|
||||
FederationDeniedError: If this destination is not on our
|
||||
FederationDeniedError: If this destination is not on our
|
||||
federation whitelist
|
||||
RequestSendFailed: If there were problems connecting to the
|
||||
remote, due to e.g. DNS failures, connection timeouts etc.
|
||||
@ -1141,7 +1143,7 @@ class MatrixFederationHttpClient:
|
||||
(except 429).
|
||||
NotRetryingDestination: If we are not yet ready to retry this
|
||||
server.
|
||||
FederationDeniedError: If this destination is not on our
|
||||
FederationDeniedError: If this destination is not on our
|
||||
federation whitelist
|
||||
RequestSendFailed: If there were problems connecting to the
|
||||
remote, due to e.g. DNS failures, connection timeouts etc.
|
||||
@ -1197,7 +1199,7 @@ class MatrixFederationHttpClient:
|
||||
(except 429).
|
||||
NotRetryingDestination: If we are not yet ready to retry this
|
||||
server.
|
||||
FederationDeniedError: If this destination is not on our
|
||||
FederationDeniedError: If this destination is not on our
|
||||
federation whitelist
|
||||
RequestSendFailed: If there were problems connecting to the
|
||||
remote, due to e.g. DNS failures, connection timeouts etc.
|
||||
|
@ -524,6 +524,7 @@ def whitelisted_homeserver(destination: str) -> bool:
|
||||
|
||||
# Start spans and scopes
|
||||
|
||||
|
||||
# Could use kwargs but I want these to be explicit
|
||||
def start_active_span(
|
||||
operation_name: str,
|
||||
|
481
synapse/media/_base.py
Normal file
481
synapse/media/_base.py
Normal file
@ -0,0 +1,481 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import urllib
|
||||
from abc import ABC, abstractmethod
|
||||
from types import TracebackType
|
||||
from typing import Awaitable, Dict, Generator, List, Optional, Tuple, Type
|
||||
|
||||
import attr
|
||||
|
||||
from twisted.internet.interfaces import IConsumer
|
||||
from twisted.protocols.basic import FileSender
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError, cs_error
|
||||
from synapse.http.server import finish_request, respond_with_json
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.util.stringutils import is_ascii, parse_and_validate_server_name
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# list all text content types that will have the charset default to UTF-8 when
|
||||
# none is given
|
||||
TEXT_CONTENT_TYPES = [
|
||||
"text/css",
|
||||
"text/csv",
|
||||
"text/html",
|
||||
"text/calendar",
|
||||
"text/plain",
|
||||
"text/javascript",
|
||||
"application/json",
|
||||
"application/ld+json",
|
||||
"application/rtf",
|
||||
"image/svg+xml",
|
||||
"text/xml",
|
||||
]
|
||||
|
||||
|
||||
def parse_media_id(request: Request) -> Tuple[str, str, Optional[str]]:
|
||||
"""Parses the server name, media ID and optional file name from the request URI
|
||||
|
||||
Also performs some rough validation on the server name.
|
||||
|
||||
Args:
|
||||
request: The `Request`.
|
||||
|
||||
Returns:
|
||||
A tuple containing the parsed server name, media ID and optional file name.
|
||||
|
||||
Raises:
|
||||
SynapseError(404): if parsing or validation fail for any reason
|
||||
"""
|
||||
try:
|
||||
# The type on postpath seems incorrect in Twisted 21.2.0.
|
||||
postpath: List[bytes] = request.postpath # type: ignore
|
||||
assert postpath
|
||||
|
||||
# This allows users to append e.g. /test.png to the URL. Useful for
|
||||
# clients that parse the URL to see content type.
|
||||
server_name_bytes, media_id_bytes = postpath[:2]
|
||||
server_name = server_name_bytes.decode("utf-8")
|
||||
media_id = media_id_bytes.decode("utf8")
|
||||
|
||||
# Validate the server name, raising if invalid
|
||||
parse_and_validate_server_name(server_name)
|
||||
|
||||
file_name = None
|
||||
if len(postpath) > 2:
|
||||
try:
|
||||
file_name = urllib.parse.unquote(postpath[-1].decode("utf-8"))
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
return server_name, media_id, file_name
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
404, "Invalid media id token %r" % (request.postpath,), Codes.UNKNOWN
|
||||
)
|
||||
|
||||
|
||||
def respond_404(request: SynapseRequest) -> None:
|
||||
respond_with_json(
|
||||
request,
|
||||
404,
|
||||
cs_error("Not found %r" % (request.postpath,), code=Codes.NOT_FOUND),
|
||||
send_cors=True,
|
||||
)
|
||||
|
||||
|
||||
async def respond_with_file(
|
||||
request: SynapseRequest,
|
||||
media_type: str,
|
||||
file_path: str,
|
||||
file_size: Optional[int] = None,
|
||||
upload_name: Optional[str] = None,
|
||||
) -> None:
|
||||
logger.debug("Responding with %r", file_path)
|
||||
|
||||
if os.path.isfile(file_path):
|
||||
if file_size is None:
|
||||
stat = os.stat(file_path)
|
||||
file_size = stat.st_size
|
||||
|
||||
add_file_headers(request, media_type, file_size, upload_name)
|
||||
|
||||
with open(file_path, "rb") as f:
|
||||
await make_deferred_yieldable(FileSender().beginFileTransfer(f, request))
|
||||
|
||||
finish_request(request)
|
||||
else:
|
||||
respond_404(request)
|
||||
|
||||
|
||||
def add_file_headers(
|
||||
request: Request,
|
||||
media_type: str,
|
||||
file_size: Optional[int],
|
||||
upload_name: Optional[str],
|
||||
) -> None:
|
||||
"""Adds the correct response headers in preparation for responding with the
|
||||
media.
|
||||
|
||||
Args:
|
||||
request
|
||||
media_type: The media/content type.
|
||||
file_size: Size in bytes of the media, if known.
|
||||
upload_name: The name of the requested file, if any.
|
||||
"""
|
||||
|
||||
def _quote(x: str) -> str:
|
||||
return urllib.parse.quote(x.encode("utf-8"))
|
||||
|
||||
# Default to a UTF-8 charset for text content types.
|
||||
# ex, uses UTF-8 for 'text/css' but not 'text/css; charset=UTF-16'
|
||||
if media_type.lower() in TEXT_CONTENT_TYPES:
|
||||
content_type = media_type + "; charset=UTF-8"
|
||||
else:
|
||||
content_type = media_type
|
||||
|
||||
request.setHeader(b"Content-Type", content_type.encode("UTF-8"))
|
||||
if upload_name:
|
||||
# RFC6266 section 4.1 [1] defines both `filename` and `filename*`.
|
||||
#
|
||||
# `filename` is defined to be a `value`, which is defined by RFC2616
|
||||
# section 3.6 [2] to be a `token` or a `quoted-string`, where a `token`
|
||||
# is (essentially) a single US-ASCII word, and a `quoted-string` is a
|
||||
# US-ASCII string surrounded by double-quotes, using backslash as an
|
||||
# escape character. Note that %-encoding is *not* permitted.
|
||||
#
|
||||
# `filename*` is defined to be an `ext-value`, which is defined in
|
||||
# RFC5987 section 3.2.1 [3] to be `charset "'" [ language ] "'" value-chars`,
|
||||
# where `value-chars` is essentially a %-encoded string in the given charset.
|
||||
#
|
||||
# [1]: https://tools.ietf.org/html/rfc6266#section-4.1
|
||||
# [2]: https://tools.ietf.org/html/rfc2616#section-3.6
|
||||
# [3]: https://tools.ietf.org/html/rfc5987#section-3.2.1
|
||||
|
||||
# We avoid the quoted-string version of `filename`, because (a) synapse didn't
|
||||
# correctly interpret those as of 0.99.2 and (b) they are a bit of a pain and we
|
||||
# may as well just do the filename* version.
|
||||
if _can_encode_filename_as_token(upload_name):
|
||||
disposition = "inline; filename=%s" % (upload_name,)
|
||||
else:
|
||||
disposition = "inline; filename*=utf-8''%s" % (_quote(upload_name),)
|
||||
|
||||
request.setHeader(b"Content-Disposition", disposition.encode("ascii"))
|
||||
|
||||
# cache for at least a day.
|
||||
# XXX: we might want to turn this off for data we don't want to
|
||||
# recommend caching as it's sensitive or private - or at least
|
||||
# select private. don't bother setting Expires as all our
|
||||
# clients are smart enough to be happy with Cache-Control
|
||||
request.setHeader(
|
||||
b"Cache-Control", b"public,immutable,max-age=86400,s-maxage=86400"
|
||||
)
|
||||
if file_size is not None:
|
||||
request.setHeader(b"Content-Length", b"%d" % (file_size,))
|
||||
|
||||
# Tell web crawlers to not index, archive, or follow links in media. This
|
||||
# should help to prevent things in the media repo from showing up in web
|
||||
# search results.
|
||||
request.setHeader(b"X-Robots-Tag", "noindex, nofollow, noarchive, noimageindex")
|
||||
|
||||
|
||||
# separators as defined in RFC2616. SP and HT are handled separately.
|
||||
# see _can_encode_filename_as_token.
|
||||
_FILENAME_SEPARATOR_CHARS = {
|
||||
"(",
|
||||
")",
|
||||
"<",
|
||||
">",
|
||||
"@",
|
||||
",",
|
||||
";",
|
||||
":",
|
||||
"\\",
|
||||
'"',
|
||||
"/",
|
||||
"[",
|
||||
"]",
|
||||
"?",
|
||||
"=",
|
||||
"{",
|
||||
"}",
|
||||
}
|
||||
|
||||
|
||||
def _can_encode_filename_as_token(x: str) -> bool:
|
||||
for c in x:
|
||||
# from RFC2616:
|
||||
#
|
||||
# token = 1*<any CHAR except CTLs or separators>
|
||||
#
|
||||
# separators = "(" | ")" | "<" | ">" | "@"
|
||||
# | "," | ";" | ":" | "\" | <">
|
||||
# | "/" | "[" | "]" | "?" | "="
|
||||
# | "{" | "}" | SP | HT
|
||||
#
|
||||
# CHAR = <any US-ASCII character (octets 0 - 127)>
|
||||
#
|
||||
# CTL = <any US-ASCII control character
|
||||
# (octets 0 - 31) and DEL (127)>
|
||||
#
|
||||
if ord(c) >= 127 or ord(c) <= 32 or c in _FILENAME_SEPARATOR_CHARS:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
async def respond_with_responder(
|
||||
request: SynapseRequest,
|
||||
responder: "Optional[Responder]",
|
||||
media_type: str,
|
||||
file_size: Optional[int],
|
||||
upload_name: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Responds to the request with given responder. If responder is None then
|
||||
returns 404.
|
||||
|
||||
Args:
|
||||
request
|
||||
responder
|
||||
media_type: The media/content type.
|
||||
file_size: Size in bytes of the media. If not known it should be None
|
||||
upload_name: The name of the requested file, if any.
|
||||
"""
|
||||
if not responder:
|
||||
respond_404(request)
|
||||
return
|
||||
|
||||
# If we have a responder we *must* use it as a context manager.
|
||||
with responder:
|
||||
if request._disconnected:
|
||||
logger.warning(
|
||||
"Not sending response to request %s, already disconnected.", request
|
||||
)
|
||||
return
|
||||
|
||||
logger.debug("Responding to media request with responder %s", responder)
|
||||
add_file_headers(request, media_type, file_size, upload_name)
|
||||
try:
|
||||
await responder.write_to_consumer(request)
|
||||
except Exception as e:
|
||||
# The majority of the time this will be due to the client having gone
|
||||
# away. Unfortunately, Twisted simply throws a generic exception at us
|
||||
# in that case.
|
||||
logger.warning("Failed to write to consumer: %s %s", type(e), e)
|
||||
|
||||
# Unregister the producer, if it has one, so Twisted doesn't complain
|
||||
if request.producer:
|
||||
request.unregisterProducer()
|
||||
|
||||
finish_request(request)
|
||||
|
||||
|
||||
class Responder(ABC):
|
||||
"""Represents a response that can be streamed to the requester.
|
||||
|
||||
Responder is a context manager which *must* be used, so that any resources
|
||||
held can be cleaned up.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def write_to_consumer(self, consumer: IConsumer) -> Awaitable:
|
||||
"""Stream response into consumer
|
||||
|
||||
Args:
|
||||
consumer: The consumer to stream into.
|
||||
|
||||
Returns:
|
||||
Resolves once the response has finished being written
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __enter__(self) -> None: # noqa: B027
|
||||
pass
|
||||
|
||||
def __exit__( # noqa: B027
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ThumbnailInfo:
|
||||
"""Details about a generated thumbnail."""
|
||||
|
||||
width: int
|
||||
height: int
|
||||
method: str
|
||||
# Content type of thumbnail, e.g. image/png
|
||||
type: str
|
||||
# The size of the media file, in bytes.
|
||||
length: Optional[int] = None
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class FileInfo:
|
||||
"""Details about a requested/uploaded file."""
|
||||
|
||||
# The server name where the media originated from, or None if local.
|
||||
server_name: Optional[str]
|
||||
# The local ID of the file. For local files this is the same as the media_id
|
||||
file_id: str
|
||||
# If the file is for the url preview cache
|
||||
url_cache: bool = False
|
||||
# Whether the file is a thumbnail or not.
|
||||
thumbnail: Optional[ThumbnailInfo] = None
|
||||
|
||||
# The below properties exist to maintain compatibility with third-party modules.
|
||||
@property
|
||||
def thumbnail_width(self) -> Optional[int]:
|
||||
if not self.thumbnail:
|
||||
return None
|
||||
return self.thumbnail.width
|
||||
|
||||
@property
|
||||
def thumbnail_height(self) -> Optional[int]:
|
||||
if not self.thumbnail:
|
||||
return None
|
||||
return self.thumbnail.height
|
||||
|
||||
@property
|
||||
def thumbnail_method(self) -> Optional[str]:
|
||||
if not self.thumbnail:
|
||||
return None
|
||||
return self.thumbnail.method
|
||||
|
||||
@property
|
||||
def thumbnail_type(self) -> Optional[str]:
|
||||
if not self.thumbnail:
|
||||
return None
|
||||
return self.thumbnail.type
|
||||
|
||||
@property
|
||||
def thumbnail_length(self) -> Optional[int]:
|
||||
if not self.thumbnail:
|
||||
return None
|
||||
return self.thumbnail.length
|
||||
|
||||
|
||||
def get_filename_from_headers(headers: Dict[bytes, List[bytes]]) -> Optional[str]:
|
||||
"""
|
||||
Get the filename of the downloaded file by inspecting the
|
||||
Content-Disposition HTTP header.
|
||||
|
||||
Args:
|
||||
headers: The HTTP request headers.
|
||||
|
||||
Returns:
|
||||
The filename, or None.
|
||||
"""
|
||||
content_disposition = headers.get(b"Content-Disposition", [b""])
|
||||
|
||||
# No header, bail out.
|
||||
if not content_disposition[0]:
|
||||
return None
|
||||
|
||||
_, params = _parse_header(content_disposition[0])
|
||||
|
||||
upload_name = None
|
||||
|
||||
# First check if there is a valid UTF-8 filename
|
||||
upload_name_utf8 = params.get(b"filename*", None)
|
||||
if upload_name_utf8:
|
||||
if upload_name_utf8.lower().startswith(b"utf-8''"):
|
||||
upload_name_utf8 = upload_name_utf8[7:]
|
||||
# We have a filename*= section. This MUST be ASCII, and any UTF-8
|
||||
# bytes are %-quoted.
|
||||
try:
|
||||
# Once it is decoded, we can then unquote the %-encoded
|
||||
# parts strictly into a unicode string.
|
||||
upload_name = urllib.parse.unquote(
|
||||
upload_name_utf8.decode("ascii"), errors="strict"
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
# Incorrect UTF-8.
|
||||
pass
|
||||
|
||||
# If there isn't check for an ascii name.
|
||||
if not upload_name:
|
||||
upload_name_ascii = params.get(b"filename", None)
|
||||
if upload_name_ascii and is_ascii(upload_name_ascii):
|
||||
upload_name = upload_name_ascii.decode("ascii")
|
||||
|
||||
# This may be None here, indicating we did not find a matching name.
|
||||
return upload_name
|
||||
|
||||
|
||||
def _parse_header(line: bytes) -> Tuple[bytes, Dict[bytes, bytes]]:
|
||||
"""Parse a Content-type like header.
|
||||
|
||||
Cargo-culted from `cgi`, but works on bytes rather than strings.
|
||||
|
||||
Args:
|
||||
line: header to be parsed
|
||||
|
||||
Returns:
|
||||
The main content-type, followed by the parameter dictionary
|
||||
"""
|
||||
parts = _parseparam(b";" + line)
|
||||
key = next(parts)
|
||||
pdict = {}
|
||||
for p in parts:
|
||||
i = p.find(b"=")
|
||||
if i >= 0:
|
||||
name = p[:i].strip().lower()
|
||||
value = p[i + 1 :].strip()
|
||||
|
||||
# strip double-quotes
|
||||
if len(value) >= 2 and value[0:1] == value[-1:] == b'"':
|
||||
value = value[1:-1]
|
||||
value = value.replace(b"\\\\", b"\\").replace(b'\\"', b'"')
|
||||
pdict[name] = value
|
||||
|
||||
return key, pdict
|
||||
|
||||
|
||||
def _parseparam(s: bytes) -> Generator[bytes, None, None]:
|
||||
"""Generator which splits the input on ;, respecting double-quoted sequences
|
||||
|
||||
Cargo-culted from `cgi`, but works on bytes rather than strings.
|
||||
|
||||
Args:
|
||||
s: header to be parsed
|
||||
|
||||
Returns:
|
||||
The split input
|
||||
"""
|
||||
while s[:1] == b";":
|
||||
s = s[1:]
|
||||
|
||||
# look for the next ;
|
||||
end = s.find(b";")
|
||||
|
||||
# if there is an odd number of " marks between here and the next ;, skip to the
|
||||
# next ; instead
|
||||
while end > 0 and (s.count(b'"', 0, end) - s.count(b'\\"', 0, end)) % 2:
|
||||
end = s.find(b";", end + 1)
|
||||
|
||||
if end < 0:
|
||||
end = len(s)
|
||||
f = s[:end]
|
||||
yield f.strip()
|
||||
s = s[end:]
|
@ -32,18 +32,10 @@ from synapse.api.errors import (
|
||||
RequestSendFailed,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.repository import ThumbnailRequirement
|
||||
from synapse.http.server import UnrecognizedRequestResource
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import defer_to_thread
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.types import UserID
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
from ._base import (
|
||||
from synapse.media._base import (
|
||||
FileInfo,
|
||||
Responder,
|
||||
ThumbnailInfo,
|
||||
@ -51,15 +43,15 @@ from ._base import (
|
||||
respond_404,
|
||||
respond_with_responder,
|
||||
)
|
||||
from .config_resource import MediaConfigResource
|
||||
from .download_resource import DownloadResource
|
||||
from .filepath import MediaFilePaths
|
||||
from .media_storage import MediaStorage
|
||||
from .preview_url_resource import PreviewUrlResource
|
||||
from .storage_provider import StorageProviderWrapper
|
||||
from .thumbnail_resource import ThumbnailResource
|
||||
from .thumbnailer import Thumbnailer, ThumbnailError
|
||||
from .upload_resource import UploadResource
|
||||
from synapse.media.filepath import MediaFilePaths
|
||||
from synapse.media.media_storage import MediaStorage
|
||||
from synapse.media.storage_provider import StorageProviderWrapper
|
||||
from synapse.media.thumbnailer import Thumbnailer, ThumbnailError
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.types import UserID
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@ -1044,69 +1036,3 @@ class MediaRepository:
|
||||
removed_media.append(media_id)
|
||||
|
||||
return removed_media, len(removed_media)
|
||||
|
||||
|
||||
class MediaRepositoryResource(UnrecognizedRequestResource):
|
||||
"""File uploading and downloading.
|
||||
|
||||
Uploads are POSTed to a resource which returns a token which is used to GET
|
||||
the download::
|
||||
|
||||
=> POST /_matrix/media/r0/upload HTTP/1.1
|
||||
Content-Type: <media-type>
|
||||
Content-Length: <content-length>
|
||||
|
||||
<media>
|
||||
|
||||
<= HTTP/1.1 200 OK
|
||||
Content-Type: application/json
|
||||
|
||||
{ "content_uri": "mxc://<server-name>/<media-id>" }
|
||||
|
||||
=> GET /_matrix/media/r0/download/<server-name>/<media-id> HTTP/1.1
|
||||
|
||||
<= HTTP/1.1 200 OK
|
||||
Content-Type: <media-type>
|
||||
Content-Disposition: attachment;filename=<upload-filename>
|
||||
|
||||
<media>
|
||||
|
||||
Clients can get thumbnails by supplying a desired width and height and
|
||||
thumbnailing method::
|
||||
|
||||
=> GET /_matrix/media/r0/thumbnail/<server_name>
|
||||
/<media-id>?width=<w>&height=<h>&method=<m> HTTP/1.1
|
||||
|
||||
<= HTTP/1.1 200 OK
|
||||
Content-Type: image/jpeg or image/png
|
||||
|
||||
<thumbnail>
|
||||
|
||||
The thumbnail methods are "crop" and "scale". "scale" tries to return an
|
||||
image where either the width or the height is smaller than the requested
|
||||
size. The client should then scale and letterbox the image if it needs to
|
||||
fit within a given rectangle. "crop" tries to return an image where the
|
||||
width and height are close to the requested size and the aspect matches
|
||||
the requested size. The client should scale the image if it needs to fit
|
||||
within a given rectangle.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
# If we're not configured to use it, raise if we somehow got here.
|
||||
if not hs.config.media.can_load_media_repo:
|
||||
raise ConfigError("Synapse is not configured to use a media repo.")
|
||||
|
||||
super().__init__()
|
||||
media_repo = hs.get_media_repository()
|
||||
|
||||
self.putChild(b"upload", UploadResource(hs, media_repo))
|
||||
self.putChild(b"download", DownloadResource(hs, media_repo))
|
||||
self.putChild(
|
||||
b"thumbnail", ThumbnailResource(hs, media_repo, media_repo.media_storage)
|
||||
)
|
||||
if hs.config.media.url_preview_enabled:
|
||||
self.putChild(
|
||||
b"preview_url",
|
||||
PreviewUrlResource(hs, media_repo, media_repo.media_storage),
|
||||
)
|
||||
self.putChild(b"config", MediaConfigResource(hs))
|
374
synapse/media/media_storage.py
Normal file
374
synapse/media/media_storage.py
Normal file
@ -0,0 +1,374 @@
|
||||
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Generator,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
import attr
|
||||
|
||||
from twisted.internet.defer import Deferred
|
||||
from twisted.internet.interfaces import IConsumer
|
||||
from twisted.protocols.basic import FileSender
|
||||
|
||||
import synapse
|
||||
from synapse.api.errors import NotFoundError
|
||||
from synapse.logging.context import defer_to_thread, make_deferred_yieldable
|
||||
from synapse.util import Clock
|
||||
from synapse.util.file_consumer import BackgroundFileConsumer
|
||||
|
||||
from ._base import FileInfo, Responder
|
||||
from .filepath import MediaFilePaths
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.media.storage_provider import StorageProvider
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MediaStorage:
|
||||
"""Responsible for storing/fetching files from local sources.
|
||||
|
||||
Args:
|
||||
hs
|
||||
local_media_directory: Base path where we store media on disk
|
||||
filepaths
|
||||
storage_providers: List of StorageProvider that are used to fetch and store files.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hs: "HomeServer",
|
||||
local_media_directory: str,
|
||||
filepaths: MediaFilePaths,
|
||||
storage_providers: Sequence["StorageProvider"],
|
||||
):
|
||||
self.hs = hs
|
||||
self.reactor = hs.get_reactor()
|
||||
self.local_media_directory = local_media_directory
|
||||
self.filepaths = filepaths
|
||||
self.storage_providers = storage_providers
|
||||
self.spam_checker = hs.get_spam_checker()
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
async def store_file(self, source: IO, file_info: FileInfo) -> str:
|
||||
"""Write `source` to the on disk media store, and also any other
|
||||
configured storage providers
|
||||
|
||||
Args:
|
||||
source: A file like object that should be written
|
||||
file_info: Info about the file to store
|
||||
|
||||
Returns:
|
||||
the file path written to in the primary media store
|
||||
"""
|
||||
|
||||
with self.store_into_file(file_info) as (f, fname, finish_cb):
|
||||
# Write to the main repository
|
||||
await self.write_to_file(source, f)
|
||||
await finish_cb()
|
||||
|
||||
return fname
|
||||
|
||||
async def write_to_file(self, source: IO, output: IO) -> None:
|
||||
"""Asynchronously write the `source` to `output`."""
|
||||
await defer_to_thread(self.reactor, _write_file_synchronously, source, output)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def store_into_file(
|
||||
self, file_info: FileInfo
|
||||
) -> Generator[Tuple[BinaryIO, str, Callable[[], Awaitable[None]]], None, None]:
|
||||
"""Context manager used to get a file like object to write into, as
|
||||
described by file_info.
|
||||
|
||||
Actually yields a 3-tuple (file, fname, finish_cb), where file is a file
|
||||
like object that can be written to, fname is the absolute path of file
|
||||
on disk, and finish_cb is a function that returns an awaitable.
|
||||
|
||||
fname can be used to read the contents from after upload, e.g. to
|
||||
generate thumbnails.
|
||||
|
||||
finish_cb must be called and waited on after the file has been
|
||||
successfully been written to. Should not be called if there was an
|
||||
error.
|
||||
|
||||
Args:
|
||||
file_info: Info about the file to store
|
||||
|
||||
Example:
|
||||
|
||||
with media_storage.store_into_file(info) as (f, fname, finish_cb):
|
||||
# .. write into f ...
|
||||
await finish_cb()
|
||||
"""
|
||||
|
||||
path = self._file_info_to_path(file_info)
|
||||
fname = os.path.join(self.local_media_directory, path)
|
||||
|
||||
dirname = os.path.dirname(fname)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
|
||||
finished_called = [False]
|
||||
|
||||
try:
|
||||
with open(fname, "wb") as f:
|
||||
|
||||
async def finish() -> None:
|
||||
# Ensure that all writes have been flushed and close the
|
||||
# file.
|
||||
f.flush()
|
||||
f.close()
|
||||
|
||||
spam_check = await self.spam_checker.check_media_file_for_spam(
|
||||
ReadableFileWrapper(self.clock, fname), file_info
|
||||
)
|
||||
if spam_check != synapse.module_api.NOT_SPAM:
|
||||
logger.info("Blocking media due to spam checker")
|
||||
# Note that we'll delete the stored media, due to the
|
||||
# try/except below. The media also won't be stored in
|
||||
# the DB.
|
||||
# We currently ignore any additional field returned by
|
||||
# the spam-check API.
|
||||
raise SpamMediaException(errcode=spam_check[0])
|
||||
|
||||
for provider in self.storage_providers:
|
||||
await provider.store_file(path, file_info)
|
||||
|
||||
finished_called[0] = True
|
||||
|
||||
yield f, fname, finish
|
||||
except Exception as e:
|
||||
try:
|
||||
os.remove(fname)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
raise e from None
|
||||
|
||||
if not finished_called:
|
||||
raise Exception("Finished callback not called")
|
||||
|
||||
async def fetch_media(self, file_info: FileInfo) -> Optional[Responder]:
|
||||
"""Attempts to fetch media described by file_info from the local cache
|
||||
and configured storage providers.
|
||||
|
||||
Args:
|
||||
file_info
|
||||
|
||||
Returns:
|
||||
Returns a Responder if the file was found, otherwise None.
|
||||
"""
|
||||
paths = [self._file_info_to_path(file_info)]
|
||||
|
||||
# fallback for remote thumbnails with no method in the filename
|
||||
if file_info.thumbnail and file_info.server_name:
|
||||
paths.append(
|
||||
self.filepaths.remote_media_thumbnail_rel_legacy(
|
||||
server_name=file_info.server_name,
|
||||
file_id=file_info.file_id,
|
||||
width=file_info.thumbnail.width,
|
||||
height=file_info.thumbnail.height,
|
||||
content_type=file_info.thumbnail.type,
|
||||
)
|
||||
)
|
||||
|
||||
for path in paths:
|
||||
local_path = os.path.join(self.local_media_directory, path)
|
||||
if os.path.exists(local_path):
|
||||
logger.debug("responding with local file %s", local_path)
|
||||
return FileResponder(open(local_path, "rb"))
|
||||
logger.debug("local file %s did not exist", local_path)
|
||||
|
||||
for provider in self.storage_providers:
|
||||
for path in paths:
|
||||
res: Any = await provider.fetch(path, file_info)
|
||||
if res:
|
||||
logger.debug("Streaming %s from %s", path, provider)
|
||||
return res
|
||||
logger.debug("%s not found on %s", path, provider)
|
||||
|
||||
return None
|
||||
|
||||
async def ensure_media_is_in_local_cache(self, file_info: FileInfo) -> str:
|
||||
"""Ensures that the given file is in the local cache. Attempts to
|
||||
download it from storage providers if it isn't.
|
||||
|
||||
Args:
|
||||
file_info
|
||||
|
||||
Returns:
|
||||
Full path to local file
|
||||
"""
|
||||
path = self._file_info_to_path(file_info)
|
||||
local_path = os.path.join(self.local_media_directory, path)
|
||||
if os.path.exists(local_path):
|
||||
return local_path
|
||||
|
||||
# Fallback for paths without method names
|
||||
# Should be removed in the future
|
||||
if file_info.thumbnail and file_info.server_name:
|
||||
legacy_path = self.filepaths.remote_media_thumbnail_rel_legacy(
|
||||
server_name=file_info.server_name,
|
||||
file_id=file_info.file_id,
|
||||
width=file_info.thumbnail.width,
|
||||
height=file_info.thumbnail.height,
|
||||
content_type=file_info.thumbnail.type,
|
||||
)
|
||||
legacy_local_path = os.path.join(self.local_media_directory, legacy_path)
|
||||
if os.path.exists(legacy_local_path):
|
||||
return legacy_local_path
|
||||
|
||||
dirname = os.path.dirname(local_path)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
|
||||
for provider in self.storage_providers:
|
||||
res: Any = await provider.fetch(path, file_info)
|
||||
if res:
|
||||
with res:
|
||||
consumer = BackgroundFileConsumer(
|
||||
open(local_path, "wb"), self.reactor
|
||||
)
|
||||
await res.write_to_consumer(consumer)
|
||||
await consumer.wait()
|
||||
return local_path
|
||||
|
||||
raise NotFoundError()
|
||||
|
||||
def _file_info_to_path(self, file_info: FileInfo) -> str:
|
||||
"""Converts file_info into a relative path.
|
||||
|
||||
The path is suitable for storing files under a directory, e.g. used to
|
||||
store files on local FS under the base media repository directory.
|
||||
"""
|
||||
if file_info.url_cache:
|
||||
if file_info.thumbnail:
|
||||
return self.filepaths.url_cache_thumbnail_rel(
|
||||
media_id=file_info.file_id,
|
||||
width=file_info.thumbnail.width,
|
||||
height=file_info.thumbnail.height,
|
||||
content_type=file_info.thumbnail.type,
|
||||
method=file_info.thumbnail.method,
|
||||
)
|
||||
return self.filepaths.url_cache_filepath_rel(file_info.file_id)
|
||||
|
||||
if file_info.server_name:
|
||||
if file_info.thumbnail:
|
||||
return self.filepaths.remote_media_thumbnail_rel(
|
||||
server_name=file_info.server_name,
|
||||
file_id=file_info.file_id,
|
||||
width=file_info.thumbnail.width,
|
||||
height=file_info.thumbnail.height,
|
||||
content_type=file_info.thumbnail.type,
|
||||
method=file_info.thumbnail.method,
|
||||
)
|
||||
return self.filepaths.remote_media_filepath_rel(
|
||||
file_info.server_name, file_info.file_id
|
||||
)
|
||||
|
||||
if file_info.thumbnail:
|
||||
return self.filepaths.local_media_thumbnail_rel(
|
||||
media_id=file_info.file_id,
|
||||
width=file_info.thumbnail.width,
|
||||
height=file_info.thumbnail.height,
|
||||
content_type=file_info.thumbnail.type,
|
||||
method=file_info.thumbnail.method,
|
||||
)
|
||||
return self.filepaths.local_media_filepath_rel(file_info.file_id)
|
||||
|
||||
|
||||
def _write_file_synchronously(source: IO, dest: IO) -> None:
|
||||
"""Write `source` to the file like `dest` synchronously. Should be called
|
||||
from a thread.
|
||||
|
||||
Args:
|
||||
source: A file like object that's to be written
|
||||
dest: A file like object to be written to
|
||||
"""
|
||||
source.seek(0) # Ensure we read from the start of the file
|
||||
shutil.copyfileobj(source, dest)
|
||||
|
||||
|
||||
class FileResponder(Responder):
|
||||
"""Wraps an open file that can be sent to a request.
|
||||
|
||||
Args:
|
||||
open_file: A file like object to be streamed ot the client,
|
||||
is closed when finished streaming.
|
||||
"""
|
||||
|
||||
def __init__(self, open_file: IO):
|
||||
self.open_file = open_file
|
||||
|
||||
def write_to_consumer(self, consumer: IConsumer) -> Deferred:
|
||||
return make_deferred_yieldable(
|
||||
FileSender().beginFileTransfer(self.open_file, consumer)
|
||||
)
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
self.open_file.close()
|
||||
|
||||
|
||||
class SpamMediaException(NotFoundError):
|
||||
"""The media was blocked by a spam checker, so we simply 404 the request (in
|
||||
the same way as if it was quarantined).
|
||||
"""
|
||||
|
||||
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
class ReadableFileWrapper:
|
||||
"""Wrapper that allows reading a file in chunks, yielding to the reactor,
|
||||
and writing to a callback.
|
||||
|
||||
This is simplified `FileSender` that takes an IO object rather than an
|
||||
`IConsumer`.
|
||||
"""
|
||||
|
||||
CHUNK_SIZE = 2**14
|
||||
|
||||
clock: Clock
|
||||
path: str
|
||||
|
||||
async def write_chunks_to(self, callback: Callable[[bytes], object]) -> None:
|
||||
"""Reads the file in chunks and calls the callback with each chunk."""
|
||||
|
||||
with open(self.path, "rb") as file:
|
||||
while True:
|
||||
chunk = file.read(self.CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
callback(chunk)
|
||||
|
||||
# We yield to the reactor by sleeping for 0 seconds.
|
||||
await self.clock.sleep(0)
|
@ -18,7 +18,7 @@ from typing import TYPE_CHECKING, List, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.rest.media.v1.preview_html import parse_html_description
|
||||
from synapse.media.preview_html import parse_html_description
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util import json_decoder
|
||||
|
181
synapse/media/storage_provider.py
Normal file
181
synapse/media/storage_provider.py
Normal file
@ -0,0 +1,181 @@
|
||||
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import abc
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING, Callable, Optional
|
||||
|
||||
from synapse.config._base import Config
|
||||
from synapse.logging.context import defer_to_thread, run_in_background
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
|
||||
from ._base import FileInfo, Responder
|
||||
from .media_storage import FileResponder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class StorageProvider(metaclass=abc.ABCMeta):
|
||||
"""A storage provider is a service that can store uploaded media and
|
||||
retrieve them.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def store_file(self, path: str, file_info: FileInfo) -> None:
|
||||
"""Store the file described by file_info. The actual contents can be
|
||||
retrieved by reading the file in file_info.upload_path.
|
||||
|
||||
Args:
|
||||
path: Relative path of file in local cache
|
||||
file_info: The metadata of the file.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
|
||||
"""Attempt to fetch the file described by file_info and stream it
|
||||
into writer.
|
||||
|
||||
Args:
|
||||
path: Relative path of file in local cache
|
||||
file_info: The metadata of the file.
|
||||
|
||||
Returns:
|
||||
Returns a Responder if the provider has the file, otherwise returns None.
|
||||
"""
|
||||
|
||||
|
||||
class StorageProviderWrapper(StorageProvider):
|
||||
"""Wraps a storage provider and provides various config options
|
||||
|
||||
Args:
|
||||
backend: The storage provider to wrap.
|
||||
store_local: Whether to store new local files or not.
|
||||
store_synchronous: Whether to wait for file to be successfully
|
||||
uploaded, or todo the upload in the background.
|
||||
store_remote: Whether remote media should be uploaded
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
backend: StorageProvider,
|
||||
store_local: bool,
|
||||
store_synchronous: bool,
|
||||
store_remote: bool,
|
||||
):
|
||||
self.backend = backend
|
||||
self.store_local = store_local
|
||||
self.store_synchronous = store_synchronous
|
||||
self.store_remote = store_remote
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "StorageProviderWrapper[%s]" % (self.backend,)
|
||||
|
||||
async def store_file(self, path: str, file_info: FileInfo) -> None:
|
||||
if not file_info.server_name and not self.store_local:
|
||||
return None
|
||||
|
||||
if file_info.server_name and not self.store_remote:
|
||||
return None
|
||||
|
||||
if file_info.url_cache:
|
||||
# The URL preview cache is short lived and not worth offloading or
|
||||
# backing up.
|
||||
return None
|
||||
|
||||
if self.store_synchronous:
|
||||
# store_file is supposed to return an Awaitable, but guard
|
||||
# against improper implementations.
|
||||
await maybe_awaitable(self.backend.store_file(path, file_info)) # type: ignore
|
||||
else:
|
||||
# TODO: Handle errors.
|
||||
async def store() -> None:
|
||||
try:
|
||||
return await maybe_awaitable(
|
||||
self.backend.store_file(path, file_info)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Error storing file")
|
||||
|
||||
run_in_background(store)
|
||||
|
||||
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
|
||||
if file_info.url_cache:
|
||||
# Files in the URL preview cache definitely aren't stored here,
|
||||
# so avoid any potentially slow I/O or network access.
|
||||
return None
|
||||
|
||||
# store_file is supposed to return an Awaitable, but guard
|
||||
# against improper implementations.
|
||||
return await maybe_awaitable(self.backend.fetch(path, file_info))
|
||||
|
||||
|
||||
class FileStorageProviderBackend(StorageProvider):
|
||||
"""A storage provider that stores files in a directory on a filesystem.
|
||||
|
||||
Args:
|
||||
hs
|
||||
config: The config returned by `parse_config`.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer", config: str):
|
||||
self.hs = hs
|
||||
self.cache_directory = hs.config.media.media_store_path
|
||||
self.base_directory = config
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "FileStorageProviderBackend[%s]" % (self.base_directory,)
|
||||
|
||||
async def store_file(self, path: str, file_info: FileInfo) -> None:
|
||||
"""See StorageProvider.store_file"""
|
||||
|
||||
primary_fname = os.path.join(self.cache_directory, path)
|
||||
backup_fname = os.path.join(self.base_directory, path)
|
||||
|
||||
dirname = os.path.dirname(backup_fname)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
|
||||
# mypy needs help inferring the type of the second parameter, which is generic
|
||||
shutil_copyfile: Callable[[str, str], str] = shutil.copyfile
|
||||
await defer_to_thread(
|
||||
self.hs.get_reactor(),
|
||||
shutil_copyfile,
|
||||
primary_fname,
|
||||
backup_fname,
|
||||
)
|
||||
|
||||
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
|
||||
"""See StorageProvider.fetch"""
|
||||
|
||||
backup_fname = os.path.join(self.base_directory, path)
|
||||
if os.path.isfile(backup_fname):
|
||||
return FileResponder(open(backup_fname, "rb"))
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def parse_config(config: dict) -> str:
|
||||
"""Called on startup to parse config supplied. This should parse
|
||||
the config and raise if there is a problem.
|
||||
|
||||
The returned value is passed into the constructor.
|
||||
|
||||
In this case we only care about a single param, the directory, so let's
|
||||
just pull that out.
|
||||
"""
|
||||
return Config.ensure_directory(config["directory"])
|
@ -38,8 +38,7 @@ class ThumbnailError(Exception):
|
||||
|
||||
|
||||
class Thumbnailer:
|
||||
|
||||
FORMATS = {"image/jpeg": "JPEG", "image/png": "PNG"}
|
||||
FORMATS = {"image/jpeg": "JPEG", "image/png": "PNG", "image/webp": "WEBP"}
|
||||
|
||||
@staticmethod
|
||||
def set_limits(max_image_pixels: int) -> None:
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user