mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-03-13 11:46:46 -04:00
Compare commits
207 Commits
meow-patch
...
master
Author | SHA1 | Date | |
---|---|---|---|
![]() |
638ea15f8e | ||
![]() |
6d103373e2 | ||
![]() |
735e4d1f9d | ||
![]() |
79d2e2e79c | ||
![]() |
89a71e7390 | ||
![]() |
c0772b4461 | ||
![]() |
8aa121c2be | ||
![]() |
cf2f2934ad | ||
![]() |
56efa9b167 | ||
![]() |
9b2ab506c5 | ||
![]() |
84b06fc893 | ||
![]() |
675ff0d5d0 | ||
![]() |
157092d97a | ||
![]() |
6204c3663e | ||
![]() |
72d2ceaa9a | ||
![]() |
2a234b788e | ||
![]() |
6f68e32bfb | ||
![]() |
91c3f32673 | ||
![]() |
ae4acda1bb | ||
![]() |
d9f694932c | ||
![]() |
a3bad89d57 | ||
![]() |
9228ae633f | ||
![]() |
9d641d88b7 | ||
![]() |
f0d8f66eaa | ||
![]() |
5350b5d04d | ||
![]() |
78cdb72cd6 | ||
![]() |
d0541e36c0 | ||
![]() |
753d1d9cde | ||
![]() |
5282ba1e2b | ||
![]() |
57481ca694 | ||
![]() |
8a47bf13ef | ||
![]() |
2e936afd5f | ||
![]() |
4c8ada3904 | ||
![]() |
383f78503a | ||
![]() |
9f7d6c6bc1 | ||
![]() |
bd4d958aaf | ||
![]() |
96f163d932 | ||
![]() |
4fc85e5a92 | ||
![]() |
7a892ce793 | ||
![]() |
7d3ea4886c | ||
![]() |
316044d6fa | ||
![]() |
fae4a2c066 | ||
![]() |
1ad142782a | ||
![]() |
43411a0fd8 | ||
![]() |
ce00e57a2a | ||
![]() |
d5324ee111 | ||
![]() |
5f7c908280 | ||
![]() |
5b70f240cf | ||
![]() |
68a6717312 | ||
![]() |
e6af49fbea | ||
![]() |
98fd558382 | ||
![]() |
3b0083c92a | ||
![]() |
cabe4a3005 | ||
![]() |
7f02fafa28 | ||
![]() |
7655bc0542 | ||
![]() |
4b8c9c340c | ||
![]() |
1bc9985eb7 | ||
![]() |
72f3f23c4d | ||
![]() |
b32014578a | ||
![]() |
8f2a3cbb70 | ||
![]() |
a9216edbaa | ||
![]() |
882911a863 | ||
![]() |
9b1f99ba6b | ||
![]() |
1bc4feb6c9 | ||
![]() |
d09760c62f | ||
![]() |
527512b811 | ||
![]() |
9f5d7d5ba2 | ||
![]() |
1e1c220084 | ||
![]() |
72832a6158 | ||
![]() |
96bcc5d902 | ||
![]() |
ec9224bf9a | ||
![]() |
b6aef59334 | ||
![]() |
f11fe931f5 | ||
![]() |
827f198177 | ||
![]() |
a5fb382a29 | ||
![]() |
5ab7146e19 | ||
![]() |
63e25010d6 | ||
![]() |
25006acc17 | ||
![]() |
f75a041f59 | ||
![]() |
eee26138fe | ||
![]() |
099b69fb1c | ||
![]() |
1870b44d23 | ||
![]() |
2cfa6a3001 | ||
![]() |
14d8d41658 | ||
![]() |
3d70cc393f | ||
![]() |
66fc166b96 | ||
![]() |
afb216c202 | ||
![]() |
b0a0fb5c97 | ||
![]() |
1f5473465d | ||
![]() |
4953cd71df | ||
![]() |
f54f877f27 | ||
![]() |
3bf973edc7 | ||
![]() |
121fce7500 | ||
![]() |
63d87c08c8 | ||
![]() |
d0fe417f5c | ||
![]() |
de92fb6a28 | ||
![]() |
603ef58549 | ||
![]() |
003a25ae5c | ||
![]() |
8b1af08c6e | ||
![]() |
e7b559d2ca | ||
![]() |
7961c6bee7 | ||
![]() |
a1c9869394 | ||
![]() |
5e21e15f96 | ||
![]() |
edcf938173 | ||
![]() |
c071cd5a0e | ||
![]() |
d4eba4409f | ||
![]() |
408f60540f | ||
![]() |
023f215c68 | ||
![]() |
f167b35de9 | ||
![]() |
6326d744c9 | ||
![]() |
ff155f7891 | ||
![]() |
4bb26c95a9 | ||
![]() |
e157c63f68 | ||
![]() |
ce54477f6f | ||
![]() |
caf43c3d7c | ||
![]() |
3d060eae6c | ||
![]() |
e7c3832ba6 | ||
![]() |
be4ea209e8 | ||
![]() |
88efc75bab | ||
![]() |
f4fc83ac75 | ||
![]() |
a368d30c1c | ||
![]() |
9418344db4 | ||
![]() |
20ed8c926b | ||
![]() |
47bc84dd53 | ||
![]() |
820f02b70b | ||
![]() |
212eb8bf88 | ||
![]() |
a939ba5976 | ||
![]() |
f7f471bb80 | ||
![]() |
7dbc917769 | ||
![]() |
4eddcf6653 | ||
![]() |
a7bdc4a1ed | ||
![]() |
3ec25f27ab | ||
![]() |
4f525ff19c | ||
![]() |
0c6d40f6c4 | ||
![]() |
cca37a0ecc | ||
![]() |
ddbd79a981 | ||
![]() |
cf1b4fab59 | ||
![]() |
e3b312a8ca | ||
![]() |
543fcb6f17 | ||
![]() |
932e104468 | ||
![]() |
2af1a982c1 | ||
![]() |
8314646cd3 | ||
![]() |
506e24ffc4 | ||
![]() |
c0854ce65a | ||
![]() |
869ef75cb7 | ||
![]() |
2a869d257f | ||
![]() |
a9478e436e | ||
![]() |
89ae8ce7ca | ||
![]() |
c114befd6b | ||
![]() |
c69aae94cd | ||
![]() |
41f127e068 | ||
![]() |
05e0a4089a | ||
![]() |
fd9cadcf53 | ||
![]() |
95876cf5f1 | ||
![]() |
242d2a27ce | ||
![]() |
6b6e91e610 | ||
![]() |
02f74f3a99 | ||
![]() |
848f7e3d5f | ||
![]() |
7ae4f7236a | ||
![]() |
15e975f68f | ||
![]() |
1eea662780 | ||
![]() |
ecbe0ddbe7 | ||
![]() |
c8665dd25d | ||
![]() |
c4f4dc35cd | ||
![]() |
8ef324ea6f | ||
![]() |
33a85cf08c | ||
![]() |
7ec1f096d3 | ||
![]() |
65f10afb64 | ||
![]() |
916b8061d2 | ||
![]() |
2b78981736 | ||
![]() |
b2fd03d075 | ||
![]() |
69553052cc | ||
![]() |
d62cd940cb | ||
![]() |
8c3fa748e6 | ||
![]() |
682d31c702 | ||
![]() |
c369d82df0 | ||
![]() |
e746f80b4f | ||
![]() |
521026897c | ||
![]() |
93f7955eba | ||
![]() |
1cd4fbc51d | ||
![]() |
189a878a35 | ||
![]() |
b40657314e | ||
![]() |
4fc8875876 | ||
![]() |
3f2ef205e2 | ||
![]() |
f7e49afb99 | ||
![]() |
d3afe59d5a | ||
![]() |
80884579f5 | ||
![]() |
229ae5bcec | ||
![]() |
81a0dc35f7 | ||
![]() |
965956160a | ||
![]() |
1ff2d20a6f | ||
![]() |
a74c099ece | ||
![]() |
1c95ddd09b | ||
![]() |
b2357a898c | ||
![]() |
335f52d595 | ||
![]() |
682151a464 | ||
![]() |
f8a584ed02 | ||
![]() |
ec79870f14 | ||
![]() |
1a1738eca2 | ||
![]() |
a068ad7dd4 | ||
![]() |
452b009eb0 | ||
![]() |
adac949a41 | ||
![]() |
9bb2eac719 | ||
![]() |
4ed08ff72e | ||
![]() |
6def779a1a | ||
![]() |
91f8de7b56 | ||
![]() |
647ff3ef65 |
@ -109,11 +109,26 @@ sytest_tests = [
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "postgres",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "testing",
|
||||
"postgres": "postgres",
|
||||
|
@ -35,9 +35,9 @@ sed -i \
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies and the dev-docs
|
||||
# group from the toml file. This means we don't have to ensure compatibility between
|
||||
# old deps and dev tools.
|
||||
|
||||
pip install toml wheel
|
||||
|
||||
@ -47,6 +47,7 @@ with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
del data['tool']['poetry']['group']['dev-docs']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
|
@ -8,6 +8,7 @@
|
||||
!README.rst
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!requirements.txt
|
||||
!Cargo.lock
|
||||
!Cargo.toml
|
||||
!build_rust.py
|
||||
|
@ -21,4 +21,8 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||
|
||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
|
||||
# Update black to 23.1.0 (#15103)
|
||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||
|
||||
|
12
.github/workflows/docker.yml
vendored
12
.github/workflows/docker.yml
vendored
@ -10,6 +10,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@ -34,11 +35,20 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@master
|
||||
with:
|
||||
images: matrixdotorg/synapse
|
||||
images: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
ghcr.io/matrix-org/synapse
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
|
2
.github/workflows/docs-pr-netlify.yaml
vendored
2
.github/workflows/docs-pr-netlify.yaml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@b59d8c6a6c5c6c6437954f470d963c0b20ea7415 # v2.25.0
|
||||
uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
4
.github/workflows/docs-pr.yaml
vendored
4
.github/workflows/docs-pr.yaml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
@ -39,7 +39,7 @@ jobs:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
|
79
.github/workflows/docs.yaml
vendored
79
.github/workflows/docs.yaml
vendored
@ -13,25 +13,10 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
pre:
|
||||
name: Calculate variables for GitHub Pages deployment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Figure out the target directory.
|
||||
#
|
||||
# The target directory depends on the name of the branch
|
||||
@ -55,11 +40,65 @@ jobs:
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
|
||||
outputs:
|
||||
branch-version: ${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-docs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
destination_dir: ./${{ needs.pre.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-devdocs:
|
||||
name: GitHub Pages (developer docs)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: "Set up Sphinx"
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.3.2"
|
||||
groups: "dev-docs"
|
||||
extras: ""
|
||||
|
||||
- name: Build the documentation
|
||||
run: |
|
||||
cd dev-docs
|
||||
poetry run make html
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./dev-docs/_build/html
|
||||
destination_dir: ./dev-docs/${{ needs.pre.outputs.branch-version }}
|
||||
|
6
.github/workflows/latest_deps.yml
vendored
6
.github/workflows/latest_deps.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -61,7 +61,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -134,7 +134,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
2
.github/workflows/push_complement_image.yml
vendored
2
.github/workflows/push_complement_image.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
4
.github/workflows/release-artifacts.yml
vendored
4
.github/workflows/release-artifacts.yml
vendored
@ -4,13 +4,15 @@ name: Build release artifacts
|
||||
|
||||
on:
|
||||
# we build on PRs and develop to (hopefully) get early warning
|
||||
# of things breaking (but only build one set of debs)
|
||||
# of things breaking (but only build one set of debs). PRs skip
|
||||
# building wheels on macOS & ARM.
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
|
39
.github/workflows/tests.yml
vendored
39
.github/workflows/tests.yml
vendored
@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
@ -33,6 +34,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@ -94,6 +103,14 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "1.3.2"
|
||||
@ -112,7 +129,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
components: clippy
|
||||
@ -134,7 +151,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
components: clippy
|
||||
@ -154,9 +171,10 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
# We use nightly so that it correctly groups together imports
|
||||
toolchain: nightly-2022-12-01
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@ -221,7 +239,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -266,7 +284,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -368,6 +386,7 @@ jobs:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
@ -386,7 +405,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -531,7 +550,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -562,7 +581,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -585,7 +604,7 @@ jobs:
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||
with:
|
||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||
content_id: ${{ github.event.issue.node_id }}
|
||||
|
15
.github/workflows/twisted_trunk.yml
vendored
15
.github/workflows/twisted_trunk.yml
vendored
@ -5,6 +5,13 @@ on:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
twisted_ref:
|
||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
||||
required: false
|
||||
default: 'trunk'
|
||||
type: string
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@ -18,7 +25,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -29,7 +36,7 @@ jobs:
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref }}
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
@ -43,7 +50,7 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -82,7 +89,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -53,6 +53,7 @@ __pycache__/
|
||||
/coverage.*
|
||||
/dist/
|
||||
/docs/build/
|
||||
/dev-docs/_build/
|
||||
/htmlcov
|
||||
/pip-wheel-metadata/
|
||||
|
||||
@ -61,7 +62,7 @@ book/
|
||||
|
||||
# complement
|
||||
/complement-*
|
||||
/master.tar.gz
|
||||
/main.tar.gz
|
||||
|
||||
# rust
|
||||
/target/
|
||||
|
19
.gitlab-ci.yml
Normal file
19
.gitlab-ci.yml
Normal file
@ -0,0 +1,19 @@
|
||||
image: docker:stable
|
||||
|
||||
stages:
|
||||
- build
|
||||
|
||||
build amd64:
|
||||
stage: build
|
||||
tags:
|
||||
- amd64
|
||||
only:
|
||||
- master
|
||||
before_script:
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
script:
|
||||
- synversion=$(cat pyproject.toml | grep '^version =' | sed -E 's/^version = "(.+)"$/\1/')
|
||||
- docker build --tag $CI_REGISTRY_IMAGE:latest --tag $CI_REGISTRY_IMAGE:$synversion .
|
||||
- docker push $CI_REGISTRY_IMAGE:latest
|
||||
- docker push $CI_REGISTRY_IMAGE:$synversion
|
||||
- docker rmi $CI_REGISTRY_IMAGE:latest $CI_REGISTRY_IMAGE:$synversion
|
296
CHANGES.md
296
CHANGES.md
@ -1,3 +1,297 @@
|
||||
Synapse 1.81.0rc1 (2023-04-04)
|
||||
==============================
|
||||
|
||||
Synapse now attempts the versioned appservice paths before falling back to the
|
||||
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
||||
Usage of the legacy routes should be considered deprecated.
|
||||
|
||||
Additionally, Synapse has supported sending the application service access token
|
||||
via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization)
|
||||
since v1.70.0. For backwards compatibility it is *also* sent as the `access_token`
|
||||
query parameter. This is insecure and should be considered deprecated.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service routes and query parameter authorization.
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add the ability to enable/disable registrations when in the OIDC flow. ([\#14978](https://github.com/matrix-org/synapse/issues/14978))
|
||||
- Add a primitive helper script for listing worker endpoints. ([\#15243](https://github.com/matrix-org/synapse/issues/15243))
|
||||
- Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). ([\#15314](https://github.com/matrix-org/synapse/issues/15314), [\#15321](https://github.com/matrix-org/synapse/issues/15321))
|
||||
- Allow loading `/password_policy` endpoint on workers. ([\#15331](https://github.com/matrix-org/synapse/issues/15331))
|
||||
- Add experimental support for Unix sockets. Contributed by Jason Little. ([\#15353](https://github.com/matrix-org/synapse/issues/15353))
|
||||
- Build Debian packages for Ubuntu 23.04 (Lunar Lobster). ([\#15381](https://github.com/matrix-org/synapse/issues/15381))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled. ([\#15295](https://github.com/matrix-org/synapse/issues/15295))
|
||||
- Fix a bug introduced in Synapse v1.55.0 which could delay remote homeservers being able to decrypt encrypted messages sent by local users. ([\#15297](https://github.com/matrix-org/synapse/issues/15297))
|
||||
- Add a check to [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite)
|
||||
to ensure that the sqlite database passed to the script exists before trying to port from it. ([\#15306](https://github.com/matrix-org/synapse/issues/15306))
|
||||
- Fix a bug introduced in Synapse 1.76.0 where responses from worker deployments could include an internal `_INT_STREAM_POS` key. ([\#15309](https://github.com/matrix-org/synapse/issues/15309))
|
||||
- Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). ([\#15317](https://github.com/matrix-org/synapse/issues/15317))
|
||||
- Fix a long-standing bug preventing users from rejoining rooms after being banned and unbanned over federation. Contributed by Nico. ([\#15323](https://github.com/matrix-org/synapse/issues/15323))
|
||||
- Fix bug in worker mode where on a rolling restart of workers the "typing" worker would consume 100% CPU until it got restarted. ([\#15332](https://github.com/matrix-org/synapse/issues/15332))
|
||||
- Fix a long-standing bug where some to_device messages could be dropped when using workers. ([\#15349](https://github.com/matrix-org/synapse/issues/15349))
|
||||
- Fix a bug introduced in Synapse 1.70.0 where the background sync from a faster join could spin for hours when one of the events involved had been marked for backoff. ([\#15351](https://github.com/matrix-org/synapse/issues/15351))
|
||||
- Fix missing app variable in mail subject for password resets. Contributed by Cyberes. ([\#15352](https://github.com/matrix-org/synapse/issues/15352))
|
||||
- Fix a rare bug introduced in Synapse 1.66.0 where initial syncs would fail when the user had been kicked from a faster joined room that had not finished syncing. ([\#15383](https://github.com/matrix-org/synapse/issues/15383))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix a typo in login requests ratelimit defaults. ([\#15341](https://github.com/matrix-org/synapse/issues/15341))
|
||||
- Add some clarification to the doc/comments regarding TCP replication. ([\#15354](https://github.com/matrix-org/synapse/issues/15354))
|
||||
- Note that Synapse 1.74 queued a rebuild of the user directory tables. ([\#15386](https://github.com/matrix-org/synapse/issues/15386))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Use `immutabledict` instead of `frozendict`. ([\#15113](https://github.com/matrix-org/synapse/issues/15113))
|
||||
- Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. ([\#15265](https://github.com/matrix-org/synapse/issues/15265), [\#15336](https://github.com/matrix-org/synapse/issues/15336))
|
||||
- Make the pushers rely on the `device_id` instead of the `access_token_id` for various operations. ([\#15280](https://github.com/matrix-org/synapse/issues/15280))
|
||||
- Bump sentry-sdk from 1.15.0 to 1.17.0. ([\#15285](https://github.com/matrix-org/synapse/issues/15285))
|
||||
- Allow running the Twisted trunk job against other branches. ([\#15302](https://github.com/matrix-org/synapse/issues/15302))
|
||||
- Remind the releaser to ask for changelog feedback in [#synapse-dev](https://matrix.to/#/#synapse-dev:matrix.org). ([\#15303](https://github.com/matrix-org/synapse/issues/15303))
|
||||
- Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. ([\#15304](https://github.com/matrix-org/synapse/issues/15304))
|
||||
- Reject events with an invalid "mentions" property per [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15311](https://github.com/matrix-org/synapse/issues/15311))
|
||||
- As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. ([\#15316](https://github.com/matrix-org/synapse/issues/15316))
|
||||
- Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. ([\#15319](https://github.com/matrix-org/synapse/issues/15319))
|
||||
- Bump serde from 1.0.157 to 1.0.158. ([\#15324](https://github.com/matrix-org/synapse/issues/15324))
|
||||
- Bump regex from 1.7.1 to 1.7.3. ([\#15325](https://github.com/matrix-org/synapse/issues/15325))
|
||||
- Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0. ([\#15326](https://github.com/matrix-org/synapse/issues/15326))
|
||||
- Bump furo from 2022.12.7 to 2023.3.23. ([\#15327](https://github.com/matrix-org/synapse/issues/15327))
|
||||
- Bump ruff from 0.0.252 to 0.0.259. ([\#15328](https://github.com/matrix-org/synapse/issues/15328))
|
||||
- Bump cryptography from 40.0.0 to 40.0.1. ([\#15329](https://github.com/matrix-org/synapse/issues/15329))
|
||||
- Bump mypy-zope from 0.9.0 to 0.9.1. ([\#15330](https://github.com/matrix-org/synapse/issues/15330))
|
||||
- Speed up unit tests when using SQLite3. ([\#15334](https://github.com/matrix-org/synapse/issues/15334))
|
||||
- Speed up pydantic CI job. ([\#15339](https://github.com/matrix-org/synapse/issues/15339))
|
||||
- Speed up sample config CI job. ([\#15340](https://github.com/matrix-org/synapse/issues/15340))
|
||||
- Fix copyright year in SSO footer template. ([\#15358](https://github.com/matrix-org/synapse/issues/15358))
|
||||
- Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3. ([\#15369](https://github.com/matrix-org/synapse/issues/15369))
|
||||
- Bump serde from 1.0.158 to 1.0.159. ([\#15370](https://github.com/matrix-org/synapse/issues/15370))
|
||||
- Bump serde_json from 1.0.94 to 1.0.95. ([\#15371](https://github.com/matrix-org/synapse/issues/15371))
|
||||
- Speed up membership queries for users with forgotten rooms. ([\#15385](https://github.com/matrix-org/synapse/issues/15385))
|
||||
|
||||
|
||||
Synapse 1.80.0 (2023-03-28)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.80.0rc2.
|
||||
|
||||
|
||||
Synapse 1.80.0rc2 (2023-03-22)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug in which the [`POST /_matrix/client/v3/rooms/{roomId}/report/{eventId}`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3roomsroomidreporteventid) endpoint would return the wrong error if the user did not have permission to view the event. This aligns Synapse's implementation with [MSC2249](https://github.com/matrix-org/matrix-spec-proposals/pull/2249). ([\#15298](https://github.com/matrix-org/synapse/issues/15298), [\#15300](https://github.com/matrix-org/synapse/issues/15300))
|
||||
- Fix a bug introduced in Synapse 1.75.0rc1 where the [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite)
|
||||
would fail to open the SQLite database. ([\#15301](https://github.com/matrix-org/synapse/issues/15301))
|
||||
|
||||
|
||||
Synapse 1.80.0rc1 (2023-03-21)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Stabilise support for [MSC3966](https://github.com/matrix-org/matrix-spec-proposals/pull/3966): `event_property_contains` push condition. ([\#15187](https://github.com/matrix-org/synapse/issues/15187))
|
||||
- Implement [MSC2659](https://github.com/matrix-org/matrix-spec-proposals/pull/2659): application service ping endpoint. Contributed by Tulir @ Beeper. ([\#15249](https://github.com/matrix-org/synapse/issues/15249))
|
||||
- Allow loading `/register/available` endpoint on workers. ([\#15268](https://github.com/matrix-org/synapse/issues/15268))
|
||||
- Improve performance of creating and authenticating events. ([\#15195](https://github.com/matrix-org/synapse/issues/15195))
|
||||
- Add topic and name events to group of events that are batch persisted when creating a room. ([\#15229](https://github.com/matrix-org/synapse/issues/15229))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug in which the user directory would assume any remote membership state events represent a profile change. ([\#14755](https://github.com/matrix-org/synapse/issues/14755), [\#14756](https://github.com/matrix-org/synapse/issues/14756))
|
||||
- Implement [MSC3873](https://github.com/matrix-org/matrix-spec-proposals/pull/3873) to fix a long-standing bug where properties with dots were handled ambiguously in push rules. ([\#15190](https://github.com/matrix-org/synapse/issues/15190))
|
||||
- Faster joins: Fix a bug introduced in Synapse 1.66 where spurious "Failed to find memberships ..." errors would be logged. ([\#15232](https://github.com/matrix-org/synapse/issues/15232))
|
||||
- Fix a long-standing error when sending message into deleted room. ([\#15235](https://github.com/matrix-org/synapse/issues/15235))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Ensure the Dockerfile builds on platforms that don't have a `cryptography` wheel. ([\#15239](https://github.com/matrix-org/synapse/issues/15239))
|
||||
- Mirror images to the GitHub Container Registry (`ghcr.io/matrix-org/synapse`). ([\#15281](https://github.com/matrix-org/synapse/issues/15281), [\#15282](https://github.com/matrix-org/synapse/issues/15282))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add a missing endpoint to the workers documentation. ([\#15223](https://github.com/matrix-org/synapse/issues/15223))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add additional functionality to declaring worker types when starting Complement in worker mode. ([\#14921](https://github.com/matrix-org/synapse/issues/14921))
|
||||
- Add `Synapse-Trace-Id` to `access-control-expose-headers` header. ([\#14974](https://github.com/matrix-org/synapse/issues/14974))
|
||||
- Make the `HttpTransactionCache` use the `Requester` in addition of the just the `Request` to build the transaction key. ([\#15200](https://github.com/matrix-org/synapse/issues/15200))
|
||||
- Improve log lines when purging rooms. ([\#15222](https://github.com/matrix-org/synapse/issues/15222))
|
||||
- Improve type hints. ([\#15230](https://github.com/matrix-org/synapse/issues/15230), [\#15231](https://github.com/matrix-org/synapse/issues/15231), [\#15238](https://github.com/matrix-org/synapse/issues/15238))
|
||||
- Move various module API callback registration methods to a dedicated class. ([\#15237](https://github.com/matrix-org/synapse/issues/15237))
|
||||
- Configure GitHub Actions for merge queues. ([\#15244](https://github.com/matrix-org/synapse/issues/15244))
|
||||
- Add schema comments about the `destinations` and `destination_rooms` tables. ([\#15247](https://github.com/matrix-org/synapse/issues/15247))
|
||||
- Skip processing of auto-join room behaviour if there are no auto-join rooms configured. ([\#15262](https://github.com/matrix-org/synapse/issues/15262))
|
||||
- Remove unused store method `_set_destination_retry_timings_emulated`. ([\#15266](https://github.com/matrix-org/synapse/issues/15266))
|
||||
- Reorganize URL preview code. ([\#15269](https://github.com/matrix-org/synapse/issues/15269))
|
||||
- Clean-up direct TCP replication code. ([\#15272](https://github.com/matrix-org/synapse/issues/15272), [\#15274](https://github.com/matrix-org/synapse/issues/15274))
|
||||
- Make `configure_workers_and_start` script used in Complement tests compatible with older versions of Python. ([\#15275](https://github.com/matrix-org/synapse/issues/15275))
|
||||
- Add a `/versions` flag for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15293](https://github.com/matrix-org/synapse/issues/15293))
|
||||
- Bump hiredis from 2.2.1 to 2.2.2. ([\#15252](https://github.com/matrix-org/synapse/issues/15252))
|
||||
- Bump serde from 1.0.152 to 1.0.155. ([\#15253](https://github.com/matrix-org/synapse/issues/15253))
|
||||
- Bump pysaml2 from 7.2.1 to 7.3.1. ([\#15254](https://github.com/matrix-org/synapse/issues/15254))
|
||||
- Bump msgpack from 1.0.4 to 1.0.5. ([\#15255](https://github.com/matrix-org/synapse/issues/15255))
|
||||
- Bump gitpython from 3.1.30 to 3.1.31. ([\#15256](https://github.com/matrix-org/synapse/issues/15256))
|
||||
- Bump cryptography from 39.0.1 to 39.0.2. ([\#15257](https://github.com/matrix-org/synapse/issues/15257))
|
||||
- Bump pydantic from 1.10.4 to 1.10.6. ([\#15286](https://github.com/matrix-org/synapse/issues/15286))
|
||||
- Bump serde from 1.0.155 to 1.0.157. ([\#15287](https://github.com/matrix-org/synapse/issues/15287))
|
||||
- Bump anyhow from 1.0.69 to 1.0.70. ([\#15288](https://github.com/matrix-org/synapse/issues/15288))
|
||||
- Bump txredisapi from 1.4.7 to 1.4.9. ([\#15289](https://github.com/matrix-org/synapse/issues/15289))
|
||||
- Bump pygithub from 1.57 to 1.58.1. ([\#15290](https://github.com/matrix-org/synapse/issues/15290))
|
||||
- Bump types-requests from 2.28.11.12 to 2.28.11.15. ([\#15291](https://github.com/matrix-org/synapse/issues/15291))
|
||||
|
||||
|
||||
|
||||
Synapse 1.79.0 (2023-03-14)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.79.0rc2.
|
||||
|
||||
|
||||
Synapse 1.79.0rc2 (2023-03-13)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.79.0rc1 where attempting to register a `on_remove_user_third_party_identifier` module API callback would be a no-op. ([\#15227](https://github.com/matrix-org/synapse/issues/15227))
|
||||
- Fix a rare bug introduced in Synapse 1.73 where events could remain unsent to other homeservers after a faster-join to a room. ([\#15248](https://github.com/matrix-org/synapse/issues/15248))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Refactor `filter_events_for_server`. ([\#15240](https://github.com/matrix-org/synapse/issues/15240))
|
||||
|
||||
|
||||
Synapse 1.79.0rc1 (2023-03-07)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add two new Third Party Rules module API callbacks: [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier) and [`on_remove_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_remove_user_third_party_identifier). ([\#15044](https://github.com/matrix-org/synapse/issues/15044))
|
||||
- Experimental support for [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967) to not require UIA for setting up cross-signing on first use. ([\#15077](https://github.com/matrix-org/synapse/issues/15077))
|
||||
- Add media information to the command line [user data export tool](https://matrix-org.github.io/synapse/v1.79/usage/administration/admin_faq.html#how-can-i-export-user-data). ([\#15107](https://github.com/matrix-org/synapse/issues/15107))
|
||||
- Add an [admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html) to delete a [specific event report](https://spec.matrix.org/v1.6/client-server-api/#reporting-content). ([\#15116](https://github.com/matrix-org/synapse/issues/15116))
|
||||
- Add support for knocking to workers. ([\#15133](https://github.com/matrix-org/synapse/issues/15133))
|
||||
- Allow use of the `/filter` Client-Server APIs on workers. ([\#15134](https://github.com/matrix-org/synapse/issues/15134))
|
||||
- Update support for [MSC2677](https://github.com/matrix-org/matrix-spec-proposals/pull/2677): remove support for server-side aggregation of reactions. ([\#15172](https://github.com/matrix-org/synapse/issues/15172))
|
||||
- Stabilise support for [MSC3758](https://github.com/matrix-org/matrix-spec-proposals/pull/3758): `event_property_is` push condition. ([\#15185](https://github.com/matrix-org/synapse/issues/15185))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.75 that caused experimental support for deleting account data to raise an internal server error while using an account data writer worker. ([\#14869](https://github.com/matrix-org/synapse/issues/14869))
|
||||
- Fix a long-standing bug where Synapse handled an unspecced field on push rules. ([\#15088](https://github.com/matrix-org/synapse/issues/15088))
|
||||
- Fix a long-standing bug where a URL preview would break if the discovered oEmbed failed to download. ([\#15092](https://github.com/matrix-org/synapse/issues/15092))
|
||||
- Fix a long-standing bug where an initial sync would not respond to changes to the list of ignored users if there was an initial sync cached. ([\#15163](https://github.com/matrix-org/synapse/issues/15163))
|
||||
- Add the `transaction_id` in the events included in many endpoints' responses. ([\#15174](https://github.com/matrix-org/synapse/issues/15174))
|
||||
- Fix a bug introduced in Synapse 1.78.0 where requests to claim dehydrated devices would fail with a `405` error. ([\#15180](https://github.com/matrix-org/synapse/issues/15180))
|
||||
- Stop applying edits when bundling aggregations, per [MSC3925](https://github.com/matrix-org/matrix-spec-proposals/pull/3925). ([\#15193](https://github.com/matrix-org/synapse/issues/15193))
|
||||
- Fix a long-standing bug where the user directory search was not case-insensitive for accented characters. ([\#15143](https://github.com/matrix-org/synapse/issues/15143))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Improve startup logging in the with-workers Docker image. ([\#15186](https://github.com/matrix-org/synapse/issues/15186))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Document how to use caches in a module. ([\#14026](https://github.com/matrix-org/synapse/issues/14026))
|
||||
- Clarify which worker processes the ThirdPartyRules' [`on_new_event`](https://matrix-org.github.io/synapse/v1.78/modules/third_party_rules_callbacks.html#on_new_event) module API callback runs on. ([\#15071](https://github.com/matrix-org/synapse/issues/15071))
|
||||
- Document using [Shibboleth](https://www.shibboleth.net/) as an OpenID Provider. ([\#15112](https://github.com/matrix-org/synapse/issues/15112))
|
||||
- Correct reference to `federation_verify_certificates` in configuration documentation. ([\#15139](https://github.com/matrix-org/synapse/issues/15139))
|
||||
- Correct small documentation errors in some `MatrixFederationHttpClient` methods. ([\#15148](https://github.com/matrix-org/synapse/issues/15148))
|
||||
- Correct the description of the behavior of `registration_shared_secret_path` on startup. ([\#15168](https://github.com/matrix-org/synapse/issues/15168))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate the `on_threepid_bind` module callback, to be replaced by [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier). See [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.79/docs/upgrade.md#upgrading-to-v1790). ([\#15044](https://github.com/matrix-org/synapse/issues/15044))
|
||||
- Remove the unspecced `room_alias` field from the [`/createRoom`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3createroom) response. ([\#15093](https://github.com/matrix-org/synapse/issues/15093))
|
||||
- Remove the unspecced `PUT` on the `/knock/{roomIdOrAlias}` endpoint. ([\#15189](https://github.com/matrix-org/synapse/issues/15189))
|
||||
- Remove the undocumented and unspecced `type` parameter to the `/thumbnail` endpoint. ([\#15137](https://github.com/matrix-org/synapse/issues/15137))
|
||||
- Remove unspecced and buggy `PUT` method on the unstable `/rooms/<room_id>/batch_send` endpoint. ([\#15199](https://github.com/matrix-org/synapse/issues/15199))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Run the integration test suites with the asyncio reactor enabled in CI. ([\#14101](https://github.com/matrix-org/synapse/issues/14101))
|
||||
- Batch up storing state groups when creating a new room. ([\#14918](https://github.com/matrix-org/synapse/issues/14918))
|
||||
- Update [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952) support based on changes to the MSC. ([\#15051](https://github.com/matrix-org/synapse/issues/15051))
|
||||
- Refactor writing json data in `FileExfiltrationWriter`. ([\#15095](https://github.com/matrix-org/synapse/issues/15095))
|
||||
- Tighten the login ratelimit defaults. ([\#15135](https://github.com/matrix-org/synapse/issues/15135))
|
||||
- Fix a typo in an experimental config setting. ([\#15138](https://github.com/matrix-org/synapse/issues/15138))
|
||||
- Refactor the media modules. ([\#15146](https://github.com/matrix-org/synapse/issues/15146), [\#15175](https://github.com/matrix-org/synapse/issues/15175))
|
||||
- Improve type hints. ([\#15164](https://github.com/matrix-org/synapse/issues/15164))
|
||||
- Move `get_event_report` and `get_event_reports_paginate` from `RoomStore` to `RoomWorkerStore`. ([\#15165](https://github.com/matrix-org/synapse/issues/15165))
|
||||
- Remove dangling reference to being a reference implementation in docstring. ([\#15167](https://github.com/matrix-org/synapse/issues/15167))
|
||||
- Add an option to force a rebuild of the "editable" complement image. ([\#15184](https://github.com/matrix-org/synapse/issues/15184))
|
||||
- Use nightly rustfmt in CI. ([\#15188](https://github.com/matrix-org/synapse/issues/15188))
|
||||
- Add a `get_next_txn` method to `StreamIdGenerator` to match `MultiWriterIdGenerator`. ([\#15191](https://github.com/matrix-org/synapse/issues/15191))
|
||||
- Combine `AbstractStreamIdTracker` and `AbstractStreamIdGenerator`. ([\#15192](https://github.com/matrix-org/synapse/issues/15192))
|
||||
- Automatically fix errors with `ruff`. ([\#15194](https://github.com/matrix-org/synapse/issues/15194))
|
||||
- Refactor database transaction for query users' devices to reduce database pool contention. ([\#15215](https://github.com/matrix-org/synapse/issues/15215))
|
||||
- Correct `test_icu_word_boundary_punctuation` so that it passes with the ICU versions available in Alpine and macOS. ([\#15177](https://github.com/matrix-org/synapse/issues/15177))
|
||||
|
||||
<details><summary>Locked dependency updates</summary>
|
||||
|
||||
- Bump actions/checkout from 2 to 3. ([\#15155](https://github.com/matrix-org/synapse/issues/15155))
|
||||
- Bump black from 22.12.0 to 23.1.0. ([\#15103](https://github.com/matrix-org/synapse/issues/15103))
|
||||
- Bump dawidd6/action-download-artifact from 2.25.0 to 2.26.0. ([\#15152](https://github.com/matrix-org/synapse/issues/15152))
|
||||
- Bump docker/login-action from 1 to 2. ([\#15154](https://github.com/matrix-org/synapse/issues/15154))
|
||||
- Bump matrix-org/backend-meta from 1 to 2. ([\#15156](https://github.com/matrix-org/synapse/issues/15156))
|
||||
- Bump ruff from 0.0.237 to 0.0.252. ([\#15159](https://github.com/matrix-org/synapse/issues/15159))
|
||||
- Bump serde_json from 1.0.93 to 1.0.94. ([\#15214](https://github.com/matrix-org/synapse/issues/15214))
|
||||
- Bump types-commonmark from 0.9.2.1 to 0.9.2.2. ([\#15209](https://github.com/matrix-org/synapse/issues/15209))
|
||||
- Bump types-opentracing from 2.4.10.1 to 2.4.10.3. ([\#15158](https://github.com/matrix-org/synapse/issues/15158))
|
||||
- Bump types-pillow from 9.4.0.13 to 9.4.0.17. ([\#15211](https://github.com/matrix-org/synapse/issues/15211))
|
||||
- Bump types-psycopg2 from 2.9.21.4 to 2.9.21.8. ([\#15210](https://github.com/matrix-org/synapse/issues/15210))
|
||||
- Bump types-pyopenssl from 22.1.0.2 to 23.0.0.4. ([\#15213](https://github.com/matrix-org/synapse/issues/15213))
|
||||
- Bump types-setuptools from 67.3.0.1 to 67.4.0.3. ([\#15160](https://github.com/matrix-org/synapse/issues/15160))
|
||||
- Bump types-setuptools from 67.4.0.3 to 67.5.0.0. ([\#15212](https://github.com/matrix-org/synapse/issues/15212))
|
||||
- Bump typing-extensions from 4.4.0 to 4.5.0. ([\#15157](https://github.com/matrix-org/synapse/issues/15157))
|
||||
</details>
|
||||
|
||||
|
||||
Synapse 1.78.0 (2023-02-28)
|
||||
===========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.76 where 5s delays would occasionally occur in deployments using workers. ([\#15150](https://github.com/matrix-org/synapse/issues/15150))
|
||||
|
||||
|
||||
Synapse 1.78.0rc1 (2023-02-21)
|
||||
==============================
|
||||
|
||||
@ -182,7 +476,7 @@ Those who are `poetry install`ing from source using our lockfile should ensure t
|
||||
Notes on faster joins
|
||||
---------------------
|
||||
|
||||
The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms.
|
||||
The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms.
|
||||
|
||||
After a faster join, Synapse considers that room "partially joined". In this state, you should be able to
|
||||
|
||||
|
49
Cargo.lock
generated
49
Cargo.lock
generated
@ -13,9 +13,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.69"
|
||||
version = "1.0.70"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
|
||||
checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@ -185,9 +185,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.46"
|
||||
version = "1.0.52"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
||||
checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@ -250,7 +250,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -261,7 +261,7 @@ checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -276,9 +276,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.21"
|
||||
version = "1.0.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@ -294,9 +294,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.7.1"
|
||||
version = "1.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
|
||||
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@ -305,9 +305,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.27"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
@ -323,29 +323,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.152"
|
||||
version = "1.0.159"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
||||
checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.152"
|
||||
version = "1.0.159"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||
checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.93"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
|
||||
checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@ -375,6 +375,17 @@ dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "synapse"
|
||||
version = "0.1.0"
|
||||
|
61
Dockerfile
Normal file
61
Dockerfile
Normal file
@ -0,0 +1,61 @@
|
||||
ARG PYTHON_VERSION=3.11
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
zlib1g-dev \
|
||||
openssl \
|
||||
git \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable
|
||||
|
||||
COPY synapse /synapse/synapse/
|
||||
COPY rust /synapse/rust/
|
||||
COPY README.rst pyproject.toml requirements.txt build_rust.py /synapse/
|
||||
|
||||
RUN pip install --prefix="/install" --no-warn-script-location --ignore-installed \
|
||||
--no-deps -r /synapse/requirements.txt \
|
||||
&& pip install --prefix="/install" --no-warn-script-location \
|
||||
--no-deps \
|
||||
'git+https://github.com/maunium/synapse-simple-antispam#egg=synapse-simple-antispam' \
|
||||
'git+https://github.com/devture/matrix-synapse-shared-secret-auth@2.0.2#egg=shared_secret_authenticator' \
|
||||
&& pip install --prefix="/install" --no-warn-script-location \
|
||||
--no-deps /synapse
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /install /usr/local
|
||||
|
||||
VOLUME ["/data"]
|
||||
ENV LD_PRELOAD="/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
|
||||
|
||||
ENTRYPOINT ["python3", "-m", "synapse.app.homeserver"]
|
||||
CMD ["--keys-directory", "/data", "-c", "/data/homeserver.yaml"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=1m --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
69
README.md
Normal file
69
README.md
Normal file
@ -0,0 +1,69 @@
|
||||
# Maunium Synapse
|
||||
This is a fork of [Synapse] to remove dumb limits and fix bugs that the
|
||||
upstream devs don't want to fix.
|
||||
|
||||
The only official distribution is the docker image in the [GitLab container
|
||||
registry], but you can also install from source ([upstream instructions]).
|
||||
|
||||
The master branch and `:latest` docker tag are upgraded to each upstream
|
||||
release candidate very soon after release (usually within 10 minutes†). There
|
||||
are also docker tags for each release, e.g. `:1.75.0`. If you don't want RCs,
|
||||
use the specific release tags.
|
||||
|
||||
†If there are merge conflicts, the update may be delayed for up to a few days
|
||||
after the full release.
|
||||
|
||||
[Synapse]: https://github.com/matrix-org/synapse
|
||||
[GitLab container registry]: https://mau.dev/maunium/synapse/container_registry
|
||||
[upstream instructions]: https://github.com/matrix-org/synapse/blob/develop/INSTALL.md#installing-from-source
|
||||
|
||||
## List of changes
|
||||
* Default power level for room creator is 9001 instead of 100.
|
||||
* Room creator can specify a custom room ID with the `room_id` param in the
|
||||
request body. If the room ID is already in use, it will return `M_CONFLICT`.
|
||||
* ~~URL previewer user agent includes `Bot` so Twitter previews work properly.~~
|
||||
Upstreamed after over 2 years 🎉
|
||||
* ~~Local event creation concurrency is disabled to avoid unnecessary state
|
||||
resolution.~~ Upstreamed after over 3 years 🎉
|
||||
* Register admin API can register invalid user IDs.
|
||||
* Docker image with jemalloc enabled by default.
|
||||
* Config option to allow specific users to send events without unnecessary
|
||||
validation.
|
||||
* Config option to allow specific users to receive events that are usually
|
||||
filtered away (e.g. `org.matrix.dummy_event` and `m.room.aliases`).
|
||||
* Config option to allow specific users to use timestamp massaging without
|
||||
being appservice users.
|
||||
* Config option to allow appservices to use MSC2716 batch sending as any local user.
|
||||
* Removed bad pusher URL validation.
|
||||
* webp images are thumbnailed to webp instead of jpeg to avoid losing
|
||||
transparency.
|
||||
* Media repo `Cache-Control` header says `immutable` and 1 year for all media
|
||||
that exists, as media IDs in Matrix are immutable.
|
||||
* Allowed sending custom data with read receipts.
|
||||
|
||||
You can view the full list of changes on the [meow-patchset] branch.
|
||||
Additionally, historical patch sets are saved as `meow-patchset-vX` [tags].
|
||||
|
||||
[meow-patchset]: https://mau.dev/maunium/synapse/-/compare/patchset-base...meow-patchset
|
||||
[tags]: https://mau.dev/maunium/synapse/-/tags?search=meow-patchset&sort=updated_desc
|
||||
|
||||
## Configuration
|
||||
Generating a new config will include the `meow` section, but this is here for
|
||||
reference for existing configs.
|
||||
|
||||
```yaml
|
||||
meow:
|
||||
# List of users who aren't subject to unnecessary validation in the C-S API.
|
||||
validation_override:
|
||||
- "@you:example.com"
|
||||
# List of users who will get org.matrix.dummy_event and m.room.aliases events down /sync
|
||||
filter_override:
|
||||
- "@you:example.com"
|
||||
# Whether or not the admin API should be able to register invalid user IDs.
|
||||
admin_api_register_invalid: true
|
||||
# List of users who can use timestamp massaging without being appservices
|
||||
timestamp_override:
|
||||
- "@you:example.com"
|
||||
# Whether appservices should be allowed to use MSC2716 batch sending as any local user.
|
||||
appservice_batch_send_any: false
|
||||
```
|
48
debian/changelog
vendored
48
debian/changelog
vendored
@ -1,3 +1,51 @@
|
||||
matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Apr 2023 14:29:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.80.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Mar 2023 11:10:33 +0100
|
||||
|
||||
matrix-synapse-py3 (1.80.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Mar 2023 08:30:16 -0700
|
||||
|
||||
matrix-synapse-py3 (1.80.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Mar 2023 10:56:08 -0700
|
||||
|
||||
matrix-synapse-py3 (1.79.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Mar 2023 16:14:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.79.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 13 Mar 2023 12:54:21 +0000
|
||||
|
||||
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Mar 2023 12:03:49 +0000
|
||||
|
||||
matrix-synapse-py3 (1.78.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.78.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Feb 2023 08:56:03 -0800
|
||||
|
||||
matrix-synapse-py3 (1.78.0~rc1) stable; urgency=medium
|
||||
|
||||
* Add `matrix-org-archive-keyring` package as recommended.
|
||||
|
20
dev-docs/Makefile
Normal file
20
dev-docs/Makefile
Normal file
@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
50
dev-docs/conf.py
Normal file
50
dev-docs/conf.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# For the full list of built-in configuration values, see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||
|
||||
project = "Synapse development"
|
||||
copyright = "2023, The Matrix.org Foundation C.I.C."
|
||||
author = "The Synapse Maintainers and Community"
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = [
|
||||
"autodoc2",
|
||||
"myst_parser",
|
||||
]
|
||||
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
|
||||
# -- Options for Autodoc2 ----------------------------------------------------
|
||||
|
||||
autodoc2_docstring_parser_regexes = [
|
||||
# this will render all docstrings as 'MyST' Markdown
|
||||
(r".*", "myst"),
|
||||
]
|
||||
|
||||
autodoc2_packages = [
|
||||
{
|
||||
"path": "../synapse",
|
||||
# Don't render documentation for everything as a matter of course
|
||||
"auto_mode": False,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# -- Options for MyST (Markdown) ---------------------------------------------
|
||||
|
||||
# myst_heading_anchors = 2
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||
|
||||
html_theme = "furo"
|
||||
html_static_path = ["_static"]
|
22
dev-docs/index.rst
Normal file
22
dev-docs/index.rst
Normal file
@ -0,0 +1,22 @@
|
||||
.. Synapse Developer Documentation documentation master file, created by
|
||||
sphinx-quickstart on Mon Mar 13 08:59:51 2023.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to the Synapse Developer Documentation!
|
||||
===========================================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
modules/federation_sender
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
5
dev-docs/modules/federation_sender.md
Normal file
5
dev-docs/modules/federation_sender.md
Normal file
@ -0,0 +1,5 @@
|
||||
Federation Sender
|
||||
=================
|
||||
|
||||
```{autodoc2-docstring} synapse.federation.sender
|
||||
```
|
@ -37,9 +37,24 @@ RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential git libffi-dev libssl-dev \
|
||||
build-essential curl git libffi-dev libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install rust and ensure its in the PATH.
|
||||
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
||||
# dependencies---on platforms that don't have a `cryptography` wheel.
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
|
@ -51,8 +51,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
# -z True if the length of string is zero.
|
||||
if [[ -z "$SYNAPSE_WORKER_TYPES" ]]; then
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
event_persister:2, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
@ -64,7 +63,8 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
synchrotron, \
|
||||
client_reader, \
|
||||
appservice, \
|
||||
pusher"
|
||||
pusher, \
|
||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||
|
||||
fi
|
||||
log "Workers requested: $SYNAPSE_WORKER_TYPES"
|
||||
|
@ -19,8 +19,15 @@
|
||||
# The environment variables it reads are:
|
||||
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
||||
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG
|
||||
# below. Leave empty for no workers.
|
||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKERS_CONFIG
|
||||
# below. Leave empty for no workers. Add a ':' and a number at the end to
|
||||
# multiply that worker. Append multiple worker types with '+' to merge the
|
||||
# worker types into a single worker. Add a name and a '=' to the front of a
|
||||
# worker type to give this instance a name in logs and nginx.
|
||||
# Examples:
|
||||
# SYNAPSE_WORKER_TYPES='event_persister, federation_sender, client_reader'
|
||||
# SYNAPSE_WORKER_TYPES='event_persister:2, federation_sender:2, client_reader'
|
||||
# SYNAPSE_WORKER_TYPES='stream_writers=account_data+presence+typing'
|
||||
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
||||
# will be treated as Application Service registration files.
|
||||
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
||||
@ -40,16 +47,33 @@
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional, Set
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
SupportsIndex,
|
||||
)
|
||||
|
||||
import yaml
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
|
||||
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
||||
# during processing with the name of the worker.
|
||||
WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
||||
|
||||
# Workers with exposed endpoints needs either "client", "federation", or "media" listener_resources
|
||||
# Watching /_matrix/client needs a "client" listener
|
||||
# Watching /_matrix/federation needs a "federation" listener
|
||||
@ -70,11 +94,13 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$"
|
||||
],
|
||||
"shared_extra_conf": {"update_user_directory_from_worker": "user_dir1"},
|
||||
"shared_extra_conf": {
|
||||
"update_user_directory_from_worker": WORKER_PLACEHOLDER_NAME
|
||||
},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"media_repository": {
|
||||
"app": "synapse.app.media_repository",
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["media"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/media/",
|
||||
@ -87,7 +113,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
# The first configured media worker will run the media background jobs
|
||||
"shared_extra_conf": {
|
||||
"enable_media_repo": False,
|
||||
"media_instance_running_background_jobs": "media_repository1",
|
||||
"media_instance_running_background_jobs": WORKER_PLACEHOLDER_NAME,
|
||||
},
|
||||
"worker_extra_conf": "enable_media_repo: true",
|
||||
},
|
||||
@ -95,7 +121,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {"notify_appservices_from_worker": "appservice1"},
|
||||
"shared_extra_conf": {
|
||||
"notify_appservices_from_worker": WORKER_PLACEHOLDER_NAME
|
||||
},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_sender": {
|
||||
@ -135,6 +163,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/versions$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register/available$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
|
||||
@ -142,6 +171,8 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
|
||||
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
|
||||
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@ -191,9 +222,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
# This worker cannot be sharded. Therefore there should only ever be one background
|
||||
# worker, and it should be named background_worker1
|
||||
"shared_extra_conf": {"run_background_tasks_on": "background_worker1"},
|
||||
# This worker cannot be sharded. Therefore, there should only ever be one
|
||||
# background worker. This is enforced for the safety of your database.
|
||||
"shared_extra_conf": {"run_background_tasks_on": WORKER_PLACEHOLDER_NAME},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"event_creator": {
|
||||
@ -204,6 +235,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/send",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
||||
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
|
||||
],
|
||||
@ -273,7 +305,7 @@ NGINX_LOCATION_CONFIG_BLOCK = """
|
||||
"""
|
||||
|
||||
NGINX_UPSTREAM_CONFIG_BLOCK = """
|
||||
upstream {upstream_worker_type} {{
|
||||
upstream {upstream_worker_base_name} {{
|
||||
{body}
|
||||
}}
|
||||
"""
|
||||
@ -324,7 +356,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
|
||||
def add_worker_roles_to_shared_config(
|
||||
shared_config: dict,
|
||||
worker_type: str,
|
||||
worker_types_set: Set[str],
|
||||
worker_name: str,
|
||||
worker_port: int,
|
||||
) -> None:
|
||||
@ -332,22 +364,36 @@ def add_worker_roles_to_shared_config(
|
||||
append appropriate worker information to it for the current worker_type instance.
|
||||
|
||||
Args:
|
||||
shared_config: The config dict that all worker instances share (after being converted to YAML)
|
||||
worker_type: The type of worker (one of those defined in WORKERS_CONFIG).
|
||||
shared_config: The config dict that all worker instances share (after being
|
||||
converted to YAML)
|
||||
worker_types_set: The type of worker (one of those defined in WORKERS_CONFIG).
|
||||
This list can be a single worker type or multiple.
|
||||
worker_name: The name of the worker instance.
|
||||
worker_port: The HTTP replication port that the worker instance is listening on.
|
||||
"""
|
||||
# The instance_map config field marks the workers that write to various replication streams
|
||||
# The instance_map config field marks the workers that write to various replication
|
||||
# streams
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
|
||||
# Worker-type specific sharding config
|
||||
if worker_type == "pusher":
|
||||
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||
# sharded, and therefore doesn't belong here.
|
||||
singular_stream_writers = [
|
||||
"account_data",
|
||||
"presence",
|
||||
"receipts",
|
||||
"to_device",
|
||||
"typing",
|
||||
]
|
||||
|
||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||
# roles, check each.
|
||||
if "pusher" in worker_types_set:
|
||||
shared_config.setdefault("pusher_instances", []).append(worker_name)
|
||||
|
||||
elif worker_type == "federation_sender":
|
||||
if "federation_sender" in worker_types_set:
|
||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||
|
||||
elif worker_type == "event_persister":
|
||||
if "event_persister" in worker_types_set:
|
||||
# Event persisters write to the events stream, so we need to update
|
||||
# the list of event stream writers
|
||||
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||
@ -360,19 +406,154 @@ def add_worker_roles_to_shared_config(
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
elif worker_type in ["account_data", "presence", "receipts", "to_device", "typing"]:
|
||||
# Update the list of stream writers
|
||||
# It's convenient that the name of the worker type is the same as the stream to write
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker_type, []
|
||||
).append(worker_name)
|
||||
# Update the list of stream writers. It's convenient that the name of the worker
|
||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||
# is more than one.
|
||||
for worker in worker_types_set:
|
||||
if worker in singular_stream_writers:
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker, []
|
||||
).append(worker_name)
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: Optional[Dict[str, Any]],
|
||||
to_be_merged_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||
return new dict.
|
||||
|
||||
Args:
|
||||
existing_dict: Either an existing worker template or a fresh blank one.
|
||||
to_be_merged_dict: The template from WORKERS_CONFIGS to be merged into
|
||||
existing_dict.
|
||||
Returns: The newly merged together dict values.
|
||||
"""
|
||||
new_dict: Dict[str, Any] = {}
|
||||
if not existing_dict:
|
||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||
new_dict = to_be_merged_dict.copy()
|
||||
else:
|
||||
for i in to_be_merged_dict.keys():
|
||||
if (i == "endpoint_patterns") or (i == "listener_resources"):
|
||||
# merge the two lists, remove duplicates
|
||||
new_dict[i] = list(set(existing_dict[i] + to_be_merged_dict[i]))
|
||||
elif i == "shared_extra_conf":
|
||||
# merge dictionary's, the worker name will be replaced later
|
||||
new_dict[i] = {**existing_dict[i], **to_be_merged_dict[i]}
|
||||
elif i == "worker_extra_conf":
|
||||
# There is only one worker type that has a 'worker_extra_conf' and it is
|
||||
# the media_repo. Since duplicate worker types on the same worker don't
|
||||
# work, this is fine.
|
||||
new_dict[i] = existing_dict[i] + to_be_merged_dict[i]
|
||||
else:
|
||||
# Everything else should be identical, like "app", which only works
|
||||
# because all apps are now generic_workers.
|
||||
new_dict[i] = to_be_merged_dict[i]
|
||||
return new_dict
|
||||
|
||||
|
||||
def insert_worker_name_for_worker_config(
|
||||
existing_dict: Dict[str, Any], worker_name: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Insert a given worker name into the worker's configuration dict.
|
||||
|
||||
Args:
|
||||
existing_dict: The worker_config dict that is imported into shared_config.
|
||||
worker_name: The name of the worker to insert.
|
||||
Returns: Copy of the dict with newly inserted worker name
|
||||
"""
|
||||
dict_to_edit = existing_dict.copy()
|
||||
for k, v in dict_to_edit["shared_extra_conf"].items():
|
||||
# Only proceed if it's the placeholder name string
|
||||
if v == WORKER_PLACEHOLDER_NAME:
|
||||
dict_to_edit["shared_extra_conf"][k] = worker_name
|
||||
return dict_to_edit
|
||||
|
||||
|
||||
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||
"""
|
||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||
checking.
|
||||
|
||||
Args:
|
||||
worker_types: The unprocessed List of requested workers
|
||||
Returns:
|
||||
A new list with all requested workers expanded.
|
||||
"""
|
||||
# Checking performed:
|
||||
# 1. if worker:2 or more is declared, it will create additional workers up to number
|
||||
# 2. if worker:1, it will create a single copy of this worker as if no number was
|
||||
# given
|
||||
# 3. if worker:0 is declared, this worker will be ignored. This is to allow for
|
||||
# scripting and automated expansion and is intended behaviour.
|
||||
# 4. if worker:NaN or is a negative number, it will error and log it.
|
||||
new_worker_types = []
|
||||
for worker_type in worker_types:
|
||||
if ":" in worker_type:
|
||||
worker_type_components = split_and_strip_string(worker_type, ":", 1)
|
||||
worker_count = 0
|
||||
# Should only be 2 components, a type of worker(s) and an integer as a
|
||||
# string. Cast the number as an int then it can be used as a counter.
|
||||
try:
|
||||
worker_count = int(worker_type_components[1])
|
||||
except ValueError:
|
||||
error(
|
||||
f"Bad number in worker count for '{worker_type}': "
|
||||
f"'{worker_type_components[1]}' is not an integer"
|
||||
)
|
||||
|
||||
# As long as there are more than 0, we add one to the list to make below.
|
||||
for _ in range(worker_count):
|
||||
new_worker_types.append(worker_type_components[0])
|
||||
|
||||
else:
|
||||
# If it's not a real worker_type, it will error out later.
|
||||
new_worker_types.append(worker_type)
|
||||
return new_worker_types
|
||||
|
||||
|
||||
def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
||||
"""Helper to check to make sure worker types that cannot have multiples do not.
|
||||
|
||||
Args:
|
||||
worker_type: The type of worker to check against.
|
||||
Returns: True if allowed, False if not
|
||||
"""
|
||||
return worker_type not in [
|
||||
"background_worker",
|
||||
"account_data",
|
||||
"presence",
|
||||
"receipts",
|
||||
"typing",
|
||||
"to_device",
|
||||
]
|
||||
|
||||
|
||||
def split_and_strip_string(
|
||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||
) -> List[str]:
|
||||
"""
|
||||
Helper to split a string on split_char and strip whitespace from each end of each
|
||||
element.
|
||||
Args:
|
||||
given_string: The string to split
|
||||
split_char: The character to split the string on
|
||||
max_split: kwarg for split() to limit how many times the split() happens
|
||||
Returns:
|
||||
A List of strings
|
||||
"""
|
||||
# Removes whitespace from ends of result strings before adding to list. Allow for
|
||||
# overriding 'maxsplit' kwarg, default being -1 to signify no maximum.
|
||||
return [x.strip() for x in given_string.split(split_char, maxsplit=max_split)]
|
||||
|
||||
|
||||
def generate_base_homeserver_config() -> None:
|
||||
@ -387,29 +568,153 @@ def generate_base_homeserver_config() -> None:
|
||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||
|
||||
|
||||
def parse_worker_types(
|
||||
requested_worker_types: List[str],
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Read the desired list of requested workers and prepare the data for use in
|
||||
generating worker config files while also checking for potential gotchas.
|
||||
|
||||
Args:
|
||||
requested_worker_types: The list formed from the split environment variable
|
||||
containing the unprocessed requests for workers.
|
||||
|
||||
Returns: A dict of worker names to set of worker types. Format:
|
||||
{'worker_name':
|
||||
{'worker_type', 'worker_type2'}
|
||||
}
|
||||
"""
|
||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||
# worker when generating its config file, as each worker's name is just
|
||||
# worker_base_name followed by instance number
|
||||
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||
|
||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||
|
||||
# The final result of all this processing
|
||||
dict_to_return: Dict[str, Set[str]] = {}
|
||||
|
||||
# Handle any multipliers requested for given workers.
|
||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||
requested_worker_types
|
||||
)
|
||||
|
||||
# Process each worker_type_string
|
||||
# Examples of expected formats:
|
||||
# - requested_name=type1+type2+type3
|
||||
# - synchrotron
|
||||
# - event_creator+event_persister
|
||||
for worker_type_string in multiple_processed_worker_types:
|
||||
# First, if a name is requested, use that — otherwise generate one.
|
||||
worker_base_name: str = ""
|
||||
if "=" in worker_type_string:
|
||||
# Split on "=", remove extra whitespace from ends then make list
|
||||
worker_type_split = split_and_strip_string(worker_type_string, "=")
|
||||
if len(worker_type_split) > 2:
|
||||
error(
|
||||
"There should only be one '=' in the worker type string. "
|
||||
f"Please fix: {worker_type_string}"
|
||||
)
|
||||
|
||||
# Assign the name
|
||||
worker_base_name = worker_type_split[0]
|
||||
|
||||
if not re.match(r"^[a-zA-Z0-9_+-]*[a-zA-Z_+-]$", worker_base_name):
|
||||
# Apply a fairly narrow regex to the worker names. Some characters
|
||||
# aren't safe for use in file paths or nginx configurations.
|
||||
# Don't allow to end with a number because we'll add a number
|
||||
# ourselves in a moment.
|
||||
error(
|
||||
"Invalid worker name; please choose a name consisting of "
|
||||
"alphanumeric letters, _ + -, but not ending with a digit: "
|
||||
f"{worker_base_name!r}"
|
||||
)
|
||||
|
||||
# Continue processing the remainder of the worker_type string
|
||||
# with the name override removed.
|
||||
worker_type_string = worker_type_split[1]
|
||||
|
||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||
# the list a set so it's deduplicated.
|
||||
worker_types_set: Set[str] = set(
|
||||
split_and_strip_string(worker_type_string, "+")
|
||||
)
|
||||
|
||||
if not worker_base_name:
|
||||
# No base name specified: generate one deterministically from set of
|
||||
# types
|
||||
worker_base_name = "+".join(sorted(worker_types_set))
|
||||
|
||||
# At this point, we have:
|
||||
# worker_base_name which is the name for the worker, without counter.
|
||||
# worker_types_set which is the set of worker types for this worker.
|
||||
|
||||
# Validate worker_type and make sure we don't allow sharding for a worker type
|
||||
# that doesn't support it. Will error and stop if it is a problem,
|
||||
# e.g. 'background_worker'.
|
||||
for worker_type in worker_types_set:
|
||||
# Verify this is a real defined worker type. If it's not, stop everything so
|
||||
# it can be fixed.
|
||||
if worker_type not in WORKERS_CONFIG:
|
||||
error(
|
||||
f"{worker_type} is an unknown worker type! Was found in "
|
||||
f"'{worker_type_string}'. Please fix!"
|
||||
)
|
||||
|
||||
if worker_type in worker_type_shard_counter:
|
||||
if not is_sharding_allowed_for_worker_type(worker_type):
|
||||
error(
|
||||
f"There can be only a single worker with {worker_type} "
|
||||
"type. Please recount and remove."
|
||||
)
|
||||
# Not in shard counter, must not have seen it yet, add it.
|
||||
worker_type_shard_counter[worker_type] += 1
|
||||
|
||||
# Generate the number for the worker using incrementing counter
|
||||
worker_base_name_counter[worker_base_name] += 1
|
||||
worker_number = worker_base_name_counter[worker_base_name]
|
||||
worker_name = f"{worker_base_name}{worker_number}"
|
||||
|
||||
if worker_number > 1:
|
||||
# If this isn't the first worker, check that we don't have a confusing
|
||||
# mixture of worker types with the same base name.
|
||||
first_worker_with_base_name = dict_to_return[f"{worker_base_name}1"]
|
||||
if first_worker_with_base_name != worker_types_set:
|
||||
error(
|
||||
f"Can not use worker_name: '{worker_name}' for worker_type(s): "
|
||||
f"{worker_types_set!r}. It is already in use by "
|
||||
f"worker_type(s): {first_worker_with_base_name!r}"
|
||||
)
|
||||
|
||||
dict_to_return[worker_name] = worker_types_set
|
||||
|
||||
return dict_to_return
|
||||
|
||||
|
||||
def generate_worker_files(
|
||||
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||
environ: Mapping[str, str],
|
||||
config_path: str,
|
||||
data_dir: str,
|
||||
requested_worker_types: Dict[str, Set[str]],
|
||||
) -> None:
|
||||
"""Read the desired list of workers from environment variables and generate
|
||||
shared homeserver, nginx and supervisord configs.
|
||||
"""Read the desired workers(if any) that is passed in and generate shared
|
||||
homeserver, nginx and supervisord configs.
|
||||
|
||||
Args:
|
||||
environ: os.environ instance.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
requested_worker_types: A Dict containing requested workers in the format of
|
||||
{'worker_name1': {'worker_type', ...}}
|
||||
"""
|
||||
# Note that yaml cares about indentation, so care should be taken to insert lines
|
||||
# into files at the correct indentation below.
|
||||
|
||||
# shared_config is the contents of a Synapse config file that will be shared amongst
|
||||
# the main Synapse process as well as all workers.
|
||||
# It is intended mainly for disabling functionality when certain workers are spun up,
|
||||
# and adding a replication listener.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll add
|
||||
# another listener for replication. Later we'll write out the result to the shared
|
||||
# config file.
|
||||
# First read the original config file and extract the listeners block. Then we'll
|
||||
# add another listener for replication. Later we'll write out the result to the
|
||||
# shared config file.
|
||||
listeners = [
|
||||
{
|
||||
"port": 9093,
|
||||
@ -425,9 +730,9 @@ def generate_worker_files(
|
||||
listeners += original_listeners
|
||||
|
||||
# The shared homeserver config. The contents of which will be inserted into the
|
||||
# base shared worker jinja2 template.
|
||||
#
|
||||
# This config file will be passed to all workers, included Synapse's main process.
|
||||
# base shared worker jinja2 template. This config file will be passed to all
|
||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||
# functionality when certain workers are spun up, and adding a replication listener.
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# List of dicts that describe workers.
|
||||
@ -435,31 +740,20 @@ def generate_worker_files(
|
||||
# program blocks.
|
||||
worker_descriptors: List[Dict[str, Any]] = []
|
||||
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of a worker type to the
|
||||
# ports of each worker. For example:
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||
# type to the ports of each worker. For example:
|
||||
# {
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||
# dict over a str is that we can easily deduplicate endpoints across multiple instances
|
||||
# of the same worker.
|
||||
#
|
||||
# An nginx site config that will be amended to depending on the workers that are
|
||||
# spun up. To be placed in /etc/nginx/conf.d.
|
||||
nginx_locations = {}
|
||||
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = [x.strip() for x in worker_types_env.split(",")]
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||
# across multiple instances of the same worker. The final rendering will be combined
|
||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||
nginx_locations: Dict[str, str] = {}
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@ -467,66 +761,57 @@ def generate_worker_files(
|
||||
# Start worker ports from this arbitrary port
|
||||
worker_port = 18009
|
||||
|
||||
# A counter of worker_type -> int. Used for determining the name for a given
|
||||
# worker type when generating its config file, as each worker's name is just
|
||||
# worker_type + instance #
|
||||
worker_type_counter: Dict[str, int] = {}
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
worker_config = WORKERS_CONFIG.get(worker_type)
|
||||
if worker_config:
|
||||
worker_config = worker_config.copy()
|
||||
else:
|
||||
error(worker_type + " is an unknown worker type! Please fix!")
|
||||
# Get the set of all worker types that we have configured
|
||||
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
|
||||
# Map locations to upstreams (corresponding to worker types) in Nginx
|
||||
# but only if we use the appropriate worker type
|
||||
for worker_type in all_worker_types_in_use:
|
||||
for endpoint_pattern in WORKERS_CONFIG[worker_type]["endpoint_patterns"]:
|
||||
nginx_locations[endpoint_pattern] = f"http://{worker_type}"
|
||||
|
||||
new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1
|
||||
worker_type_counter[worker_type] = new_worker_count
|
||||
# For each worker type specified by the user, create config values and write it's
|
||||
# yaml config file
|
||||
for worker_name, worker_types_set in requested_worker_types.items():
|
||||
# The collected and processed data will live here.
|
||||
worker_config: Dict[str, Any] = {}
|
||||
|
||||
# Merge all worker config templates for this worker into a single config
|
||||
for worker_type in worker_types_set:
|
||||
copy_of_template_config = WORKERS_CONFIG[worker_type].copy()
|
||||
|
||||
# Merge worker type template configuration data. It's a combination of lists
|
||||
# and dicts, so use this helper.
|
||||
worker_config = merge_worker_template_configs(
|
||||
worker_config, copy_of_template_config
|
||||
)
|
||||
|
||||
# Replace placeholder names in the config template with the actual worker name.
|
||||
worker_config = insert_worker_name_for_worker_config(worker_config, worker_name)
|
||||
|
||||
# Name workers by their type concatenated with an incrementing number
|
||||
# e.g. federation_reader1
|
||||
worker_name = worker_type + str(new_worker_count)
|
||||
worker_config.update(
|
||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||
)
|
||||
|
||||
# Update the shared config with any worker-type specific options
|
||||
shared_config.update(worker_config["shared_extra_conf"])
|
||||
# Update the shared config with any worker_type specific options. The first of a
|
||||
# given worker_type needs to stay assigned and not be replaced.
|
||||
worker_config["shared_extra_conf"].update(shared_config)
|
||||
shared_config = worker_config["shared_extra_conf"]
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_worker_roles_to_shared_config(
|
||||
shared_config, worker_type, worker_name, worker_port
|
||||
shared_config, worker_types_set, worker_name, worker_port
|
||||
)
|
||||
|
||||
# Enable the worker in supervisord
|
||||
worker_descriptors.append(worker_config)
|
||||
|
||||
# Add nginx location blocks for this worker's endpoints (if any are defined)
|
||||
for pattern in worker_config["endpoint_patterns"]:
|
||||
# Determine whether we need to load-balance this worker
|
||||
if worker_type_total_count > 1:
|
||||
# Create or add to a load-balanced upstream for this worker
|
||||
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
||||
|
||||
# Upstreams are named after the worker_type
|
||||
upstream = "http://" + worker_type
|
||||
else:
|
||||
upstream = "http://localhost:%d" % (worker_port,)
|
||||
|
||||
# Note that this endpoint should proxy to this upstream
|
||||
nginx_locations[pattern] = upstream
|
||||
|
||||
# Write out the worker's logging config file
|
||||
|
||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||
|
||||
# Then a worker config file
|
||||
@ -537,6 +822,10 @@ def generate_worker_files(
|
||||
worker_log_config_filepath=log_config_filepath,
|
||||
)
|
||||
|
||||
# Save this worker's port number to the correct nginx upstreams
|
||||
for worker_type in worker_types_set:
|
||||
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
||||
|
||||
worker_port += 1
|
||||
|
||||
# Build the nginx location config blocks
|
||||
@ -549,15 +838,14 @@ def generate_worker_files(
|
||||
|
||||
# Determine the load-balancing upstreams to configure
|
||||
nginx_upstream_config = ""
|
||||
|
||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
for port in upstream_worker_ports:
|
||||
body += " server localhost:%d;\n" % (port,)
|
||||
body += f" server localhost:{port};\n"
|
||||
|
||||
# Add to the list of configured upstreams
|
||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||
upstream_worker_type=upstream_worker_type,
|
||||
upstream_worker_base_name=upstream_worker_base_name,
|
||||
body=body,
|
||||
)
|
||||
|
||||
@ -578,7 +866,7 @@ def generate_worker_files(
|
||||
if reg_path.suffix.lower() in (".yaml", ".yml")
|
||||
]
|
||||
|
||||
workers_in_use = len(worker_types) > 0
|
||||
workers_in_use = len(requested_worker_types) > 0
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
@ -674,17 +962,34 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
if not os.path.exists(config_path):
|
||||
log("Generating base homeserver config")
|
||||
generate_base_homeserver_config()
|
||||
|
||||
# This script may be run multiple times (mostly by Complement, see note at top of file).
|
||||
# Don't re-configure workers in this instance.
|
||||
else:
|
||||
log("Base homeserver config exists—not regenerating")
|
||||
# This script may be run multiple times (mostly by Complement, see note at top of
|
||||
# file). Don't re-configure workers in this instance.
|
||||
mark_filepath = "/conf/workers_have_been_configured"
|
||||
if not os.path.exists(mark_filepath):
|
||||
# Collect and validate worker_type requests
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
||||
# Only process worker_types if they exist
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
requested_worker_types: Dict[str, Any] = {}
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||
requested_worker_types = parse_worker_types(worker_types)
|
||||
|
||||
# Always regenerate all other config files
|
||||
generate_worker_files(environ, config_path, data_dir)
|
||||
log("Generating worker config files")
|
||||
generate_worker_files(environ, config_path, data_dir, requested_worker_types)
|
||||
|
||||
# Mark workers as being configured
|
||||
with open(mark_filepath, "w") as f:
|
||||
f.write("")
|
||||
else:
|
||||
log("Worker config exists—not regenerating")
|
||||
|
||||
# Lifted right out of start.py
|
||||
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
||||
|
@ -169,3 +169,17 @@ The following fields are returned in the JSON response body:
|
||||
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
|
||||
have a canonical alias set.
|
||||
* `event_json`: object - Details of the original event that was reported.
|
||||
|
||||
# Delete a specific event report
|
||||
|
||||
This API deletes a specific event report. If the request is successful, the response body
|
||||
will be an empty JSON object.
|
||||
|
||||
The api is:
|
||||
```
|
||||
DELETE /_synapse/admin/v1/event_reports/<report_id>
|
||||
```
|
||||
|
||||
**URL parameters:**
|
||||
|
||||
* `report_id`: string - The ID of the event report.
|
||||
|
@ -307,8 +307,8 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
|
||||
|
||||
```python
|
||||
async def check_media_file_for_spam(
|
||||
file_wrapper: "synapse.rest.media.v1.media_storage.ReadableFileWrapper",
|
||||
file_info: "synapse.rest.media.v1._base.FileInfo",
|
||||
file_wrapper: "synapse.media.media_storage.ReadableFileWrapper",
|
||||
file_info: "synapse.media._base.FileInfo",
|
||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||
```
|
||||
|
||||
|
@ -146,6 +146,9 @@ Note that this callback is called when the event has already been processed and
|
||||
into the room, which means this callback cannot be used to deny persisting the event. To
|
||||
deny an incoming event, see [`check_event_for_spam`](spam_checker_callbacks.md#check_event_for_spam) instead.
|
||||
|
||||
For any given event, this callback will be called on every worker process, even if that worker will not end up
|
||||
acting on that event. This callback will not be called for events that are marked as rejected.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `check_can_shutdown_room`
|
||||
@ -251,6 +254,11 @@ If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
_First introduced in Synapse v1.56.0_
|
||||
|
||||
**<span style="color:red">
|
||||
This callback is deprecated in favour of the `on_add_user_third_party_identifier` callback, which
|
||||
features the same functionality. The only difference is in name.
|
||||
</span>**
|
||||
|
||||
```python
|
||||
async def on_threepid_bind(user_id: str, medium: str, address: str) -> None:
|
||||
```
|
||||
@ -265,6 +273,44 @@ server_.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `on_add_user_third_party_identifier`
|
||||
|
||||
_First introduced in Synapse v1.79.0_
|
||||
|
||||
```python
|
||||
async def on_add_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
|
||||
```
|
||||
|
||||
Called after successfully creating an association between a user and a third-party identifier
|
||||
(email address, phone number). The module is given the Matrix ID of the user the
|
||||
association is for, as well as the medium (`email` or `msisdn`) and address of the
|
||||
third-party identifier (i.e. an email address).
|
||||
|
||||
Note that this callback is _not_ called if a user attempts to bind their third-party identifier
|
||||
to an identity server (via a call to [`POST
|
||||
/_matrix/client/v3/account/3pid/bind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidbind)).
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `on_remove_user_third_party_identifier`
|
||||
|
||||
_First introduced in Synapse v1.79.0_
|
||||
|
||||
```python
|
||||
async def on_remove_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
|
||||
```
|
||||
|
||||
Called after successfully removing an association between a user and a third-party identifier
|
||||
(email address, phone number). The module is given the Matrix ID of the user the
|
||||
association is for, as well as the medium (`email` or `msisdn`) and address of the
|
||||
third-party identifier (i.e. an email address).
|
||||
|
||||
Note that this callback is _not_ called if a user attempts to unbind their third-party
|
||||
identifier from an identity server (via a call to [`POST
|
||||
/_matrix/client/v3/account/3pid/unbind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidunbind)).
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the third-party rules callback
|
||||
@ -297,4 +343,4 @@ class EventCensorer:
|
||||
)
|
||||
event_dict["content"] = new_event_content
|
||||
return event_dict
|
||||
```
|
||||
```
|
@ -83,3 +83,59 @@ the callback name as the argument name and the function as its value. A
|
||||
|
||||
Callbacks for each category can be found on their respective page of the
|
||||
[Synapse documentation website](https://matrix-org.github.io/synapse).
|
||||
|
||||
## Caching
|
||||
|
||||
_Added in Synapse 1.74.0._
|
||||
|
||||
Modules can leverage Synapse's caching tools to manage their own cached functions. This
|
||||
can be helpful for modules that need to repeatedly request the same data from the database
|
||||
or a remote service.
|
||||
|
||||
Functions that need to be wrapped with a cache need to be decorated with a `@cached()`
|
||||
decorator (which can be imported from `synapse.module_api`) and registered with the
|
||||
[`ModuleApi.register_cached_function`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L888)
|
||||
API when initialising the module. If the module needs to invalidate an entry in a cache,
|
||||
it needs to use the [`ModuleApi.invalidate_cache`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L904)
|
||||
API, with the function to invalidate the cache of and the key(s) of the entry to
|
||||
invalidate.
|
||||
|
||||
Below is an example of a simple module using a cached function:
|
||||
|
||||
```python
|
||||
from typing import Any
|
||||
from synapse.module_api import cached, ModuleApi
|
||||
|
||||
class MyModule:
|
||||
def __init__(self, config: Any, api: ModuleApi):
|
||||
self.api = api
|
||||
|
||||
# Register the cached function so Synapse knows how to correctly invalidate
|
||||
# entries for it.
|
||||
self.api.register_cached_function(self.get_user_from_id)
|
||||
|
||||
@cached()
|
||||
async def get_department_for_user(self, user_id: str) -> str:
|
||||
"""A function with a cache."""
|
||||
# Request a department from an external service.
|
||||
return await self.http_client.get_json(
|
||||
"https://int.example.com/users", {"user_id": user_id)
|
||||
)["department"]
|
||||
|
||||
async def do_something_with_users(self) -> None:
|
||||
"""Calls the cached function and then invalidates an entry in its cache."""
|
||||
|
||||
user_id = "@alice:example.com"
|
||||
|
||||
# Get the user. Since get_department_for_user is wrapped with a cache,
|
||||
# the return value for this user_id will be cached.
|
||||
department = await self.get_department_for_user(user_id)
|
||||
|
||||
# Do something with `department`...
|
||||
|
||||
# Let's say something has changed with our user, and the entry we have for
|
||||
# them in the cache is out of date, so we want to invalidate it.
|
||||
await self.api.invalidate_cache(self.get_department_for_user, (user_id,))
|
||||
```
|
||||
|
||||
See the [`cached` docstring](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L190) for more details.
|
||||
|
@ -590,6 +590,47 @@ oidc_providers:
|
||||
|
||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||
|
||||
### Shibboleth with OIDC Plugin
|
||||
|
||||
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
|
||||
|
||||
1. Shibboleth needs the [OIDC Plugin](https://shibboleth.atlassian.net/wiki/spaces/IDPPLUGINS/pages/1376878976/OIDC+OP) installed and working correctly.
|
||||
2. Create a new config on the IdP Side, ensure that the `client_id` and `client_secret`
|
||||
are randomly generated data.
|
||||
```json
|
||||
{
|
||||
"client_id": "SOME-CLIENT-ID",
|
||||
"client_secret": "SOME-SUPER-SECRET-SECRET",
|
||||
"response_types": ["code"],
|
||||
"grant_types": ["authorization_code"],
|
||||
"scope": "openid profile email",
|
||||
"redirect_uris": ["https://[synapse public baseurl]/_synapse/client/oidc/callback"]
|
||||
}
|
||||
```
|
||||
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_providers:
|
||||
# Shibboleth IDP
|
||||
#
|
||||
- idp_id: shibboleth
|
||||
idp_name: "Shibboleth Login"
|
||||
discover: true
|
||||
issuer: "https://YOUR-IDP-URL.TLD"
|
||||
client_id: "YOUR_CLIENT_ID"
|
||||
client_secret: "YOUR-CLIENT-SECRECT-FROM-YOUR-IDP"
|
||||
scopes: ["openid", "profile", "email"]
|
||||
allow_existing_users: true
|
||||
user_profile_method: "userinfo_endpoint"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "sub"
|
||||
localpart_template: "{{ user.sub.split('@')[0] }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
email_template: "{{ user.email }}"
|
||||
```
|
||||
|
||||
### Twitch
|
||||
|
||||
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
|
||||
|
@ -26,8 +26,8 @@ for most users.
|
||||
#### Docker images and Ansible playbooks
|
||||
|
||||
There is an official synapse image available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
||||
the docker-compose file available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> or at [`ghcr.io/matrix-org/synapse`](https://ghcr.io/matrix-org/synapse)
|
||||
which can be used with the docker-compose file available at
|
||||
[contrib/docker](https://github.com/matrix-org/synapse/tree/develop/contrib/docker).
|
||||
Further information on this including configuration options is available in the README
|
||||
on hub.docker.com.
|
||||
|
@ -25,7 +25,7 @@ position of all streams. The server then periodically sends `RDATA` commands
|
||||
which have the format `RDATA <stream_name> <instance_name> <token> <row>`, where
|
||||
the format of `<row>` is defined by the individual streams. The
|
||||
`<instance_name>` is the name of the Synapse process that generated the data
|
||||
(usually "master").
|
||||
(usually "master"). We expect an RDATA for every row in the DB.
|
||||
|
||||
Error reporting happens by either the client or server sending an ERROR
|
||||
command, and usually the connection will be closed.
|
||||
@ -107,7 +107,7 @@ reconnect, following the steps above.
|
||||
If the server sends messages faster than the client can consume them the
|
||||
server will first buffer a (fairly large) number of commands and then
|
||||
disconnect the client. This ensures that we don't queue up an unbounded
|
||||
number of commands in memory and gives us a potential oppurtunity to
|
||||
number of commands in memory and gives us a potential opportunity to
|
||||
squawk loudly. When/if the client recovers it can reconnect to the
|
||||
server and ask for missed messages.
|
||||
|
||||
@ -122,7 +122,7 @@ since these include tokens which can be used to restart the stream on
|
||||
connection errors.
|
||||
|
||||
The client should keep track of the token in the last RDATA command
|
||||
received for each stream so that on reconneciton it can start streaming
|
||||
received for each stream so that on reconnection it can start streaming
|
||||
from the correct place. Note: not all RDATA have valid tokens due to
|
||||
batching. See `RdataCommand` for more details.
|
||||
|
||||
@ -188,7 +188,8 @@ client (C):
|
||||
Two positions are included, the "new" position and the last position sent respectively.
|
||||
This allows servers to tell instances that the positions have advanced but no
|
||||
data has been written, without clients needlessly checking to see if they
|
||||
have missed any updates.
|
||||
have missed any updates. Instances will only fetch stuff if there is a gap between
|
||||
their current position and the given last position.
|
||||
|
||||
#### ERROR (S, C)
|
||||
|
||||
|
@ -88,6 +88,58 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.81.0
|
||||
|
||||
## Application service path & authentication deprecations
|
||||
|
||||
Synapse now attempts the versioned appservice paths before falling back to the
|
||||
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
||||
Usage of the legacy routes should be considered deprecated.
|
||||
|
||||
Additionally, Synapse has supported sending the application service access token
|
||||
via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization)
|
||||
since v1.70.0. For backwards compatibility it is *also* sent as the `access_token`
|
||||
query parameter. This is insecure and should be considered deprecated.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service routes and query parameter authorization.
|
||||
|
||||
# Upgrading to v1.80.0
|
||||
|
||||
## Reporting events error code change
|
||||
|
||||
Before this update, the
|
||||
[`POST /_matrix/client/v3/rooms/{roomId}/report/{eventId}`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3roomsroomidreporteventid)
|
||||
endpoint would return a `403` if a user attempted to report an event that they did not have access to.
|
||||
This endpoint will now return a `404` in this case instead.
|
||||
|
||||
Clients that implement event reporting should check that their error handling code will handle this
|
||||
change.
|
||||
|
||||
# Upgrading to v1.79.0
|
||||
|
||||
## The `on_threepid_bind` module callback method has been deprecated
|
||||
|
||||
Synapse v1.79.0 deprecates the
|
||||
[`on_threepid_bind`](modules/third_party_rules_callbacks.md#on_threepid_bind)
|
||||
"third-party rules" Synapse module callback method in favour of a new module method,
|
||||
[`on_add_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_add_user_third_party_identifier).
|
||||
`on_threepid_bind` will be removed in a future version of Synapse. You should check whether any Synapse
|
||||
modules in use in your deployment are making use of `on_threepid_bind`, and update them where possible.
|
||||
|
||||
The arguments and functionality of the new method are the same.
|
||||
|
||||
The justification behind the name change is that the old method's name, `on_threepid_bind`, was
|
||||
misleading. A user is considered to "bind" their third-party ID to their Matrix ID only if they
|
||||
do so via an [identity server](https://spec.matrix.org/latest/identity-service-api/)
|
||||
(so that users on other homeservers may find them). But this method was not called in that case -
|
||||
it was only called when a user added a third-party identifier on the local homeserver.
|
||||
|
||||
Module developers may also be interested in the related
|
||||
[`on_remove_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_remove_user_third_party_identifier)
|
||||
module callback method that was also added in Synapse v1.79.0. This new method is called when a
|
||||
user removes a third-party identifier from their account.
|
||||
|
||||
# Upgrading to v1.78.0
|
||||
|
||||
## Deprecate the `/_synapse/admin/v1/media/<server_name>/delete` admin API
|
||||
@ -147,6 +199,17 @@ Docker images and Debian packages need nothing specific as they already
|
||||
include or specify ICU as an explicit dependency.
|
||||
|
||||
|
||||
## User directory rebuild
|
||||
|
||||
Synapse 1.74 queues a background update
|
||||
[to rebuild the user directory](https://github.com/matrix-org/synapse/pull/14643),
|
||||
in order to fix missing or erroneous entries.
|
||||
|
||||
When this update begins, the user directory will be cleared out and rebuilt from
|
||||
scratch. User directory lookups will be incomplete until the rebuild completes.
|
||||
Admins can monitor the rebuild's progress by using the
|
||||
[Background update Admin API](usage/administration/admin_api/background_updates.md#status).
|
||||
|
||||
# Upgrading to v1.73.0
|
||||
|
||||
## Legacy Prometheus metric names have now been removed
|
||||
|
@ -70,13 +70,55 @@ output-directory
|
||||
│ ├───state
|
||||
│ ├───invite_state
|
||||
│ └───knock_state
|
||||
└───user_data
|
||||
├───account_data
|
||||
│ ├───global
|
||||
│ └───<room_id>
|
||||
├───connections
|
||||
├───devices
|
||||
└───profile
|
||||
├───user_data
|
||||
│ ├───account_data
|
||||
│ │ ├───global
|
||||
│ │ └───<room_id>
|
||||
│ ├───connections
|
||||
│ ├───devices
|
||||
│ └───profile
|
||||
└───media_ids
|
||||
└───<media_id>
|
||||
```
|
||||
|
||||
The `media_ids` folder contains only the metadata of the media uploaded by the user.
|
||||
It does not contain the media itself.
|
||||
Furthermore, only the `media_ids` that Synapse manages itself are exported.
|
||||
If another media repository (e.g. [matrix-media-repo](https://github.com/turt2live/matrix-media-repo))
|
||||
is used, the data must be exported separately.
|
||||
|
||||
With the `media_ids` the media files can be downloaded.
|
||||
Media that have been sent in encrypted rooms are only retrieved in encrypted form.
|
||||
The following script can help with download the media files:
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Parameters
|
||||
#
|
||||
# source_directory: Directory which contains the export with the media_ids.
|
||||
# target_directory: Directory into which all files are to be downloaded.
|
||||
# repository_url: Address of the media repository resp. media worker.
|
||||
# serverName: Name of the server (`server_name` from homeserver.yaml).
|
||||
#
|
||||
# Example:
|
||||
# ./download_media.sh /tmp/export_data/media_ids/ /tmp/export_data/media_files/ http://localhost:8008 matrix.example.com
|
||||
|
||||
source_directory=$1
|
||||
target_directory=$2
|
||||
repository_url=$3
|
||||
serverName=$4
|
||||
|
||||
mkdir -p $target_directory
|
||||
|
||||
for file in $source_directory/*; do
|
||||
filename=$(basename ${file})
|
||||
url=$repository_url/_matrix/media/v3/download/$serverName/$filename
|
||||
echo "Downloading $filename - $url"
|
||||
if ! wget -o /dev/null -P $target_directory $url; then
|
||||
echo "Could not download $filename"
|
||||
fi
|
||||
done
|
||||
```
|
||||
|
||||
Manually resetting passwords
|
||||
@ -87,7 +129,7 @@ can reset a user's password using the [admin API](../../admin_api/user_admin_api
|
||||
|
||||
I have a problem with my server. Can I just delete my database and start again?
|
||||
---
|
||||
Deleting your database is unlikely to make anything better.
|
||||
Deleting your database is unlikely to make anything better.
|
||||
|
||||
It's easy to make the mistake of thinking that you can start again from a clean
|
||||
slate by dropping your database, but things don't work like that in a federated
|
||||
@ -102,7 +144,7 @@ Come and seek help in https://matrix.to/#/#synapse:matrix.org.
|
||||
|
||||
There are two exceptions when it might be sensible to delete your database and start again:
|
||||
* You have *never* joined any rooms which are federated with other servers. For
|
||||
instance, a local deployment which the outside world can't talk to.
|
||||
instance, a local deployment which the outside world can't talk to.
|
||||
* You are changing the `server_name` in the homeserver configuration. In effect
|
||||
this makes your server a completely new one from the point of view of the network,
|
||||
so in this case it makes sense to start with a clean database.
|
||||
@ -115,7 +157,7 @@ Using the following curl command:
|
||||
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
|
||||
```
|
||||
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
|
||||
Access Token:\<click to reveal\>
|
||||
Access Token:\<click to reveal\>
|
||||
|
||||
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
|
||||
|
||||
@ -152,13 +194,13 @@ What are the biggest rooms on my server?
|
||||
---
|
||||
|
||||
```sql
|
||||
SELECT s.canonical_alias, g.room_id, count(*) AS num_rows
|
||||
FROM
|
||||
state_groups_state AS g,
|
||||
room_stats_state AS s
|
||||
WHERE g.room_id = s.room_id
|
||||
SELECT s.canonical_alias, g.room_id, count(*) AS num_rows
|
||||
FROM
|
||||
state_groups_state AS g,
|
||||
room_stats_state AS s
|
||||
WHERE g.room_id = s.room_id
|
||||
GROUP BY s.canonical_alias, g.room_id
|
||||
ORDER BY num_rows desc
|
||||
ORDER BY num_rows desc
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
|
@ -1105,7 +1105,7 @@ This setting should only be used in very specific cases, such as
|
||||
federation over Tor hidden services and similar. For private networks
|
||||
of homeservers, you likely want to use a private CA instead.
|
||||
|
||||
Only effective if `federation_verify_certicates` is `true`.
|
||||
Only effective if `federation_verify_certificates` is `true`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@ -1518,11 +1518,11 @@ rc_registration_token_validity:
|
||||
|
||||
This option specifies several limits for login:
|
||||
* `address` ratelimits login requests based on the client's IP
|
||||
address. Defaults to `per_second: 0.17`, `burst_count: 3`.
|
||||
address. Defaults to `per_second: 0.003`, `burst_count: 5`.
|
||||
|
||||
* `account` ratelimits login requests based on the account the
|
||||
client is attempting to log into. Defaults to `per_second: 0.17`,
|
||||
`burst_count: 3`.
|
||||
client is attempting to log into. Defaults to `per_second: 0.003`,
|
||||
`burst_count: 5`.
|
||||
|
||||
* `failed_attempts` ratelimits login requests based on the account the
|
||||
client is attempting to log into, based on the amount of failed login
|
||||
@ -2227,8 +2227,8 @@ allows the shared secret to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the shared secret.
|
||||
|
||||
If this file does not exist, Synapse will create a new signing
|
||||
key on startup and store it in this file.
|
||||
If this file does not exist, Synapse will create a new shared
|
||||
secret on startup and store it in this file.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@ -3100,6 +3100,11 @@ Options for each entry include:
|
||||
match a pre-existing account instead of failing. This could be used if
|
||||
switching from password logins to OIDC. Defaults to false.
|
||||
|
||||
* `enable_registration`: set to 'false' to disable automatic registration of new
|
||||
users. This allows the OIDC SSO flow to be limited to sign in only, rather than
|
||||
automatically registering users that have a valid SSO login but do not have
|
||||
a pre-registered account. Defaults to true.
|
||||
|
||||
* `user_mapping_provider`: Configuration for how attributes returned from a OIDC
|
||||
provider are mapped onto a matrix user. This setting has the following
|
||||
sub-properties:
|
||||
@ -3216,6 +3221,7 @@ oidc_providers:
|
||||
userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||
jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
skip_verification: true
|
||||
enable_registration: true
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "id"
|
||||
|
@ -231,7 +231,9 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms$
|
||||
^/_matrix/client/v1/rooms/.*/timestamp_to_event$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
|
||||
^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)
|
||||
|
||||
# Encryption requests
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||
@ -243,7 +245,9 @@ information.
|
||||
# Registration/login requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||
^/_matrix/client/(r0|v3|unstable)/register$
|
||||
^/_matrix/client/(r0|v3|unstable)/register/available$
|
||||
^/_matrix/client/v1/register/m.login.registration_token/validity$
|
||||
^/_matrix/client/(r0|v3|unstable)/password_policy$
|
||||
|
||||
# Event sending requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/redact
|
||||
@ -251,6 +255,7 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/join/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/knock/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/profile/
|
||||
|
||||
# Account data requests
|
||||
|
11
mypy.ini
11
mypy.ini
@ -36,9 +36,6 @@ exclude = (?x)
|
||||
[mypy-synapse.federation.transport.client]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.http.client]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.http.matrixfederationclient]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
@ -51,9 +48,6 @@ warn_unused_ignores = False
|
||||
[mypy-synapse.util.caches.treecache]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.storage.database]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-tests.util.caches.test_descriptors]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
@ -77,11 +71,6 @@ ignore_missing_imports = True
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
# Note: WIP stubs available at
|
||||
# https://github.com/microsoft/python-type-stubs/tree/64934207f523ad6b611e6cfe039d85d7175d7d0d/netaddr
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-parameterized.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
|
2126
poetry.lock
generated
2126
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.78.0rc1"
|
||||
version = "1.81.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@ -153,15 +153,13 @@ python = "^3.7.1"
|
||||
# ----------------------
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
jsonschema = ">=3.0.0"
|
||||
# frozendict 2.1.2 is broken on Debian 10: https://github.com/Marco-Sulla/python-frozendict/issues/41
|
||||
# We cannot test our wheels against the 2.3.5 release in CI. Putting in an upper bound for this
|
||||
# because frozendict has been more trouble than it's worth; we would like to move to immutabledict.
|
||||
frozendict = ">=1,!=2.1.2,<2.3.5"
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
immutabledict = ">=2.0"
|
||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||
unpaddedbase64 = ">=2.1.0"
|
||||
# We require 1.5.0 to work around an issue when running against the C implementation of
|
||||
# frozendict: https://github.com/matrix-org/python-canonicaljson/issues/36
|
||||
canonicaljson = "^1.5.0"
|
||||
# We require 2.0.0 for immutabledict support.
|
||||
canonicaljson = "^2.0.0"
|
||||
# we use the type definitions added in signedjson 1.1.
|
||||
signedjson = "^1.1.0"
|
||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||
@ -313,7 +311,7 @@ all = [
|
||||
# We pin black so that our tests don't start failing on new releases.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.3.0"
|
||||
ruff = "0.0.237"
|
||||
ruff = "0.0.259"
|
||||
|
||||
# Typechecking
|
||||
mypy = "*"
|
||||
@ -321,6 +319,7 @@ mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-commonmark = ">=0.9.2"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-netaddr = ">=0.8.0.6"
|
||||
types-opentracing = ">=2.4.2"
|
||||
types-Pillow = ">=8.3.4"
|
||||
types-psycopg2 = ">=2.9.9"
|
||||
@ -351,6 +350,18 @@ towncrier = ">=18.6.0rc1"
|
||||
# Used for checking the Poetry lockfile
|
||||
tomli = ">=1.2.3"
|
||||
|
||||
|
||||
# Dependencies for building the development documentation
|
||||
[tool.poetry.group.dev-docs]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev-docs.dependencies]
|
||||
sphinx = {version = "^6.1", python = "^3.8"}
|
||||
sphinx-autodoc2 = {version = "^0.4.2", python = "^3.8"}
|
||||
myst-parser = {version = "^1.0.0", python = "^3.8"}
|
||||
furo = ">=2022.12.7,<2024.0.0"
|
||||
|
||||
|
||||
[build-system]
|
||||
# The upper bounds here are defensive, intended to prevent situations like
|
||||
# #13849 and #14079 where we see buildtime or runtime errors caused by build
|
||||
|
934
requirements.txt
Normal file
934
requirements.txt
Normal file
@ -0,0 +1,934 @@
|
||||
attrs==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \
|
||||
--hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99
|
||||
authlib==1.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a \
|
||||
--hash=sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d
|
||||
automat==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180 \
|
||||
--hash=sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e
|
||||
bcrypt==4.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535 \
|
||||
--hash=sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0 \
|
||||
--hash=sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410 \
|
||||
--hash=sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd \
|
||||
--hash=sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665 \
|
||||
--hash=sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab \
|
||||
--hash=sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71 \
|
||||
--hash=sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215 \
|
||||
--hash=sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b \
|
||||
--hash=sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda \
|
||||
--hash=sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9 \
|
||||
--hash=sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a \
|
||||
--hash=sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344 \
|
||||
--hash=sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f \
|
||||
--hash=sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d \
|
||||
--hash=sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c \
|
||||
--hash=sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c \
|
||||
--hash=sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2 \
|
||||
--hash=sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d \
|
||||
--hash=sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e \
|
||||
--hash=sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3
|
||||
bleach==6.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414 \
|
||||
--hash=sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4
|
||||
canonicaljson==2.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:c38a315de3b5a0532f1ec1f9153cd3d716abfc565a558d00a4835428a34fca5b \
|
||||
--hash=sha256:e2fdaef1d7fadc5d9cb59bd3d0d41b064ddda697809ac4325dced721d12f113f
|
||||
certifi==2022.12.7 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
|
||||
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
|
||||
cffi==1.15.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
|
||||
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
|
||||
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
|
||||
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
|
||||
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
|
||||
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
|
||||
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
|
||||
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
|
||||
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
|
||||
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
|
||||
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
|
||||
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
|
||||
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
|
||||
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
|
||||
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
|
||||
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
|
||||
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
|
||||
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
|
||||
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
|
||||
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
|
||||
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
|
||||
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
|
||||
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
|
||||
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
|
||||
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
|
||||
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
|
||||
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
|
||||
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
|
||||
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
|
||||
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
|
||||
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
|
||||
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
|
||||
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
|
||||
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
|
||||
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
|
||||
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
|
||||
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
|
||||
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
|
||||
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
|
||||
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
|
||||
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
|
||||
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
|
||||
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
|
||||
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
|
||||
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
|
||||
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
|
||||
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
|
||||
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
|
||||
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
|
||||
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
|
||||
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
|
||||
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
|
||||
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
|
||||
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
|
||||
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
|
||||
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
|
||||
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
|
||||
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
|
||||
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
|
||||
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
|
||||
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
|
||||
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
|
||||
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
|
||||
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
|
||||
charset-normalizer==3.1.0 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
|
||||
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
|
||||
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
|
||||
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
|
||||
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
|
||||
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
|
||||
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
|
||||
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
|
||||
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
|
||||
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
|
||||
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
|
||||
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
|
||||
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
|
||||
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
|
||||
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
|
||||
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
|
||||
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
|
||||
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
|
||||
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
|
||||
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
|
||||
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
|
||||
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
|
||||
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
|
||||
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
|
||||
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
|
||||
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
|
||||
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
|
||||
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
|
||||
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
|
||||
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
|
||||
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
|
||||
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
|
||||
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
|
||||
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
|
||||
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
|
||||
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
|
||||
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
|
||||
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
|
||||
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
|
||||
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
|
||||
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
|
||||
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
|
||||
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
|
||||
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
|
||||
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
|
||||
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
|
||||
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
|
||||
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
|
||||
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
|
||||
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
|
||||
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
|
||||
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
|
||||
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
|
||||
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
|
||||
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
|
||||
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
|
||||
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
|
||||
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
|
||||
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
|
||||
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
|
||||
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
|
||||
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
|
||||
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
|
||||
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
|
||||
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
|
||||
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
|
||||
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
|
||||
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
|
||||
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
|
||||
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
|
||||
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
|
||||
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
|
||||
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
|
||||
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
|
||||
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
|
||||
constantly==15.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 \
|
||||
--hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d
|
||||
cryptography==40.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405 \
|
||||
--hash=sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356 \
|
||||
--hash=sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472 \
|
||||
--hash=sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41 \
|
||||
--hash=sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a \
|
||||
--hash=sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88 \
|
||||
--hash=sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554 \
|
||||
--hash=sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db \
|
||||
--hash=sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778 \
|
||||
--hash=sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c \
|
||||
--hash=sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917 \
|
||||
--hash=sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797 \
|
||||
--hash=sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160 \
|
||||
--hash=sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c \
|
||||
--hash=sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c \
|
||||
--hash=sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2 \
|
||||
--hash=sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4 \
|
||||
--hash=sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122 \
|
||||
--hash=sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94
|
||||
hiredis==2.2.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01e2e588392b5fdcc3a6aa0eb62a2eb2a142f829082fa4c3354228029d3aa1ce \
|
||||
--hash=sha256:02b9f928dc6cd43ed0f0ffc1c75fb209fb180f004b7e2e19994805f998d247aa \
|
||||
--hash=sha256:03ab1d545794bb0e09f3b1e2c8b3adcfacd84f6f2d402bfdcd441a98c0e9643c \
|
||||
--hash=sha256:03ab760fc96e0c5d36226eb727f30645bf6a53c97f14bfc0a4d0401bfc9b8af7 \
|
||||
--hash=sha256:03dfb4ab7a2136ce1be305592553f102e1bd91a96068ab2778e3252aed20d9bc \
|
||||
--hash=sha256:0fc1f9a9791d028b2b8afa318ccff734c7fc8861d37a04ca9b3d27c9b05f9718 \
|
||||
--hash=sha256:10dc34854e9acfb3e7cc4157606e2efcb497b1c6fca07bd6c3be34ae5e413f13 \
|
||||
--hash=sha256:14dfccf4696d75395c587a5dafafb4f7aa0a5d55309341d10bc2e7f1eaa20771 \
|
||||
--hash=sha256:1570fe4f93bc1ea487fb566f2b863fd0ed146f643a4ea31e4e07036db9e0c7f8 \
|
||||
--hash=sha256:18103090b8eda9c529830e26594e88b0b1472055785f3ed29b8adc694d03862a \
|
||||
--hash=sha256:1b084fbc3e69f99865242f8e1ccd4ea2a34bf6a3983d015d61133377526c0ce2 \
|
||||
--hash=sha256:1eb39b34d15220095dc49ad1e1082580d35cd3b6d9741def52988b5075e4ff03 \
|
||||
--hash=sha256:1f1c44242c18b1f02e6d1162f133d65d00e09cc10d9165dccc78662def72abc2 \
|
||||
--hash=sha256:20cfbc469400669a5999aa34ccba3872a1e34490ec3d5c84e8c0752c27977b7c \
|
||||
--hash=sha256:20ecbf87aac4f0f33f9c55ae15cb73b485d256c57518c590b7d0c9c152150632 \
|
||||
--hash=sha256:24301ca2bf9b2f843b4c3015c90f161798fa3bbc5b95fd494785751b137dbbe2 \
|
||||
--hash=sha256:2a355aff8dfa02ebfe67f0946dd706e490bddda9ea260afac9cdc43942310c53 \
|
||||
--hash=sha256:2c18b00a382546e19bcda8b83dcca5b6e0dbc238d235723434405f48a18e8f77 \
|
||||
--hash=sha256:2d1ba0799f3487294f72b2157944d5c3a4fb33c99e2d495d63eab98c7ec7234b \
|
||||
--hash=sha256:2ddc573809ca4374da1b24b48604f34f3d5f0911fcccfb1c403ff8d8ca31c232 \
|
||||
--hash=sha256:2e10a66680023bd5c5a3d605dae0844e3dde60eac5b79e39f51395a2aceaf634 \
|
||||
--hash=sha256:2e2f0ce3e8ab1314a52f562386220f6714fd24d7968a95528135ad04e88cc741 \
|
||||
--hash=sha256:2f220b71235d2deab1b4b22681c8aee444720d973b80f1b86a4e2a85f6bcf1e1 \
|
||||
--hash=sha256:339af17bb9817f8acb127247c79a99cad63db6738c0fb2aec9fa3d4f35d2a250 \
|
||||
--hash=sha256:359e662324318baadb768d3c4ade8c4bdcfbb313570eb01e15d75dc5db781815 \
|
||||
--hash=sha256:3645590b9234cafd21c8ecfbf252ad9aa1d67629f4bdc98ba3627f48f8f7b5aa \
|
||||
--hash=sha256:3a5fefac31c84143782ec1ebc323c04e733a6e4bfebcef9907a34e47a465e648 \
|
||||
--hash=sha256:40ff3f1ec3a4046732e9e41df08dcb1a559847196755d295d43e32528aae39e6 \
|
||||
--hash=sha256:449e18506d22af40977abd0f5a8979f57f88d4562fe591478a3438d76a15133d \
|
||||
--hash=sha256:4997f55e1208af95a8fbd0fa187b04c672fcec8f66e49b9ab7fcc45cc1657dc4 \
|
||||
--hash=sha256:4cb992e3f9753c5a0c637f333c2010d1ad702aebf2d730ee4d484f32b19bae97 \
|
||||
--hash=sha256:4ee9fe7cef505e8d925c70bebcc16bfab12aa7af922f948346baffd4730f7b00 \
|
||||
--hash=sha256:5155bc1710df8e21aa48c9b2f4d4e13e4987e1efff363a1ef9c84fae2cc6c145 \
|
||||
--hash=sha256:544d52fde3a8dac7854673eac20deca05214758193c01926ffbb0d57c6bf4ffe \
|
||||
--hash=sha256:55c7e9a9e05f8c0555bfba5c16d98492f8b6db650e56d0c35cc28aeabfc86020 \
|
||||
--hash=sha256:57f73aa04d0b70ff436fb35fa7ea2b796aa7addbd7ebb8d1aa1f3d1b3e4439f1 \
|
||||
--hash=sha256:5dac177a6ab8b4eb4d5e74978c29eef7cc9eef14086f814cb3893f7465578044 \
|
||||
--hash=sha256:5f2cfd323f83985f2bed6ed013107873275025af270485b7d04c338bfb47bd14 \
|
||||
--hash=sha256:632d79fd02b03e8d9fbaebbe40bfe34b920c5d0a9c0ef6270752e0db85208175 \
|
||||
--hash=sha256:674f296c3c89cb53f97aa9ba2508d3f360ad481b9e0c0e3a59b342a15192adaf \
|
||||
--hash=sha256:688b9b7458b4f3f452fea6ed062c04fa1fd9a69d9223d95c6cb052581aba553b \
|
||||
--hash=sha256:6e6ea7532221c97fa6d79f7d19d452cd9d1141d759c54279cc4774ce24728f13 \
|
||||
--hash=sha256:75349f7c8f77eb0fd33ede4575d1e5b0a902a8176a436bf03293d7fec4bd3894 \
|
||||
--hash=sha256:82f869ca44bcafa37cd71cfa1429648fa354d6021dcd72f03a2f66bcb339c546 \
|
||||
--hash=sha256:831461abe5b63e73719621a5f31d8fc175528a05dc09d5a8aa8ef565d6deefa4 \
|
||||
--hash=sha256:855d258e7f1aee3d7fbd5b1dc87790b1b5016e23d369a97b934a25ae7bc0171f \
|
||||
--hash=sha256:8753c561b37cccbda7264c9b4486e206a6318c18377cd647beb3aa41a15a6beb \
|
||||
--hash=sha256:8c3a6998f6f88d7ca4d082fd26525074df13162b274d7c64034784b6fdc56666 \
|
||||
--hash=sha256:8d43a7bba66a800279e33229a206861be09c279e261eaa8db4824e59465f4848 \
|
||||
--hash=sha256:8e16dc949cc2e9c5fbcd08de05b5fb61b89ff65738d772863c5c96248628830e \
|
||||
--hash=sha256:9873898e26e50cd41415e9d1ea128bfdb60eb26abb4f5be28a4500fd7834dc0c \
|
||||
--hash=sha256:990916e8b0b4eedddef787e73549b562f8c9e73a7fea82f9b8ff517806774ad0 \
|
||||
--hash=sha256:99350e89f52186146938bdba0b9c6cd68802c20346707d6ca8366f2d69d89b2f \
|
||||
--hash=sha256:9c270bd0567a9c60673284e000132f603bb4ecbcd707567647a68f85ef45c4d4 \
|
||||
--hash=sha256:9e0f444d9062f7e487ef42bab2fb2e290f1704afcbca48ad3ec23de63eef0fda \
|
||||
--hash=sha256:9eb14339e399554bb436cc4628e8aaa3943adf7afcf34aba4cbd1e3e6b9ec7ec \
|
||||
--hash=sha256:a06d0dd84f10be6b15a92edbca2490b64917280f66d8267c63de99b6550308ad \
|
||||
--hash=sha256:a1ce725542133dbdda9e8704867ef52651886bd1ef568c6fd997a27404381985 \
|
||||
--hash=sha256:a32a4474f7a4abdea954f3365608edee3f90f1de9fa05b81d214d4cad04c718a \
|
||||
--hash=sha256:a7114961ed78d708142f6c6eb1d2ed65dc3da4b5ae8a4660ad889dd7fc891971 \
|
||||
--hash=sha256:a89f5afb9827eab07b9c8c585cd4dc95e5232c727508ae2c935d09531abe9e33 \
|
||||
--hash=sha256:a9b306f4e870747eea8b008dcba2e9f1e4acd12b333a684bc1cc120e633a280e \
|
||||
--hash=sha256:aa90a5ee7a7f30c3d72d3513914b8f51f953a71b8cbd52a241b6db6685e55645 \
|
||||
--hash=sha256:ac15e7e1efca51b4695e540c80c328accb352c9608da7c2df82d1fa1a3c539ef \
|
||||
--hash=sha256:ac7f8d68826f95a3652e44b0c12bfa74d3aa6531d47d5dbe6a2fbfc7979bc20f \
|
||||
--hash=sha256:b083a69e158138ffa95740ff6984d328259387b5596908021b3ccb946469ff66 \
|
||||
--hash=sha256:b11960237a3025bf248135e5b497dc4923e83d137eb798fbfe78b40d57c4b156 \
|
||||
--hash=sha256:b5d290f3d8f7a05c4adbe6c355055b87c7081bfa1eccd1ae5491216307ee5f53 \
|
||||
--hash=sha256:ba6123ff137275e2f4c31fc74b93813fcbb79160d43f5357163e09638c7743de \
|
||||
--hash=sha256:bae004a0b978bf62e38d0eef5ab9156f8101d01167b3ca7054bd0994b773e917 \
|
||||
--hash=sha256:c24d856e13c02bd9d28a189e47be70cbba6f2c2a4bd85a8cc98819db9e7e3e06 \
|
||||
--hash=sha256:c446a2007985ae49c2ecd946dd819dea72b931beb5f647ba08655a1a1e133fa8 \
|
||||
--hash=sha256:c73aa295c5369135247ff63aa1fbb116067485d0506cd787cc0c868e72bbee55 \
|
||||
--hash=sha256:c9488ffb10acc6b121c498875278b0a6715d193742dc92d21a281712169ac06d \
|
||||
--hash=sha256:c95be6f20377d5995ef41a98314542e194d2dc9c2579d8f130a1aea78d48fd42 \
|
||||
--hash=sha256:ccc33d87866d213f84f857a98f69c13f94fbf99a3304e328869890c9e49c8d65 \
|
||||
--hash=sha256:d1acb7c957e5343303b3862947df3232dc7395da320b3b9ae076dfaa56ad59dc \
|
||||
--hash=sha256:d8bc89c7e33fecb083a199ade0131a34d20365a8c32239e218da57290987ca9a \
|
||||
--hash=sha256:d995846acc8e3339fb7833cd19bf6f3946ff5157c8488a4df9c51cd119a36870 \
|
||||
--hash=sha256:e4e2da61a04251121cb551f569c3250e6e27e95f2a80f8351c36822eda1f5d2b \
|
||||
--hash=sha256:e4ec57886f20f4298537cb1ab9dbda98594fb8d7c724c5fbf9a4b55329fd4a63 \
|
||||
--hash=sha256:e61c22fda5fc25d31bbced24a8322d33c5cb8cad9ba698634c16edb5b3e79a91 \
|
||||
--hash=sha256:e7e61ab75b851aac2d6bc634d03738a242a6ef255a44178437b427c5ebac0a87 \
|
||||
--hash=sha256:e86c800c6941698777fc58419216a66a7f76504f1cea72381d2ee206888e964d \
|
||||
--hash=sha256:e97d4e650b8d933a1229f341db92b610fc52b8d752490235977b63b81fbbc2cb \
|
||||
--hash=sha256:eaff526c2fed31c971b0fa338a25237ae5513550ef75d0b85b9420ec778cca45 \
|
||||
--hash=sha256:ed44b3c711cecde920f238ac35f70ac08744f2079b6369655856e43944464a72 \
|
||||
--hash=sha256:f1f1efbe9cc29a3af39cf7eed27225f951aed3f48a1149c7fb74529fb5ab86d4 \
|
||||
--hash=sha256:fd0ca35e2cf44866137cbb5ae7e439fab18a0b0e0e1cf51d45137622d59ec012
|
||||
hyperlink==21.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \
|
||||
--hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
|
||||
idna==3.4 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
|
||||
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
|
||||
ijson==3.2.0.post0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:00594ed3ef2218fee8c652d9e7f862fb39f8251b67c6379ef12f7e044bf6bbf3 \
|
||||
--hash=sha256:03dfd4c8ed19e704d04b0ad4f34f598dc569fd3f73089f80eed698e7f6069233 \
|
||||
--hash=sha256:09fe3a53e00c59de33b825ba8d6d39f544a7d7180983cd3d6bd2c3794ae35442 \
|
||||
--hash=sha256:0eb838b4e4360e65c00aa13c78b35afc2477759d423b602b60335af5bed3de5b \
|
||||
--hash=sha256:11bb84a53c37e227e733c6dffad2037391cf0b3474bff78596dc4373b02008a0 \
|
||||
--hash=sha256:11dfd64633fe1382c4237477ac3836f682ca17e25e0d0799e84737795b0611df \
|
||||
--hash=sha256:1302dc6490da7d44c3a76a5f0b87d8bec9f918454c6d6e6bf4ed922e47da58bb \
|
||||
--hash=sha256:13f2939db983327dd0492f6c1c0e77be3f2cbf9b620c92c7547d1d2cd6ef0486 \
|
||||
--hash=sha256:158494bfe89ccb32618d0e53b471364080ceb975462ec464d9f9f37d9832b653 \
|
||||
--hash=sha256:183841b8d033ca95457f61fb0719185dc7f51a616070bdf1dcaf03473bed05b2 \
|
||||
--hash=sha256:1a75cfb34217b41136b714985be645f12269e4345da35d7b48aabd317c82fd10 \
|
||||
--hash=sha256:1d64ffaab1d006a4fa9584a4c723e95cc9609bf6c3365478e250cd0bffaaadf3 \
|
||||
--hash=sha256:25919b444426f58dcc62f763d1c6be6297f309da85ecab55f51da6ca86fc9fdf \
|
||||
--hash=sha256:26b57838e712b8852c40ec6d74c6de8bb226446440e1af1354c077a6f81b9142 \
|
||||
--hash=sha256:27409ba44cfd006901971063d37699f72e092b5efaa1586288b5067d80c6b5bd \
|
||||
--hash=sha256:2d50b2ad9c6c51ca160aa60de7f4dacd1357c38d0e503f51aed95c1c1945ff53 \
|
||||
--hash=sha256:2f204f6d4cedeb28326c230a0b046968b5263c234c65a5b18cee22865800fff7 \
|
||||
--hash=sha256:2f9d449f86f8971c24609e319811f7f3b6b734f0218c4a0e799debe19300d15b \
|
||||
--hash=sha256:3b21b1ecd20ed2f918f6f99cdfa68284a416c0f015ffa64b68fa933df1b24d40 \
|
||||
--hash=sha256:3ccc4d4b947549f9c431651c02b95ef571412c78f88ded198612a41d5c5701a0 \
|
||||
--hash=sha256:41e955e173f77f54337fecaaa58a35c464b75e232b1f939b282497134a4d4f0e \
|
||||
--hash=sha256:424232c2bf3e8181f1b572db92c179c2376b57eba9fc8931453fba975f48cb80 \
|
||||
--hash=sha256:434e57e7ec5c334ccb0e67bb4d9e60c264dcb2a3843713dbeb12cb19fe42a668 \
|
||||
--hash=sha256:47a56e3628c227081a2aa58569cbf2af378bad8af648aa904080e87cd6644cfb \
|
||||
--hash=sha256:4d4e143908f47307042c9678803d27706e0e2099d0a6c1988c6cae1da07760bf \
|
||||
--hash=sha256:4e7c4fdc7d24747c8cc7d528c145afda4de23210bf4054bd98cd63bf07e4882d \
|
||||
--hash=sha256:51c1db80d7791fb761ad9a6c70f521acd2c4b0e5afa2fe0d813beb2140d16c37 \
|
||||
--hash=sha256:5242cb2313ba3ece307b426efa56424ac13cc291c36f292b501d412a98ad0703 \
|
||||
--hash=sha256:535665a77408b6bea56eb828806fae125846dff2e2e0ed4cb2e0a8e36244d753 \
|
||||
--hash=sha256:535a59d61b9aef6fc2a3d01564c1151e38e5a44b92cd6583cb4e8ccf0f58043f \
|
||||
--hash=sha256:53f1a13eb99ab514c562869513172135d4b55a914b344e6518ba09ad3ef1e503 \
|
||||
--hash=sha256:5418066666b25b05f2b8ae2698408daa0afa68f07b0b217f2ab24465b7e9cbd9 \
|
||||
--hash=sha256:56500dac8f52989ef7c0075257a8b471cbea8ef77f1044822742b3cbf2246e8b \
|
||||
--hash=sha256:5809752045ef74c26adf159ed03df7fb7e7a8d656992fd7562663ed47d6d39d9 \
|
||||
--hash=sha256:5c93ae4d49d8cf8accfedc8a8e7815851f56ceb6e399b0c186754a68fed22844 \
|
||||
--hash=sha256:5d365df54d18076f1d5f2ffb1eef2ac7f0d067789838f13d393b5586fbb77b02 \
|
||||
--hash=sha256:6def9ac8d73b76cb02e9e9837763f27f71e5e67ec0afae5f1f4cf8f61c39b1ac \
|
||||
--hash=sha256:6ee9537e8a8aa15dd2d0912737aeb6265e781e74f7f7cad8165048fcb5f39230 \
|
||||
--hash=sha256:6eed1ddd3147de49226db4f213851cf7860493a7b6c7bd5e62516941c007094c \
|
||||
--hash=sha256:6fd55f7a46429de95383fc0d0158c1bfb798e976d59d52830337343c2d9bda5c \
|
||||
--hash=sha256:775444a3b647350158d0b3c6c39c88b4a0995643a076cb104bf25042c9aedcf8 \
|
||||
--hash=sha256:79b94662c2e9d366ab362c2c5858097eae0da100dea0dfd340db09ab28c8d5e8 \
|
||||
--hash=sha256:7e0d1713a9074a7677eb8e43f424b731589d1c689d4676e2f57a5ce59d089e89 \
|
||||
--hash=sha256:80a5bd7e9923cab200701f67ad2372104328b99ddf249dbbe8834102c852d316 \
|
||||
--hash=sha256:830de03f391f7e72b8587bb178c22d534da31153e9ee4234d54ef82cde5ace5e \
|
||||
--hash=sha256:84eed88177f6c243c52b280cb094f751de600d98d2221e0dec331920894889ec \
|
||||
--hash=sha256:8f20072376e338af0e51ccecb02335b4e242d55a9218a640f545be7fc64cca99 \
|
||||
--hash=sha256:93aaec00cbde65c192f15c21f3ee44d2ab0c11eb1a35020b5c4c2676f7fe01d0 \
|
||||
--hash=sha256:9829a17f6f78d7f4d0aeff28c126926a1e5f86828ebb60d6a0acfa0d08457f9f \
|
||||
--hash=sha256:986a0347fe19e5117a5241276b72add570839e5bcdc7a6dac4b538c5928eeff5 \
|
||||
--hash=sha256:992e9e68003df32e2aa0f31eb82c0a94f21286203ab2f2b2c666410e17b59d2f \
|
||||
--hash=sha256:9ecbf85a6d73fc72f6534c38f7d92ed15d212e29e0dbe9810a465d61c8a66d23 \
|
||||
--hash=sha256:a340413a9bf307fafd99254a4dd4ac6c567b91a205bf896dde18888315fd7fcd \
|
||||
--hash=sha256:a4465c90b25ca7903410fabe4145e7b45493295cc3b84ec1216653fbe9021276 \
|
||||
--hash=sha256:a7698bc480df76073067017f73ba4139dbaae20f7a6c9a0c7855b9c5e9a62124 \
|
||||
--hash=sha256:a8af68fe579f6f0b9a8b3f033d10caacfed6a4b89b8c7a1d9478a8f5d8aba4a1 \
|
||||
--hash=sha256:a8c84dff2d60ae06d5280ec87cd63050bbd74a90c02bfc7c390c803cfc8ac8fc \
|
||||
--hash=sha256:b3456cd5b16ec9db3ef23dd27f37bf5a14f765e8272e9af3e3de9ee9a4cba867 \
|
||||
--hash=sha256:b3bdd2e12d9b9a18713dd6f3c5ef3734fdab25b79b177054ba9e35ecc746cb6e \
|
||||
--hash=sha256:b3c6cf18b61b94db9590f86af0dd60edbccb36e151643152b8688066f677fbc9 \
|
||||
--hash=sha256:b3e8d46c1004afcf2bf513a8fb575ee2ec3d8009a2668566b5926a2dcf7f1a45 \
|
||||
--hash=sha256:bced6cd5b09d4d002dda9f37292dd58d26eb1c4d0d179b820d3708d776300bb4 \
|
||||
--hash=sha256:bed8dcb7dbfdb98e647ad47676045e0891f610d38095dcfdae468e1e1efb2766 \
|
||||
--hash=sha256:c85892d68895ba7a0b16a0e6b7d9f9a0e30e86f2b1e0f6986243473ba8735432 \
|
||||
--hash=sha256:c8646eb81eec559d7d8b1e51a5087299d06ecab3bc7da54c01f7df94350df135 \
|
||||
--hash=sha256:cd0450e76b9c629b7f86e7d5b91b7cc9c281dd719630160a992b19a856f7bdbd \
|
||||
--hash=sha256:ce4be2beece2629bd24bcab147741d1532bd5ed40fb52f2b4fcde5c5bf606df0 \
|
||||
--hash=sha256:d3e255ef05b434f20fc9d4b18ea15733d1038bec3e4960d772b06216fa79e82d \
|
||||
--hash=sha256:dcec67fc15e5978ad286e8cc2a3f9347076e28e0e01673b5ace18c73da64e3ff \
|
||||
--hash=sha256:e97e6e07851cefe7baa41f1ebf5c0899d2d00d94bfef59825752e4c784bebbe8 \
|
||||
--hash=sha256:eb167ee21d9c413d6b0ab65ec12f3e7ea0122879da8b3569fa1063526f9f03a8 \
|
||||
--hash=sha256:efee1e9b4f691e1086730f3010e31c55625bc2e0f7db292a38a2cdf2774c2e13 \
|
||||
--hash=sha256:f349bee14d0a4a72ba41e1b1cce52af324ebf704f5066c09e3dd04cfa6f545f0 \
|
||||
--hash=sha256:f470f3d750e00df86e03254fdcb422d2f726f4fb3a0d8eeee35e81343985e58a \
|
||||
--hash=sha256:f6464242f7895268d3086d7829ef031b05c77870dad1e13e51ef79d0a9cfe029 \
|
||||
--hash=sha256:f6785ba0f65eb64b1ce3b7fcfec101085faf98f4e77b234f14287fd4138ffb25 \
|
||||
--hash=sha256:fd218b338ac68213c997d4c88437c0e726f16d301616bf837e1468901934042c \
|
||||
--hash=sha256:fe7f414edd69dd9199b0dfffa0ada22f23d8009e10fe2a719e0993b7dcc2e6e2
|
||||
immutabledict==2.2.3 ; python_full_version >= "3.7.1" and python_version < "4.0" \
|
||||
--hash=sha256:0e1e8a3f2b3ff062daa19795f947e9ec7a58add269d44e34d3ab4319e1343853 \
|
||||
--hash=sha256:a7b078ebcc4a58ddc73b55f808b26e7c8c2d5183fad325615112689e1a63e714
|
||||
importlib-metadata==6.1.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \
|
||||
--hash=sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20 \
|
||||
--hash=sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09
|
||||
importlib-resources==5.12.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6 \
|
||||
--hash=sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a
|
||||
incremental==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0 \
|
||||
--hash=sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51
|
||||
jinja2==3.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
|
||||
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
|
||||
jsonschema==4.17.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \
|
||||
--hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6
|
||||
lxml==4.9.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7 \
|
||||
--hash=sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726 \
|
||||
--hash=sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03 \
|
||||
--hash=sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140 \
|
||||
--hash=sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a \
|
||||
--hash=sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05 \
|
||||
--hash=sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03 \
|
||||
--hash=sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419 \
|
||||
--hash=sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4 \
|
||||
--hash=sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e \
|
||||
--hash=sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67 \
|
||||
--hash=sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50 \
|
||||
--hash=sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894 \
|
||||
--hash=sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf \
|
||||
--hash=sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947 \
|
||||
--hash=sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1 \
|
||||
--hash=sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd \
|
||||
--hash=sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3 \
|
||||
--hash=sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92 \
|
||||
--hash=sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3 \
|
||||
--hash=sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457 \
|
||||
--hash=sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74 \
|
||||
--hash=sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf \
|
||||
--hash=sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1 \
|
||||
--hash=sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4 \
|
||||
--hash=sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975 \
|
||||
--hash=sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5 \
|
||||
--hash=sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe \
|
||||
--hash=sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7 \
|
||||
--hash=sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1 \
|
||||
--hash=sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2 \
|
||||
--hash=sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409 \
|
||||
--hash=sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f \
|
||||
--hash=sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f \
|
||||
--hash=sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5 \
|
||||
--hash=sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24 \
|
||||
--hash=sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e \
|
||||
--hash=sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4 \
|
||||
--hash=sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a \
|
||||
--hash=sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c \
|
||||
--hash=sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de \
|
||||
--hash=sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f \
|
||||
--hash=sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b \
|
||||
--hash=sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5 \
|
||||
--hash=sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7 \
|
||||
--hash=sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a \
|
||||
--hash=sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c \
|
||||
--hash=sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9 \
|
||||
--hash=sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e \
|
||||
--hash=sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab \
|
||||
--hash=sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941 \
|
||||
--hash=sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5 \
|
||||
--hash=sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45 \
|
||||
--hash=sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7 \
|
||||
--hash=sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892 \
|
||||
--hash=sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746 \
|
||||
--hash=sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c \
|
||||
--hash=sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53 \
|
||||
--hash=sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe \
|
||||
--hash=sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184 \
|
||||
--hash=sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38 \
|
||||
--hash=sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df \
|
||||
--hash=sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9 \
|
||||
--hash=sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b \
|
||||
--hash=sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2 \
|
||||
--hash=sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0 \
|
||||
--hash=sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda \
|
||||
--hash=sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b \
|
||||
--hash=sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5 \
|
||||
--hash=sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380 \
|
||||
--hash=sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33 \
|
||||
--hash=sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8 \
|
||||
--hash=sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1 \
|
||||
--hash=sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889 \
|
||||
--hash=sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9 \
|
||||
--hash=sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f \
|
||||
--hash=sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c
|
||||
markupsafe==2.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
|
||||
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
|
||||
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
|
||||
--hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
|
||||
--hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
|
||||
--hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
|
||||
--hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
|
||||
--hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
|
||||
--hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
|
||||
--hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
|
||||
--hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
|
||||
--hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
|
||||
--hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
|
||||
--hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
|
||||
--hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
|
||||
--hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
|
||||
--hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
|
||||
--hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
|
||||
--hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
|
||||
--hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
|
||||
--hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
|
||||
--hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
|
||||
--hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
|
||||
--hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
|
||||
--hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
|
||||
--hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
|
||||
--hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
|
||||
--hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
|
||||
--hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
|
||||
--hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
|
||||
--hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
|
||||
--hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
|
||||
--hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
|
||||
--hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
|
||||
--hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
|
||||
--hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
|
||||
--hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
|
||||
--hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
|
||||
--hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
|
||||
--hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
|
||||
--hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
|
||||
--hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
|
||||
--hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
|
||||
--hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
|
||||
--hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
|
||||
--hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
|
||||
--hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
|
||||
--hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
|
||||
--hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
|
||||
--hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
|
||||
matrix-common==1.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20 \
|
||||
--hash=sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1
|
||||
msgpack==1.0.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164 \
|
||||
--hash=sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b \
|
||||
--hash=sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c \
|
||||
--hash=sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf \
|
||||
--hash=sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd \
|
||||
--hash=sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d \
|
||||
--hash=sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c \
|
||||
--hash=sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a \
|
||||
--hash=sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e \
|
||||
--hash=sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd \
|
||||
--hash=sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025 \
|
||||
--hash=sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5 \
|
||||
--hash=sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705 \
|
||||
--hash=sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a \
|
||||
--hash=sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d \
|
||||
--hash=sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb \
|
||||
--hash=sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11 \
|
||||
--hash=sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f \
|
||||
--hash=sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c \
|
||||
--hash=sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d \
|
||||
--hash=sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea \
|
||||
--hash=sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba \
|
||||
--hash=sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87 \
|
||||
--hash=sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a \
|
||||
--hash=sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c \
|
||||
--hash=sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080 \
|
||||
--hash=sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198 \
|
||||
--hash=sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9 \
|
||||
--hash=sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a \
|
||||
--hash=sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b \
|
||||
--hash=sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f \
|
||||
--hash=sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437 \
|
||||
--hash=sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f \
|
||||
--hash=sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7 \
|
||||
--hash=sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2 \
|
||||
--hash=sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0 \
|
||||
--hash=sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48 \
|
||||
--hash=sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898 \
|
||||
--hash=sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0 \
|
||||
--hash=sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57 \
|
||||
--hash=sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8 \
|
||||
--hash=sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282 \
|
||||
--hash=sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1 \
|
||||
--hash=sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82 \
|
||||
--hash=sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc \
|
||||
--hash=sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb \
|
||||
--hash=sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6 \
|
||||
--hash=sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7 \
|
||||
--hash=sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9 \
|
||||
--hash=sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c \
|
||||
--hash=sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1 \
|
||||
--hash=sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed \
|
||||
--hash=sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c \
|
||||
--hash=sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c \
|
||||
--hash=sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77 \
|
||||
--hash=sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81 \
|
||||
--hash=sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a \
|
||||
--hash=sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3 \
|
||||
--hash=sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086 \
|
||||
--hash=sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9 \
|
||||
--hash=sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f \
|
||||
--hash=sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b \
|
||||
--hash=sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d
|
||||
netaddr==0.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac \
|
||||
--hash=sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243
|
||||
packaging==23.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \
|
||||
--hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97
|
||||
parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c \
|
||||
--hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9
|
||||
phonenumbers==8.13.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:253bb0e01250d21a11f2b42b3e6e161b7f6cb2ac440e2e2a95c1da71d221ee1a \
|
||||
--hash=sha256:d3e3555b38c89b121f5b2e917847003bdd07027569d758d5f40156c01aeac089
|
||||
pillow==9.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33 \
|
||||
--hash=sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b \
|
||||
--hash=sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e \
|
||||
--hash=sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35 \
|
||||
--hash=sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153 \
|
||||
--hash=sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9 \
|
||||
--hash=sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569 \
|
||||
--hash=sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57 \
|
||||
--hash=sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8 \
|
||||
--hash=sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1 \
|
||||
--hash=sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264 \
|
||||
--hash=sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157 \
|
||||
--hash=sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9 \
|
||||
--hash=sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133 \
|
||||
--hash=sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9 \
|
||||
--hash=sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab \
|
||||
--hash=sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6 \
|
||||
--hash=sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5 \
|
||||
--hash=sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df \
|
||||
--hash=sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503 \
|
||||
--hash=sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b \
|
||||
--hash=sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa \
|
||||
--hash=sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327 \
|
||||
--hash=sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493 \
|
||||
--hash=sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d \
|
||||
--hash=sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4 \
|
||||
--hash=sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4 \
|
||||
--hash=sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35 \
|
||||
--hash=sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2 \
|
||||
--hash=sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c \
|
||||
--hash=sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011 \
|
||||
--hash=sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a \
|
||||
--hash=sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e \
|
||||
--hash=sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f \
|
||||
--hash=sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848 \
|
||||
--hash=sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57 \
|
||||
--hash=sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f \
|
||||
--hash=sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c \
|
||||
--hash=sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9 \
|
||||
--hash=sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5 \
|
||||
--hash=sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9 \
|
||||
--hash=sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d \
|
||||
--hash=sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0 \
|
||||
--hash=sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1 \
|
||||
--hash=sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e \
|
||||
--hash=sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815 \
|
||||
--hash=sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0 \
|
||||
--hash=sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b \
|
||||
--hash=sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd \
|
||||
--hash=sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c \
|
||||
--hash=sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3 \
|
||||
--hash=sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab \
|
||||
--hash=sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858 \
|
||||
--hash=sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5 \
|
||||
--hash=sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee \
|
||||
--hash=sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343 \
|
||||
--hash=sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb \
|
||||
--hash=sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47 \
|
||||
--hash=sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed \
|
||||
--hash=sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837 \
|
||||
--hash=sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286 \
|
||||
--hash=sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28 \
|
||||
--hash=sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628 \
|
||||
--hash=sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df \
|
||||
--hash=sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d \
|
||||
--hash=sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d \
|
||||
--hash=sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a \
|
||||
--hash=sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6 \
|
||||
--hash=sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336 \
|
||||
--hash=sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132 \
|
||||
--hash=sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070 \
|
||||
--hash=sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe \
|
||||
--hash=sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a \
|
||||
--hash=sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd \
|
||||
--hash=sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391 \
|
||||
--hash=sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a \
|
||||
--hash=sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12
|
||||
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
|
||||
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
|
||||
prometheus-client==0.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab \
|
||||
--hash=sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48
|
||||
psycopg2==2.9.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955 \
|
||||
--hash=sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa \
|
||||
--hash=sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e \
|
||||
--hash=sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a \
|
||||
--hash=sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5 \
|
||||
--hash=sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee \
|
||||
--hash=sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad \
|
||||
--hash=sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0 \
|
||||
--hash=sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a \
|
||||
--hash=sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d \
|
||||
--hash=sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f \
|
||||
--hash=sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2 \
|
||||
--hash=sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5
|
||||
psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
||||
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
|
||||
psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
||||
--hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52
|
||||
pyasn1-modules==0.2.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
|
||||
--hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74
|
||||
pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
|
||||
--hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
|
||||
pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
|
||||
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
|
||||
pydantic==1.10.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e \
|
||||
--hash=sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6 \
|
||||
--hash=sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd \
|
||||
--hash=sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca \
|
||||
--hash=sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b \
|
||||
--hash=sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a \
|
||||
--hash=sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245 \
|
||||
--hash=sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d \
|
||||
--hash=sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee \
|
||||
--hash=sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1 \
|
||||
--hash=sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3 \
|
||||
--hash=sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d \
|
||||
--hash=sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5 \
|
||||
--hash=sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914 \
|
||||
--hash=sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd \
|
||||
--hash=sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1 \
|
||||
--hash=sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e \
|
||||
--hash=sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e \
|
||||
--hash=sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a \
|
||||
--hash=sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd \
|
||||
--hash=sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f \
|
||||
--hash=sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209 \
|
||||
--hash=sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d \
|
||||
--hash=sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a \
|
||||
--hash=sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143 \
|
||||
--hash=sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918 \
|
||||
--hash=sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52 \
|
||||
--hash=sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e \
|
||||
--hash=sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f \
|
||||
--hash=sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e \
|
||||
--hash=sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb \
|
||||
--hash=sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe \
|
||||
--hash=sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe \
|
||||
--hash=sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d \
|
||||
--hash=sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209 \
|
||||
--hash=sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af
|
||||
pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \
|
||||
--hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907
|
||||
pynacl==1.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \
|
||||
--hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \
|
||||
--hash=sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 \
|
||||
--hash=sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 \
|
||||
--hash=sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92 \
|
||||
--hash=sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff \
|
||||
--hash=sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba \
|
||||
--hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \
|
||||
--hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \
|
||||
--hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543
|
||||
pyopenssl==23.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8cb78010a1eb2c8e24b851693b7b04dfe9b1dc0a5ab3843927b10a85b1dfbb2e \
|
||||
--hash=sha256:fb96e936866ad65662c22d0de84ca0fba58397893cdfe0f01334fa93382af23c
|
||||
pyrsistent==0.19.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \
|
||||
--hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \
|
||||
--hash=sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a \
|
||||
--hash=sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c \
|
||||
--hash=sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3 \
|
||||
--hash=sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393 \
|
||||
--hash=sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9 \
|
||||
--hash=sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da \
|
||||
--hash=sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf \
|
||||
--hash=sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64 \
|
||||
--hash=sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a \
|
||||
--hash=sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3 \
|
||||
--hash=sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98 \
|
||||
--hash=sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2 \
|
||||
--hash=sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8 \
|
||||
--hash=sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf \
|
||||
--hash=sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc \
|
||||
--hash=sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7 \
|
||||
--hash=sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28 \
|
||||
--hash=sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2 \
|
||||
--hash=sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b \
|
||||
--hash=sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a \
|
||||
--hash=sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64 \
|
||||
--hash=sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19 \
|
||||
--hash=sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1 \
|
||||
--hash=sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9 \
|
||||
--hash=sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c
|
||||
pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
|
||||
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
|
||||
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
|
||||
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
|
||||
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
|
||||
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
|
||||
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
|
||||
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
|
||||
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
|
||||
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
|
||||
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
|
||||
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
|
||||
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
|
||||
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
|
||||
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
|
||||
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
|
||||
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
|
||||
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
|
||||
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
|
||||
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
|
||||
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
|
||||
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
|
||||
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
|
||||
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
|
||||
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
|
||||
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
|
||||
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
|
||||
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
|
||||
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
|
||||
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
|
||||
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
|
||||
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
|
||||
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
|
||||
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
|
||||
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
|
||||
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
|
||||
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
|
||||
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
|
||||
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
|
||||
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
|
||||
requests==2.28.2 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \
|
||||
--hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf
|
||||
semantic-version==2.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c \
|
||||
--hash=sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177
|
||||
service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34 \
|
||||
--hash=sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db
|
||||
setuptools-rust==1.5.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206 \
|
||||
--hash=sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7
|
||||
setuptools==67.6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077 \
|
||||
--hash=sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2
|
||||
signedjson==1.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228 \
|
||||
--hash=sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492
|
||||
six==1.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
|
||||
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
|
||||
sortedcontainers==2.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \
|
||||
--hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0
|
||||
treq==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2 \
|
||||
--hash=sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec
|
||||
twisted-iocpsupport==1.0.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_system == "Windows" \
|
||||
--hash=sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41 \
|
||||
--hash=sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d \
|
||||
--hash=sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9 \
|
||||
--hash=sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf \
|
||||
--hash=sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323 \
|
||||
--hash=sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32 \
|
||||
--hash=sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4 \
|
||||
--hash=sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f \
|
||||
--hash=sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546 \
|
||||
--hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \
|
||||
--hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \
|
||||
--hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415
|
||||
twisted==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31 \
|
||||
--hash=sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0
|
||||
twisted[tls]==22.10.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31 \
|
||||
--hash=sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0
|
||||
txredisapi==1.4.9 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:72e6ad09cc5fffe3bec2e55e5bfb74407bd357565fc212e6003f7e26ef7d8f78 \
|
||||
--hash=sha256:c9607062d05e4d0b8ef84719eb76a3fe7d5ccd606a2acf024429da51d6e84559
|
||||
typing-extensions==4.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
|
||||
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
|
||||
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
|
||||
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
|
||||
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005
|
||||
urllib3==1.26.15 ; python_full_version >= "3.7.1" and python_version < "4" \
|
||||
--hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \
|
||||
--hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42
|
||||
webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
|
||||
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
|
||||
zipp==3.15.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \
|
||||
--hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \
|
||||
--hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556
|
||||
zope-interface==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
|
||||
--hash=sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373 \
|
||||
--hash=sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb \
|
||||
--hash=sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446 \
|
||||
--hash=sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8 \
|
||||
--hash=sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c \
|
||||
--hash=sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8 \
|
||||
--hash=sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2 \
|
||||
--hash=sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f \
|
||||
--hash=sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f \
|
||||
--hash=sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5 \
|
||||
--hash=sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85 \
|
||||
--hash=sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc \
|
||||
--hash=sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788 \
|
||||
--hash=sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518 \
|
||||
--hash=sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410 \
|
||||
--hash=sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464 \
|
||||
--hash=sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5 \
|
||||
--hash=sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d \
|
||||
--hash=sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52 \
|
||||
--hash=sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca \
|
||||
--hash=sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8 \
|
||||
--hash=sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2 \
|
||||
--hash=sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f \
|
||||
--hash=sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58 \
|
||||
--hash=sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a \
|
||||
--hash=sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d \
|
||||
--hash=sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28 \
|
||||
--hash=sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990 \
|
||||
--hash=sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995 \
|
||||
--hash=sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30
|
@ -14,6 +14,7 @@
|
||||
|
||||
#![feature(test)]
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use synapse::push::{
|
||||
evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, JsonValue,
|
||||
PushRules, SimpleJsonValue,
|
||||
@ -44,7 +45,6 @@ fn bench_match_exact(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -52,16 +52,13 @@ fn bench_match_exact(b: &mut Bencher) {
|
||||
true,
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "room_id".into(),
|
||||
pattern: Some("!room:server".into()),
|
||||
pattern_type: None,
|
||||
pattern: "!room:server".into(),
|
||||
},
|
||||
));
|
||||
|
||||
@ -93,7 +90,6 @@ fn bench_match_word(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -101,16 +97,13 @@ fn bench_match_word(b: &mut Bencher) {
|
||||
true,
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("test".into()),
|
||||
pattern_type: None,
|
||||
pattern: "test".into(),
|
||||
},
|
||||
));
|
||||
|
||||
@ -142,7 +135,6 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -150,16 +142,13 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
||||
true,
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("foobar".into()),
|
||||
pattern_type: None,
|
||||
pattern: "foobar".into(),
|
||||
},
|
||||
));
|
||||
|
||||
@ -191,7 +180,6 @@ fn bench_eval_message(b: &mut Bencher) {
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
Default::default(),
|
||||
@ -199,8 +187,6 @@ fn bench_eval_message(b: &mut Bencher) {
|
||||
true,
|
||||
vec![],
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -21,13 +21,13 @@ use lazy_static::lazy_static;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::KnownCondition;
|
||||
use crate::push::Condition;
|
||||
use crate::push::EventMatchCondition;
|
||||
use crate::push::PushRule;
|
||||
use crate::push::RelatedEventMatchCondition;
|
||||
use crate::push::RelatedEventMatchTypeCondition;
|
||||
use crate::push::SetTweak;
|
||||
use crate::push::TweakValue;
|
||||
use crate::push::{Action, ExactEventMatchCondition, SimpleJsonValue};
|
||||
use crate::push::{Action, EventPropertyIsCondition, SimpleJsonValue};
|
||||
use crate::push::{Condition, EventMatchTypeCondition};
|
||||
use crate::push::{EventMatchCondition, EventMatchPatternType};
|
||||
use crate::push::{EventPropertyIsTypeCondition, PushRule};
|
||||
|
||||
const HIGHLIGHT_ACTION: Action = Action::SetTweak(SetTweak {
|
||||
set_tweak: Cow::Borrowed("highlight"),
|
||||
@ -71,9 +71,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("content.m.relates_to.rel_type"),
|
||||
pattern: Some(Cow::Borrowed("m.replace")),
|
||||
pattern_type: None,
|
||||
key: Cow::Borrowed("content.m\\.relates_to.rel_type"),
|
||||
pattern: Cow::Borrowed("m.replace"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -86,8 +85,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("content.msgtype"),
|
||||
pattern: Some(Cow::Borrowed("m.notice")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.notice"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
@ -100,18 +98,15 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.member")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.member"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.membership"),
|
||||
pattern: Some(Cow::Borrowed("invite")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("invite"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_id")),
|
||||
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION, SOUND_ACTION]),
|
||||
@ -124,8 +119,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.member")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.member"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
@ -135,11 +129,10 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.im.nheko.msc3664.reply"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatch(
|
||||
RelatedEventMatchCondition {
|
||||
key: Some(Cow::Borrowed("sender")),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_id")),
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatchType(
|
||||
RelatedEventMatchTypeCondition {
|
||||
key: Cow::Borrowed("sender"),
|
||||
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
|
||||
rel_type: Cow::Borrowed("m.in_reply_to"),
|
||||
include_fallbacks: None,
|
||||
},
|
||||
@ -151,7 +144,12 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_user_mention"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::IsUserMention)]),
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::ExactEventPropertyContainsType(EventPropertyIsTypeCondition {
|
||||
key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.user_ids"),
|
||||
value_type: Cow::Borrowed(&EventMatchPatternType::UserId),
|
||||
}),
|
||||
)]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
@ -168,8 +166,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_room_mention"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::ExactEventMatch(ExactEventMatchCondition {
|
||||
key: Cow::Borrowed("content.org.matrix.msc3952.mentions.room"),
|
||||
Condition::Known(KnownCondition::EventPropertyIs(EventPropertyIsCondition {
|
||||
key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.room"),
|
||||
value: Cow::Borrowed(&SimpleJsonValue::Bool(true)),
|
||||
})),
|
||||
Condition::Known(KnownCondition::SenderNotificationPermission {
|
||||
@ -189,8 +187,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
}),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.body"),
|
||||
pattern: Some(Cow::Borrowed("@room")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("@room"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
|
||||
@ -203,13 +200,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.tombstone")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.tombstone"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed(""),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
|
||||
@ -222,8 +217,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.reaction")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.reaction"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -236,13 +230,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.server_acl")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.server_acl"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed(""),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -255,8 +247,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.response")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.response"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
@ -268,11 +259,10 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
|
||||
rule_id: Cow::Borrowed("global/content/.m.rule.contains_user_name"),
|
||||
priority_class: 4,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatchType(
|
||||
EventMatchTypeCondition {
|
||||
key: Cow::Borrowed("content.body"),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_localpart")),
|
||||
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserLocalpart),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
|
||||
@ -287,8 +277,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.call.invite")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.call.invite"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, RING_ACTION, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -301,8 +290,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.message"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -318,8 +306,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.encrypted"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -338,8 +325,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.encrypted"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -363,8 +349,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.message"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -388,8 +373,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.file")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.file"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -413,8 +397,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.image")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.image"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -438,8 +421,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.video")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.video"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -463,8 +445,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc1767.audio")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc1767.audio"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
@ -485,8 +466,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.message"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -499,8 +479,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.room.encrypted"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -514,8 +493,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.encrypted")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.encrypted"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -534,8 +512,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.message")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.message"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -554,8 +531,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.file")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.file"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -574,8 +550,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.image")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.image"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -594,8 +569,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.video")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.video"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -614,8 +588,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
// MSC3933: Type changed from template rule - see MSC.
|
||||
pattern: Some(Cow::Borrowed("m.audio")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("m.audio"),
|
||||
})),
|
||||
// MSC3933: Add condition on top of template rule - see MSC.
|
||||
Condition::Known(KnownCondition::RoomVersionSupports {
|
||||
@ -633,18 +606,15 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("im.vector.modular.widgets")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("im.vector.modular.widgets"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.type"),
|
||||
pattern: Some(Cow::Borrowed("jitsi")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("jitsi"),
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("*")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("*"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
@ -660,8 +630,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
}),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
|
||||
@ -674,8 +643,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify]),
|
||||
@ -691,8 +659,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
}),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
|
||||
@ -705,8 +672,7 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
|
||||
pattern_type: None,
|
||||
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify]),
|
||||
|
@ -12,9 +12,9 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::push::JsonValue;
|
||||
use anyhow::{Context, Error};
|
||||
use lazy_static::lazy_static;
|
||||
use log::warn;
|
||||
@ -23,9 +23,10 @@ use regex::Regex;
|
||||
|
||||
use super::{
|
||||
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
|
||||
Action, Condition, EventMatchCondition, ExactEventMatchCondition, FilteredPushRules,
|
||||
KnownCondition, RelatedEventMatchCondition, SimpleJsonValue,
|
||||
Action, Condition, EventPropertyIsCondition, FilteredPushRules, KnownCondition,
|
||||
SimpleJsonValue,
|
||||
};
|
||||
use crate::push::{EventMatchPatternType, JsonValue};
|
||||
|
||||
lazy_static! {
|
||||
/// Used to parse the `is` clause in the room member count condition.
|
||||
@ -71,8 +72,6 @@ pub struct PushRuleEvaluator {
|
||||
|
||||
/// True if the event has a mentions property and MSC3952 support is enabled.
|
||||
has_mentions: bool,
|
||||
/// The user mentions that were part of the message.
|
||||
user_mentions: BTreeSet<String>,
|
||||
|
||||
/// The number of users in the room.
|
||||
room_member_count: u64,
|
||||
@ -97,12 +96,6 @@ pub struct PushRuleEvaluator {
|
||||
/// If MSC3931 (room version feature flags) is enabled. Usually controlled by the same
|
||||
/// flag as MSC1767 (extensible events core).
|
||||
msc3931_enabled: bool,
|
||||
|
||||
/// If MSC3758 (exact_event_match push rule condition) is enabled.
|
||||
msc3758_exact_event_match: bool,
|
||||
|
||||
/// If MSC3966 (exact_event_property_contains push rule condition) is enabled.
|
||||
msc3966_exact_event_property_contains: bool,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
@ -113,7 +106,6 @@ impl PushRuleEvaluator {
|
||||
pub fn py_new(
|
||||
flattened_keys: BTreeMap<String, JsonValue>,
|
||||
has_mentions: bool,
|
||||
user_mentions: BTreeSet<String>,
|
||||
room_member_count: u64,
|
||||
sender_power_level: Option<i64>,
|
||||
notification_power_levels: BTreeMap<String, i64>,
|
||||
@ -121,8 +113,6 @@ impl PushRuleEvaluator {
|
||||
related_event_match_enabled: bool,
|
||||
room_version_feature_flags: Vec<String>,
|
||||
msc3931_enabled: bool,
|
||||
msc3758_exact_event_match: bool,
|
||||
msc3966_exact_event_property_contains: bool,
|
||||
) -> Result<Self, Error> {
|
||||
let body = match flattened_keys.get("content.body") {
|
||||
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone(),
|
||||
@ -133,7 +123,6 @@ impl PushRuleEvaluator {
|
||||
flattened_keys,
|
||||
body,
|
||||
has_mentions,
|
||||
user_mentions,
|
||||
room_member_count,
|
||||
notification_power_levels,
|
||||
sender_power_level,
|
||||
@ -141,8 +130,6 @@ impl PushRuleEvaluator {
|
||||
related_event_match_enabled,
|
||||
room_version_feature_flags,
|
||||
msc3931_enabled,
|
||||
msc3758_exact_event_match,
|
||||
msc3966_exact_event_property_contains,
|
||||
})
|
||||
}
|
||||
|
||||
@ -256,24 +243,83 @@ impl PushRuleEvaluator {
|
||||
};
|
||||
|
||||
let result = match known_condition {
|
||||
KnownCondition::EventMatch(event_match) => {
|
||||
self.match_event_match(event_match, user_id)?
|
||||
}
|
||||
KnownCondition::ExactEventMatch(exact_event_match) => {
|
||||
self.match_exact_event_match(exact_event_match)?
|
||||
}
|
||||
KnownCondition::RelatedEventMatch(event_match) => {
|
||||
self.match_related_event_match(event_match, user_id)?
|
||||
}
|
||||
KnownCondition::ExactEventPropertyContains(exact_event_match) => {
|
||||
self.match_exact_event_property_contains(exact_event_match)?
|
||||
}
|
||||
KnownCondition::IsUserMention => {
|
||||
if let Some(uid) = user_id {
|
||||
self.user_mentions.contains(uid)
|
||||
KnownCondition::EventMatch(event_match) => self.match_event_match(
|
||||
&self.flattened_keys,
|
||||
&event_match.key,
|
||||
&event_match.pattern,
|
||||
)?,
|
||||
KnownCondition::EventMatchType(event_match) => {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
false
|
||||
}
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let pattern = match &*event_match.pattern_type {
|
||||
EventMatchPatternType::UserId => user_id,
|
||||
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
|
||||
};
|
||||
|
||||
self.match_event_match(&self.flattened_keys, &event_match.key, pattern)?
|
||||
}
|
||||
KnownCondition::EventPropertyIs(event_property_is) => {
|
||||
self.match_event_property_is(event_property_is)?
|
||||
}
|
||||
KnownCondition::RelatedEventMatch(event_match) => self.match_related_event_match(
|
||||
&event_match.rel_type.clone(),
|
||||
event_match.include_fallbacks,
|
||||
event_match.key.clone(),
|
||||
event_match.pattern.clone(),
|
||||
)?,
|
||||
KnownCondition::RelatedEventMatchType(event_match) => {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let pattern = match &*event_match.pattern_type {
|
||||
EventMatchPatternType::UserId => user_id,
|
||||
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
|
||||
};
|
||||
|
||||
self.match_related_event_match(
|
||||
&event_match.rel_type.clone(),
|
||||
event_match.include_fallbacks,
|
||||
Some(event_match.key.clone()),
|
||||
Some(Cow::Borrowed(pattern)),
|
||||
)?
|
||||
}
|
||||
KnownCondition::EventPropertyContains(event_property_is) => self
|
||||
.match_event_property_contains(
|
||||
event_property_is.key.clone(),
|
||||
event_property_is.value.clone(),
|
||||
)?,
|
||||
KnownCondition::ExactEventPropertyContainsType(exact_event_match) => {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let pattern = match &*exact_event_match.value_type {
|
||||
EventMatchPatternType::UserId => user_id,
|
||||
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
|
||||
};
|
||||
|
||||
self.match_event_property_contains(
|
||||
exact_event_match.key.clone(),
|
||||
Cow::Borrowed(&SimpleJsonValue::Str(pattern.to_string())),
|
||||
)?
|
||||
}
|
||||
KnownCondition::ContainsDisplayName => {
|
||||
if let Some(dn) = display_name {
|
||||
@ -325,135 +371,18 @@ impl PushRuleEvaluator {
|
||||
/// Evaluates a `event_match` condition.
|
||||
fn match_event_match(
|
||||
&self,
|
||||
event_match: &EventMatchCondition,
|
||||
user_id: Option<&str>,
|
||||
flattened_event: &BTreeMap<String, JsonValue>,
|
||||
key: &str,
|
||||
pattern: &str,
|
||||
) -> Result<bool, Error> {
|
||||
let pattern = if let Some(pattern) = &event_match.pattern {
|
||||
pattern
|
||||
} else if let Some(pattern_type) = &event_match.pattern_type {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
match &**pattern_type {
|
||||
"user_id" => user_id,
|
||||
"user_localpart" => get_localpart_from_id(user_id)?,
|
||||
_ => return Ok(false),
|
||||
}
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let haystack = if let Some(JsonValue::Value(SimpleJsonValue::Str(haystack))) =
|
||||
self.flattened_keys.get(&*event_match.key)
|
||||
flattened_event.get(key)
|
||||
{
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// For the content.body we match against "words", but for everything
|
||||
// else we match against the entire value.
|
||||
let match_type = if event_match.key == "content.body" {
|
||||
GlobMatchType::Word
|
||||
} else {
|
||||
GlobMatchType::Whole
|
||||
};
|
||||
|
||||
let mut compiled_pattern = get_glob_matcher(pattern, match_type)?;
|
||||
compiled_pattern.is_match(haystack)
|
||||
}
|
||||
|
||||
/// Evaluates a `exact_event_match` condition. (MSC3758)
|
||||
fn match_exact_event_match(
|
||||
&self,
|
||||
exact_event_match: &ExactEventMatchCondition,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if the feature is enabled.
|
||||
if !self.msc3758_exact_event_match {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let value = &exact_event_match.value;
|
||||
|
||||
let haystack = if let Some(JsonValue::Value(haystack)) =
|
||||
self.flattened_keys.get(&*exact_event_match.key)
|
||||
{
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(haystack == &**value)
|
||||
}
|
||||
|
||||
/// Evaluates a `related_event_match` condition. (MSC3664)
|
||||
fn match_related_event_match(
|
||||
&self,
|
||||
event_match: &RelatedEventMatchCondition,
|
||||
user_id: Option<&str>,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if related event matching is enabled...
|
||||
if !self.related_event_match_enabled {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// get the related event, fail if there is none.
|
||||
let event = if let Some(event) = self.related_events_flattened.get(&*event_match.rel_type) {
|
||||
event
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// If we are not matching fallbacks, don't match if our special key indicating this is a
|
||||
// fallback relation is not present.
|
||||
if !event_match.include_fallbacks.unwrap_or(false)
|
||||
&& event.contains_key("im.vector.is_falling_back")
|
||||
{
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// if we have no key, accept the event as matching, if it existed without matching any
|
||||
// fields.
|
||||
let key = if let Some(key) = &event_match.key {
|
||||
key
|
||||
} else {
|
||||
return Ok(true);
|
||||
};
|
||||
|
||||
let pattern = if let Some(pattern) = &event_match.pattern {
|
||||
pattern
|
||||
} else if let Some(pattern_type) = &event_match.pattern_type {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
match &**pattern_type {
|
||||
"user_id" => user_id,
|
||||
"user_localpart" => get_localpart_from_id(user_id)?,
|
||||
_ => return Ok(false),
|
||||
}
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let haystack =
|
||||
if let Some(JsonValue::Value(SimpleJsonValue::Str(haystack))) = event.get(&**key) {
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// For the content.body we match against "words", but for everything
|
||||
// else we match against the entire value.
|
||||
let match_type = if key == "content.body" {
|
||||
@ -466,27 +395,73 @@ impl PushRuleEvaluator {
|
||||
compiled_pattern.is_match(haystack)
|
||||
}
|
||||
|
||||
/// Evaluates a `exact_event_property_contains` condition. (MSC3758)
|
||||
fn match_exact_event_property_contains(
|
||||
/// Evaluates a `event_property_is` condition.
|
||||
fn match_event_property_is(
|
||||
&self,
|
||||
exact_event_match: &ExactEventMatchCondition,
|
||||
event_property_is: &EventPropertyIsCondition,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if the feature is enabled.
|
||||
if !self.msc3966_exact_event_property_contains {
|
||||
return Ok(false);
|
||||
}
|
||||
let value = &event_property_is.value;
|
||||
|
||||
let value = &exact_event_match.value;
|
||||
|
||||
let haystack = if let Some(JsonValue::Array(haystack)) =
|
||||
self.flattened_keys.get(&*exact_event_match.key)
|
||||
let haystack = if let Some(JsonValue::Value(haystack)) =
|
||||
self.flattened_keys.get(&*event_property_is.key)
|
||||
{
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(haystack.contains(&**value))
|
||||
Ok(haystack == &**value)
|
||||
}
|
||||
|
||||
/// Evaluates a `related_event_match` condition. (MSC3664)
|
||||
fn match_related_event_match(
|
||||
&self,
|
||||
rel_type: &str,
|
||||
include_fallbacks: Option<bool>,
|
||||
key: Option<Cow<str>>,
|
||||
pattern: Option<Cow<str>>,
|
||||
) -> Result<bool, Error> {
|
||||
// First check if related event matching is enabled...
|
||||
if !self.related_event_match_enabled {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// get the related event, fail if there is none.
|
||||
let event = if let Some(event) = self.related_events_flattened.get(rel_type) {
|
||||
event
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// If we are not matching fallbacks, don't match if our special key indicating this is a
|
||||
// fallback relation is not present.
|
||||
if !include_fallbacks.unwrap_or(false) && event.contains_key("im.vector.is_falling_back") {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
match (key, pattern) {
|
||||
// if we have no key, accept the event as matching.
|
||||
(None, _) => Ok(true),
|
||||
// There was a key, so we *must* have a pattern to go with it.
|
||||
(Some(_), None) => Ok(false),
|
||||
// If there is a key & pattern, check if they're in the flattened event (given by rel_type).
|
||||
(Some(key), Some(pattern)) => self.match_event_match(event, &key, &pattern),
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluates a `event_property_contains` condition.
|
||||
fn match_event_property_contains(
|
||||
&self,
|
||||
key: Cow<str>,
|
||||
value: Cow<SimpleJsonValue>,
|
||||
) -> Result<bool, Error> {
|
||||
let haystack = if let Some(JsonValue::Array(haystack)) = self.flattened_keys.get(&*key) {
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
Ok(haystack.contains(&value))
|
||||
}
|
||||
|
||||
/// Match the member count against an 'is' condition
|
||||
@ -523,7 +498,6 @@ fn push_rule_evaluator() {
|
||||
let evaluator = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
BTreeMap::new(),
|
||||
@ -531,8 +505,6 @@ fn push_rule_evaluator() {
|
||||
true,
|
||||
vec![],
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -555,7 +527,6 @@ fn test_requires_room_version_supports_condition() {
|
||||
let evaluator = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
false,
|
||||
BTreeSet::new(),
|
||||
10,
|
||||
Some(0),
|
||||
BTreeMap::new(),
|
||||
@ -563,8 +534,6 @@ fn test_requires_room_version_supports_condition() {
|
||||
false,
|
||||
flags,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -328,14 +328,19 @@ pub enum Condition {
|
||||
#[serde(tag = "kind")]
|
||||
pub enum KnownCondition {
|
||||
EventMatch(EventMatchCondition),
|
||||
#[serde(rename = "com.beeper.msc3758.exact_event_match")]
|
||||
ExactEventMatch(ExactEventMatchCondition),
|
||||
// Identical to event_match but gives predefined patterns. Cannot be added by users.
|
||||
#[serde(skip_deserializing, rename = "event_match")]
|
||||
EventMatchType(EventMatchTypeCondition),
|
||||
EventPropertyIs(EventPropertyIsCondition),
|
||||
#[serde(rename = "im.nheko.msc3664.related_event_match")]
|
||||
RelatedEventMatch(RelatedEventMatchCondition),
|
||||
#[serde(rename = "org.matrix.msc3966.exact_event_property_contains")]
|
||||
ExactEventPropertyContains(ExactEventMatchCondition),
|
||||
#[serde(rename = "org.matrix.msc3952.is_user_mention")]
|
||||
IsUserMention,
|
||||
// Identical to related_event_match but gives predefined patterns. Cannot be added by users.
|
||||
#[serde(skip_deserializing, rename = "im.nheko.msc3664.related_event_match")]
|
||||
RelatedEventMatchType(RelatedEventMatchTypeCondition),
|
||||
EventPropertyContains(EventPropertyIsCondition),
|
||||
// Identical to exact_event_property_contains but gives predefined patterns. Cannot be added by users.
|
||||
#[serde(skip_deserializing, rename = "event_property_contains")]
|
||||
ExactEventPropertyContainsType(EventPropertyIsTypeCondition),
|
||||
ContainsDisplayName,
|
||||
RoomMemberCount {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@ -362,23 +367,45 @@ impl<'source> FromPyObject<'source> for Condition {
|
||||
}
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventMatch`]
|
||||
/// The body of a [`Condition::EventMatch`] with a pattern.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct EventMatchCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern: Option<Cow<'static, str>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern_type: Option<Cow<'static, str>>,
|
||||
pub pattern: Cow<'static, str>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::ExactEventMatch`]
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum EventMatchPatternType {
|
||||
UserId,
|
||||
UserLocalpart,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventMatch`] that uses user_id or user_localpart as a pattern.
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct EventMatchTypeCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
// During serialization, the pattern_type property gets replaced with a
|
||||
// pattern property of the correct value in synapse.push.clientformat.format_push_rules_for_user.
|
||||
pub pattern_type: Cow<'static, EventMatchPatternType>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventPropertyIs`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct ExactEventMatchCondition {
|
||||
pub struct EventPropertyIsCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
pub value: Cow<'static, SimpleJsonValue>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventPropertyIs`] that uses user_id or user_localpart as a pattern.
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct EventPropertyIsTypeCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
// During serialization, the pattern_type property gets replaced with a
|
||||
// pattern property of the correct value in synapse.push.clientformat.format_push_rules_for_user.
|
||||
pub value_type: Cow<'static, EventMatchPatternType>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::RelatedEventMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct RelatedEventMatchCondition {
|
||||
@ -386,8 +413,18 @@ pub struct RelatedEventMatchCondition {
|
||||
pub key: Option<Cow<'static, str>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern: Option<Cow<'static, str>>,
|
||||
pub rel_type: Cow<'static, str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern_type: Option<Cow<'static, str>>,
|
||||
pub include_fallbacks: Option<bool>,
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::RelatedEventMatch`] that uses user_id or user_localpart as a pattern.
|
||||
#[derive(Serialize, Debug, Clone)]
|
||||
pub struct RelatedEventMatchTypeCondition {
|
||||
// This is only used if pattern_type exists (and thus key must exist), so is
|
||||
// a bit simpler than RelatedEventMatchCondition.
|
||||
pub key: Cow<'static, str>,
|
||||
pub pattern_type: Cow<'static, EventMatchPatternType>,
|
||||
pub rel_type: Cow<'static, str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_fallbacks: Option<bool>,
|
||||
@ -571,8 +608,7 @@ impl FilteredPushRules {
|
||||
fn test_serialize_condition() {
|
||||
let condition = Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("coffee".into()),
|
||||
pattern_type: None,
|
||||
pattern: "coffee".into(),
|
||||
}));
|
||||
|
||||
let json = serde_json::to_string(&condition).unwrap();
|
||||
@ -586,7 +622,33 @@ fn test_serialize_condition() {
|
||||
fn test_deserialize_condition() {
|
||||
let json = r#"{"kind":"event_match","key":"content.body","pattern":"coffee"}"#;
|
||||
|
||||
let _: Condition = serde_json::from_str(json).unwrap();
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::EventMatch(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_event_match_condition_with_pattern_type() {
|
||||
let condition = Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
|
||||
key: "content.body".into(),
|
||||
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
|
||||
}));
|
||||
|
||||
let json = serde_json::to_string(&condition).unwrap();
|
||||
assert_eq!(
|
||||
json,
|
||||
r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cannot_deserialize_event_match_condition_with_pattern_type() {
|
||||
let json = r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(condition, Condition::Unknown(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -600,6 +662,37 @@ fn test_deserialize_unstable_msc3664_condition() {
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_unstable_msc3664_condition_with_pattern_type() {
|
||||
let condition = Condition::Known(KnownCondition::RelatedEventMatchType(
|
||||
RelatedEventMatchTypeCondition {
|
||||
key: "content.body".into(),
|
||||
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
|
||||
rel_type: "m.in_reply_to".into(),
|
||||
include_fallbacks: Some(true),
|
||||
},
|
||||
));
|
||||
|
||||
let json = serde_json::to_string(&condition).unwrap();
|
||||
assert_eq!(
|
||||
json,
|
||||
r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to","include_fallbacks":true}"#
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cannot_deserialize_unstable_msc3664_condition_with_pattern_type() {
|
||||
let json = r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
// Since pattern is optional on RelatedEventMatch it deserializes it to that
|
||||
// instead of RelatedEventMatchType.
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::RelatedEventMatch(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_unstable_msc3931_condition() {
|
||||
let json =
|
||||
@ -613,55 +706,41 @@ fn test_deserialize_unstable_msc3931_condition() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_unstable_msc3758_condition() {
|
||||
fn test_deserialize_event_property_is_condition() {
|
||||
// A string condition should work.
|
||||
let json =
|
||||
r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":"foo"}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":"foo"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
|
||||
// A boolean condition should work.
|
||||
let json =
|
||||
r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":true}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":true}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
|
||||
// An integer condition should work.
|
||||
let json = r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":1}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":1}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
|
||||
// A null condition should work
|
||||
let json =
|
||||
r#"{"kind":"com.beeper.msc3758.exact_event_match","key":"content.value","value":null}"#;
|
||||
let json = r#"{"kind":"event_property_is","key":"content.value","value":null}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::ExactEventMatch(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_unstable_msc3952_user_condition() {
|
||||
let json = r#"{"kind":"org.matrix.msc3952.is_user_mention"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(
|
||||
condition,
|
||||
Condition::Known(KnownCondition::IsUserMention)
|
||||
Condition::Known(KnownCondition::EventPropertyIs(_))
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -28,6 +28,7 @@ DISTS = (
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04)
|
||||
"ubuntu:kinetic", # 22.10 (EOL 2023-07-20)
|
||||
"ubuntu:lunar", # 23.04 (EOL 2024-01)
|
||||
)
|
||||
|
||||
DESC = """\
|
||||
|
@ -59,6 +59,11 @@ Run the complement test suite on Synapse.
|
||||
is important.
|
||||
Not suitable for use in CI in case the editable environment is impure.
|
||||
|
||||
--rebuild-editable
|
||||
Force a rebuild of the editable build of Synapse.
|
||||
This is occasionally useful if the built-in rebuild detection with
|
||||
--editable fails, e.g. when changing configure_workers_and_start.py.
|
||||
|
||||
For help on arguments to 'go test', run 'go help testflag'.
|
||||
EOF
|
||||
}
|
||||
@ -82,6 +87,9 @@ while [ $# -ge 1 ]; do
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
"--rebuild-editable")
|
||||
rebuild_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
@ -116,7 +124,9 @@ if [ -n "$use_editable_synapse" ]; then
|
||||
fi
|
||||
|
||||
editable_mount="$(realpath .):/editable-src:z"
|
||||
if docker inspect complement-synapse-editable &>/dev/null; then
|
||||
if [ -n "$rebuild_editable_synapse" ]; then
|
||||
unset skip_docker_build
|
||||
elif docker inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
@ -91,6 +91,7 @@ else
|
||||
"synapse" "docker" "tests"
|
||||
"scripts-dev"
|
||||
"contrib" "synmark" "stubs" ".ci"
|
||||
"dev-docs"
|
||||
)
|
||||
fi
|
||||
fi
|
||||
@ -112,7 +113,7 @@ python3 -m black "${files[@]}"
|
||||
|
||||
# Catch any common programming mistakes in Python code.
|
||||
# --quiet suppresses the update check.
|
||||
ruff --quiet "${files[@]}"
|
||||
ruff --quiet --fix "${files[@]}"
|
||||
|
||||
# Catch any common programming mistakes in Rust code.
|
||||
#
|
||||
|
@ -280,7 +280,7 @@ def _prepare() -> None:
|
||||
)
|
||||
|
||||
print("Opening the changelog in your browser...")
|
||||
print("Please ask others to give it a check.")
|
||||
print("Please ask #synapse-dev to give it a check.")
|
||||
click.launch(
|
||||
f"https://github.com/matrix-org/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
|
||||
)
|
||||
|
@ -1,39 +0,0 @@
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Stub for frozendict.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload
|
||||
|
||||
_KT = TypeVar("_KT", bound=Hashable) # Key type.
|
||||
_VT = TypeVar("_VT") # Value type.
|
||||
|
||||
class frozendict(Mapping[_KT, _VT]):
|
||||
@overload
|
||||
def __init__(self, **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
def __getitem__(self, key: _KT) -> _VT: ...
|
||||
def __contains__(self, key: Any) -> bool: ...
|
||||
def copy(self, **add_or_replace: Any) -> frozendict: ...
|
||||
def __iter__(self) -> Iterator[_KT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def __hash__(self) -> int: ...
|
@ -29,7 +29,6 @@ _Repr = Callable[[], str]
|
||||
def recursive_repr(fillvalue: str = ...) -> Callable[[_Repr], _Repr]: ...
|
||||
|
||||
class SortedList(MutableSequence[_T]):
|
||||
|
||||
DEFAULT_LOAD_FACTOR: int = ...
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Set, Tuple, Union
|
||||
from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Tuple, Union
|
||||
|
||||
from synapse.types import JsonDict, JsonValue
|
||||
|
||||
@ -58,7 +58,6 @@ class PushRuleEvaluator:
|
||||
self,
|
||||
flattened_keys: Mapping[str, JsonValue],
|
||||
has_mentions: bool,
|
||||
user_mentions: Set[str],
|
||||
room_member_count: int,
|
||||
sender_power_level: Optional[int],
|
||||
notification_power_levels: Mapping[str, int],
|
||||
@ -66,8 +65,6 @@ class PushRuleEvaluator:
|
||||
related_event_match_enabled: bool,
|
||||
room_version_feature_flags: Tuple[str, ...],
|
||||
msc3931_enabled: bool,
|
||||
msc3758_exact_event_match: bool,
|
||||
msc3966_exact_event_property_contains: bool,
|
||||
): ...
|
||||
def run(
|
||||
self,
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2018-9 New Vector Ltd
|
||||
# Copyright 2018-2019 New Vector Ltd
|
||||
# Copyright 2023 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -13,12 +14,12 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" This is a reference implementation of a Matrix homeserver.
|
||||
""" This is an implementation of a Matrix homeserver.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
|
||||
from synapse.util.rust import check_rust_lib_up_to_date
|
||||
from synapse.util.stringutils import strtobool
|
||||
@ -60,11 +61,20 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Use the standard library json implementation instead of simplejson.
|
||||
# Teach canonicaljson how to serialise immutabledicts.
|
||||
try:
|
||||
from canonicaljson import set_json_library
|
||||
from canonicaljson import register_preserialisation_callback
|
||||
from immutabledict import immutabledict
|
||||
|
||||
set_json_library(json)
|
||||
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
|
||||
try:
|
||||
return d._dict
|
||||
except Exception:
|
||||
# Paranoia: fall back to a `dict()` call, in case a future version of
|
||||
# immutabledict removes `_dict` from the implementation.
|
||||
return dict(d)
|
||||
|
||||
register_preserialisation_callback(immutabledict, _immutabledict_cb)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
302
synapse/_scripts/generate_workers_map.py
Executable file
302
synapse/_scripts/generate_workers_map.py
Executable file
@ -0,0 +1,302 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2022-2023 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
|
||||
|
||||
import yaml
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.federation.transport.server import (
|
||||
TransportLayerServer,
|
||||
register_servlets as register_federation_servlets,
|
||||
)
|
||||
from synapse.http.server import HttpServer, ServletCallback
|
||||
from synapse.rest import ClientRestResource
|
||||
from synapse.rest.key.v2 import RemoteKey
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
|
||||
logger = logging.getLogger("generate_workers_map")
|
||||
|
||||
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore # type: ignore
|
||||
|
||||
def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None:
|
||||
super().__init__(config.server.server_name, config=config)
|
||||
self.config.worker.worker_app = worker_app
|
||||
|
||||
|
||||
GROUP_PATTERN = re.compile(r"\(\?P<[^>]+?>(.+?)\)")
|
||||
|
||||
|
||||
@dataclass
|
||||
class EndpointDescription:
|
||||
"""
|
||||
Describes an endpoint and how it should be routed.
|
||||
"""
|
||||
|
||||
# The servlet class that handles this endpoint
|
||||
servlet_class: object
|
||||
|
||||
# The category of this endpoint. Is read from the `CATEGORY` constant in the servlet
|
||||
# class.
|
||||
category: Optional[str]
|
||||
|
||||
# TODO:
|
||||
# - does it need to be routed based on a stream writer config?
|
||||
# - does it benefit from any optimised, but optional, routing?
|
||||
# - what 'opinionated synapse worker class' (event_creator, synchrotron, etc) does
|
||||
# it go in?
|
||||
|
||||
|
||||
class EnumerationResource(HttpServer):
|
||||
"""
|
||||
Accepts servlet registrations for the purposes of building up a description of
|
||||
all endpoints.
|
||||
"""
|
||||
|
||||
def __init__(self, is_worker: bool) -> None:
|
||||
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
|
||||
self._is_worker = is_worker
|
||||
|
||||
def register_paths(
|
||||
self,
|
||||
method: str,
|
||||
path_patterns: Iterable[Pattern],
|
||||
callback: ServletCallback,
|
||||
servlet_classname: str,
|
||||
) -> None:
|
||||
# federation servlet callbacks are wrapped, so unwrap them.
|
||||
callback = getattr(callback, "__wrapped__", callback)
|
||||
|
||||
# fish out the servlet class
|
||||
servlet_class = callback.__self__.__class__ # type: ignore
|
||||
|
||||
if self._is_worker and method in getattr(
|
||||
servlet_class, "WORKERS_DENIED_METHODS", ()
|
||||
):
|
||||
# This endpoint would cause an error if called on a worker, so pretend it
|
||||
# was never registered!
|
||||
return
|
||||
|
||||
sd = EndpointDescription(
|
||||
servlet_class=servlet_class,
|
||||
category=getattr(servlet_class, "CATEGORY", None),
|
||||
)
|
||||
|
||||
for pat in path_patterns:
|
||||
self.registrations[(method, pat.pattern)] = sd
|
||||
|
||||
|
||||
def get_registered_paths_for_hs(
|
||||
hs: HomeServer,
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given a homeserver, get all registered endpoints and their descriptions.
|
||||
"""
|
||||
|
||||
enumerator = EnumerationResource(is_worker=hs.config.worker.worker_app is not None)
|
||||
ClientRestResource.register_servlets(enumerator, hs)
|
||||
federation_server = TransportLayerServer(hs)
|
||||
|
||||
# we can't use `federation_server.register_servlets` but this line does the
|
||||
# same thing, only it uses this enumerator
|
||||
register_federation_servlets(
|
||||
federation_server.hs,
|
||||
resource=enumerator,
|
||||
ratelimiter=federation_server.ratelimiter,
|
||||
authenticator=federation_server.authenticator,
|
||||
servlet_groups=federation_server.servlet_groups,
|
||||
)
|
||||
|
||||
# the key server endpoints are separate again
|
||||
RemoteKey(hs).register(enumerator)
|
||||
|
||||
return enumerator.registrations
|
||||
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
returns:
|
||||
|
||||
Dict from (method, path) to EndpointDescription
|
||||
|
||||
TODO Don't require passing in a config
|
||||
"""
|
||||
|
||||
hs = MockHomeserver(base_config, worker_app)
|
||||
# TODO We only do this to avoid an error, but don't need the database etc
|
||||
hs.setup()
|
||||
return get_registered_paths_for_hs(hs)
|
||||
|
||||
|
||||
def elide_http_methods_if_unconflicting(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
||||
can be handled by the worker whose registration map is `registrations`.
|
||||
|
||||
i.e. the only endpoints left with methods (other than `*`) should be the ones where
|
||||
the worker can't handle all possible methods for that path.
|
||||
"""
|
||||
|
||||
def paths_to_methods_dict(
|
||||
methods_and_paths: Iterable[Tuple[str, str]]
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""
|
||||
Given (method, path) pairs, produces a dict from path to set of methods
|
||||
available at that path.
|
||||
"""
|
||||
result: Dict[str, Set[str]] = {}
|
||||
for method, path in methods_and_paths:
|
||||
result.setdefault(path, set()).add(method)
|
||||
return result
|
||||
|
||||
all_possible_reg_methods = paths_to_methods_dict(all_possible_registrations)
|
||||
reg_methods = paths_to_methods_dict(registrations)
|
||||
|
||||
output = {}
|
||||
|
||||
for path, handleable_methods in reg_methods.items():
|
||||
if handleable_methods == all_possible_reg_methods[path]:
|
||||
any_method = next(iter(handleable_methods))
|
||||
# TODO This assumes that all methods have the same servlet.
|
||||
# I suppose that's possibly dubious?
|
||||
output[("*", path)] = registrations[(any_method, path)]
|
||||
else:
|
||||
for method in handleable_methods:
|
||||
output[(method, path)] = registrations[(method, path)]
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def simplify_path_regexes(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription]
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||
so that we don't use the Python-specific regex extensions
|
||||
(and also to remove needlessly specific detail).
|
||||
"""
|
||||
|
||||
def simplify_path_regex(path: str) -> str:
|
||||
"""
|
||||
Given a regex pattern, replaces all named capturing groups (e.g. `(?P<blah>xyz)`)
|
||||
with a simpler version available in more common regex dialects (e.g. `.*`).
|
||||
"""
|
||||
|
||||
# TODO it's hard to choose between these two;
|
||||
# `.*` is a vague simplification
|
||||
# return GROUP_PATTERN.sub(r"\1", path)
|
||||
return GROUP_PATTERN.sub(r".*", path)
|
||||
|
||||
return {(m, simplify_path_regex(p)): v for (m, p), v in registrations.items()}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Updates a synapse database to the latest schema and optionally runs background updates"
|
||||
" on it."
|
||||
)
|
||||
)
|
||||
parser.add_argument("-v", action="store_true")
|
||||
parser.add_argument(
|
||||
"--config-path",
|
||||
type=argparse.FileType("r"),
|
||||
required=True,
|
||||
help="Synapse configuration file",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# TODO
|
||||
# logging.basicConfig(**logging_config)
|
||||
|
||||
# Load, process and sanity-check the config.
|
||||
hs_config = yaml.safe_load(args.config_path)
|
||||
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
master_paths = get_registered_paths_for_default(None, config)
|
||||
worker_paths = get_registered_paths_for_default(
|
||||
"synapse.app.generic_worker", config
|
||||
)
|
||||
|
||||
all_paths = {**master_paths, **worker_paths}
|
||||
|
||||
elided_worker_paths = elide_http_methods_if_unconflicting(worker_paths, all_paths)
|
||||
elide_http_methods_if_unconflicting(master_paths, all_paths)
|
||||
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: Dict[
|
||||
Optional[str], Dict[Tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
categories_to_methods_and_paths[desc.category][method, path] = desc
|
||||
|
||||
for category, contents in categories_to_methods_and_paths.items():
|
||||
print_category(category, contents)
|
||||
|
||||
|
||||
def print_category(
|
||||
category_name: Optional[str],
|
||||
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
Prints out a category, in documentation page style.
|
||||
|
||||
Example:
|
||||
```
|
||||
# Category name
|
||||
/path/xyz
|
||||
|
||||
GET /path/abc
|
||||
```
|
||||
"""
|
||||
|
||||
if category_name:
|
||||
print(f"# {category_name}")
|
||||
else:
|
||||
print("# (Uncategorised requests)")
|
||||
|
||||
for ln in sorted(
|
||||
p for m, p in simplify_path_regexes(elided_worker_paths) if m == "*"
|
||||
):
|
||||
print(ln)
|
||||
print()
|
||||
for ln in sorted(
|
||||
f"{m:6} {p}" for m, p in simplify_path_regexes(elided_worker_paths) if m != "*"
|
||||
):
|
||||
print(ln)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -37,7 +37,7 @@ import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from synapse.rest.media.v1.filepath import MediaFilePaths
|
||||
from synapse.media.filepath import MediaFilePaths
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
@ -47,7 +47,6 @@ def request_registration(
|
||||
_print: Callable[[str], None] = print,
|
||||
exit: Callable[[int], None] = sys.exit,
|
||||
) -> None:
|
||||
|
||||
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
|
||||
|
||||
# Get the nonce
|
||||
@ -154,7 +153,6 @@ def register_new_user(
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
logging.captureWarnings(True)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
|
@ -18,6 +18,7 @@
|
||||
import argparse
|
||||
import curses
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
@ -67,7 +68,10 @@ from synapse.storage.databases.main.media_repository import (
|
||||
MediaRepositoryBackgroundUpdateStore,
|
||||
)
|
||||
from synapse.storage.databases.main.presence import PresenceBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.pusher import PusherWorkerStore
|
||||
from synapse.storage.databases.main.pusher import (
|
||||
PusherBackgroundUpdatesStore,
|
||||
PusherWorkerStore,
|
||||
)
|
||||
from synapse.storage.databases.main.receipts import ReceiptsBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.registration import (
|
||||
RegistrationBackgroundUpdateStore,
|
||||
@ -225,6 +229,7 @@ class Store(
|
||||
AccountDataWorkerStore,
|
||||
PushRuleStore,
|
||||
PusherWorkerStore,
|
||||
PusherBackgroundUpdatesStore,
|
||||
PresenceBackgroundUpdateStore,
|
||||
ReceiptsBackgroundUpdateStore,
|
||||
RelationsWorkerStore,
|
||||
@ -1205,7 +1210,6 @@ class CursesProgress(Progress):
|
||||
if self.finished:
|
||||
status = "Time spent: %s (Done!)" % (duration_str,)
|
||||
else:
|
||||
|
||||
if self.total_processed > 0:
|
||||
left = float(self.total_remaining) / self.total_processed
|
||||
|
||||
@ -1327,10 +1331,17 @@ def main() -> None:
|
||||
filename="port-synapse.log" if args.curses else None,
|
||||
)
|
||||
|
||||
if not os.path.isfile(args.sqlite_database):
|
||||
sys.stderr.write(
|
||||
"The sqlite database you specified does not exist, please check that you have the"
|
||||
"correct path."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
sqlite_config = {
|
||||
"name": "sqlite3",
|
||||
"args": {
|
||||
"database": "file:{}?mode=rw".format(args.sqlite_database),
|
||||
"database": args.sqlite_database,
|
||||
"cp_min": 1,
|
||||
"cp_max": 1,
|
||||
"check_same_thread": False,
|
||||
|
@ -167,7 +167,6 @@ Worker = collections.namedtuple(
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
|
@ -254,7 +254,7 @@ class Auth:
|
||||
raise MissingClientTokenError()
|
||||
|
||||
async def validate_appservice_can_control_user_id(
|
||||
self, app_service: ApplicationService, user_id: str
|
||||
self, app_service: ApplicationService, user_id: str, allow_any: bool = False
|
||||
) -> None:
|
||||
"""Validates that the app service is allowed to control
|
||||
the given user.
|
||||
@ -262,6 +262,7 @@ class Auth:
|
||||
Args:
|
||||
app_service: The app service that controls the user
|
||||
user_id: The author MXID that the app service is controlling
|
||||
allow_any: Allow the appservice to control any local user
|
||||
|
||||
Raises:
|
||||
AuthError: If the application service is not allowed to control the user
|
||||
@ -273,7 +274,7 @@ class Auth:
|
||||
if app_service.sender == user_id:
|
||||
pass
|
||||
# Check to make sure the app service is allowed to control the user
|
||||
elif not app_service.is_interested_in_user(user_id):
|
||||
elif not app_service.is_interested_in_user(user_id) and not allow_any:
|
||||
raise AuthError(
|
||||
403,
|
||||
"Application service cannot masquerade as this user (%s)." % user_id,
|
||||
|
@ -27,7 +27,7 @@ from synapse.util import json_decoder
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.types import JsonDict
|
||||
from synapse.types import JsonDict, StrCollection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -108,6 +108,11 @@ class Codes(str, Enum):
|
||||
|
||||
USER_AWAITING_APPROVAL = "ORG.MATRIX.MSC3866_USER_AWAITING_APPROVAL"
|
||||
|
||||
AS_PING_URL_NOT_SET = "FI.MAU.MSC2659_URL_NOT_SET"
|
||||
AS_PING_BAD_STATUS = "FI.MAU.MSC2659_BAD_STATUS"
|
||||
AS_PING_CONNECTION_TIMEOUT = "FI.MAU.MSC2659_CONNECTION_TIMEOUT"
|
||||
AS_PING_CONNECTION_FAILED = "FI.MAU.MSC2659_CONNECTION_FAILED"
|
||||
|
||||
# Attempt to send a second annotation with the same event type & annotation key
|
||||
# MSC2677
|
||||
DUPLICATE_ANNOTATION = "M_DUPLICATE_ANNOTATION"
|
||||
@ -677,18 +682,27 @@ class FederationPullAttemptBackoffError(RuntimeError):
|
||||
Attributes:
|
||||
event_id: The event_id which we are refusing to pull
|
||||
message: A custom error message that gives more context
|
||||
retry_after_ms: The remaining backoff interval, in milliseconds
|
||||
"""
|
||||
|
||||
def __init__(self, event_ids: List[str], message: Optional[str]):
|
||||
self.event_ids = event_ids
|
||||
def __init__(
|
||||
self, event_ids: "StrCollection", message: Optional[str], retry_after_ms: int
|
||||
):
|
||||
event_ids = list(event_ids)
|
||||
|
||||
if message:
|
||||
error_message = message
|
||||
else:
|
||||
error_message = f"Not attempting to pull event_ids={self.event_ids} because we already tried to pull them recently (backing off)."
|
||||
error_message = (
|
||||
f"Not attempting to pull event_ids={event_ids} because we already "
|
||||
"tried to pull them recently (backing off)."
|
||||
)
|
||||
|
||||
super().__init__(error_message)
|
||||
|
||||
self.event_ids = event_ids
|
||||
self.retry_after_ms = retry_after_ms
|
||||
|
||||
|
||||
class HttpResponseException(CodeMessageException):
|
||||
"""
|
||||
|
@ -41,7 +41,12 @@ from typing_extensions import ParamSpec
|
||||
|
||||
import twisted
|
||||
from twisted.internet import defer, error, reactor as _reactor
|
||||
from twisted.internet.interfaces import IOpenSSLContextFactory, IReactorSSL, IReactorTCP
|
||||
from twisted.internet.interfaces import (
|
||||
IOpenSSLContextFactory,
|
||||
IReactorSSL,
|
||||
IReactorTCP,
|
||||
IReactorUNIX,
|
||||
)
|
||||
from twisted.internet.protocol import ServerFactory
|
||||
from twisted.internet.tcp import Port
|
||||
from twisted.logger import LoggingFile, LogLevel
|
||||
@ -56,7 +61,7 @@ from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config._base import format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import ListenerConfig, ManholeConfig
|
||||
from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.events.presence_router import load_legacy_presence_router
|
||||
from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||
@ -213,7 +218,7 @@ def handle_startup_exception(e: Exception) -> NoReturn:
|
||||
def redirect_stdio_to_logs() -> None:
|
||||
streams = [("stdout", LogLevel.info), ("stderr", LogLevel.error)]
|
||||
|
||||
for (stream, level) in streams:
|
||||
for stream, level in streams:
|
||||
oldStream = getattr(sys, stream)
|
||||
loggingFile = LoggingFile(
|
||||
logger=twisted.logger.Logger(namespace=stream),
|
||||
@ -351,6 +356,28 @@ def listen_tcp(
|
||||
return r # type: ignore[return-value]
|
||||
|
||||
|
||||
def listen_unix(
|
||||
path: str,
|
||||
mode: int,
|
||||
factory: ServerFactory,
|
||||
reactor: IReactorUNIX = reactor,
|
||||
backlog: int = 50,
|
||||
) -> List[Port]:
|
||||
"""
|
||||
Create a UNIX socket for a given path and 'mode' permission
|
||||
|
||||
Returns:
|
||||
list of twisted.internet.tcp.Port listening for TCP connections
|
||||
"""
|
||||
wantPID = True
|
||||
|
||||
return [
|
||||
# IReactorUNIX returns an object implementing IListeningPort from listenUNIX,
|
||||
# but we know it will be a Port instance.
|
||||
cast(Port, reactor.listenUNIX(path, factory, backlog, mode, wantPID))
|
||||
]
|
||||
|
||||
|
||||
def listen_http(
|
||||
listener_config: ListenerConfig,
|
||||
root_resource: Resource,
|
||||
@ -359,18 +386,13 @@ def listen_http(
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> List[Port]:
|
||||
port = listener_config.port
|
||||
bind_addresses = listener_config.bind_addresses
|
||||
tls = listener_config.tls
|
||||
|
||||
assert listener_config.http_options is not None
|
||||
|
||||
site_tag = listener_config.http_options.tag
|
||||
if site_tag is None:
|
||||
site_tag = str(port)
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
site = SynapseSite(
|
||||
"synapse.access.%s.%s" % ("https" if tls else "http", site_tag),
|
||||
"synapse.access.%s.%s"
|
||||
% ("https" if listener_config.is_tls() else "http", site_tag),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
@ -378,25 +400,41 @@ def listen_http(
|
||||
max_request_body_size=max_request_body_size,
|
||||
reactor=reactor,
|
||||
)
|
||||
if tls:
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
bind_addresses,
|
||||
port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d (TLS)", port)
|
||||
|
||||
if isinstance(listener_config, TCPListenerConfig):
|
||||
if listener_config.is_tls():
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info(
|
||||
"Synapse now listening on TCP port %d (TLS)", listener_config.port
|
||||
)
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d", listener_config.port)
|
||||
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
ports = listen_unix(
|
||||
listener_config.path, listener_config.mode, site, reactor=reactor
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d", port)
|
||||
# getHost() returns a UNIXAddress which contains an instance variable of 'name'
|
||||
# encoded as a byte string. Decode as utf-8 so pretty.
|
||||
logger.info(
|
||||
"Synapse now listening on Unix Socket at: "
|
||||
f"{ports[0].getHost().name.decode('utf-8')}"
|
||||
)
|
||||
|
||||
return ports
|
||||
|
||||
|
||||
|
@ -44,6 +44,7 @@ from synapse.storage.databases.main.event_push_actions import (
|
||||
)
|
||||
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
||||
from synapse.storage.databases.main.filtering import FilteringWorkerStore
|
||||
from synapse.storage.databases.main.media_repository import MediaRepositoryStore
|
||||
from synapse.storage.databases.main.profile import ProfileWorkerStore
|
||||
from synapse.storage.databases.main.push_rule import PushRulesWorkerStore
|
||||
from synapse.storage.databases.main.receipts import ReceiptsWorkerStore
|
||||
@ -86,6 +87,7 @@ class AdminCmdSlavedStore(
|
||||
RegistrationWorkerStore,
|
||||
RoomWorkerStore,
|
||||
ProfileWorkerStore,
|
||||
MediaRepositoryStore,
|
||||
):
|
||||
def __init__(
|
||||
self,
|
||||
@ -149,7 +151,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(events_file, "a") as f:
|
||||
for event in events:
|
||||
print(json.dumps(event.get_pdu_json()), file=f)
|
||||
json.dump(event.get_pdu_json(), fp=f)
|
||||
|
||||
def write_state(
|
||||
self, room_id: str, event_id: str, state: StateMap[EventBase]
|
||||
@ -162,7 +164,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(event_file, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event.get_pdu_json()), file=f)
|
||||
json.dump(event.get_pdu_json(), fp=f)
|
||||
|
||||
def write_invite(
|
||||
self, room_id: str, event: EventBase, state: StateMap[EventBase]
|
||||
@ -178,7 +180,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(invite_state, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event), file=f)
|
||||
json.dump(event, fp=f)
|
||||
|
||||
def write_knock(
|
||||
self, room_id: str, event: EventBase, state: StateMap[EventBase]
|
||||
@ -194,7 +196,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
with open(knock_state, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event), file=f)
|
||||
json.dump(event, fp=f)
|
||||
|
||||
def write_profile(self, profile: JsonDict) -> None:
|
||||
user_directory = os.path.join(self.base_directory, "user_data")
|
||||
@ -202,7 +204,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
profile_file = os.path.join(user_directory, "profile")
|
||||
|
||||
with open(profile_file, "a") as f:
|
||||
print(json.dumps(profile), file=f)
|
||||
json.dump(profile, fp=f)
|
||||
|
||||
def write_devices(self, devices: List[JsonDict]) -> None:
|
||||
user_directory = os.path.join(self.base_directory, "user_data")
|
||||
@ -211,7 +213,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
for device in devices:
|
||||
with open(device_file, "a") as f:
|
||||
print(json.dumps(device), file=f)
|
||||
json.dump(device, fp=f)
|
||||
|
||||
def write_connections(self, connections: List[JsonDict]) -> None:
|
||||
user_directory = os.path.join(self.base_directory, "user_data")
|
||||
@ -220,7 +222,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
|
||||
for connection in connections:
|
||||
with open(connection_file, "a") as f:
|
||||
print(json.dumps(connection), file=f)
|
||||
json.dump(connection, fp=f)
|
||||
|
||||
def write_account_data(
|
||||
self, file_name: str, account_data: Mapping[str, JsonDict]
|
||||
@ -233,7 +235,15 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
account_data_file = os.path.join(account_data_directory, file_name)
|
||||
|
||||
with open(account_data_file, "a") as f:
|
||||
print(json.dumps(account_data), file=f)
|
||||
json.dump(account_data, fp=f)
|
||||
|
||||
def write_media_id(self, media_id: str, media_metadata: JsonDict) -> None:
|
||||
file_directory = os.path.join(self.base_directory, "media_ids")
|
||||
os.makedirs(file_directory, exist_ok=True)
|
||||
media_id_file = os.path.join(file_directory, media_id)
|
||||
|
||||
with open(media_id_file, "w") as f:
|
||||
json.dump(media_metadata, fp=f)
|
||||
|
||||
def finished(self) -> str:
|
||||
return self.base_directory
|
||||
|
@ -219,7 +219,7 @@ def main() -> None:
|
||||
# memory space and don't need to repeat the work of loading the code!
|
||||
# Instead of using fork() directly, we use the multiprocessing library,
|
||||
# which uses fork() on Unix platforms.
|
||||
for (func, worker_args) in zip(worker_functions, args_by_worker):
|
||||
for func, worker_args in zip(worker_functions, args_by_worker):
|
||||
process = multiprocessing.Process(
|
||||
target=_worker_entrypoint, args=(func, proxy_reactor, worker_args)
|
||||
)
|
||||
|
@ -38,7 +38,7 @@ from synapse.app._base import (
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.server import JsonResource, OptionsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
@ -157,7 +157,6 @@ class GenericWorkerServer(HomeServer):
|
||||
DATASTORE_CLASS = GenericWorkerSlavedStore # type: ignore
|
||||
|
||||
def _listen_http(self, listener_config: ListenerConfig) -> None:
|
||||
|
||||
assert listener_config.http_options is not None
|
||||
|
||||
# We always include a health resource.
|
||||
@ -237,12 +236,18 @@ class GenericWorkerServer(HomeServer):
|
||||
if listener.type == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener.type == "manhole":
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not using a unix socket for manhole at this time."
|
||||
)
|
||||
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
@ -250,10 +255,16 @@ class GenericWorkerServer(HomeServer):
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for metrics at this time."
|
||||
)
|
||||
|
||||
else:
|
||||
logger.warning("Unsupported listener type: %s", listener.type)
|
||||
|
||||
|
@ -44,7 +44,7 @@ from synapse.app._base import (
|
||||
)
|
||||
from synapse.config._base import ConfigError, format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.additional_resource import AdditionalResource
|
||||
from synapse.http.server import (
|
||||
@ -78,14 +78,13 @@ class SynapseHomeServer(HomeServer):
|
||||
DATASTORE_CLASS = DataStore # type: ignore
|
||||
|
||||
def _listener_http(
|
||||
self, config: HomeServerConfig, listener_config: ListenerConfig
|
||||
self,
|
||||
config: HomeServerConfig,
|
||||
listener_config: ListenerConfig,
|
||||
) -> Iterable[Port]:
|
||||
port = listener_config.port
|
||||
# Must exist since this is an HTTP listener.
|
||||
assert listener_config.http_options is not None
|
||||
site_tag = listener_config.http_options.tag
|
||||
if site_tag is None:
|
||||
site_tag = str(port)
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
# We always include a health resource.
|
||||
resources: Dict[str, Resource] = {"/health": HealthResource()}
|
||||
@ -252,12 +251,17 @@ class SynapseHomeServer(HomeServer):
|
||||
self._listener_http(self.config, listener)
|
||||
)
|
||||
elif listener.type == "manhole":
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for manhole at this time."
|
||||
)
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
@ -265,10 +269,16 @@ class SynapseHomeServer(HomeServer):
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for metrics at this time."
|
||||
)
|
||||
|
||||
else:
|
||||
# this shouldn't happen, as the listener type should have been checked
|
||||
# during parsing
|
||||
@ -321,7 +331,6 @@ def setup(config_options: List[str]) -> SynapseHomeServer:
|
||||
and not config.registration.registrations_require_3pid
|
||||
and not config.registration.registration_requires_token
|
||||
):
|
||||
|
||||
raise ConfigError(
|
||||
"You have enabled open registration without any verification. This is a known vector for "
|
||||
"spam and abuse. If you would like to allow public registration, please consider adding email, "
|
||||
|
@ -17,6 +17,8 @@ import urllib.parse
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
@ -24,13 +26,14 @@ from typing import (
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import TypeGuard
|
||||
from typing_extensions import Concatenate, ParamSpec, TypeGuard
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind
|
||||
from synapse.api.errors import CodeMessageException
|
||||
from synapse.api.errors import CodeMessageException, HttpResponseException
|
||||
from synapse.appservice import (
|
||||
ApplicationService,
|
||||
TransactionOneTimeKeysCount,
|
||||
@ -38,7 +41,7 @@ from synapse.appservice import (
|
||||
)
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.utils import SerializeEventConfig, serialize_event
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.http.client import SimpleHttpClient, is_unknown_endpoint
|
||||
from synapse.types import DeviceListUpdates, JsonDict, ThirdPartyInstanceID
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
@ -78,7 +81,11 @@ sent_todevice_counter = Counter(
|
||||
HOUR_IN_MS = 60 * 60 * 1000
|
||||
|
||||
|
||||
APP_SERVICE_PREFIX = "/_matrix/app/unstable"
|
||||
APP_SERVICE_PREFIX = "/_matrix/app/v1"
|
||||
APP_SERVICE_UNSTABLE_PREFIX = "/_matrix/app/unstable"
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
def _is_valid_3pe_metadata(info: JsonDict) -> bool:
|
||||
@ -121,6 +128,47 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||
)
|
||||
|
||||
async def _send_with_fallbacks(
|
||||
self,
|
||||
service: "ApplicationService",
|
||||
prefixes: List[str],
|
||||
path: str,
|
||||
func: Callable[Concatenate[str, P], Awaitable[R]],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> R:
|
||||
"""
|
||||
Attempt to call an application service with multiple paths, falling back
|
||||
until one succeeds.
|
||||
|
||||
Args:
|
||||
service: The appliacation service, this provides the base URL.
|
||||
prefixes: A last of paths to try in order for the requests.
|
||||
path: A suffix to append to each prefix.
|
||||
func: The function to call, the first argument will be the full
|
||||
endpoint to fetch. Other arguments are provided by args/kwargs.
|
||||
|
||||
Returns:
|
||||
The return value of func.
|
||||
"""
|
||||
for i, prefix in enumerate(prefixes, start=1):
|
||||
uri = f"{service.url}{prefix}{path}"
|
||||
try:
|
||||
return await func(uri, *args, **kwargs)
|
||||
except HttpResponseException as e:
|
||||
# If an error is received that is due to an unrecognised path,
|
||||
# fallback to next path (if one exists). Otherwise, consider it
|
||||
# a legitimate error and raise.
|
||||
if i < len(prefixes) and is_unknown_endpoint(e):
|
||||
continue
|
||||
raise
|
||||
except Exception:
|
||||
# Unexpected exceptions get sent to the caller.
|
||||
raise
|
||||
|
||||
# The function should always exit via the return or raise above this.
|
||||
raise RuntimeError("Unexpected fallback behaviour. This should never be seen.")
|
||||
|
||||
async def query_user(self, service: "ApplicationService", user_id: str) -> bool:
|
||||
if service.url is None:
|
||||
return False
|
||||
@ -128,10 +176,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
||||
try:
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/users/{urllib.parse.quote(user_id)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
@ -140,9 +190,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except CodeMessageException as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
logger.warning("query_user to %s received %s", uri, e.code)
|
||||
logger.warning("query_user to %s received %s", service.url, e.code)
|
||||
except Exception as ex:
|
||||
logger.warning("query_user to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_user to %s threw exception %s", service.url, ex)
|
||||
return False
|
||||
|
||||
async def query_alias(self, service: "ApplicationService", alias: str) -> bool:
|
||||
@ -152,21 +202,23 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
||||
try:
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/rooms/{urllib.parse.quote(alias)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
if response is not None: # just an empty json object
|
||||
return True
|
||||
except CodeMessageException as e:
|
||||
logger.warning("query_alias to %s received %s", uri, e.code)
|
||||
logger.warning("query_alias to %s received %s", service.url, e.code)
|
||||
if e.code == 404:
|
||||
return False
|
||||
except Exception as ex:
|
||||
logger.warning("query_alias to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_alias to %s threw exception %s", service.url, ex)
|
||||
return False
|
||||
|
||||
async def query_3pe(
|
||||
@ -188,25 +240,24 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = "%s%s/thirdparty/%s/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
kind,
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
args: Mapping[Any, Any] = {
|
||||
**fields,
|
||||
b"access_token": service.hs_token,
|
||||
}
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||
f"/thirdparty/{kind}/{urllib.parse.quote(protocol)}",
|
||||
self.get_json,
|
||||
args=args,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
if not isinstance(response, list):
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid response %r", uri, response
|
||||
"query_3pe to %s returned an invalid response %r",
|
||||
service.url,
|
||||
response,
|
||||
)
|
||||
return []
|
||||
|
||||
@ -216,12 +267,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
ret.append(r)
|
||||
else:
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid result %r", uri, r
|
||||
"query_3pe to %s returned an invalid result %r", service.url, r
|
||||
)
|
||||
|
||||
return ret
|
||||
except Exception as ex:
|
||||
logger.warning("query_3pe to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_3pe to %s threw exception %s", service.url, ex)
|
||||
return []
|
||||
|
||||
async def get_3pe_protocol(
|
||||
@ -233,21 +284,20 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
async def _get() -> Optional[JsonDict]:
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
uri = "%s%s/thirdparty/protocol/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
info = await self.get_json(
|
||||
uri,
|
||||
info = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||
f"/thirdparty/protocol/{urllib.parse.quote(protocol)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
|
||||
if not _is_valid_3pe_metadata(info):
|
||||
logger.warning(
|
||||
"query_3pe_protocol to %s did not return a valid result", uri
|
||||
"query_3pe_protocol to %s did not return a valid result",
|
||||
service.url,
|
||||
)
|
||||
return None
|
||||
|
||||
@ -260,12 +310,27 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
|
||||
return info
|
||||
except Exception as ex:
|
||||
logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex)
|
||||
logger.warning(
|
||||
"query_3pe_protocol to %s threw exception %s", service.url, ex
|
||||
)
|
||||
return None
|
||||
|
||||
key = (service.id, protocol)
|
||||
return await self.protocol_meta_cache.wrap(key, _get)
|
||||
|
||||
async def ping(self, service: "ApplicationService", txn_id: Optional[str]) -> None:
|
||||
# The caller should check that url is set
|
||||
assert service.url is not None, "ping called without URL being set"
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
await self.post_json_get_json(
|
||||
uri=f"{service.url}{APP_SERVICE_UNSTABLE_PREFIX}/fi.mau.msc2659/ping",
|
||||
post_json={"transaction_id": txn_id},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
|
||||
async def push_bulk(
|
||||
self,
|
||||
service: "ApplicationService",
|
||||
@ -305,8 +370,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
)
|
||||
txn_id = 0
|
||||
|
||||
uri = service.url + ("/transactions/%s" % urllib.parse.quote(str(txn_id)))
|
||||
|
||||
# Never send ephemeral events to appservices that do not support it
|
||||
body: JsonDict = {"events": serialized_events}
|
||||
if service.supports_ephemeral:
|
||||
@ -338,8 +401,11 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
}
|
||||
|
||||
try:
|
||||
await self.put_json(
|
||||
uri=uri,
|
||||
await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/transactions/{urllib.parse.quote(str(txn_id))}",
|
||||
self.put_json,
|
||||
json_body=body,
|
||||
args={"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
@ -347,7 +413,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug(
|
||||
"push_bulk to %s succeeded! events=%s",
|
||||
uri,
|
||||
service.url,
|
||||
[event.get("event_id") for event in events],
|
||||
)
|
||||
sent_transactions_counter.labels(service.id).inc()
|
||||
@ -358,7 +424,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except CodeMessageException as e:
|
||||
logger.warning(
|
||||
"push_bulk to %s received code=%s msg=%s",
|
||||
uri,
|
||||
service.url,
|
||||
e.code,
|
||||
e.msg,
|
||||
exc_info=logger.isEnabledFor(logging.DEBUG),
|
||||
@ -366,7 +432,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except Exception as ex:
|
||||
logger.warning(
|
||||
"push_bulk to %s threw exception(%s) %s args=%s",
|
||||
uri,
|
||||
service.url,
|
||||
type(ex).__name__,
|
||||
ex,
|
||||
ex.args,
|
||||
@ -375,6 +441,108 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
failed_transactions_counter.labels(service.id).inc()
|
||||
return False
|
||||
|
||||
async def claim_client_keys(
|
||||
self, service: "ApplicationService", query: List[Tuple[str, str, str]]
|
||||
) -> Tuple[Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str]]]:
|
||||
"""Claim one time keys from an application service.
|
||||
|
||||
Note that any error (including a timeout) is treated as the application
|
||||
service having no information.
|
||||
|
||||
Args:
|
||||
service: The application service to query.
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A tuple of:
|
||||
A map of user ID -> a map device ID -> a map of key ID -> JSON dict.
|
||||
|
||||
A copy of the input which has not been fulfilled because the
|
||||
appservice doesn't support this endpoint or has not returned
|
||||
data for that tuple.
|
||||
"""
|
||||
if service.url is None:
|
||||
return {}, query
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
# Create the expected payload shape.
|
||||
body: Dict[str, Dict[str, List[str]]] = {}
|
||||
for user_id, device, algorithm in query:
|
||||
body.setdefault(user_id, {}).setdefault(device, []).append(algorithm)
|
||||
|
||||
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3983/keys/claim"
|
||||
try:
|
||||
response = await self.post_json_get_json(
|
||||
uri,
|
||||
body,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# The appservice doesn't support this endpoint.
|
||||
if is_unknown_endpoint(e):
|
||||
return {}, query
|
||||
logger.warning("claim_keys to %s received %s", uri, e.code)
|
||||
return {}, query
|
||||
except Exception as ex:
|
||||
logger.warning("claim_keys to %s threw exception %s", uri, ex)
|
||||
return {}, query
|
||||
|
||||
# Check if the appservice fulfilled all of the queried user/device/algorithms
|
||||
# or if some are still missing.
|
||||
#
|
||||
# TODO This places a lot of faith in the response shape being correct.
|
||||
missing = [
|
||||
(user_id, device, algorithm)
|
||||
for user_id, device, algorithm in query
|
||||
if algorithm not in response.get(user_id, {}).get(device, [])
|
||||
]
|
||||
|
||||
return response, missing
|
||||
|
||||
async def query_keys(
|
||||
self, service: "ApplicationService", query: Dict[str, List[str]]
|
||||
) -> Dict[str, Dict[str, Dict[str, JsonDict]]]:
|
||||
"""Query the application service for keys.
|
||||
|
||||
Note that any error (including a timeout) is treated as the application
|
||||
service having no information.
|
||||
|
||||
Args:
|
||||
service: The application service to query.
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A map of device_keys/master_keys/self_signing_keys/user_signing_keys:
|
||||
|
||||
device_keys is a map of user ID -> a map device ID -> device info.
|
||||
"""
|
||||
if service.url is None:
|
||||
return {}
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3984/keys/query"
|
||||
try:
|
||||
response = await self.post_json_get_json(
|
||||
uri,
|
||||
query,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# The appservice doesn't support this endpoint.
|
||||
if is_unknown_endpoint(e):
|
||||
return {}
|
||||
logger.warning("query_keys to %s received %s", uri, e.code)
|
||||
return {}
|
||||
except Exception as ex:
|
||||
logger.warning("query_keys to %s threw exception %s", uri, ex)
|
||||
return {}
|
||||
|
||||
return response
|
||||
|
||||
def _serialize(
|
||||
self, service: "ApplicationService", events: Iterable[EventBase]
|
||||
) -> List[JsonDict]:
|
||||
|
@ -35,6 +35,7 @@ from synapse.config import ( # noqa: F401
|
||||
jwt,
|
||||
key,
|
||||
logger,
|
||||
meow,
|
||||
metrics,
|
||||
modules,
|
||||
oembed,
|
||||
@ -90,6 +91,7 @@ class RootConfig:
|
||||
voip: voip.VoipConfig
|
||||
registration: registration.RegistrationConfig
|
||||
account_validity: account_validity.AccountValidityConfig
|
||||
meow: meow.MeowConfig
|
||||
metrics: metrics.MetricsConfig
|
||||
api: api.ApiConfig
|
||||
appservice: appservice.AppServiceConfig
|
||||
|
@ -22,7 +22,6 @@ from ._base import Config
|
||||
|
||||
|
||||
class ConsentConfig(Config):
|
||||
|
||||
section = "consent"
|
||||
|
||||
def __init__(self, *args: Any):
|
||||
|
@ -154,7 +154,6 @@ class DatabaseConfig(Config):
|
||||
logger.warning(NON_SQLITE_DATABASE_PATH_WARNING)
|
||||
|
||||
def set_databasepath(self, database_path: str) -> None:
|
||||
|
||||
if database_path != ":memory:":
|
||||
database_path = self.abspath(database_path)
|
||||
|
||||
|
@ -74,6 +74,16 @@ class ExperimentalConfig(Config):
|
||||
"msc3202_transaction_extensions", False
|
||||
)
|
||||
|
||||
# MSC3983: Proxying OTK claim requests to exclusive ASes.
|
||||
self.msc3983_appservice_otk_claims: bool = experimental.get(
|
||||
"msc3983_appservice_otk_claims", False
|
||||
)
|
||||
|
||||
# MSC3984: Proxying key queries to exclusive ASes.
|
||||
self.msc3984_appservice_key_query: bool = experimental.get(
|
||||
"msc3984_appservice_key_query", False
|
||||
)
|
||||
|
||||
# MSC3706 (server-side support for partial state in /send_join responses)
|
||||
# Synapse will always serve partial state responses to requests using the stable
|
||||
# query parameter `omit_members`. If this flag is set, Synapse will also serve
|
||||
@ -166,23 +176,9 @@ class ExperimentalConfig(Config):
|
||||
# MSC3391: Removing account data.
|
||||
self.msc3391_enabled = experimental.get("msc3391_enabled", False)
|
||||
|
||||
# MSC3925: do not replace events with their edits
|
||||
self.msc3925_inhibit_edit = experimental.get("msc3925_inhibit_edit", False)
|
||||
|
||||
# MSC3758: exact_event_match push rule condition
|
||||
self.msc3758_exact_event_match = experimental.get(
|
||||
"msc3758_exact_event_match", False
|
||||
)
|
||||
|
||||
# MSC3873: Disambiguate event_match keys.
|
||||
self.msc3783_escape_event_match_key = experimental.get(
|
||||
"msc3783_escape_event_match_key", False
|
||||
)
|
||||
|
||||
# MSC3952: Intentional mentions, this depends on MSC3758.
|
||||
self.msc3952_intentional_mentions = (
|
||||
experimental.get("msc3952_intentional_mentions", False)
|
||||
and self.msc3758_exact_event_match
|
||||
# MSC3952: Intentional mentions, this depends on MSC3966.
|
||||
self.msc3952_intentional_mentions = experimental.get(
|
||||
"msc3952_intentional_mentions", False
|
||||
)
|
||||
|
||||
# MSC3959: Do not generate notifications for edits.
|
||||
@ -190,7 +186,8 @@ class ExperimentalConfig(Config):
|
||||
"msc3958_supress_edit_notifs", False
|
||||
)
|
||||
|
||||
# MSC3966: exact_event_property_contains push rule condition.
|
||||
self.msc3966_exact_event_property_contains = experimental.get(
|
||||
"msc3966_exact_event_property_contains", False
|
||||
)
|
||||
# MSC3967: Do not require UIA when first uploading cross signing keys
|
||||
self.msc3967_enabled = experimental.get("msc3967_enabled", False)
|
||||
|
||||
# MSC2659: Application service ping endpoint
|
||||
self.msc2659_enabled = experimental.get("msc2659_enabled", False)
|
||||
|
@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from ._base import RootConfig
|
||||
from .meow import MeowConfig
|
||||
from .account_validity import AccountValidityConfig
|
||||
from .api import ApiConfig
|
||||
from .appservice import AppServiceConfig
|
||||
@ -56,8 +57,8 @@ from .workers import WorkerConfig
|
||||
|
||||
|
||||
class HomeServerConfig(RootConfig):
|
||||
|
||||
config_classes = [
|
||||
MeowConfig,
|
||||
ModulesConfig,
|
||||
ServerConfig,
|
||||
RetentionConfig,
|
||||
|
56
synapse/config/meow.py
Normal file
56
synapse/config/meow.py
Normal file
@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 Maunium
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
|
||||
|
||||
class MeowConfig(Config):
|
||||
"""Meow Configuration
|
||||
Configuration for disabling dumb limits in Synapse
|
||||
"""
|
||||
|
||||
section = "meow"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
meow_config = config.get("meow", {})
|
||||
self.validation_override = set(meow_config.get("validation_override", []))
|
||||
self.filter_override = set(meow_config.get("filter_override", []))
|
||||
self.timestamp_override = set(meow_config.get("timestamp_override", []))
|
||||
self.admin_api_register_invalid = meow_config.get(
|
||||
"admin_api_register_invalid", True
|
||||
)
|
||||
self.appservice_batch_send_any = meow_config.get(
|
||||
"appservice_batch_send_any", False
|
||||
)
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return """
|
||||
# Configuration for disabling dumb limits in Synapse
|
||||
#
|
||||
#meow:
|
||||
# # List of users who aren't subject to unnecessary validation in the C-S API.
|
||||
# validation_override:
|
||||
# - "@you:example.com"
|
||||
# # List of users who will get org.matrix.dummy_event and m.room.aliases events down /sync
|
||||
# filter_override:
|
||||
# - "@you:example.com"
|
||||
# # List of users who can use timestamp massaging without being appservices
|
||||
# timestamp_override:
|
||||
# - "@you:example.com"
|
||||
# # Whether or not the admin API should be able to register invalid user IDs.
|
||||
# admin_api_register_invalid: true
|
||||
# # Whether appservices should be allowed to use MSC2716 batch sending as any local user.
|
||||
# appservice_batch_send_any: false
|
||||
"""
|
@ -136,6 +136,7 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
||||
"type": "array",
|
||||
"items": SsoAttributeRequirement.JSON_SCHEMA,
|
||||
},
|
||||
"enable_registration": {"type": "boolean"},
|
||||
},
|
||||
}
|
||||
|
||||
@ -306,6 +307,7 @@ def _parse_oidc_config_dict(
|
||||
user_mapping_provider_class=user_mapping_provider_class,
|
||||
user_mapping_provider_config=user_mapping_provider_config,
|
||||
attribute_requirements=attribute_requirements,
|
||||
enable_registration=oidc_config.get("enable_registration", True),
|
||||
)
|
||||
|
||||
|
||||
@ -405,3 +407,6 @@ class OidcProviderConfig:
|
||||
|
||||
# required attributes to require in userinfo to allow login/registration
|
||||
attribute_requirements: List[SsoAttributeRequirement]
|
||||
|
||||
# Whether automatic registrations are enabled in the ODIC flow. Defaults to True
|
||||
enable_registration: bool
|
||||
|
@ -46,7 +46,6 @@ class RatelimitConfig(Config):
|
||||
section = "ratelimiting"
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
|
||||
# Load the new-style messages config if it exists. Otherwise fall back
|
||||
# to the old method.
|
||||
if "rc_message" in config:
|
||||
@ -87,9 +86,18 @@ class RatelimitConfig(Config):
|
||||
defaults={"per_second": 0.1, "burst_count": 5},
|
||||
)
|
||||
|
||||
# It is reasonable to login with a bunch of devices at once (i.e. when
|
||||
# setting up an account), but it is *not* valid to continually be
|
||||
# logging into new devices.
|
||||
rc_login_config = config.get("rc_login", {})
|
||||
self.rc_login_address = RatelimitSettings(rc_login_config.get("address", {}))
|
||||
self.rc_login_account = RatelimitSettings(rc_login_config.get("account", {}))
|
||||
self.rc_login_address = RatelimitSettings(
|
||||
rc_login_config.get("address", {}),
|
||||
defaults={"per_second": 0.003, "burst_count": 5},
|
||||
)
|
||||
self.rc_login_account = RatelimitSettings(
|
||||
rc_login_config.get("account", {}),
|
||||
defaults={"per_second": 0.003, "burst_count": 5},
|
||||
)
|
||||
self.rc_login_failed_attempts = RatelimitSettings(
|
||||
rc_login_config.get("failed_attempts", {})
|
||||
)
|
||||
|
@ -47,10 +47,8 @@ THUMBNAIL_SIZE_YAML = """\
|
||||
THUMBNAIL_SUPPORTED_MEDIA_FORMAT_MAP = {
|
||||
"image/jpeg": "jpeg",
|
||||
"image/jpg": "jpeg",
|
||||
"image/webp": "jpeg",
|
||||
# Thumbnails can only be jpeg or png. We choose png thumbnails for gif
|
||||
# because it can have transparency.
|
||||
"image/gif": "png",
|
||||
"image/webp": "webp",
|
||||
"image/gif": "webp",
|
||||
"image/png": "png",
|
||||
}
|
||||
|
||||
@ -102,6 +100,10 @@ def parse_thumbnail_requirements(
|
||||
requirement.append(
|
||||
ThumbnailRequirement(width, height, method, "image/png")
|
||||
)
|
||||
elif thumbnail_format == "webp":
|
||||
requirement.append(
|
||||
ThumbnailRequirement(width, height, method, "image/webp")
|
||||
)
|
||||
else:
|
||||
raise Exception(
|
||||
"Unknown thumbnail mapping from %s to %s. This is a Synapse problem, please report!"
|
||||
@ -116,7 +118,6 @@ class ContentRepositoryConfig(Config):
|
||||
section = "media"
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
|
||||
# Only enable the media repo if either the media repo is enabled or the
|
||||
# current worker app is the media repo.
|
||||
if (
|
||||
@ -179,11 +180,13 @@ class ContentRepositoryConfig(Config):
|
||||
for i, provider_config in enumerate(storage_providers):
|
||||
# We special case the module "file_system" so as not to need to
|
||||
# expose FileStorageProviderBackend
|
||||
if provider_config["module"] == "file_system":
|
||||
provider_config["module"] = (
|
||||
"synapse.rest.media.v1.storage_provider"
|
||||
".FileStorageProviderBackend"
|
||||
)
|
||||
if (
|
||||
provider_config["module"] == "file_system"
|
||||
or provider_config["module"] == "synapse.rest.media.v1.storage_provider"
|
||||
):
|
||||
provider_config[
|
||||
"module"
|
||||
] = "synapse.media.storage_provider.FileStorageProviderBackend"
|
||||
|
||||
provider_class, parsed_config = load_module(
|
||||
provider_config, ("media_storage_providers", "<item %i>" % i)
|
||||
|
@ -214,17 +214,52 @@ class HttpListenerConfig:
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ListenerConfig:
|
||||
"""Object describing the configuration of a single listener."""
|
||||
class TCPListenerConfig:
|
||||
"""Object describing the configuration of a single TCP listener."""
|
||||
|
||||
port: int = attr.ib(validator=attr.validators.instance_of(int))
|
||||
bind_addresses: List[str]
|
||||
bind_addresses: List[str] = attr.ib(validator=attr.validators.instance_of(List))
|
||||
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||
tls: bool = False
|
||||
|
||||
# http_options is only populated if type=http
|
||||
http_options: Optional[HttpListenerConfig] = None
|
||||
|
||||
def get_site_tag(self) -> str:
|
||||
"""Retrieves http_options.tag if it exists, otherwise the port number."""
|
||||
if self.http_options and self.http_options.tag is not None:
|
||||
return self.http_options.tag
|
||||
else:
|
||||
return str(self.port)
|
||||
|
||||
def is_tls(self) -> bool:
|
||||
return self.tls
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class UnixListenerConfig:
|
||||
"""Object describing the configuration of a single Unix socket listener."""
|
||||
|
||||
# Note: unix sockets can not be tls encrypted, so HAVE to be behind a tls-handling
|
||||
# reverse proxy
|
||||
path: str = attr.ib()
|
||||
# A default(0o666) for this is set in parse_listener_def() below
|
||||
mode: int
|
||||
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||
|
||||
# http_options is only populated if type=http
|
||||
http_options: Optional[HttpListenerConfig] = None
|
||||
|
||||
def get_site_tag(self) -> str:
|
||||
return "unix"
|
||||
|
||||
def is_tls(self) -> bool:
|
||||
"""Unix sockets can't have TLS"""
|
||||
return False
|
||||
|
||||
|
||||
ListenerConfig = Union[TCPListenerConfig, UnixListenerConfig]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ManholeConfig:
|
||||
@ -531,12 +566,12 @@ class ServerConfig(Config):
|
||||
|
||||
self.listeners = [parse_listener_def(i, x) for i, x in enumerate(listeners)]
|
||||
|
||||
# no_tls is not really supported any more, but let's grandfather it in
|
||||
# here.
|
||||
# no_tls is not really supported anymore, but let's grandfather it in here.
|
||||
if config.get("no_tls", False):
|
||||
l2 = []
|
||||
for listener in self.listeners:
|
||||
if listener.tls:
|
||||
if isinstance(listener, TCPListenerConfig) and listener.tls:
|
||||
# Use isinstance() as the assertion this *has* a listener.port
|
||||
logger.info(
|
||||
"Ignoring TLS-enabled listener on port %i due to no_tls",
|
||||
listener.port,
|
||||
@ -577,7 +612,7 @@ class ServerConfig(Config):
|
||||
)
|
||||
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=bind_port,
|
||||
bind_addresses=[bind_host],
|
||||
tls=True,
|
||||
@ -589,7 +624,7 @@ class ServerConfig(Config):
|
||||
unsecure_port = config.get("unsecure_port", bind_port - 400)
|
||||
if unsecure_port:
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=unsecure_port,
|
||||
bind_addresses=[bind_host],
|
||||
tls=False,
|
||||
@ -601,7 +636,7 @@ class ServerConfig(Config):
|
||||
manhole = config.get("manhole")
|
||||
if manhole:
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=manhole,
|
||||
bind_addresses=["127.0.0.1"],
|
||||
type="manhole",
|
||||
@ -648,7 +683,7 @@ class ServerConfig(Config):
|
||||
logger.warning(METRICS_PORT_WARNING)
|
||||
|
||||
self.listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=metrics_port,
|
||||
bind_addresses=[config.get("metrics_bind_host", "127.0.0.1")],
|
||||
type="http",
|
||||
@ -724,7 +759,7 @@ class ServerConfig(Config):
|
||||
self.delete_stale_devices_after = None
|
||||
|
||||
def has_tls_listener(self) -> bool:
|
||||
return any(listener.tls for listener in self.listeners)
|
||||
return any(listener.is_tls() for listener in self.listeners)
|
||||
|
||||
def generate_config_section(
|
||||
self,
|
||||
@ -735,7 +770,6 @@ class ServerConfig(Config):
|
||||
listeners: Optional[List[dict]],
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
|
||||
_, bind_port = parse_and_validate_server_name(server_name)
|
||||
if bind_port is not None:
|
||||
unsecure_port = bind_port - 400
|
||||
@ -905,25 +939,25 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||
raise ConfigError(DIRECT_TCP_ERROR, ("listeners", str(num), "type"))
|
||||
|
||||
port = listener.get("port")
|
||||
if type(port) is not int:
|
||||
socket_path = listener.get("path")
|
||||
# Either a port or a path should be declared at a minimum. Using both would be bad.
|
||||
if port is not None and not isinstance(port, int):
|
||||
raise ConfigError("Listener configuration is lacking a valid 'port' option")
|
||||
if socket_path is not None and not isinstance(socket_path, str):
|
||||
raise ConfigError("Listener configuration is lacking a valid 'path' option")
|
||||
if port and socket_path:
|
||||
raise ConfigError(
|
||||
"Can not have both a UNIX socket and an IP/port declared for the same "
|
||||
"resource!"
|
||||
)
|
||||
if port is None and socket_path is None:
|
||||
raise ConfigError(
|
||||
"Must have either a UNIX socket or an IP/port declared for a given "
|
||||
"resource!"
|
||||
)
|
||||
|
||||
tls = listener.get("tls", False)
|
||||
|
||||
bind_addresses = listener.get("bind_addresses", [])
|
||||
bind_address = listener.get("bind_address")
|
||||
# if bind_address was specified, add it to the list of addresses
|
||||
if bind_address:
|
||||
bind_addresses.append(bind_address)
|
||||
|
||||
# if we still have an empty list of addresses, use the default list
|
||||
if not bind_addresses:
|
||||
if listener_type == "metrics":
|
||||
# the metrics listener doesn't support IPv6
|
||||
bind_addresses.append("0.0.0.0")
|
||||
else:
|
||||
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
||||
|
||||
http_config = None
|
||||
if listener_type == "http":
|
||||
try:
|
||||
@ -933,8 +967,12 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||
except ValueError as e:
|
||||
raise ConfigError("Unknown listener resource") from e
|
||||
|
||||
# For a unix socket, default x_forwarded to True, as this is the only way of
|
||||
# getting a client IP.
|
||||
# Note: a reverse proxy is required anyway, as there is no way of exposing a
|
||||
# unix socket to the internet.
|
||||
http_config = HttpListenerConfig(
|
||||
x_forwarded=listener.get("x_forwarded", False),
|
||||
x_forwarded=listener.get("x_forwarded", (True if socket_path else False)),
|
||||
resources=resources,
|
||||
additional_resources=listener.get("additional_resources", {}),
|
||||
tag=listener.get("tag"),
|
||||
@ -942,7 +980,30 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||
experimental_cors_msc3886=listener.get("experimental_cors_msc3886", False),
|
||||
)
|
||||
|
||||
return ListenerConfig(port, bind_addresses, listener_type, tls, http_config)
|
||||
if socket_path:
|
||||
# TODO: Add in path validation, like if the directory exists and is writable?
|
||||
# Set a default for the permission, in case it's left out
|
||||
socket_mode = listener.get("mode", 0o666)
|
||||
|
||||
return UnixListenerConfig(socket_path, socket_mode, listener_type, http_config)
|
||||
|
||||
else:
|
||||
assert port is not None
|
||||
bind_addresses = listener.get("bind_addresses", [])
|
||||
bind_address = listener.get("bind_address")
|
||||
# if bind_address was specified, add it to the list of addresses
|
||||
if bind_address:
|
||||
bind_addresses.append(bind_address)
|
||||
|
||||
# if we still have an empty list of addresses, use the default list
|
||||
if not bind_addresses:
|
||||
if listener_type == "metrics":
|
||||
# the metrics listener doesn't support IPv6
|
||||
bind_addresses.append("0.0.0.0")
|
||||
else:
|
||||
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
||||
|
||||
return TCPListenerConfig(port, bind_addresses, listener_type, tls, http_config)
|
||||
|
||||
|
||||
_MANHOLE_SETTINGS_SCHEMA = {
|
||||
|
@ -30,7 +30,6 @@ class TlsConfig(Config):
|
||||
section = "tls"
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
|
||||
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
|
||||
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
|
||||
|
||||
|
@ -19,15 +19,18 @@ from typing import Any, Dict, List, Union
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import (
|
||||
from synapse.config._base import (
|
||||
Config,
|
||||
ConfigError,
|
||||
RoutableShardedWorkerHandlingConfig,
|
||||
ShardedWorkerHandlingConfig,
|
||||
)
|
||||
from .server import DIRECT_TCP_ERROR, ListenerConfig, parse_listener_def
|
||||
from synapse.config.server import (
|
||||
DIRECT_TCP_ERROR,
|
||||
TCPListenerConfig,
|
||||
parse_listener_def,
|
||||
)
|
||||
from synapse.types import JsonDict
|
||||
|
||||
_DEPRECATED_WORKER_DUTY_OPTION_USED = """
|
||||
The '%s' configuration option is deprecated and will be removed in a future
|
||||
@ -161,7 +164,7 @@ class WorkerConfig(Config):
|
||||
manhole = config.get("worker_manhole")
|
||||
if manhole:
|
||||
self.worker_listeners.append(
|
||||
ListenerConfig(
|
||||
TCPListenerConfig(
|
||||
port=manhole,
|
||||
bind_addresses=["127.0.0.1"],
|
||||
type="manhole",
|
||||
|
@ -51,7 +51,7 @@ def check_event_content_hash(
|
||||
# some malformed events lack a 'hashes'. Protect against it being missing
|
||||
# or a weird type by basically treating it the same as an unhashed event.
|
||||
hashes = event.get("hashes")
|
||||
# nb it might be a frozendict or a dict
|
||||
# nb it might be a immutabledict or a dict
|
||||
if not isinstance(hashes, collections.abc.Mapping):
|
||||
raise SynapseError(
|
||||
400, "Malformed 'hashes': %s" % (type(hashes),), Codes.UNAUTHORIZED
|
||||
|
@ -399,7 +399,7 @@ class Keyring:
|
||||
# We now convert the returned list of results into a map from server
|
||||
# name to key ID to FetchKeyResult, to return.
|
||||
to_return: Dict[str, Dict[str, FetchKeyResult]] = {}
|
||||
for (request, results) in zip(deduped_requests, results_per_request):
|
||||
for request, results in zip(deduped_requests, results_per_request):
|
||||
to_return_by_server = to_return.setdefault(request.server_name, {})
|
||||
for key_id, key_result in results.items():
|
||||
existing = to_return_by_server.get(key_id)
|
||||
|
@ -168,13 +168,24 @@ async def check_state_independent_auth_rules(
|
||||
return
|
||||
|
||||
# 2. Reject if event has auth_events that: ...
|
||||
auth_events = await store.get_events(
|
||||
event.auth_event_ids(),
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
allow_rejected=True,
|
||||
)
|
||||
if batched_auth_events:
|
||||
auth_events.update(batched_auth_events)
|
||||
# Copy the batched auth events to avoid mutating them.
|
||||
auth_events = dict(batched_auth_events)
|
||||
needed_auth_event_ids = set(event.auth_event_ids()) - batched_auth_events.keys()
|
||||
if needed_auth_event_ids:
|
||||
auth_events.update(
|
||||
await store.get_events(
|
||||
needed_auth_event_ids,
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
allow_rejected=True,
|
||||
)
|
||||
)
|
||||
else:
|
||||
auth_events = await store.get_events(
|
||||
event.auth_event_ids(),
|
||||
redact_behaviour=EventRedactBehaviour.as_is,
|
||||
allow_rejected=True,
|
||||
)
|
||||
|
||||
room_id = event.room_id
|
||||
auth_dict: MutableStateMap[str] = {}
|
||||
|
@ -462,7 +462,7 @@ class FrozenEvent(EventBase):
|
||||
# Signatures is a dict of dicts, and this is faster than doing a
|
||||
# copy.deepcopy
|
||||
signatures = {
|
||||
name: {sig_id: sig for sig_id, sig in sigs.items()}
|
||||
name: dict(sigs.items())
|
||||
for name, sigs in event_dict.pop("signatures", {}).items()
|
||||
}
|
||||
|
||||
@ -510,7 +510,7 @@ class FrozenEventV2(EventBase):
|
||||
# Signatures is a dict of dicts, and this is faster than doing a
|
||||
# copy.deepcopy
|
||||
signatures = {
|
||||
name: {sig_id: sig for sig_id, sig in sigs.items()}
|
||||
name: dict(sigs.items())
|
||||
for name, sigs in event_dict.pop("signatures", {}).items()
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple
|
||||
|
||||
import attr
|
||||
from frozendict import frozendict
|
||||
from immutabledict import immutabledict
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.events import EventBase
|
||||
@ -23,6 +23,7 @@ from synapse.types import JsonDict, StateMap
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.storage.controllers import StorageControllers
|
||||
from synapse.storage.databases import StateGroupDataStore
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types.state import StateFilter
|
||||
|
||||
@ -292,6 +293,7 @@ class EventContext(UnpersistedEventContextBase):
|
||||
Maps a (type, state_key) to the event ID of the state event matching
|
||||
this tuple.
|
||||
"""
|
||||
|
||||
assert self.state_group_before_event is not None
|
||||
return await self._storage.state.get_state_ids_for_group(
|
||||
self.state_group_before_event, state_filter
|
||||
@ -348,6 +350,54 @@ class UnpersistedEventContext(UnpersistedEventContextBase):
|
||||
partial_state: bool
|
||||
state_map_before_event: Optional[StateMap[str]] = None
|
||||
|
||||
@classmethod
|
||||
async def batch_persist_unpersisted_contexts(
|
||||
cls,
|
||||
events_and_context: List[Tuple[EventBase, "UnpersistedEventContextBase"]],
|
||||
room_id: str,
|
||||
last_known_state_group: int,
|
||||
datastore: "StateGroupDataStore",
|
||||
) -> List[Tuple[EventBase, EventContext]]:
|
||||
"""
|
||||
Takes a list of events and their associated unpersisted contexts and persists
|
||||
the unpersisted contexts, returning a list of events and persisted contexts.
|
||||
Note that all the events must be in a linear chain (ie a <- b <- c).
|
||||
|
||||
Args:
|
||||
events_and_context: A list of events and their unpersisted contexts
|
||||
room_id: the room_id for the events
|
||||
last_known_state_group: the last persisted state group
|
||||
datastore: a state datastore
|
||||
"""
|
||||
amended_events_and_context = await datastore.store_state_deltas_for_batched(
|
||||
events_and_context, room_id, last_known_state_group
|
||||
)
|
||||
|
||||
events_and_persisted_context = []
|
||||
for event, unpersisted_context in amended_events_and_context:
|
||||
if event.is_state():
|
||||
context = EventContext(
|
||||
storage=unpersisted_context._storage,
|
||||
state_group=unpersisted_context.state_group_after_event,
|
||||
state_group_before_event=unpersisted_context.state_group_before_event,
|
||||
state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
|
||||
partial_state=unpersisted_context.partial_state,
|
||||
prev_group=unpersisted_context.state_group_before_event,
|
||||
delta_ids=unpersisted_context.state_delta_due_to_event,
|
||||
)
|
||||
else:
|
||||
context = EventContext(
|
||||
storage=unpersisted_context._storage,
|
||||
state_group=unpersisted_context.state_group_after_event,
|
||||
state_group_before_event=unpersisted_context.state_group_before_event,
|
||||
state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
|
||||
partial_state=unpersisted_context.partial_state,
|
||||
prev_group=unpersisted_context.prev_group_for_state_group_before_event,
|
||||
delta_ids=unpersisted_context.delta_ids_to_state_group_before_event,
|
||||
)
|
||||
events_and_persisted_context.append((event, context))
|
||||
return events_and_persisted_context
|
||||
|
||||
async def get_prev_state_ids(
|
||||
self, state_filter: Optional["StateFilter"] = None
|
||||
) -> StateMap[str]:
|
||||
@ -439,4 +489,4 @@ def _decode_state_dict(
|
||||
if input is None:
|
||||
return None
|
||||
|
||||
return frozendict({(etype, state_key): v for etype, state_key, v in input})
|
||||
return immutabledict({(etype, state_key): v for etype, state_key, v in input})
|
||||
|
@ -33,8 +33,8 @@ from typing_extensions import Literal
|
||||
import synapse
|
||||
from synapse.api.errors import Codes
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.rest.media.v1._base import FileInfo
|
||||
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
|
||||
from synapse.media._base import FileInfo
|
||||
from synapse.media.media_storage import ReadableFileWrapper
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
from synapse.types import JsonDict, RoomAlias, UserProfile
|
||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||
|
@ -45,6 +45,8 @@ CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]]
|
||||
ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable]
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable]
|
||||
ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable]
|
||||
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
|
||||
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
|
||||
|
||||
|
||||
def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||
@ -78,7 +80,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||
# correctly, we need to await its result. Therefore it doesn't make a lot of
|
||||
# sense to make it go through the run() wrapper.
|
||||
if f.__name__ == "check_event_allowed":
|
||||
|
||||
# We need to wrap check_event_allowed because its old form would return either
|
||||
# a boolean or a dict, but now we want to return the dict separately from the
|
||||
# boolean.
|
||||
@ -100,7 +101,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||
return wrap_check_event_allowed
|
||||
|
||||
if f.__name__ == "on_create_room":
|
||||
|
||||
# We need to wrap on_create_room because its old form would return a boolean
|
||||
# if the room creation is denied, but now we just want it to raise an
|
||||
# exception.
|
||||
@ -174,6 +174,12 @@ class ThirdPartyEventRules:
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
|
||||
] = []
|
||||
self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = []
|
||||
self._on_add_user_third_party_identifier_callbacks: List[
|
||||
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = []
|
||||
self._on_remove_user_third_party_identifier_callbacks: List[
|
||||
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = []
|
||||
|
||||
def register_third_party_rules_callbacks(
|
||||
self,
|
||||
@ -193,6 +199,12 @@ class ThirdPartyEventRules:
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
|
||||
] = None,
|
||||
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
|
||||
on_add_user_third_party_identifier: Optional[
|
||||
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = None,
|
||||
on_remove_user_third_party_identifier: Optional[
|
||||
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
|
||||
] = None,
|
||||
) -> None:
|
||||
"""Register callbacks from modules for each hook."""
|
||||
if check_event_allowed is not None:
|
||||
@ -230,6 +242,16 @@ class ThirdPartyEventRules:
|
||||
if on_threepid_bind is not None:
|
||||
self._on_threepid_bind_callbacks.append(on_threepid_bind)
|
||||
|
||||
if on_add_user_third_party_identifier is not None:
|
||||
self._on_add_user_third_party_identifier_callbacks.append(
|
||||
on_add_user_third_party_identifier
|
||||
)
|
||||
|
||||
if on_remove_user_third_party_identifier is not None:
|
||||
self._on_remove_user_third_party_identifier_callbacks.append(
|
||||
on_remove_user_third_party_identifier
|
||||
)
|
||||
|
||||
async def check_event_allowed(
|
||||
self,
|
||||
event: EventBase,
|
||||
@ -513,6 +535,9 @@ class ThirdPartyEventRules:
|
||||
local homeserver, not when it's created on an identity server (and then kept track
|
||||
of so that it can be unbound on the same IS later on).
|
||||
|
||||
THIS MODULE CALLBACK METHOD HAS BEEN DEPRECATED. Please use the
|
||||
`on_add_user_third_party_identifier` callback method instead.
|
||||
|
||||
Args:
|
||||
user_id: the user being associated with the threepid.
|
||||
medium: the threepid's medium.
|
||||
@ -525,3 +550,44 @@ class ThirdPartyEventRules:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
||||
async def on_add_user_third_party_identifier(
|
||||
self, user_id: str, medium: str, address: str
|
||||
) -> None:
|
||||
"""Called when an association between a user's Matrix ID and a third-party ID
|
||||
(email, phone number) has successfully been registered on the homeserver.
|
||||
|
||||
Args:
|
||||
user_id: The User ID included in the association.
|
||||
medium: The medium of the third-party ID (email, msisdn).
|
||||
address: The address of the third-party ID (i.e. an email address).
|
||||
"""
|
||||
for callback in self._on_add_user_third_party_identifier_callbacks:
|
||||
try:
|
||||
await callback(user_id, medium, address)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
||||
async def on_remove_user_third_party_identifier(
|
||||
self, user_id: str, medium: str, address: str
|
||||
) -> None:
|
||||
"""Called when an association between a user's Matrix ID and a third-party ID
|
||||
(email, phone number) has been successfully removed on the homeserver.
|
||||
|
||||
This is called *after* any known bindings on identity servers for this
|
||||
association have been removed.
|
||||
|
||||
Args:
|
||||
user_id: The User ID included in the removed association.
|
||||
medium: The medium of the third-party ID (email, msisdn).
|
||||
address: The address of the third-party ID (i.e. an email address).
|
||||
"""
|
||||
for callback in self._on_remove_user_third_party_identifier_callbacks:
|
||||
try:
|
||||
await callback(user_id, medium, address)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
@ -38,8 +38,7 @@ from synapse.api.constants import (
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import RoomVersion
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
from synapse.types import JsonDict, Requester
|
||||
|
||||
from . import EventBase
|
||||
|
||||
@ -317,8 +316,9 @@ class SerializeEventConfig:
|
||||
as_client_event: bool = True
|
||||
# Function to convert from federation format to client format
|
||||
event_format: Callable[[JsonDict], JsonDict] = format_event_for_client_v1
|
||||
# ID of the user's auth token - used for namespacing of transaction IDs
|
||||
token_id: Optional[int] = None
|
||||
# The entity that requested the event. This is used to determine whether to include
|
||||
# the transaction_id in the unsigned section of the event.
|
||||
requester: Optional[Requester] = None
|
||||
# List of event fields to include. If empty, all fields will be returned.
|
||||
only_event_fields: Optional[List[str]] = None
|
||||
# Some events can have stripped room state stored in the `unsigned` field.
|
||||
@ -355,7 +355,7 @@ def serialize_event(
|
||||
time_now_ms = int(time_now_ms)
|
||||
|
||||
# Should this strip out None's?
|
||||
d = {k: v for k, v in e.get_dict().items()}
|
||||
d = dict(e.get_dict().items())
|
||||
|
||||
d["event_id"] = e.event_id
|
||||
|
||||
@ -368,11 +368,24 @@ def serialize_event(
|
||||
e.unsigned["redacted_because"], time_now_ms, config=config
|
||||
)
|
||||
|
||||
if config.token_id is not None:
|
||||
if config.token_id == getattr(e.internal_metadata, "token_id", None):
|
||||
txn_id = getattr(e.internal_metadata, "txn_id", None)
|
||||
if txn_id is not None:
|
||||
d["unsigned"]["transaction_id"] = txn_id
|
||||
# If we have a txn_id saved in the internal_metadata, we should include it in the
|
||||
# unsigned section of the event if it was sent by the same session as the one
|
||||
# requesting the event.
|
||||
# There is a special case for guests, because they only have one access token
|
||||
# without associated access_token_id, so we always include the txn_id for events
|
||||
# they sent.
|
||||
txn_id = getattr(e.internal_metadata, "txn_id", None)
|
||||
if txn_id is not None and config.requester is not None:
|
||||
event_token_id = getattr(e.internal_metadata, "token_id", None)
|
||||
if config.requester.user.to_string() == e.sender and (
|
||||
(
|
||||
event_token_id is not None
|
||||
and config.requester.access_token_id is not None
|
||||
and event_token_id == config.requester.access_token_id
|
||||
)
|
||||
or config.requester.is_guest
|
||||
):
|
||||
d["unsigned"]["transaction_id"] = txn_id
|
||||
|
||||
# invite_room_state and knock_room_state are a list of stripped room state events
|
||||
# that are meant to provide metadata about a room to an invitee/knocker. They are
|
||||
@ -403,14 +416,6 @@ class EventClientSerializer:
|
||||
clients.
|
||||
"""
|
||||
|
||||
def __init__(self, inhibit_replacement_via_edits: bool = False):
|
||||
"""
|
||||
Args:
|
||||
inhibit_replacement_via_edits: If this is set to True, then events are
|
||||
never replaced by their edits.
|
||||
"""
|
||||
self._inhibit_replacement_via_edits = inhibit_replacement_via_edits
|
||||
|
||||
def serialize_event(
|
||||
self,
|
||||
event: Union[JsonDict, EventBase],
|
||||
@ -418,7 +423,6 @@ class EventClientSerializer:
|
||||
*,
|
||||
config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG,
|
||||
bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None,
|
||||
apply_edits: bool = True,
|
||||
) -> JsonDict:
|
||||
"""Serializes a single event.
|
||||
|
||||
@ -428,10 +432,7 @@ class EventClientSerializer:
|
||||
config: Event serialization config
|
||||
bundle_aggregations: A map from event_id to the aggregations to be bundled
|
||||
into the event.
|
||||
apply_edits: Whether the content of the event should be modified to reflect
|
||||
any replacement in `bundle_aggregations[<event_id>].replace`.
|
||||
See also the `inhibit_replacement_via_edits` constructor arg: if that is
|
||||
set to True, then this argument is ignored.
|
||||
|
||||
Returns:
|
||||
The serialized event
|
||||
"""
|
||||
@ -450,38 +451,10 @@ class EventClientSerializer:
|
||||
config,
|
||||
bundle_aggregations,
|
||||
serialized_event,
|
||||
apply_edits=apply_edits,
|
||||
)
|
||||
|
||||
return serialized_event
|
||||
|
||||
def _apply_edit(
|
||||
self, orig_event: EventBase, serialized_event: JsonDict, edit: EventBase
|
||||
) -> None:
|
||||
"""Replace the content, preserving existing relations of the serialized event.
|
||||
|
||||
Args:
|
||||
orig_event: The original event.
|
||||
serialized_event: The original event, serialized. This is modified.
|
||||
edit: The event which edits the above.
|
||||
"""
|
||||
|
||||
# Ensure we take copies of the edit content, otherwise we risk modifying
|
||||
# the original event.
|
||||
edit_content = edit.content.copy()
|
||||
|
||||
# Unfreeze the event content if necessary, so that we may modify it below
|
||||
edit_content = unfreeze(edit_content)
|
||||
serialized_event["content"] = edit_content.get("m.new_content", {})
|
||||
|
||||
# Check for existing relations
|
||||
relates_to = orig_event.content.get("m.relates_to")
|
||||
if relates_to:
|
||||
# Keep the relations, ensuring we use a dict copy of the original
|
||||
serialized_event["content"]["m.relates_to"] = relates_to.copy()
|
||||
else:
|
||||
serialized_event["content"].pop("m.relates_to", None)
|
||||
|
||||
def _inject_bundled_aggregations(
|
||||
self,
|
||||
event: EventBase,
|
||||
@ -489,7 +462,6 @@ class EventClientSerializer:
|
||||
config: SerializeEventConfig,
|
||||
bundled_aggregations: Dict[str, "BundledAggregations"],
|
||||
serialized_event: JsonDict,
|
||||
apply_edits: bool,
|
||||
) -> None:
|
||||
"""Potentially injects bundled aggregations into the unsigned portion of the serialized event.
|
||||
|
||||
@ -504,9 +476,6 @@ class EventClientSerializer:
|
||||
While serializing the bundled aggregations this map may be searched
|
||||
again for additional events in a recursive manner.
|
||||
serialized_event: The serialized event which may be modified.
|
||||
apply_edits: Whether the content of the event should be modified to reflect
|
||||
any replacement in `aggregations.replace` (subject to the
|
||||
`inhibit_replacement_via_edits` constructor arg).
|
||||
"""
|
||||
|
||||
# We have already checked that aggregations exist for this event.
|
||||
@ -516,22 +485,12 @@ class EventClientSerializer:
|
||||
# being serialized.
|
||||
serialized_aggregations = {}
|
||||
|
||||
if event_aggregations.annotations:
|
||||
serialized_aggregations[
|
||||
RelationTypes.ANNOTATION
|
||||
] = event_aggregations.annotations
|
||||
|
||||
if event_aggregations.references:
|
||||
serialized_aggregations[
|
||||
RelationTypes.REFERENCE
|
||||
] = event_aggregations.references
|
||||
|
||||
if event_aggregations.replace:
|
||||
# If there is an edit, optionally apply it to the event.
|
||||
edit = event_aggregations.replace
|
||||
if apply_edits and not self._inhibit_replacement_via_edits:
|
||||
self._apply_edit(event, serialized_event, edit)
|
||||
|
||||
# Include information about it in the relations dict.
|
||||
#
|
||||
# Matrix spec v1.5 (https://spec.matrix.org/v1.5/client-server-api/#server-side-aggregation-of-mreplace-relationships)
|
||||
@ -539,10 +498,7 @@ class EventClientSerializer:
|
||||
# `sender` of the edit; however MSC3925 proposes extending it to the whole
|
||||
# of the edit, which is what we do here.
|
||||
serialized_aggregations[RelationTypes.REPLACE] = self.serialize_event(
|
||||
edit,
|
||||
time_now,
|
||||
config=config,
|
||||
apply_edits=False,
|
||||
event_aggregations.replace, time_now, config=config
|
||||
)
|
||||
|
||||
# Include any threaded replies to this event.
|
||||
@ -611,7 +567,7 @@ PowerLevelsContent = Mapping[str, Union[_PowerLevel, Mapping[str, _PowerLevel]]]
|
||||
def copy_and_fixup_power_levels_contents(
|
||||
old_power_levels: PowerLevelsContent,
|
||||
) -> Dict[str, Union[int, Dict[str, int]]]:
|
||||
"""Copy the content of a power_levels event, unfreezing frozendicts along the way.
|
||||
"""Copy the content of a power_levels event, unfreezing immutabledicts along the way.
|
||||
|
||||
We accept as input power level values which are strings, provided they represent an
|
||||
integer, e.g. `"`100"` instead of 100. Such strings are converted to integers
|
||||
|
@ -12,11 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import collections.abc
|
||||
from typing import Iterable, Type, Union, cast
|
||||
from typing import Iterable, List, Type, Union, cast
|
||||
|
||||
import jsonschema
|
||||
from pydantic import Field, StrictBool, StrictStr
|
||||
|
||||
from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes, Membership
|
||||
from synapse.api.constants import (
|
||||
MAX_ALIAS_LENGTH,
|
||||
EventContentFields,
|
||||
EventTypes,
|
||||
Membership,
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import EventFormatVersions
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@ -28,6 +34,8 @@ from synapse.events.utils import (
|
||||
validate_canonicaljson,
|
||||
)
|
||||
from synapse.federation.federation_server import server_matches_acl_event
|
||||
from synapse.http.servlet import validate_json_object
|
||||
from synapse.rest.models import RequestBodyModel
|
||||
from synapse.types import EventID, JsonDict, RoomID, UserID
|
||||
|
||||
|
||||
@ -43,7 +51,7 @@ class EventValidator:
|
||||
event: The event to validate.
|
||||
config: The homeserver's configuration.
|
||||
"""
|
||||
self.validate_builder(event)
|
||||
self.validate_builder(event, config)
|
||||
|
||||
if event.format_version == EventFormatVersions.ROOM_V1_V2:
|
||||
EventID.from_string(event.event_id)
|
||||
@ -74,6 +82,12 @@ class EventValidator:
|
||||
# Note that only the client controlled portion of the event is
|
||||
# checked, since we trust the portions of the event we created.
|
||||
validate_canonicaljson(event.content)
|
||||
if not 0 < event.origin_server_ts < 2**53:
|
||||
raise SynapseError(400, "Event timestamp is out of range")
|
||||
|
||||
# meow: allow specific users to send potentially dangerous events.
|
||||
if event.sender in config.meow.validation_override:
|
||||
return
|
||||
|
||||
if event.type == EventTypes.Aliases:
|
||||
if "aliases" in event.content:
|
||||
@ -88,27 +102,27 @@ class EventValidator:
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Retention:
|
||||
elif event.type == EventTypes.Retention:
|
||||
self._validate_retention(event)
|
||||
|
||||
if event.type == EventTypes.ServerACL:
|
||||
elif event.type == EventTypes.ServerACL:
|
||||
if not server_matches_acl_event(config.server.server_name, event):
|
||||
raise SynapseError(
|
||||
400, "Can't create an ACL event that denies the local server"
|
||||
)
|
||||
|
||||
if event.type == EventTypes.PowerLevels:
|
||||
elif event.type == EventTypes.PowerLevels:
|
||||
try:
|
||||
jsonschema.validate(
|
||||
instance=event.content,
|
||||
schema=POWER_LEVELS_SCHEMA,
|
||||
cls=plValidator,
|
||||
cls=POWER_LEVELS_VALIDATOR,
|
||||
)
|
||||
except jsonschema.ValidationError as e:
|
||||
if e.path:
|
||||
# example: "users_default": '0' is not of type 'integer'
|
||||
# cast safety: path entries can be integers, if we fail to validate
|
||||
# items in an array. However the POWER_LEVELS_SCHEMA doesn't expect
|
||||
# items in an array. However, the POWER_LEVELS_SCHEMA doesn't expect
|
||||
# to see any arrays.
|
||||
message = (
|
||||
'"' + cast(str, e.path[-1]) + '": ' + e.message # noqa: B306
|
||||
@ -125,6 +139,15 @@ class EventValidator:
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
# If the event contains a mentions key, validate it.
|
||||
if (
|
||||
EventContentFields.MSC3952_MENTIONS in event.content
|
||||
and config.experimental.msc3952_intentional_mentions
|
||||
):
|
||||
validate_json_object(
|
||||
event.content[EventContentFields.MSC3952_MENTIONS], Mentions
|
||||
)
|
||||
|
||||
def _validate_retention(self, event: EventBase) -> None:
|
||||
"""Checks that an event that defines the retention policy for a room respects the
|
||||
format enforced by the spec.
|
||||
@ -165,7 +188,9 @@ class EventValidator:
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
def validate_builder(self, event: Union[EventBase, EventBuilder]) -> None:
|
||||
def validate_builder(
|
||||
self, event: Union[EventBase, EventBuilder], config: HomeServerConfig
|
||||
) -> None:
|
||||
"""Validates that the builder/event has roughly the right format. Only
|
||||
checks values that we expect a proto event to have, rather than all the
|
||||
fields an event would have
|
||||
@ -183,6 +208,10 @@ class EventValidator:
|
||||
RoomID.from_string(event.room_id)
|
||||
UserID.from_string(event.sender)
|
||||
|
||||
# meow: allow specific users to send so-called invalid events
|
||||
if event.sender in config.meow.validation_override:
|
||||
return
|
||||
|
||||
if event.type == EventTypes.Message:
|
||||
strings = ["body", "msgtype"]
|
||||
|
||||
@ -253,12 +282,17 @@ POWER_LEVELS_SCHEMA = {
|
||||
}
|
||||
|
||||
|
||||
class Mentions(RequestBodyModel):
|
||||
user_ids: List[StrictStr] = Field(default_factory=list)
|
||||
room: StrictBool = False
|
||||
|
||||
|
||||
# This could return something newer than Draft 7, but that's the current "latest"
|
||||
# validator.
|
||||
def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]:
|
||||
validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA)
|
||||
def _create_validator(schema: JsonDict) -> Type[jsonschema.Draft7Validator]:
|
||||
validator = jsonschema.validators.validator_for(schema)
|
||||
|
||||
# by default jsonschema does not consider a frozendict to be an object so
|
||||
# by default jsonschema does not consider a immutabledict to be an object so
|
||||
# we need to use a custom type checker
|
||||
# https://python-jsonschema.readthedocs.io/en/stable/validate/?highlight=object#validating-with-additional-types
|
||||
type_checker = validator.TYPE_CHECKER.redefine(
|
||||
@ -268,4 +302,4 @@ def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]:
|
||||
return jsonschema.validators.extend(validator, type_checker=type_checker)
|
||||
|
||||
|
||||
plValidator = _create_power_level_validator()
|
||||
POWER_LEVELS_VALIDATOR = _create_validator(POWER_LEVELS_SCHEMA)
|
||||
|
@ -61,6 +61,7 @@ from synapse.federation.federation_base import (
|
||||
event_from_pdu_json,
|
||||
)
|
||||
from synapse.federation.transport.client import SendJoinResponse
|
||||
from synapse.http.client import is_unknown_endpoint
|
||||
from synapse.http.types import QueryParams
|
||||
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, tag_args, trace
|
||||
from synapse.types import JsonDict, UserID, get_domain_from_id
|
||||
@ -759,43 +760,6 @@ class FederationClient(FederationBase):
|
||||
|
||||
return signed_auth
|
||||
|
||||
def _is_unknown_endpoint(
|
||||
self, e: HttpResponseException, synapse_error: Optional[SynapseError] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Returns true if the response was due to an endpoint being unimplemented.
|
||||
|
||||
Args:
|
||||
e: The error response received from the remote server.
|
||||
synapse_error: The above error converted to a SynapseError. This is
|
||||
automatically generated if not provided.
|
||||
|
||||
"""
|
||||
if synapse_error is None:
|
||||
synapse_error = e.to_synapse_error()
|
||||
# MSC3743 specifies that servers should return a 404 or 405 with an errcode
|
||||
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
||||
# to an unknown method, respectively.
|
||||
#
|
||||
# Older versions of servers don't properly handle this. This needs to be
|
||||
# rather specific as some endpoints truly do return 404 errors.
|
||||
return (
|
||||
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
||||
(e.code == 404 or e.code == 405)
|
||||
and (
|
||||
# Older Dendrites returned a text or empty body.
|
||||
# Older Conduit returned an empty body.
|
||||
not e.response
|
||||
or e.response == b"404 page not found"
|
||||
# The proper response JSON with M_UNRECOGNIZED errcode.
|
||||
or synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
) or (
|
||||
# Older Synapses returned a 400 error.
|
||||
e.code == 400
|
||||
and synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
|
||||
async def _try_destination_list(
|
||||
self,
|
||||
description: str,
|
||||
@ -887,7 +851,7 @@ class FederationClient(FederationBase):
|
||||
elif 400 <= e.code < 500 and synapse_error.errcode in failover_errcodes:
|
||||
failover = True
|
||||
|
||||
elif failover_on_unknown_endpoint and self._is_unknown_endpoint(
|
||||
elif failover_on_unknown_endpoint and is_unknown_endpoint(
|
||||
e, synapse_error
|
||||
):
|
||||
failover = True
|
||||
@ -1223,7 +1187,7 @@ class FederationClient(FederationBase):
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
# fallback to the v1 endpoint. Otherwise, consider it a legitimate error
|
||||
# and raise.
|
||||
if not self._is_unknown_endpoint(e):
|
||||
if not is_unknown_endpoint(e):
|
||||
raise
|
||||
|
||||
logger.debug("Couldn't send_join with the v2 API, falling back to the v1 API")
|
||||
@ -1297,7 +1261,7 @@ class FederationClient(FederationBase):
|
||||
# fallback to the v1 endpoint if the room uses old-style event IDs.
|
||||
# Otherwise, consider it a legitimate error and raise.
|
||||
err = e.to_synapse_error()
|
||||
if self._is_unknown_endpoint(e, err):
|
||||
if is_unknown_endpoint(e, err):
|
||||
if room_version.event_format != EventFormatVersions.ROOM_V1_V2:
|
||||
raise SynapseError(
|
||||
400,
|
||||
@ -1358,7 +1322,7 @@ class FederationClient(FederationBase):
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
# fallback to the v1 endpoint. Otherwise, consider it a legitimate error
|
||||
# and raise.
|
||||
if not self._is_unknown_endpoint(e):
|
||||
if not is_unknown_endpoint(e):
|
||||
raise
|
||||
|
||||
logger.debug("Couldn't send_leave with the v2 API, falling back to the v1 API")
|
||||
@ -1629,7 +1593,7 @@ class FederationClient(FederationBase):
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
# fallback to the unstable endpoint. Otherwise, consider it a
|
||||
# legitimate error and raise.
|
||||
if not self._is_unknown_endpoint(e):
|
||||
if not is_unknown_endpoint(e):
|
||||
raise
|
||||
|
||||
logger.debug(
|
||||
|
@ -86,7 +86,7 @@ from synapse.storage.databases.main.lock import Lock
|
||||
from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
|
||||
from synapse.storage.roommember import MemberSummary
|
||||
from synapse.types import JsonDict, StateMap, get_domain_from_id
|
||||
from synapse.util import json_decoder, unwrapFirstError
|
||||
from synapse.util import unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer, concurrently_execute, gather_results
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.stringutils import parse_server_name
|
||||
@ -135,6 +135,7 @@ class FederationServer(FederationBase):
|
||||
self.state = hs.get_state_handler()
|
||||
self._event_auth_handler = hs.get_event_auth_handler()
|
||||
self._room_member_handler = hs.get_room_member_handler()
|
||||
self._e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
|
||||
self._state_storage_controller = hs.get_storage_controllers().state
|
||||
|
||||
@ -1012,15 +1013,14 @@ class FederationServer(FederationBase):
|
||||
query.append((user_id, device_id, algorithm))
|
||||
|
||||
log_kv({"message": "Claiming one time keys.", "user, device pairs": query})
|
||||
results = await self.store.claim_e2e_one_time_keys(query)
|
||||
results = await self._e2e_keys_handler.claim_local_one_time_keys(query)
|
||||
|
||||
json_result: Dict[str, Dict[str, dict]] = {}
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, json_str in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {
|
||||
key_id: json_decoder.decode(json_str)
|
||||
}
|
||||
json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
||||
for result in results:
|
||||
for user_id, device_keys in result.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, key in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {key_id: key}
|
||||
|
||||
logger.info(
|
||||
"Claimed one-time-keys: %s",
|
||||
|
@ -244,7 +244,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
|
||||
self.notifier.on_new_replication_data()
|
||||
|
||||
def send_device_messages(self, destination: str, immediate: bool = False) -> None:
|
||||
def send_device_messages(self, destination: str, immediate: bool = True) -> None:
|
||||
"""As per FederationSender"""
|
||||
# We don't need to replicate this as it gets sent down a different
|
||||
# stream.
|
||||
@ -314,7 +314,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
# stream position.
|
||||
keyed_edus = {v: k for k, v in self.keyed_edu_changed.items()[i:j]}
|
||||
|
||||
for ((destination, edu_key), pos) in keyed_edus.items():
|
||||
for (destination, edu_key), pos in keyed_edus.items():
|
||||
rows.append(
|
||||
(
|
||||
pos,
|
||||
@ -329,7 +329,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||
j = self.edus.bisect_right(to_token) + 1
|
||||
edus = self.edus.items()[i:j]
|
||||
|
||||
for (pos, edu) in edus:
|
||||
for pos, edu in edus:
|
||||
rows.append((pos, EduRow(edu)))
|
||||
|
||||
# Sort rows based on pos
|
||||
|
@ -11,6 +11,119 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
The Federation Sender is responsible for sending Persistent Data Units (PDUs)
|
||||
and Ephemeral Data Units (EDUs) to other homeservers using
|
||||
the `/send` Federation API.
|
||||
|
||||
|
||||
## How do PDUs get sent?
|
||||
|
||||
The Federation Sender is made aware of new PDUs due to `FederationSender.notify_new_events`.
|
||||
When the sender is notified about a newly-persisted PDU that originates from this homeserver
|
||||
and is not an out-of-band event, we pass the PDU to the `_PerDestinationQueue` for each
|
||||
remote homeserver that is in the room at that point in the DAG.
|
||||
|
||||
|
||||
### Per-Destination Queues
|
||||
|
||||
There is one `PerDestinationQueue` per 'destination' homeserver.
|
||||
The `PerDestinationQueue` maintains the following information about the destination:
|
||||
|
||||
- whether the destination is currently in [catch-up mode (see below)](#catch-up-mode);
|
||||
- a queue of PDUs to be sent to the destination; and
|
||||
- a queue of EDUs to be sent to the destination (not considered in this section).
|
||||
|
||||
Upon a new PDU being enqueued, `attempt_new_transaction` is called to start a new
|
||||
transaction if there is not already one in progress.
|
||||
|
||||
|
||||
### Transactions and the Transaction Transmission Loop
|
||||
|
||||
Each federation HTTP request to the `/send` endpoint is referred to as a 'transaction'.
|
||||
The body of the HTTP request contains a list of PDUs and EDUs to send to the destination.
|
||||
|
||||
The *Transaction Transmission Loop* (`_transaction_transmission_loop`) is responsible
|
||||
for emptying the queued PDUs (and EDUs) from a `PerDestinationQueue` by sending
|
||||
them to the destination.
|
||||
|
||||
There can only be one transaction in flight for a given destination at any time.
|
||||
(Other than preventing us from overloading the destination, this also makes it easier to
|
||||
reason about because we process events sequentially for each destination.
|
||||
This is useful for *Catch-Up Mode*, described later.)
|
||||
|
||||
The loop continues so long as there is anything to send. At each iteration of the loop, we:
|
||||
|
||||
- dequeue up to 50 PDUs (and up to 100 EDUs).
|
||||
- make the `/send` request to the destination homeserver with the dequeued PDUs and EDUs.
|
||||
- if successful, make note of the fact that we succeeded in transmitting PDUs up to
|
||||
the given `stream_ordering` of the latest PDU by
|
||||
- if unsuccessful, back off from the remote homeserver for some time.
|
||||
If we have been unsuccessful for too long (when the backoff interval grows to exceed 1 hour),
|
||||
the in-memory queues are emptied and we enter [*Catch-Up Mode*, described below](#catch-up-mode).
|
||||
|
||||
|
||||
### Catch-Up Mode
|
||||
|
||||
When the `PerDestinationQueue` has the catch-up flag set, the *Catch-Up Transmission Loop*
|
||||
(`_catch_up_transmission_loop`) is used in lieu of the regular `_transaction_transmission_loop`.
|
||||
(Only once the catch-up mode has been exited can the regular tranaction transmission behaviour
|
||||
be resumed.)
|
||||
|
||||
*Catch-Up Mode*, entered upon Synapse startup or once a homeserver has fallen behind due to
|
||||
connection problems, is responsible for sending PDUs that have been missed by the destination
|
||||
homeserver. (PDUs can be missed because the `PerDestinationQueue` is volatile — i.e. resets
|
||||
on startup — and it does not hold PDUs forever if `/send` requests to the destination fail.)
|
||||
|
||||
The catch-up mechanism makes use of the `last_successful_stream_ordering` column in the
|
||||
`destinations` table (which gives the `stream_ordering` of the most recent successfully
|
||||
sent PDU) and the `stream_ordering` column in the `destination_rooms` table (which gives,
|
||||
for each room, the `stream_ordering` of the most recent PDU that needs to be sent to this
|
||||
destination).
|
||||
|
||||
Each iteration of the loop pulls out 50 `destination_rooms` entries with the oldest
|
||||
`stream_ordering`s that are greater than the `last_successful_stream_ordering`.
|
||||
In other words, from the set of latest PDUs in each room to be sent to the destination,
|
||||
the 50 oldest such PDUs are pulled out.
|
||||
|
||||
These PDUs could, in principle, now be directly sent to the destination. However, as an
|
||||
optimisation intended to prevent overloading destination homeservers, we instead attempt
|
||||
to send the latest forward extremities so long as the destination homeserver is still
|
||||
eligible to receive those.
|
||||
This reduces load on the destination **in aggregate** because all Synapse homeservers
|
||||
will behave according to this principle and therefore avoid sending lots of different PDUs
|
||||
at different points in the DAG to a recovering homeserver.
|
||||
*This optimisation is not currently valid in rooms which are partial-state on this homeserver,
|
||||
since we are unable to determine whether the destination homeserver is eligible to receive
|
||||
the latest forward extremities unless this homeserver sent those PDUs — in this case, we
|
||||
just send the latest PDUs originating from this server and skip this optimisation.*
|
||||
|
||||
Whilst PDUs are sent through this mechanism, the position of `last_successful_stream_ordering`
|
||||
is advanced as normal.
|
||||
Once there are no longer any rooms containing outstanding PDUs to be sent to the destination
|
||||
*that are not already in the `PerDestinationQueue` because they arrived since Catch-Up Mode
|
||||
was enabled*, Catch-Up Mode is exited and we return to `_transaction_transmission_loop`.
|
||||
|
||||
|
||||
#### A note on failures and back-offs
|
||||
|
||||
If a remote server is unreachable over federation, we back off from that server,
|
||||
with an exponentially-increasing retry interval.
|
||||
Whilst we don't automatically retry after the interval, we prevent making new attempts
|
||||
until such time as the back-off has cleared.
|
||||
Once the back-off is cleared and a new PDU or EDU arrives for transmission, the transmission
|
||||
loop resumes and empties the queue by making federation requests.
|
||||
|
||||
If the backoff grows too large (> 1 hour), the in-memory queue is emptied (to prevent
|
||||
unbounded growth) and Catch-Up Mode is entered.
|
||||
|
||||
It is worth noting that the back-off for a remote server is cleared once an inbound
|
||||
request from that remote server is received (see `notify_remote_server_up`).
|
||||
At this point, the transaction transmission loop is also started up, to proactively
|
||||
send missed PDUs and EDUs to the destination (i.e. you don't need to wait for a new PDU
|
||||
or EDU, destined for that destination, to be created in order to send out missed PDUs and
|
||||
EDUs).
|
||||
"""
|
||||
|
||||
import abc
|
||||
import logging
|
||||
@ -783,7 +896,7 @@ class FederationSender(AbstractFederationSender):
|
||||
else:
|
||||
queue.send_edu(edu)
|
||||
|
||||
def send_device_messages(self, destination: str, immediate: bool = False) -> None:
|
||||
def send_device_messages(self, destination: str, immediate: bool = True) -> None:
|
||||
if destination == self.server_name:
|
||||
logger.warning("Not sending device update to ourselves")
|
||||
return
|
||||
|
@ -497,8 +497,8 @@ class PerDestinationQueue:
|
||||
#
|
||||
# Note: `catchup_pdus` will have exactly one PDU per room.
|
||||
for pdu in catchup_pdus:
|
||||
# The PDU from the DB will be the last PDU in the room from
|
||||
# *this server* that wasn't sent to the remote. However, other
|
||||
# The PDU from the DB will be the newest PDU in the room from
|
||||
# *this server* that we tried---but were unable---to send to the remote.
|
||||
# servers may have sent lots of events since then, and we want
|
||||
# to try and tell the remote only about the *latest* events in
|
||||
# the room. This is so that it doesn't get inundated by events
|
||||
@ -516,6 +516,11 @@ class PerDestinationQueue:
|
||||
# If the event is in the extremities, then great! We can just
|
||||
# use that without having to do further checks.
|
||||
room_catchup_pdus = [pdu]
|
||||
elif await self._store.is_partial_state_room(pdu.room_id):
|
||||
# We can't be sure which events the destination should
|
||||
# see using only partial state. Avoid doing so, and just retry
|
||||
# sending our the newest PDU the remote is missing from us.
|
||||
room_catchup_pdus = [pdu]
|
||||
else:
|
||||
# If not, fetch the extremities and figure out which we can
|
||||
# send.
|
||||
@ -547,6 +552,8 @@ class PerDestinationQueue:
|
||||
self._server_name,
|
||||
new_pdus,
|
||||
redact=False,
|
||||
filter_out_erased_senders=True,
|
||||
filter_out_remote_partial_state_events=True,
|
||||
)
|
||||
|
||||
# If we've filtered out all the extremities, fall back to
|
||||
|
@ -108,6 +108,7 @@ class PublicRoomList(BaseFederationServlet):
|
||||
"""
|
||||
|
||||
PATH = "/publicRooms"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -212,6 +213,7 @@ class OpenIdUserInfo(BaseFederationServlet):
|
||||
"""
|
||||
|
||||
PATH = "/openid/userinfo"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
REQUIRE_AUTH = False
|
||||
|
||||
|
@ -70,6 +70,7 @@ class BaseFederationServerServlet(BaseFederationServlet):
|
||||
|
||||
class FederationSendServlet(BaseFederationServerServlet):
|
||||
PATH = "/send/(?P<transaction_id>[^/]*)/?"
|
||||
CATEGORY = "Inbound federation transaction request"
|
||||
|
||||
# We ratelimit manually in the handler as we queue up the requests and we
|
||||
# don't want to fill up the ratelimiter with blocked requests.
|
||||
@ -138,6 +139,7 @@ class FederationSendServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationEventServlet(BaseFederationServerServlet):
|
||||
PATH = "/event/(?P<event_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
# This is when someone asks for a data item for a given server data_id pair.
|
||||
async def on_GET(
|
||||
@ -152,6 +154,7 @@ class FederationEventServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationStateV1Servlet(BaseFederationServerServlet):
|
||||
PATH = "/state/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
# This is when someone asks for all data for a given room.
|
||||
async def on_GET(
|
||||
@ -170,6 +173,7 @@ class FederationStateV1Servlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationStateIdsServlet(BaseFederationServerServlet):
|
||||
PATH = "/state_ids/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -187,6 +191,7 @@ class FederationStateIdsServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationBackfillServlet(BaseFederationServerServlet):
|
||||
PATH = "/backfill/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -225,6 +230,7 @@ class FederationTimestampLookupServlet(BaseFederationServerServlet):
|
||||
"""
|
||||
|
||||
PATH = "/timestamp_to_event/(?P<room_id>[^/]*)/?"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -246,6 +252,7 @@ class FederationTimestampLookupServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationQueryServlet(BaseFederationServerServlet):
|
||||
PATH = "/query/(?P<query_type>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
# This is when we receive a server-server Query
|
||||
async def on_GET(
|
||||
@ -262,6 +269,7 @@ class FederationQueryServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationMakeJoinServlet(BaseFederationServerServlet):
|
||||
PATH = "/make_join/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -297,6 +305,7 @@ class FederationMakeJoinServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationMakeLeaveServlet(BaseFederationServerServlet):
|
||||
PATH = "/make_leave/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -312,6 +321,7 @@ class FederationMakeLeaveServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1SendLeaveServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_leave/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -327,6 +337,7 @@ class FederationV1SendLeaveServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV2SendLeaveServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_leave/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
PREFIX = FEDERATION_V2_PREFIX
|
||||
|
||||
@ -344,6 +355,7 @@ class FederationV2SendLeaveServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationMakeKnockServlet(BaseFederationServerServlet):
|
||||
PATH = "/make_knock/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -366,6 +378,7 @@ class FederationMakeKnockServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1SendKnockServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_knock/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -381,6 +394,7 @@ class FederationV1SendKnockServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationEventAuthServlet(BaseFederationServerServlet):
|
||||
PATH = "/event_auth/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -395,6 +409,7 @@ class FederationEventAuthServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1SendJoinServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_join/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -412,6 +427,7 @@ class FederationV1SendJoinServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV2SendJoinServlet(BaseFederationServerServlet):
|
||||
PATH = "/send_join/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
PREFIX = FEDERATION_V2_PREFIX
|
||||
|
||||
@ -455,6 +471,7 @@ class FederationV2SendJoinServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV1InviteServlet(BaseFederationServerServlet):
|
||||
PATH = "/invite/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -479,6 +496,7 @@ class FederationV1InviteServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationV2InviteServlet(BaseFederationServerServlet):
|
||||
PATH = "/invite/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
PREFIX = FEDERATION_V2_PREFIX
|
||||
|
||||
@ -515,6 +533,7 @@ class FederationV2InviteServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationThirdPartyInviteExchangeServlet(BaseFederationServerServlet):
|
||||
PATH = "/exchange_third_party_invite/(?P<room_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@ -529,6 +548,7 @@ class FederationThirdPartyInviteExchangeServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationClientKeysQueryServlet(BaseFederationServerServlet):
|
||||
PATH = "/user/keys/query"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_POST(
|
||||
self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
|
||||
@ -538,6 +558,7 @@ class FederationClientKeysQueryServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationUserDevicesQueryServlet(BaseFederationServerServlet):
|
||||
PATH = "/user/devices/(?P<user_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
@ -551,6 +572,7 @@ class FederationUserDevicesQueryServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationClientKeysClaimServlet(BaseFederationServerServlet):
|
||||
PATH = "/user/keys/claim"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_POST(
|
||||
self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
|
||||
@ -561,6 +583,7 @@ class FederationClientKeysClaimServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationGetMissingEventsServlet(BaseFederationServerServlet):
|
||||
PATH = "/get_missing_events/(?P<room_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
async def on_POST(
|
||||
self,
|
||||
@ -586,6 +609,7 @@ class FederationGetMissingEventsServlet(BaseFederationServerServlet):
|
||||
|
||||
class On3pidBindServlet(BaseFederationServerServlet):
|
||||
PATH = "/3pid/onbind"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
REQUIRE_AUTH = False
|
||||
|
||||
@ -618,6 +642,7 @@ class On3pidBindServlet(BaseFederationServerServlet):
|
||||
|
||||
class FederationVersionServlet(BaseFederationServlet):
|
||||
PATH = "/version"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
REQUIRE_AUTH = False
|
||||
|
||||
@ -640,6 +665,7 @@ class FederationVersionServlet(BaseFederationServlet):
|
||||
|
||||
class FederationRoomHierarchyServlet(BaseFederationServlet):
|
||||
PATH = "/hierarchy/(?P<room_id>[^/]*)"
|
||||
CATEGORY = "Federation requests"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -672,6 +698,7 @@ class RoomComplexityServlet(BaseFederationServlet):
|
||||
|
||||
PATH = "/rooms/(?P<room_id>[^/]*)/complexity"
|
||||
PREFIX = FEDERATION_UNSTABLE_PREFIX
|
||||
CATEGORY = "Federation requests (unstable)"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -155,9 +155,6 @@ class AccountDataHandler:
|
||||
max_stream_id = await self._store.remove_account_data_for_room(
|
||||
user_id, room_id, account_data_type
|
||||
)
|
||||
if max_stream_id is None:
|
||||
# The referenced account data did not exist, so no delete occurred.
|
||||
return None
|
||||
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
|
||||
@ -230,9 +227,6 @@ class AccountDataHandler:
|
||||
max_stream_id = await self._store.remove_account_data_for_user(
|
||||
user_id, account_data_type
|
||||
)
|
||||
if max_stream_id is None:
|
||||
# The referenced account data did not exist, so no delete occurred.
|
||||
return None
|
||||
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
|
||||
@ -248,7 +242,6 @@ class AccountDataHandler:
|
||||
instance_name=random.choice(self._account_data_writers),
|
||||
user_id=user_id,
|
||||
account_data_type=account_data_type,
|
||||
content={},
|
||||
)
|
||||
return response["max_stream_id"]
|
||||
|
||||
|
@ -15,9 +15,7 @@
|
||||
import email.mime.multipart
|
||||
import email.utils
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Tuple
|
||||
|
||||
from twisted.web.http import Request
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple
|
||||
|
||||
from synapse.api.errors import AuthError, StoreError, SynapseError
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
@ -30,25 +28,17 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Types for callbacks to be registered via the module api
|
||||
IS_USER_EXPIRED_CALLBACK = Callable[[str], Awaitable[Optional[bool]]]
|
||||
ON_USER_REGISTRATION_CALLBACK = Callable[[str], Awaitable]
|
||||
# Temporary hooks to allow for a transition from `/_matrix/client` endpoints
|
||||
# to `/_synapse/client/account_validity`. See `register_account_validity_callbacks`.
|
||||
ON_LEGACY_SEND_MAIL_CALLBACK = Callable[[str], Awaitable]
|
||||
ON_LEGACY_RENEW_CALLBACK = Callable[[str], Awaitable[Tuple[bool, bool, int]]]
|
||||
ON_LEGACY_ADMIN_REQUEST = Callable[[Request], Awaitable]
|
||||
|
||||
|
||||
class AccountValidityHandler:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.config = hs.config
|
||||
self.store = self.hs.get_datastores().main
|
||||
self.send_email_handler = self.hs.get_send_email_handler()
|
||||
self.clock = self.hs.get_clock()
|
||||
self.store = hs.get_datastores().main
|
||||
self.send_email_handler = hs.get_send_email_handler()
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
self._app_name = self.hs.config.email.email_app_name
|
||||
self._app_name = hs.config.email.email_app_name
|
||||
self._module_api_callbacks = hs.get_module_api_callbacks().account_validity
|
||||
|
||||
self._account_validity_enabled = (
|
||||
hs.config.account_validity.account_validity_enabled
|
||||
@ -78,69 +68,6 @@ class AccountValidityHandler:
|
||||
if hs.config.worker.run_background_tasks:
|
||||
self.clock.looping_call(self._send_renewal_emails, 30 * 60 * 1000)
|
||||
|
||||
self._is_user_expired_callbacks: List[IS_USER_EXPIRED_CALLBACK] = []
|
||||
self._on_user_registration_callbacks: List[ON_USER_REGISTRATION_CALLBACK] = []
|
||||
self._on_legacy_send_mail_callback: Optional[
|
||||
ON_LEGACY_SEND_MAIL_CALLBACK
|
||||
] = None
|
||||
self._on_legacy_renew_callback: Optional[ON_LEGACY_RENEW_CALLBACK] = None
|
||||
|
||||
# The legacy admin requests callback isn't a protected attribute because we need
|
||||
# to access it from the admin servlet, which is outside of this handler.
|
||||
self.on_legacy_admin_request_callback: Optional[ON_LEGACY_ADMIN_REQUEST] = None
|
||||
|
||||
def register_account_validity_callbacks(
|
||||
self,
|
||||
is_user_expired: Optional[IS_USER_EXPIRED_CALLBACK] = None,
|
||||
on_user_registration: Optional[ON_USER_REGISTRATION_CALLBACK] = None,
|
||||
on_legacy_send_mail: Optional[ON_LEGACY_SEND_MAIL_CALLBACK] = None,
|
||||
on_legacy_renew: Optional[ON_LEGACY_RENEW_CALLBACK] = None,
|
||||
on_legacy_admin_request: Optional[ON_LEGACY_ADMIN_REQUEST] = None,
|
||||
) -> None:
|
||||
"""Register callbacks from module for each hook."""
|
||||
if is_user_expired is not None:
|
||||
self._is_user_expired_callbacks.append(is_user_expired)
|
||||
|
||||
if on_user_registration is not None:
|
||||
self._on_user_registration_callbacks.append(on_user_registration)
|
||||
|
||||
# The builtin account validity feature exposes 3 endpoints (send_mail, renew, and
|
||||
# an admin one). As part of moving the feature into a module, we need to change
|
||||
# the path from /_matrix/client/unstable/account_validity/... to
|
||||
# /_synapse/client/account_validity, because:
|
||||
#
|
||||
# * the feature isn't part of the Matrix spec thus shouldn't live under /_matrix
|
||||
# * the way we register servlets means that modules can't register resources
|
||||
# under /_matrix/client
|
||||
#
|
||||
# We need to allow for a transition period between the old and new endpoints
|
||||
# in order to allow for clients to update (and for emails to be processed).
|
||||
#
|
||||
# Once the email-account-validity module is loaded, it will take control of account
|
||||
# validity by moving the rows from our `account_validity` table into its own table.
|
||||
#
|
||||
# Therefore, we need to allow modules (in practice just the one implementing the
|
||||
# email-based account validity) to temporarily hook into the legacy endpoints so we
|
||||
# can route the traffic coming into the old endpoints into the module, which is
|
||||
# why we have the following three temporary hooks.
|
||||
if on_legacy_send_mail is not None:
|
||||
if self._on_legacy_send_mail_callback is not None:
|
||||
raise RuntimeError("Tried to register on_legacy_send_mail twice")
|
||||
|
||||
self._on_legacy_send_mail_callback = on_legacy_send_mail
|
||||
|
||||
if on_legacy_renew is not None:
|
||||
if self._on_legacy_renew_callback is not None:
|
||||
raise RuntimeError("Tried to register on_legacy_renew twice")
|
||||
|
||||
self._on_legacy_renew_callback = on_legacy_renew
|
||||
|
||||
if on_legacy_admin_request is not None:
|
||||
if self.on_legacy_admin_request_callback is not None:
|
||||
raise RuntimeError("Tried to register on_legacy_admin_request twice")
|
||||
|
||||
self.on_legacy_admin_request_callback = on_legacy_admin_request
|
||||
|
||||
async def is_user_expired(self, user_id: str) -> bool:
|
||||
"""Checks if a user has expired against third-party modules.
|
||||
|
||||
@ -150,7 +77,7 @@ class AccountValidityHandler:
|
||||
Returns:
|
||||
Whether the user has expired.
|
||||
"""
|
||||
for callback in self._is_user_expired_callbacks:
|
||||
for callback in self._module_api_callbacks.is_user_expired_callbacks:
|
||||
expired = await delay_cancellation(callback(user_id))
|
||||
if expired is not None:
|
||||
return expired
|
||||
@ -168,7 +95,7 @@ class AccountValidityHandler:
|
||||
Args:
|
||||
user_id: The ID of the newly registered user.
|
||||
"""
|
||||
for callback in self._on_user_registration_callbacks:
|
||||
for callback in self._module_api_callbacks.on_user_registration_callbacks:
|
||||
await callback(user_id)
|
||||
|
||||
@wrap_as_background_process("send_renewals")
|
||||
@ -198,8 +125,8 @@ class AccountValidityHandler:
|
||||
"""
|
||||
# If a module supports sending a renewal email from here, do that, otherwise do
|
||||
# the legacy dance.
|
||||
if self._on_legacy_send_mail_callback is not None:
|
||||
await self._on_legacy_send_mail_callback(user_id)
|
||||
if self._module_api_callbacks.on_legacy_send_mail_callback is not None:
|
||||
await self._module_api_callbacks.on_legacy_send_mail_callback(user_id)
|
||||
return
|
||||
|
||||
if not self._account_validity_renew_by_email_enabled:
|
||||
@ -336,8 +263,10 @@ class AccountValidityHandler:
|
||||
"""
|
||||
# If a module supports triggering a renew from here, do that, otherwise do the
|
||||
# legacy dance.
|
||||
if self._on_legacy_renew_callback is not None:
|
||||
return await self._on_legacy_renew_callback(renewal_token)
|
||||
if self._module_api_callbacks.on_legacy_renew_callback is not None:
|
||||
return await self._module_api_callbacks.on_legacy_renew_callback(
|
||||
renewal_token
|
||||
)
|
||||
|
||||
try:
|
||||
(
|
||||
|
@ -252,16 +252,19 @@ class AdminHandler:
|
||||
profile = await self.get_user(UserID.from_string(user_id))
|
||||
if profile is not None:
|
||||
writer.write_profile(profile)
|
||||
logger.info("[%s] Written profile", user_id)
|
||||
|
||||
# Get all devices the user has
|
||||
devices = await self._device_handler.get_devices_by_user(user_id)
|
||||
writer.write_devices(devices)
|
||||
logger.info("[%s] Written %s devices", user_id, len(devices))
|
||||
|
||||
# Get all connections the user has
|
||||
connections = await self.get_whois(UserID.from_string(user_id))
|
||||
writer.write_connections(
|
||||
connections["devices"][""]["sessions"][0]["connections"]
|
||||
)
|
||||
logger.info("[%s] Written %s connections", user_id, len(connections))
|
||||
|
||||
# Get all account data the user has global and in rooms
|
||||
global_data = await self._store.get_global_account_data_for_user(user_id)
|
||||
@ -269,6 +272,29 @@ class AdminHandler:
|
||||
writer.write_account_data("global", global_data)
|
||||
for room_id in by_room_data:
|
||||
writer.write_account_data(room_id, by_room_data[room_id])
|
||||
logger.info(
|
||||
"[%s] Written account data for %s rooms", user_id, len(by_room_data)
|
||||
)
|
||||
|
||||
# Get all media ids the user has
|
||||
limit = 100
|
||||
start = 0
|
||||
while True:
|
||||
media_ids, total = await self._store.get_local_media_by_user_paginate(
|
||||
start, limit, user_id
|
||||
)
|
||||
for media in media_ids:
|
||||
writer.write_media_id(media["media_id"], media)
|
||||
|
||||
logger.info(
|
||||
"[%s] Written %d media_ids of %s",
|
||||
user_id,
|
||||
(start + len(media_ids)),
|
||||
total,
|
||||
)
|
||||
if (start + limit) >= total:
|
||||
break
|
||||
start += limit
|
||||
|
||||
return writer.finished()
|
||||
|
||||
@ -359,6 +385,18 @@ class ExfiltrationWriter(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def write_media_id(self, media_id: str, media_metadata: JsonDict) -> None:
|
||||
"""Write the media's metadata of a user.
|
||||
Exports only the metadata, as this can be fetched from the database via
|
||||
read only. In order to access the files, a connection to the correct
|
||||
media repository would be required.
|
||||
|
||||
Args:
|
||||
media_id: ID of the media.
|
||||
media_metadata: Metadata of one media file.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finished(self) -> Any:
|
||||
"""Called when all data has successfully been exported and written.
|
||||
|
@ -12,7 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Collection, Dict, Iterable, List, Optional, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Collection,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from prometheus_client import Counter
|
||||
|
||||
@ -737,7 +747,7 @@ class ApplicationServicesHandler:
|
||||
)
|
||||
|
||||
ret = []
|
||||
for (success, result) in results:
|
||||
for success, result in results:
|
||||
if success:
|
||||
ret.extend(result)
|
||||
|
||||
@ -829,3 +839,126 @@ class ApplicationServicesHandler:
|
||||
if unknown_user:
|
||||
return await self.query_user_exists(user_id)
|
||||
return True
|
||||
|
||||
async def claim_e2e_one_time_keys(
|
||||
self, query: Iterable[Tuple[str, str, str]]
|
||||
) -> Tuple[
|
||||
Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]], List[Tuple[str, str, str]]
|
||||
]:
|
||||
"""Claim one time keys from application services.
|
||||
|
||||
Users which are exclusively owned by an application service are sent a
|
||||
key claim request to check if the application service provides keys
|
||||
directly.
|
||||
|
||||
Args:
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A tuple of:
|
||||
An iterable of maps of user ID -> a map device ID -> a map of key ID -> JSON bytes.
|
||||
|
||||
A copy of the input which has not been fulfilled (either because
|
||||
they are not appservice users or the appservice does not support
|
||||
providing OTKs).
|
||||
"""
|
||||
services = self.store.get_app_services()
|
||||
|
||||
# Partition the users by appservice.
|
||||
query_by_appservice: Dict[str, List[Tuple[str, str, str]]] = {}
|
||||
missing = []
|
||||
for user_id, device, algorithm in query:
|
||||
if not self.store.get_if_app_services_interested_in_user(user_id):
|
||||
missing.append((user_id, device, algorithm))
|
||||
continue
|
||||
|
||||
# Find the associated appservice.
|
||||
for service in services:
|
||||
if service.is_exclusive_user(user_id):
|
||||
query_by_appservice.setdefault(service.id, []).append(
|
||||
(user_id, device, algorithm)
|
||||
)
|
||||
continue
|
||||
|
||||
# Query each service in parallel.
|
||||
results = await make_deferred_yieldable(
|
||||
defer.DeferredList(
|
||||
[
|
||||
run_in_background(
|
||||
self.appservice_api.claim_client_keys,
|
||||
# We know this must be an app service.
|
||||
self.store.get_app_service_by_id(service_id), # type: ignore[arg-type]
|
||||
service_query,
|
||||
)
|
||||
for service_id, service_query in query_by_appservice.items()
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Patch together the results -- they are all independent (since they
|
||||
# require exclusive control over the users). They get returned as a list
|
||||
# and the caller combines them.
|
||||
claimed_keys: List[Dict[str, Dict[str, Dict[str, JsonDict]]]] = []
|
||||
for success, result in results:
|
||||
if success:
|
||||
claimed_keys.append(result[0])
|
||||
missing.extend(result[1])
|
||||
|
||||
return claimed_keys, missing
|
||||
|
||||
async def query_keys(
|
||||
self, query: Mapping[str, Optional[List[str]]]
|
||||
) -> Dict[str, Dict[str, Dict[str, JsonDict]]]:
|
||||
"""Query application services for device keys.
|
||||
|
||||
Users which are exclusively owned by an application service are queried
|
||||
for keys to check if the application service provides keys directly.
|
||||
|
||||
Args:
|
||||
query: map from user_id to a list of devices to query
|
||||
|
||||
Returns:
|
||||
A map from user_id -> device_id -> device details
|
||||
"""
|
||||
services = self.store.get_app_services()
|
||||
|
||||
# Partition the users by appservice.
|
||||
query_by_appservice: Dict[str, Dict[str, List[str]]] = {}
|
||||
for user_id, device_ids in query.items():
|
||||
if not self.store.get_if_app_services_interested_in_user(user_id):
|
||||
continue
|
||||
|
||||
# Find the associated appservice.
|
||||
for service in services:
|
||||
if service.is_exclusive_user(user_id):
|
||||
query_by_appservice.setdefault(service.id, {})[user_id] = (
|
||||
device_ids or []
|
||||
)
|
||||
continue
|
||||
|
||||
# Query each service in parallel.
|
||||
results = await make_deferred_yieldable(
|
||||
defer.DeferredList(
|
||||
[
|
||||
run_in_background(
|
||||
self.appservice_api.query_keys,
|
||||
# We know this must be an app service.
|
||||
self.store.get_app_service_by_id(service_id), # type: ignore[arg-type]
|
||||
service_query,
|
||||
)
|
||||
for service_id, service_query in query_by_appservice.items()
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Patch together the results -- they are all independent (since they
|
||||
# require exclusive control over the users). They get returned as a single
|
||||
# dictionary.
|
||||
key_queries: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
||||
for success, result in results:
|
||||
if success:
|
||||
key_queries.update(result)
|
||||
|
||||
return key_queries
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user