Merge remote-tracking branch 'upstream/release-v1.70'

This commit is contained in:
Tulir Asokan 2022-10-19 17:24:11 +03:00
commit c3b3895da4
142 changed files with 4896 additions and 2015 deletions

View file

@ -18,6 +18,13 @@
import json import json
import os import os
def set_output(key: str, value: str):
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
print(f"{key}={value}", file=f)
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/") IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs. # First calculate the various trial jobs.
@ -81,7 +88,7 @@ print("::endgroup::")
test_matrix = json.dumps( test_matrix = json.dumps(
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
) )
print(f"::set-output name=trial_test_matrix::{test_matrix}") set_output("trial_test_matrix", test_matrix)
# First calculate the various sytest jobs. # First calculate the various sytest jobs.
@ -125,4 +132,4 @@ print(json.dumps(sytest_tests, indent=4))
print("::endgroup::") print("::endgroup::")
test_matrix = json.dumps(sytest_tests) test_matrix = json.dumps(sytest_tests)
print(f"::set-output name=sytest_test_matrix::{test_matrix}") set_output("sytest_test_matrix", test_matrix)

View file

@ -21,7 +21,7 @@ endblock
block Install Complement Dependencies block Install Complement Dependencies
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest go get -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
endblock endblock
block Install custom gotestfmt template block Install custom gotestfmt template

View file

@ -15,3 +15,8 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "weekly" interval: "weekly"
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"

View file

@ -54,7 +54,7 @@ jobs:
esac esac
# finally, set the 'branch-version' var. # finally, set the 'branch-version' var.
echo "::set-output name=branch-version::$branch" echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
# Deploy to the target directory. # Deploy to the target directory.
- name: Deploy to gh pages - name: Deploy to gh pages

View file

@ -76,7 +76,7 @@ jobs:
-e POSTGRES_PASSWORD=postgres \ -e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }} postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
with: with:
python-version: "3.x" python-version: "3.x"
- run: pip install .[all,test] - run: pip install .[all,test]

View file

@ -26,7 +26,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
with:
python-version: '3.x'
- id: set-distros - id: set-distros
run: | run: |
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid # if we're running from a tag, get the full list of distros; otherwise just use debian:sid
@ -34,7 +36,7 @@ jobs:
if [[ $GITHUB_REF == refs/tags/* ]]; then if [[ $GITHUB_REF == refs/tags/* ]]; then
dists=$(scripts-dev/build_debian_packages.py --show-dists-json) dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
fi fi
echo "::set-output name=distros::$dists" echo "distros=$dists" >> "$GITHUB_OUTPUT"
# map the step outputs to job outputs # map the step outputs to job outputs
outputs: outputs:
distros: ${{ steps.set-distros.outputs.distros }} distros: ${{ steps.set-distros.outputs.distros }}
@ -69,7 +71,9 @@ jobs:
${{ runner.os }}-buildx- ${{ runner.os }}-buildx-
- name: Set up python - name: Set up python
uses: actions/setup-python@v2 uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Build the packages - name: Build the packages
# see https://github.com/docker/build-push-action/issues/252 # see https://github.com/docker/build-push-action/issues/252
@ -91,11 +95,14 @@ jobs:
path: debs/* path: debs/*
build-wheels: build-wheels:
name: Build wheels on ${{ matrix.os }} name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [ubuntu-20.04, macos-10.15] os: [ubuntu-20.04, macos-10.15]
arch: [x86_64, aarch64]
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
is_pr: is_pr:
- ${{ startsWith(github.ref, 'refs/pull/') }} - ${{ startsWith(github.ref, 'refs/pull/') }}
@ -103,20 +110,38 @@ jobs:
# Don't build macos wheels on PR CI. # Don't build macos wheels on PR CI.
- is_pr: true - is_pr: true
os: "macos-10.15" os: "macos-10.15"
# Don't build aarch64 wheels on mac.
- os: "macos-10.15"
arch: aarch64
# Don't build aarch64 wheels on PR CI.
- is_pr: true
arch: aarch64
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: actions/setup-python@v3 - uses: actions/setup-python@v4
with:
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
# here, because `python` on osx points to Python 2.7.
python-version: "3.x"
- name: Install cibuildwheel - name: Install cibuildwheel
run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0 run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
# Only build a single wheel in CI. - name: Set up QEMU to emulate aarch64
- name: Set env vars. if: matrix.arch == 'aarch64'
run: | uses: docker/setup-qemu-action@v2
echo "CIBW_BUILD="cp37-manylinux_x86_64"" >> $GITHUB_ENV with:
platforms: arm64
- name: Build aarch64 wheels
if: matrix.arch == 'aarch64'
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/') if: startsWith(github.ref, 'refs/pull/')
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
- name: Build wheels - name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse run: python -m cibuildwheel --output-dir wheelhouse
@ -124,6 +149,9 @@ jobs:
# Skip testing for platforms which various libraries don't have wheels # Skip testing for platforms which various libraries don't have wheels
# for, and so need extra build deps. # for, and so need extra build deps.
CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx* CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v3
with: with:

View file

@ -32,7 +32,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
extras: "all" extras: "all"
@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'" - run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
- run: scripts-dev/check_schema_delta.py --force-colors - run: scripts-dev/check_schema_delta.py --force-colors
@ -67,7 +67,7 @@ jobs:
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
- run: "pip install 'towncrier>=18.6.0rc1'" - run: "pip install 'towncrier>=18.6.0rc1'"
- run: scripts-dev/check-newsfragment.sh - run: scripts-dev/check-newsfragment.sh
env: env:
@ -142,7 +142,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
- id: get-matrix - id: get-matrix
run: .ci/scripts/calculate_jobs.py run: .ci/scripts/calculate_jobs.py
outputs: outputs:

View file

@ -1,3 +1,99 @@
Synapse 1.70.0rc1 (2022-10-19)
==============================
Features
--------
- Support for [MSC3856](https://github.com/matrix-org/matrix-spec-proposals/pull/3856): threads list API. ([\#13394](https://github.com/matrix-org/synapse/issues/13394), [\#14171](https://github.com/matrix-org/synapse/issues/14171), [\#14175](https://github.com/matrix-org/synapse/issues/14175))
- Support for thread-specific notifications & receipts ([MSC3771](https://github.com/matrix-org/matrix-spec-proposals/pull/3771) and [MSC3773](https://github.com/matrix-org/matrix-spec-proposals/pull/3773)). ([\#13776](https://github.com/matrix-org/synapse/issues/13776), [\#13824](https://github.com/matrix-org/synapse/issues/13824), [\#13877](https://github.com/matrix-org/synapse/issues/13877), [\#13878](https://github.com/matrix-org/synapse/issues/13878), [\#14050](https://github.com/matrix-org/synapse/issues/14050), [\#14140](https://github.com/matrix-org/synapse/issues/14140), [\#14159](https://github.com/matrix-org/synapse/issues/14159), [\#14163](https://github.com/matrix-org/synapse/issues/14163), [\#14174](https://github.com/matrix-org/synapse/issues/14174), [\#14222](https://github.com/matrix-org/synapse/issues/14222))
- Stop fetching missing `prev_events` after we already know their signature is invalid. ([\#13816](https://github.com/matrix-org/synapse/issues/13816))
- Send application service access tokens as a header (and query parameter). Implements [MSC2832](https://github.com/matrix-org/matrix-spec-proposals/pull/2832). ([\#13996](https://github.com/matrix-org/synapse/issues/13996))
- Ignore server ACL changes when generating pushes. Implements [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786). ([\#13997](https://github.com/matrix-org/synapse/issues/13997))
- Experimental support for redirecting to an implementation of a [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) HTTP rendezvous service. ([\#14018](https://github.com/matrix-org/synapse/issues/14018))
- The `/relations` endpoint can now be used on workers. ([\#14028](https://github.com/matrix-org/synapse/issues/14028))
- Advertise support for Matrix 1.3 and 1.4 on `/_matrix/client/versions`. ([\#14032](https://github.com/matrix-org/synapse/issues/14032), [\#14184](https://github.com/matrix-org/synapse/issues/14184))
- Improve validation of request bodies for the [Device Management](https://spec.matrix.org/v1.4/client-server-api/#device-management) and [MSC2697 Device Dehyrdation](https://github.com/matrix-org/matrix-spec-proposals/pull/2697) client-server API endpoints. ([\#14054](https://github.com/matrix-org/synapse/issues/14054))
- Experimental support for [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874). ([\#14148](https://github.com/matrix-org/synapse/issues/14148))
- Improve the validation of the following PUT endpoints: [`/directory/room/{roomAlias}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directoryroomroomalias), [`/directory/list/room/{roomId}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directorylistroomroomid) and [`/directory/list/appservice/{networkId}/{roomId}`](https://spec.matrix.org/v1.4/application-service-api/#put_matrixclientv3directorylistappservicenetworkidroomid). ([\#14179](https://github.com/matrix-org/synapse/issues/14179))
- Build and publish binary wheels for `aarch64` platforms. ([\#14212](https://github.com/matrix-org/synapse/issues/14212))
Bugfixes
--------
- Prevent device names from appearing in device list updates when `allow_device_name_lookup_over_federation` is `false`. ([\#10015](https://github.com/matrix-org/synapse/issues/10015))
- Fix a long-standing bug where redactions were not being sent over federation if we did not have the original event. ([\#13813](https://github.com/matrix-org/synapse/issues/13813))
- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled or have their new content applied. ([\#14034](https://github.com/matrix-org/synapse/issues/14034))
- Fix a bug introduced in Synapse 1.53.0 when querying `/publicRooms` with both a `room_type` filter and a `third_party_instance_id`. ([\#14053](https://github.com/matrix-org/synapse/issues/14053))
- Fix a bug introduced in Synapse 1.35.0 where errors parsing a `/send_join` or `/state` response would produce excessive, low-quality Sentry events. ([\#14065](https://github.com/matrix-org/synapse/issues/14065))
- Fix a long-standing bug where Synapse would error on the optional 'invite_room_state' field not being provided to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14083](https://github.com/matrix-org/synapse/issues/14083))
- Fix a bug where invalid oEmbed fields would cause the entire response to be discarded. Introduced in Synapse 1.18.0. ([\#14089](https://github.com/matrix-org/synapse/issues/14089))
- Fix a bug introduced in Synapse 1.37.0 in which an incorrect key name was used for sending and receiving room metadata when knocking on a room. ([\#14102](https://github.com/matrix-org/synapse/issues/14102))
- Fix a bug introduced in v1.69.0rc1 where the joined hosts for a given event were not being properly cached. ([\#14125](https://github.com/matrix-org/synapse/issues/14125))
- Fix a bug introduced in Synapse 1.30.0 where purging and rejoining a room without restarting in-between would result in a broken room. ([\#14161](https://github.com/matrix-org/synapse/issues/14161), [\#14164](https://github.com/matrix-org/synapse/issues/14164))
- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint returning potentially inaccurate closest events with `outliers` present. ([\#14215](https://github.com/matrix-org/synapse/issues/14215))
Updates to the Docker image
---------------------------
- Update the version of frozendict in Docker images and Debian packages from 2.3.3 to 2.3.4, which may fix memory leak problems. ([\#13955](https://github.com/matrix-org/synapse/issues/13955))
- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141))
- Prevent a class of database sharding errors when using `Dockerfile-workers` to spawn multiple instances of the same worker. Contributed by Jason Little. ([\#14165](https://github.com/matrix-org/synapse/issues/14165))
- Set `LD_PRELOAD` to use jemalloc memory allocator in Dockerfile-workers. ([\#14182](https://github.com/matrix-org/synapse/issues/14182))
- Fix pre-startup logging being lost when using the `Dockerfile-workers` image. ([\#14195](https://github.com/matrix-org/synapse/issues/14195))
Improved Documentation
----------------------
- Add sample worker files for `pusher` and `federation_sender`. ([\#14077](https://github.com/matrix-org/synapse/issues/14077))
- Improve the listener example on the metrics documentation. ([\#14078](https://github.com/matrix-org/synapse/issues/14078))
- Expand Google OpenID Connect example config to map email attribute. Contributed by @ptman. ([\#14081](https://github.com/matrix-org/synapse/issues/14081))
- The changelog entry ending in a full stop or exclamation mark is not optional. ([\#14087](https://github.com/matrix-org/synapse/issues/14087))
- Fix links to jemalloc documentation, which were broken in [#13491](https://github.com/matrix-org/synapse/pull/14124). ([\#14093](https://github.com/matrix-org/synapse/issues/14093))
- Remove not needed `replication` listener in docker compose example. ([\#14107](https://github.com/matrix-org/synapse/issues/14107))
- Fix name of `alias_creation_rules` option in the config manual documentation. ([\#14124](https://github.com/matrix-org/synapse/issues/14124))
- Clarify comment on event contexts. ([\#14145](https://github.com/matrix-org/synapse/issues/14145))
- Fix dead link to the [Admin Registration API](https://matrix-org.github.io/synapse/latest/admin_api/register_api.html). ([\#14189](https://github.com/matrix-org/synapse/issues/14189))
Deprecations and Removals
-------------------------
- Remove the experimental implementation of [MSC3772](https://github.com/matrix-org/matrix-spec-proposals/pull/3772). ([\#14094](https://github.com/matrix-org/synapse/issues/14094))
- Remove the unstable identifier for [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#14106](https://github.com/matrix-org/synapse/issues/14106), [\#14146](https://github.com/matrix-org/synapse/issues/14146))
Internal Changes
----------------
- Optimise queries used to get a users rooms during sync. Contributed by Nick @ Beeper (@fizzadar). ([\#13991](https://github.com/matrix-org/synapse/issues/13991))
- Update authlib from 0.15.5 to 1.1.0. ([\#14006](https://github.com/matrix-org/synapse/issues/14006))
- Make `parse_server_name` consistent in handling invalid server names. ([\#14007](https://github.com/matrix-org/synapse/issues/14007))
- Don't repeatedly wake up the same users for batched events. ([\#14033](https://github.com/matrix-org/synapse/issues/14033))
- Complement test image: capture logs from nginx. ([\#14063](https://github.com/matrix-org/synapse/issues/14063))
- Don't create noisy Sentry events when a requester drops connection to the metrics server mid-request. ([\#14072](https://github.com/matrix-org/synapse/issues/14072))
- Run the integration test suites with the asyncio reactor enabled in CI. ([\#14092](https://github.com/matrix-org/synapse/issues/14092))
- Add debug logs to figure out why an event was filtered out of the client response. ([\#14095](https://github.com/matrix-org/synapse/issues/14095))
- Indicate what endpoint came back with a JSON response we were unable to parse. ([\#14097](https://github.com/matrix-org/synapse/issues/14097))
- Break up calls to fetch rooms for many users. Contributed by Nick @ Beeper (@fizzadar). ([\#14109](https://github.com/matrix-org/synapse/issues/14109))
- Faster joins: prioritise the server we joined by when restarting a partial join resync. ([\#14126](https://github.com/matrix-org/synapse/issues/14126))
- Cache Rust build cache when building docker images. ([\#14130](https://github.com/matrix-org/synapse/issues/14130))
- Enable dependabot for Rust dependencies. ([\#14132](https://github.com/matrix-org/synapse/issues/14132))
- Bump typing-extensions from 4.1.1 to 4.4.0. ([\#14134](https://github.com/matrix-org/synapse/issues/14134))
- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141))
- Remove unused configuration code. ([\#14142](https://github.com/matrix-org/synapse/issues/14142))
- Prepare for the [`gotestfmt` repository move](https://github.com/GoTestTools/gotestfmt/discussions/46). ([\#14144](https://github.com/matrix-org/synapse/issues/14144))
- Invalidate rooms for user caches on replicated event, fix sync cache race in synapse workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14155](https://github.com/matrix-org/synapse/issues/14155))
- Enable url previews when testing with complement. ([\#14198](https://github.com/matrix-org/synapse/issues/14198))
- When authenticating batched events, check for auth events in batch as well as DB. ([\#14214](https://github.com/matrix-org/synapse/issues/14214))
- Update CI config to avoid GitHub Actions deprecation warnings. ([\#14216](https://github.com/matrix-org/synapse/issues/14216), [\#14224](https://github.com/matrix-org/synapse/issues/14224))
- Update dependency requirements to allow building with poetry-core 1.3.2. ([\#14217](https://github.com/matrix-org/synapse/issues/14217))
- Rename the `cache_memory` extra to `cache-memory`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14221](https://github.com/matrix-org/synapse/issues/14221))
- Specify dev-dependencies using lower bounds, to reduce the likelihood of a dependabot merge conflict. The lockfile continues to pin to specific versions. ([\#14227](https://github.com/matrix-org/synapse/issues/14227))
Synapse 1.69.0 (2022-10-17) Synapse 1.69.0 (2022-10-17)
=========================== ===========================

56
Cargo.lock generated
View file

@ -104,9 +104,9 @@ checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.3" version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
@ -116,15 +116,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.132" version = "0.2.135"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5" checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.7" version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"scopeguard", "scopeguard",
@ -156,9 +156,9 @@ dependencies = [
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.13.1" version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e" checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
@ -185,18 +185,18 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.43" version = "1.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "pyo3" name = "pyo3"
version = "0.17.1" version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12f72538a0230791398a0986a6518ebd88abc3fded89007b506ed072acc831e1" checksum = "201b6887e5576bf2f945fe65172c1fcbf3fcf285b23e4d71eb171d9736e38d32"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -212,9 +212,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-build-config" name = "pyo3-build-config"
version = "0.17.1" version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc4cf18c20f4f09995f3554e6bcf9b09bd5e4d6b67c562fdfaafa644526ba479" checksum = "bf0708c9ed01692635cbf056e286008e5a2927ab1a5e48cdd3aeb1ba5a6fef47"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"target-lexicon", "target-lexicon",
@ -222,9 +222,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-ffi" name = "pyo3-ffi"
version = "0.17.1" version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a41877f28d8ebd600b6aa21a17b40c3b0fc4dfe73a27b6e81ab3d895e401b0e9" checksum = "90352dea4f486932b72ddf776264d293f85b79a1d214de1d023927b41461132d"
dependencies = [ dependencies = [
"libc", "libc",
"pyo3-build-config", "pyo3-build-config",
@ -243,9 +243,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-macros" name = "pyo3-macros"
version = "0.17.1" version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e81c8d4bcc2f216dc1b665412df35e46d12ee8d3d046b381aad05f1fcf30547" checksum = "7eb24b804a2d9e88bfcc480a5a6dd76f006c1e3edaf064e8250423336e2cd79d"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"pyo3-macros-backend", "pyo3-macros-backend",
@ -255,9 +255,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-macros-backend" name = "pyo3-macros-backend"
version = "0.17.1" version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85752a767ee19399a78272cc2ab625cd7d373b2e112b4b13db28de71fa892784" checksum = "f22bb49f6a7348c253d7ac67a6875f2dc65f36c2ae64a82c381d528972bea6d6"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -343,9 +343,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.85" version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44" checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -354,9 +354,9 @@ dependencies = [
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.9.0" version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]] [[package]]
name = "subtle" name = "subtle"
@ -366,9 +366,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.99" version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -406,9 +406,9 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.3" version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf" checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]] [[package]]
name = "unindent" name = "unindent"

View file

@ -94,20 +94,6 @@ worker_replication_host: synapse
worker_replication_http_port: 9093 worker_replication_http_port: 9093
``` ```
### Add Workers to `instance_map`
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
```yaml
instance_map:
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
port: 8034 # The port assigned to the replication listener in your worker config file
synapse-federation-sender-1:
host: synapse-federation-sender-1
port: 8034
```
### Configure Federation Senders ### Configure Federation Senders
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them: This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
@ -122,4 +108,4 @@ federation_sender_instances:
## Other Worker types ## Other Worker types
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers. Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.

View file

@ -5,10 +5,4 @@ worker_name: synapse-federation-sender-1
worker_replication_host: synapse worker_replication_host: synapse
worker_replication_http_port: 9093 worker_replication_http_port: 9093
worker_listeners:
- type: http
port: 8034
resources:
- names: [replication]
worker_log_config: /data/federation_sender.log.config worker_log_config: /data/federation_sender.log.config

View file

@ -6,10 +6,6 @@ worker_replication_host: synapse
worker_replication_http_port: 9093 worker_replication_http_port: 9093
worker_listeners: worker_listeners:
- type: http
port: 8034
resources:
- names: [replication]
- type: http - type: http
port: 8081 port: 8081
x_forwarded: true x_forwarded: true

6
debian/changelog vendored
View file

@ -1,3 +1,9 @@
matrix-synapse-py3 (1.70.0~rc1) stable; urgency=medium
* New Synapse release 1.70.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 19 Oct 2022 14:11:57 +0100
matrix-synapse-py3 (1.69.0) stable; urgency=medium matrix-synapse-py3 (1.69.0) stable; urgency=medium
* New Synapse release 1.69.0. * New Synapse release 1.69.0.

View file

@ -106,7 +106,7 @@ ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not # arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
@ -135,7 +135,9 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# Install the synapse package itself. # Install the synapse package itself.
# If we have populated requirements.txt, we don't install any dependencies # If we have populated requirements.txt, we don't install any dependencies
# as we should already have those from the previous `pip install` step. # as we should already have those from the previous `pip install` step.
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ RUN --mount=type=cache,target=/synapse/target,sharing=locked \
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \ pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
else \ else \
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \ pip install --prefix="/install" --no-warn-script-location /synapse[all]; \

View file

@ -92,7 +92,7 @@ ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb / COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb /

View file

@ -40,7 +40,11 @@ FROM matrixdotorg/synapse:$SYNAPSE_VERSION
COPY --from=deps_base /etc/nginx /etc/nginx COPY --from=deps_base /etc/nginx /etc/nginx
RUN rm /etc/nginx/sites-enabled/default RUN rm /etc/nginx/sites-enabled/default
RUN mkdir /var/log/nginx /var/lib/nginx RUN mkdir /var/log/nginx /var/lib/nginx
RUN chown www-data /var/log/nginx /var/lib/nginx RUN chown www-data /var/lib/nginx
# have nginx log to stderr/out
RUN ln -sf /dev/stdout /var/log/nginx/access.log
RUN ln -sf /dev/stderr /var/log/nginx/error.log
# Copy Synapse worker, nginx and supervisord configuration template files # Copy Synapse worker, nginx and supervisord configuration template files
COPY ./docker/conf-workers/* /conf/ COPY ./docker/conf-workers/* /conf/

View file

@ -241,4 +241,4 @@ healthcheck:
Jemalloc is embedded in the image and will be used instead of the default allocator. Jemalloc is embedded in the image and will be used instead of the default allocator.
You can read about jemalloc by reading the Synapse You can read about jemalloc by reading the Synapse
[README](https://github.com/matrix-org/synapse/blob/HEAD/README.rst#help-synapse-is-slow-and-eats-all-my-ram-cpu). [Admin FAQ](https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html#help-synapse-is-slow-and-eats-all-my-ramcpu).

View file

@ -57,6 +57,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
federation_reader, \ federation_reader, \
federation_sender, \ federation_sender, \
synchrotron, \ synchrotron, \
client_reader, \
appservice, \ appservice, \
pusher" pusher"

View file

@ -12,6 +12,8 @@ trusted_key_servers: []
enable_registration: true enable_registration: true
enable_registration_without_verification: true enable_registration_without_verification: true
bcrypt_rounds: 4 bcrypt_rounds: 4
url_preview_enabled: true
url_preview_ip_range_blacklist: []
## Registration ## ## Registration ##

View file

@ -39,6 +39,7 @@
# continue to work if so. # continue to work if so.
import os import os
import platform
import subprocess import subprocess
import sys import sys
from pathlib import Path from pathlib import Path
@ -107,6 +108,34 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"shared_extra_conf": {}, "shared_extra_conf": {},
"worker_extra_conf": "", "worker_extra_conf": "",
}, },
"client_reader": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client"],
"endpoint_patterns": [
"^/_matrix/client/(api/v1|r0|v3|unstable)/publicRooms$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/joined_members$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/context/.*$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/members$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state$",
"^/_matrix/client/v1/rooms/.*/hierarchy$",
"^/_matrix/client/(v1|unstable)/rooms/.*/relations/",
"^/_matrix/client/v1/rooms/.*/threads$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
"^/_matrix/client/versions$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
"^/_matrix/client/(r0|v3|unstable)/register$",
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
"^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms",
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"federation_reader": { "federation_reader": {
"app": "synapse.app.generic_worker", "app": "synapse.app.generic_worker",
"listener_resources": ["federation"], "listener_resources": ["federation"],
@ -201,24 +230,19 @@ upstream {upstream_worker_type} {{
# Utility functions # Utility functions
def log(txt: str) -> None: def log(txt: str) -> None:
"""Log something to the stdout.
Args:
txt: The text to log.
"""
print(txt) print(txt)
def error(txt: str) -> NoReturn: def error(txt: str) -> NoReturn:
"""Log something and exit with an error code. print(txt, file=sys.stderr)
Args:
txt: The text to log in error.
"""
log(txt)
sys.exit(2) sys.exit(2)
def flush_buffers() -> None:
sys.stdout.flush()
sys.stderr.flush()
def convert(src: str, dst: str, **template_vars: object) -> None: def convert(src: str, dst: str, **template_vars: object) -> None:
"""Generate a file from a template """Generate a file from a template
@ -299,7 +323,7 @@ def generate_base_homeserver_config() -> None:
# start.py already does this for us, so just call that. # start.py already does this for us, so just call that.
# note that this script is copied in in the official, monolith dockerfile # note that this script is copied in in the official, monolith dockerfile
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT) os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"]) subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
def generate_worker_files( def generate_worker_files(
@ -373,8 +397,8 @@ def generate_worker_files(
# No workers, just the main process # No workers, just the main process
worker_types = [] worker_types = []
else: else:
# Split type names by comma # Split type names by comma, ignoring whitespace.
worker_types = worker_types_env.split(",") worker_types = [x.strip() for x in worker_types_env.split(",")]
# Create the worker configuration directory if it doesn't already exist # Create the worker configuration directory if it doesn't already exist
os.makedirs("/conf/workers", exist_ok=True) os.makedirs("/conf/workers", exist_ok=True)
@ -393,8 +417,6 @@ def generate_worker_files(
# For each worker type specified by the user, create config values # For each worker type specified by the user, create config values
for worker_type in worker_types: for worker_type in worker_types:
worker_type = worker_type.strip()
worker_config = WORKERS_CONFIG.get(worker_type) worker_config = WORKERS_CONFIG.get(worker_type)
if worker_config: if worker_config:
worker_config = worker_config.copy() worker_config = worker_config.copy()
@ -604,14 +626,24 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
with open(mark_filepath, "w") as f: with open(mark_filepath, "w") as f:
f.write("") f.write("")
# Lifted right out of start.py
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
if os.path.isfile(jemallocpath):
environ["LD_PRELOAD"] = jemallocpath
else:
log("Could not find %s, will not use" % (jemallocpath,))
# Start supervisord, which will start Synapse, all of the configured worker # Start supervisord, which will start Synapse, all of the configured worker
# processes, redis, nginx etc. according to the config we created above. # processes, redis, nginx etc. according to the config we created above.
log("Starting supervisord") log("Starting supervisord")
os.execl( flush_buffers()
os.execle(
"/usr/local/bin/supervisord", "/usr/local/bin/supervisord",
"supervisord", "supervisord",
"-c", "-c",
"/etc/supervisor/supervisord.conf", "/etc/supervisor/supervisord.conf",
environ,
) )

View file

@ -13,14 +13,19 @@ import jinja2
# Utility functions # Utility functions
def log(txt: str) -> None: def log(txt: str) -> None:
print(txt, file=sys.stderr) print(txt)
def error(txt: str) -> NoReturn: def error(txt: str) -> NoReturn:
log(txt) print(txt, file=sys.stderr)
sys.exit(2) sys.exit(2)
def flush_buffers() -> None:
sys.stdout.flush()
sys.stderr.flush()
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None: def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
"""Generate a file from a template """Generate a file from a template
@ -131,10 +136,10 @@ def generate_config_from_template(
if ownership is not None: if ownership is not None:
log(f"Setting ownership on /data to {ownership}") log(f"Setting ownership on /data to {ownership}")
subprocess.check_output(["chown", "-R", ownership, "/data"]) subprocess.run(["chown", "-R", ownership, "/data"], check=True)
args = ["gosu", ownership] + args args = ["gosu", ownership] + args
subprocess.check_output(args) subprocess.run(args, check=True)
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None: def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
@ -158,7 +163,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
if ownership is not None: if ownership is not None:
# make sure that synapse has perms to write to the data dir. # make sure that synapse has perms to write to the data dir.
log(f"Setting ownership on {data_dir} to {ownership}") log(f"Setting ownership on {data_dir} to {ownership}")
subprocess.check_output(["chown", ownership, data_dir]) subprocess.run(["chown", ownership, data_dir], check=True)
# create a suitable log config from our template # create a suitable log config from our template
log_config_file = "%s/%s.log.config" % (config_dir, server_name) log_config_file = "%s/%s.log.config" % (config_dir, server_name)
@ -185,6 +190,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
"--open-private-ports", "--open-private-ports",
] ]
# log("running %s" % (args, )) # log("running %s" % (args, ))
flush_buffers()
os.execv(sys.executable, args) os.execv(sys.executable, args)
@ -267,8 +273,10 @@ running with 'migrate_config'. See the README for more details.
args = [sys.executable] + args args = [sys.executable] + args
if ownership is not None: if ownership is not None:
args = ["gosu", ownership] + args args = ["gosu", ownership] + args
flush_buffers()
os.execve("/usr/sbin/gosu", args, environ) os.execve("/usr/sbin/gosu", args, environ)
else: else:
flush_buffers()
os.execve(sys.executable, args, environ) os.execve(sys.executable, args, environ)

View file

@ -333,7 +333,7 @@ SYNAPSE_TEST_LOG_LEVEL=DEBUG COMPLEMENT_DIR=../complement ./scripts-dev/compleme
### Prettier formatting with `gotestfmt` ### Prettier formatting with `gotestfmt`
If you want to format the output of the tests the same way as it looks in CI, If you want to format the output of the tests the same way as it looks in CI,
install [gotestfmt](https://github.com/haveyoudebuggedit/gotestfmt). install [gotestfmt](https://github.com/GoTestTools/gotestfmt).
You can then use this incantation to format the tests appropriately: You can then use this incantation to format the tests appropriately:
@ -390,7 +390,7 @@ This file will become part of our [changelog](
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
release, so the content of the file should be a short description of your release, so the content of the file should be a short description of your
change in the same style as the rest of the changelog. The file can contain Markdown change in the same style as the rest of the changelog. The file can contain Markdown
formatting, and should end with a full stop (.) or an exclamation mark (!) for formatting, and must end with a full stop (.) or an exclamation mark (!) for
consistency. consistency.
Adding credits to the changelog is encouraged, we value your Adding credits to the changelog is encouraged, we value your

View file

@ -16,14 +16,21 @@
There are two methods of enabling the metrics endpoint in Synapse. There are two methods of enabling the metrics endpoint in Synapse.
The first serves the metrics as a part of the usual web server and The first serves the metrics as a part of the usual web server and
can be enabled by adding the \"metrics\" resource to the existing can be enabled by adding the `metrics` resource to the existing
listener as such: listener as such as in this example:
```yaml ```yaml
resources: listeners:
- names: - port: 8008
- client tls: false
- metrics type: http
x_forwarded: true
bind_addresses: ['::1', '127.0.0.1']
resources:
# added "metrics" in this line
- names: [client, federation, metrics]
compress: false
``` ```
This provides a simple way of adding metrics to your Synapse This provides a simple way of adding metrics to your Synapse
@ -37,14 +44,24 @@
to just internal networks easier. The served metrics are available to just internal networks easier. The served metrics are available
over HTTP only, and will be available at `/_synapse/metrics`. over HTTP only, and will be available at `/_synapse/metrics`.
Add a new listener to homeserver.yaml: Add a new listener to homeserver.yaml as in this example:
```yaml ```yaml
listeners: listeners:
- type: metrics - port: 8008
port: 9000 tls: false
bind_addresses: type: http
- '0.0.0.0' x_forwarded: true
bind_addresses: ['::1', '127.0.0.1']
resources:
- names: [client, federation]
compress: false
# beginning of the new metrics listener
- port: 9000
type: metrics
bind_addresses: ['::1', '127.0.0.1']
``` ```
1. Restart Synapse. 1. Restart Synapse.

View file

@ -336,11 +336,12 @@ oidc_providers:
issuer: "https://accounts.google.com/" issuer: "https://accounts.google.com/"
client_id: "your-client-id" # TO BE FILLED client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED client_secret: "your-client-secret" # TO BE FILLED
scopes: ["openid", "profile"] scopes: ["openid", "profile", "email"] # email is optional, read below
user_mapping_provider: user_mapping_provider:
config: config:
localpart_template: "{{ user.given_name|lower }}" localpart_template: "{{ user.given_name|lower }}"
display_name_template: "{{ user.name }}" display_name_template: "{{ user.name }}"
email_template: "{{ user.email }}" # needs "email" in scopes above
``` ```
4. Back in the Google console, add this Authorized redirect URI: `[synapse 4. Back in the Google console, add this Authorized redirect URI: `[synapse
public baseurl]/_synapse/client/oidc/callback`. public baseurl]/_synapse/client/oidc/callback`.
@ -423,7 +424,7 @@ Synapse config:
user_mapping_provider: user_mapping_provider:
config: config:
display_name_template: "{{ user.name }}" display_name_template: "{{ user.name }}"
email_template: "{{ '{{ user.email }}' }}" email_template: "{{ user.email }}"
``` ```
Relevant documents: Relevant documents:

View file

@ -0,0 +1,8 @@
worker_app: synapse.app.federation_sender
worker_name: federation_sender1
# The replication listener on the main synapse process.
worker_replication_host: 127.0.0.1
worker_replication_http_port: 9093
worker_log_config: /etc/matrix-synapse/federation-sender-log.yaml

View file

@ -0,0 +1,8 @@
worker_app: synapse.app.pusher
worker_name: pusher_worker1
# The replication listener on the main synapse process.
worker_replication_host: 127.0.0.1
worker_replication_http_port: 9093
worker_log_config: /etc/matrix-synapse/pusher-worker-log.yaml

View file

@ -1139,7 +1139,7 @@ number of entries that can be stored.
* `cache_autotuning` and its sub-options `max_cache_memory_usage`, `target_cache_memory_usage`, and * `cache_autotuning` and its sub-options `max_cache_memory_usage`, `target_cache_memory_usage`, and
`min_cache_ttl` work in conjunction with each other to maintain a balance between cache memory `min_cache_ttl` work in conjunction with each other to maintain a balance between cache memory
usage and cache entry availability. You must be using [jemalloc](https://github.com/matrix-org/synapse#help-synapse-is-slow-and-eats-all-my-ramcpu) usage and cache entry availability. You must be using [jemalloc](../administration/admin_faq.md#help-synapse-is-slow-and-eats-all-my-ramcpu)
to utilize this option, and all three of the options must be specified for this feature to work. This option to utilize this option, and all three of the options must be specified for this feature to work. This option
defaults to off, enable it by providing values for the sub-options listed below. Please note that the feature will not work defaults to off, enable it by providing values for the sub-options listed below. Please note that the feature will not work
and may cause unstable behavior (such as excessive emptying of caches or exceptions) if all of the values are not provided. and may cause unstable behavior (such as excessive emptying of caches or exceptions) if all of the values are not provided.
@ -2088,7 +2088,7 @@ set.
This is primarily intended for use with the `register_new_matrix_user` script This is primarily intended for use with the `register_new_matrix_user` script
(see [Registering a user](../../setup/installation.md#registering-a-user)); (see [Registering a user](../../setup/installation.md#registering-a-user));
however, the interface is [documented](../admin_api/register_api.html). however, the interface is [documented](../../admin_api/register_api.html).
See also [`registration_shared_secret_path`](#registration_shared_secret_path). See also [`registration_shared_secret_path`](#registration_shared_secret_path).
@ -3541,9 +3541,9 @@ Example configuration:
enable_room_list_search: false enable_room_list_search: false
``` ```
--- ---
### `alias_creation` ### `alias_creation_rules`
The `alias_creation` option controls who is allowed to create aliases The `alias_creation_rules` option controls who is allowed to create aliases
on this server. on this server.
The format of this option is a list of rules that contain globs that The format of this option is a list of rules that contain globs that

View file

@ -203,6 +203,8 @@ information.
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/members$ ^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/members$
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state$ ^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state$
^/_matrix/client/v1/rooms/.*/hierarchy$ ^/_matrix/client/v1/rooms/.*/hierarchy$
^/_matrix/client/(v1|unstable)/rooms/.*/relations/
^/_matrix/client/v1/rooms/.*/threads$
^/_matrix/client/unstable/org.matrix.msc2716/rooms/.*/batch_send$ ^/_matrix/client/unstable/org.matrix.msc2716/rooms/.*/batch_send$
^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$ ^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$
^/_matrix/client/(r0|v3|unstable)/account/3pid$ ^/_matrix/client/(r0|v3|unstable)/account/3pid$
@ -488,6 +490,12 @@ pusher_instances:
- pusher_worker2 - pusher_worker2
``` ```
An example for a pusher instance:
```yaml
{{#include systemd-with-workers/workers/pusher_worker.yaml}}
```
### `synapse.app.appservice` ### `synapse.app.appservice`
@ -518,6 +526,12 @@ federation_sender_instances:
- federation_sender2 - federation_sender2
``` ```
An example for a federation sender instance:
```yaml
{{#include systemd-with-workers/workers/federation_sender.yaml}}
```
### `synapse.app.media_repository` ### `synapse.app.media_repository`
Handles the media repository. It can handle all endpoints starting with: Handles the media repository. It can handle all endpoints starting with:

487
poetry.lock generated
View file

@ -10,21 +10,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
[[package]] [[package]]
name = "authlib" name = "Authlib"
version = "0.15.5" version = "1.1.0"
description = "The ultimate Python library in building OAuth and OpenID Connect servers." description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
category = "main" category = "main"
optional = true optional = true
python-versions = "*" python-versions = "*"
[package.dependencies] [package.dependencies]
cryptography = "*" cryptography = ">=3.2"
[package.extras]
client = ["requests"]
[[package]] [[package]]
name = "automat" name = "automat"
@ -43,16 +40,12 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"]
[[package]] [[package]]
name = "bcrypt" name = "bcrypt"
version = "3.2.0" version = "4.0.1"
description = "Modern password hashing for your software and your servers" description = "Modern password hashing for your software and your servers"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
[package.dependencies]
cffi = ">=1.1"
six = ">=1.4.1"
[package.extras] [package.extras]
tests = ["pytest (>=3.2.1,!=3.3.0)"] tests = ["pytest (>=3.2.1,!=3.3.0)"]
typecheck = ["mypy"] typecheck = ["mypy"]
@ -82,17 +75,20 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]] [[package]]
name = "bleach" name = "bleach"
version = "4.1.0" version = "5.0.1"
description = "An easy safelist-based HTML-sanitizing tool." description = "An easy safelist-based HTML-sanitizing tool."
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
packaging = "*"
six = ">=1.9.0" six = ">=1.9.0"
webencodings = "*" webencodings = "*"
[package.extras]
css = ["tinycss2 (>=1.1.0,<1.2)"]
dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"]
[[package]] [[package]]
name = "canonicaljson" name = "canonicaljson"
version = "1.6.3" version = "1.6.3"
@ -136,11 +132,11 @@ optional = false
python-versions = ">=3.5.0" python-versions = ">=3.5.0"
[package.extras] [package.extras]
unicode_backport = ["unicodedata2"] unicode-backport = ["unicodedata2"]
[[package]] [[package]]
name = "click" name = "click"
version = "8.1.1" version = "8.1.3"
description = "Composable command line interface toolkit" description = "Composable command line interface toolkit"
category = "dev" category = "dev"
optional = false optional = false
@ -291,7 +287,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[[package]] [[package]]
name = "frozendict" name = "frozendict"
version = "2.3.3" version = "2.3.4"
description = "A simple immutable dictionary" description = "A simple immutable dictionary"
category = "main" category = "main"
optional = false optional = false
@ -341,7 +337,7 @@ idna = ">=2.5"
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.3" version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)" description = "Internationalized Domain Names in Applications (IDNA)"
category = "main" category = "main"
optional = false optional = false
@ -399,16 +395,17 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"]
[[package]] [[package]]
name = "isort" name = "isort"
version = "5.7.0" version = "5.10.1"
description = "A Python utility / library to sort Python imports." description = "A Python utility / library to sort Python imports."
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.6,<4.0" python-versions = ">=3.6.1,<4.0"
[package.extras] [package.extras]
colors = ["colorama (>=0.4.3,<0.5.0)"] colors = ["colorama (>=0.4.3,<0.5.0)"]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"] pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pip-api", "pipreqs"] plugins = ["setuptools"]
requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]] [[package]]
name = "jaeger-client" name = "jaeger-client"
@ -455,7 +452,7 @@ i18n = ["Babel (>=2.7)"]
[[package]] [[package]]
name = "jsonschema" name = "jsonschema"
version = "4.4.0" version = "4.16.0"
description = "An implementation of JSON Schema validation for Python" description = "An implementation of JSON Schema validation for Python"
category = "main" category = "main"
optional = false optional = false
@ -465,12 +462,13 @@ python-versions = ">=3.7"
attrs = ">=17.4.0" attrs = ">=17.4.0"
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
typing-extensions = {version = "*", markers = "python_version < \"3.8\""} typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
[package.extras] [package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
[[package]] [[package]]
name = "keyring" name = "keyring"
@ -565,8 +563,8 @@ python-versions = "*"
[[package]] [[package]]
name = "msgpack" name = "msgpack"
version = "1.0.3" version = "1.0.4"
description = "MessagePack (de)serializer." description = "MessagePack serializer"
category = "main" category = "main"
optional = false optional = false
python-versions = "*" python-versions = "*"
@ -665,7 +663,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]] [[package]]
name = "phonenumbers" name = "phonenumbers"
version = "8.12.44" version = "8.12.56"
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
category = "main" category = "main"
optional = false optional = false
@ -690,6 +688,14 @@ python-versions = "*"
[package.extras] [package.extras]
testing = ["coverage", "nose"] testing = ["coverage", "nose"]
[[package]]
name = "pkgutil_resolve_name"
version = "1.3.10"
description = "Resolve a name to an object."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]] [[package]]
name = "platformdirs" name = "platformdirs"
version = "2.5.1" version = "2.5.1"
@ -715,7 +721,7 @@ twisted = ["twisted"]
[[package]] [[package]]
name = "psycopg2" name = "psycopg2"
version = "2.9.3" version = "2.9.4"
description = "psycopg2 - Python-PostgreSQL Database Adapter" description = "psycopg2 - Python-PostgreSQL Database Adapter"
category = "main" category = "main"
optional = true optional = true
@ -781,14 +787,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "1.9.1" version = "1.10.2"
description = "Data validation and settings management using python type hints" description = "Data validation and settings management using python type hints"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6.1" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
typing-extensions = ">=3.7.4.3" typing-extensions = ">=4.1.0"
[package.extras] [package.extras]
dotenv = ["python-dotenv (>=0.10.4)"] dotenv = ["python-dotenv (>=0.10.4)"]
@ -804,7 +810,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]] [[package]]
name = "pygithub" name = "pygithub"
version = "1.55" version = "1.56"
description = "Use the full Github API v3" description = "Use the full Github API v3"
category = "dev" category = "dev"
optional = false optional = false
@ -1000,7 +1006,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras] [package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"]
[[package]] [[package]]
name = "requests-toolbelt" name = "requests-toolbelt"
@ -1070,7 +1076,7 @@ django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"] falcon = ["falcon (>=1.4)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"] httpx = ["httpx (>=0.16.0)"]
pure_eval = ["asttokens", "executing", "pure-eval"] pure-eval = ["asttokens", "executing", "pure-eval"]
pyspark = ["pyspark (>=2.4.4)"] pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"] rq = ["rq (>=0.6)"]
@ -1302,11 +1308,11 @@ urllib3 = ">=1.26.0"
[[package]] [[package]]
name = "twisted" name = "twisted"
version = "22.4.0" version = "22.8.0"
description = "An asynchronous networking framework written in Python" description = "An asynchronous networking framework written in Python"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6.7" python-versions = ">=3.7.1"
[package.dependencies] [package.dependencies]
attrs = ">=19.2.0" attrs = ">=19.2.0"
@ -1315,27 +1321,28 @@ constantly = ">=15.1"
hyperlink = ">=17.1.1" hyperlink = ">=17.1.1"
idna = {version = ">=2.4", optional = true, markers = "extra == \"tls\""} idna = {version = ">=2.4", optional = true, markers = "extra == \"tls\""}
incremental = ">=21.3.0" incremental = ">=21.3.0"
pyopenssl = {version = ">=16.0.0", optional = true, markers = "extra == \"tls\""} pyopenssl = {version = ">=21.0.0", optional = true, markers = "extra == \"tls\""}
service-identity = {version = ">=18.1.0", optional = true, markers = "extra == \"tls\""} service-identity = {version = ">=18.1.0", optional = true, markers = "extra == \"tls\""}
twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""} twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""}
typing-extensions = ">=3.6.5" typing-extensions = ">=3.6.5"
"zope.interface" = ">=4.4.2" "zope.interface" = ">=4.4.2"
[package.extras] [package.extras]
all_non_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"]
conch_nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] conch-nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"]
contextvars = ["contextvars (>=2.4,<3)"] contextvars = ["contextvars (>=2.4,<3)"]
dev = ["coverage (>=6b1,<7)", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)"] dev = ["coverage (>=6b1,<7)", "pydoctor (>=22.7.0,<22.8.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)"]
dev_release = ["pydoctor (>=21.9.0,<21.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)"] dev-release = ["pydoctor (>=22.7.0,<22.8.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)"]
gtk-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pygobject", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"]
macos_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=22.7.0,<22.8.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"]
osx_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"]
test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)"] test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)"]
tls = ["idna (>=2.4)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"]
windows_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
[[package]] [[package]]
name = "twisted-iocpsupport" name = "twisted-iocpsupport"
@ -1367,7 +1374,7 @@ python-versions = ">=3.6"
[[package]] [[package]]
name = "types-bleach" name = "types-bleach"
version = "4.1.4" version = "5.0.3"
description = "Typing stubs for bleach" description = "Typing stubs for bleach"
category = "dev" category = "dev"
optional = false optional = false
@ -1427,7 +1434,7 @@ python-versions = "*"
[[package]] [[package]]
name = "types-pillow" name = "types-pillow"
version = "9.0.15" version = "9.2.2.1"
description = "Typing stubs for Pillow" description = "Typing stubs for Pillow"
category = "dev" category = "dev"
optional = false optional = false
@ -1435,15 +1442,15 @@ python-versions = "*"
[[package]] [[package]]
name = "types-psycopg2" name = "types-psycopg2"
version = "2.9.9" version = "2.9.21.1"
description = "Typing stubs for psycopg2" description = "Typing stubs for psycopg2"
category = "dev" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
[[package]] [[package]]
name = "types-pyopenssl" name = "types-pyOpenSSL"
version = "22.0.0" version = "22.0.10"
description = "Typing stubs for pyOpenSSL" description = "Typing stubs for pyOpenSSL"
category = "dev" category = "dev"
optional = false optional = false
@ -1453,8 +1460,8 @@ python-versions = "*"
types-cryptography = "*" types-cryptography = "*"
[[package]] [[package]]
name = "types-pyyaml" name = "types-PyYAML"
version = "6.0.4" version = "6.0.12"
description = "Typing stubs for PyYAML" description = "Typing stubs for PyYAML"
category = "dev" category = "dev"
optional = false optional = false
@ -1462,7 +1469,7 @@ python-versions = "*"
[[package]] [[package]]
name = "types-requests" name = "types-requests"
version = "2.27.11" version = "2.28.11"
description = "Typing stubs for requests" description = "Typing stubs for requests"
category = "dev" category = "dev"
optional = false optional = false
@ -1473,7 +1480,7 @@ types-urllib3 = "<1.27"
[[package]] [[package]]
name = "types-setuptools" name = "types-setuptools"
version = "57.4.9" version = "65.5.0.1"
description = "Typing stubs for setuptools" description = "Typing stubs for setuptools"
category = "dev" category = "dev"
optional = false optional = false
@ -1489,11 +1496,11 @@ python-versions = "*"
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.1.1" version = "4.4.0"
description = "Backported and Experimental Type Hints for Python 3.6+" description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.7"
[[package]] [[package]]
name = "unpaddedbase64" name = "unpaddedbase64"
@ -1610,7 +1617,7 @@ test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
[extras] [extras]
all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler"] all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler"]
cache_memory = ["Pympler"] cache-memory = ["Pympler"]
jwt = ["authlib"] jwt = ["authlib"]
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"] matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
oidc = ["authlib"] oidc = ["authlib"]
@ -1621,37 +1628,48 @@ saml2 = ["pysaml2"]
sentry = ["sentry-sdk"] sentry = ["sentry-sdk"]
systemd = ["systemd-python"] systemd = ["systemd-python"]
test = ["parameterized", "idna"] test = ["parameterized", "idna"]
url_preview = ["lxml"] url-preview = ["lxml"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.7.1" python-versions = "^3.7.1"
content-hash = "1b14fc274d9e2a495a7f864150f3ffcf4d9f585e09a67e53301ae4ef3c2f3e48" content-hash = "9400cb5c92bb4648238f652f5e7f81df51cdcf9b7c69d645f35beaa4acb2f420"
[metadata.files] [metadata.files]
attrs = [ attrs = [
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
] ]
authlib = [ Authlib = [
{file = "Authlib-0.15.5-py2.py3-none-any.whl", hash = "sha256:ecf4a7a9f2508c0bb07e93a752dd3c495cfaffc20e864ef0ffc95e3f40d2abaf"}, {file = "Authlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:be4b6a1dea51122336c210a6945b27a105b9ac572baffd15b07bcff4376c1523"},
{file = "Authlib-0.15.5.tar.gz", hash = "sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252"}, {file = "Authlib-1.1.0.tar.gz", hash = "sha256:0a270c91409fc2b7b0fbee6996e09f2ee3187358762111a9a4225c874b94e891"},
] ]
automat = [ automat = [
{file = "Automat-20.2.0-py2.py3-none-any.whl", hash = "sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111"}, {file = "Automat-20.2.0-py2.py3-none-any.whl", hash = "sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111"},
{file = "Automat-20.2.0.tar.gz", hash = "sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33"}, {file = "Automat-20.2.0.tar.gz", hash = "sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33"},
] ]
bcrypt = [ bcrypt = [
{file = "bcrypt-3.2.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd"}, {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"},
{file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"},
{file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"},
{file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"},
{file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"},
{file = "bcrypt-3.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"},
{file = "bcrypt-3.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"},
{file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"}, {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"},
{file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"},
{file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"},
{file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"},
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"},
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"},
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"},
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"},
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"},
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"},
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"},
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"},
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"},
{file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
] ]
black = [ black = [
{file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"},
@ -1679,8 +1697,8 @@ black = [
{file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"},
] ]
bleach = [ bleach = [
{file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"},
{file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"},
] ]
canonicaljson = [ canonicaljson = [
{file = "canonicaljson-1.6.3-py3-none-any.whl", hash = "sha256:6ba3cf1702fa3d209b3e915a4e9a3e4ef194f1e8fca189c1f0b7a2a7686a27e6"}, {file = "canonicaljson-1.6.3-py3-none-any.whl", hash = "sha256:6ba3cf1702fa3d209b3e915a4e9a3e4ef194f1e8fca189c1f0b7a2a7686a27e6"},
@ -1747,8 +1765,8 @@ charset-normalizer = [
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
] ]
click = [ click = [
{file = "click-8.1.1-py3-none-any.whl", hash = "sha256:5e0d195c2067da3136efb897449ec1e9e6c98282fbf30d7f9e164af9be901a6b"}, {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
{file = "click-8.1.1.tar.gz", hash = "sha256:7ab900e38149c9872376e8f9b5986ddcaf68c0f413cf73678a0bca5547e6f976"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
] ]
click-default-group = [ click-default-group = [
{file = "click-default-group-1.2.2.tar.gz", hash = "sha256:d9560e8e8dfa44b3562fbc9425042a0fd6d21956fcc2db0077f63f34253ab904"}, {file = "click-default-group-1.2.2.tar.gz", hash = "sha256:d9560e8e8dfa44b3562fbc9425042a0fd6d21956fcc2db0077f63f34253ab904"},
@ -1816,23 +1834,23 @@ flake8-comprehensions = [
{file = "flake8_comprehensions-3.8.0-py3-none-any.whl", hash = "sha256:9406314803abe1193c064544ab14fdc43c58424c0882f6ff8a581eb73fc9bb58"}, {file = "flake8_comprehensions-3.8.0-py3-none-any.whl", hash = "sha256:9406314803abe1193c064544ab14fdc43c58424c0882f6ff8a581eb73fc9bb58"},
] ]
frozendict = [ frozendict = [
{file = "frozendict-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39942914c1217a5a49c7551495a103b3dbd216e19413687e003b859c6b0ebc12"}, {file = "frozendict-2.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc"},
{file = "frozendict-2.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5589256058b31f2b91419fa30b8dc62dbdefe7710e688a3fd5b43849161eecc9"}, {file = "frozendict-2.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782"},
{file = "frozendict-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:35eb7e59e287c41f4f712d4d3d2333354175b155d217b97c99c201d2d8920790"}, {file = "frozendict-2.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:b98a0d65a59af6da03f794f90b0c3085a7ee14e7bf8f0ef36b079ee8aa992439"},
{file = "frozendict-2.3.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:310aaf81793abf4f471895e6fe65e0e74a28a2aaf7b25c2ba6ccd4e35af06842"}, {file = "frozendict-2.3.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d8042b7dab5e992e30889c9b71b781d5feef19b372d47d735e4d7d45846fd4a"},
{file = "frozendict-2.3.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c353c11010a986566a0cb37f9a783c560ffff7d67d5e7fd52221fb03757cdc43"}, {file = "frozendict-2.3.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a6d2e8b7cf6b6e5677a1a4b53b4073e5d9ec640d1db30dc679627668d25e90"},
{file = "frozendict-2.3.3-cp36-cp36m-win_amd64.whl", hash = "sha256:15b5f82aad108125336593cec1b6420c638bf45f449c57e50949fc7654ea5a41"}, {file = "frozendict-2.3.4-cp36-cp36m-win_amd64.whl", hash = "sha256:dbbe1339ac2646523e0bb00d1896085d1f70de23780e4927ca82b36ab8a044d3"},
{file = "frozendict-2.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a4737e5257756bd6b877504ff50185b705db577b5330d53040a6cf6417bb3cdb"}, {file = "frozendict-2.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95bac22f7f09d81f378f2b3f672b7a50a974ca180feae1507f5e21bc147e8bc8"},
{file = "frozendict-2.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a14c11e33e8b0bc09e07bba3732c77a502c39edb8c3959fd9a0e490e031158"}, {file = "frozendict-2.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae686722c144b333c4dbdc16323a5de11406d26b76d2be1cc175f90afacb5ba"},
{file = "frozendict-2.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:027952d1698ac9c766ef43711226b178cdd49d2acbdff396936639ad1d2a5615"}, {file = "frozendict-2.3.4-cp37-cp37m-win_amd64.whl", hash = "sha256:389f395a74eb16992217ac1521e689c1dea2d70113bcb18714669ace1ed623b9"},
{file = "frozendict-2.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ef818d66c85098a37cf42509545a4ba7dd0c4c679d6262123a8dc14cc474bab7"}, {file = "frozendict-2.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ccb6450a416c9cc9acef7683e637e28356e3ceeabf83521f74cc2718883076b7"},
{file = "frozendict-2.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:812279f2b270c980112dc4e367b168054f937108f8044eced4199e0ab2945a37"}, {file = "frozendict-2.3.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aca59108b77cadc13ba7dfea7e8f50811208c7652a13dc6c7f92d7782a24d299"},
{file = "frozendict-2.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:c1fb7efbfebc2075f781be3d9774e4ba6ce4fc399148b02097f68d4b3c4bc00a"}, {file = "frozendict-2.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:3ec86ebf143dd685184215c27ec416c36e0ba1b80d81b1b9482f7d380c049b4e"},
{file = "frozendict-2.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0b46d4bf95bce843c0151959d54c3e5b8d0ce29cb44794e820b3ec980d63eee"}, {file = "frozendict-2.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5809e6ff6b7257043a486f7a3b73a7da71cf69a38980b4171e4741291d0d9eb3"},
{file = "frozendict-2.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38c4660f37fcc70a32ff997fe58e40b3fcc60b2017b286e33828efaa16b01308"}, {file = "frozendict-2.3.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c550ed7fdf1962984bec21630c584d722b3ee5d5f57a0ae2527a0121dc0414a"},
{file = "frozendict-2.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:919e3609844fece11ab18bcbf28a3ed20f8108ad4149d7927d413687f281c6c9"}, {file = "frozendict-2.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:3e93aebc6e69a8ef329bbe9afb8342bd33c7b5c7a0c480cb9f7e60b0cbe48072"},
{file = "frozendict-2.3.3-py3-none-any.whl", hash = "sha256:f988b482d08972a196664718167a993a61c9e9f6fe7b0ca2443570b5f20ca44a"}, {file = "frozendict-2.3.4-py3-none-any.whl", hash = "sha256:d722f3d89db6ae35ef35ecc243c40c800eb344848c83dba4798353312cd37b15"},
{file = "frozendict-2.3.3.tar.gz", hash = "sha256:398539c52af3c647d103185bbaa1291679f0507ad035fe3bab2a8b0366d52cf1"}, {file = "frozendict-2.3.4.tar.gz", hash = "sha256:15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78"},
] ]
gitdb = [ gitdb = [
{file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
@ -1890,8 +1908,8 @@ hyperlink = [
{file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"},
] ]
idna = [ idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
] ]
ijson = [ ijson = [
{file = "ijson-3.1.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6c1a777096be5f75ffebb335c6d2ebc0e489b231496b7f2ca903aa061fe7d381"}, {file = "ijson-3.1.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6c1a777096be5f75ffebb335c6d2ebc0e489b231496b7f2ca903aa061fe7d381"},
@ -1970,8 +1988,8 @@ incremental = [
{file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"}, {file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"},
] ]
isort = [ isort = [
{file = "isort-5.7.0-py3-none-any.whl", hash = "sha256:fff4f0c04e1825522ce6949973e83110a6e907750cd92d128b0d14aaaadbffdc"}, {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
{file = "isort-5.7.0.tar.gz", hash = "sha256:c729845434366216d320e936b8ad6f9d681aab72dc7cbc2d51bedc3582f3ad1e"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
] ]
jaeger-client = [ jaeger-client = [
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
@ -1985,18 +2003,15 @@ jinja2 = [
{file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"},
] ]
jsonschema = [ jsonschema = [
{file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"},
{file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"},
] ]
keyring = [ keyring = [
{file = "keyring-23.5.0-py3-none-any.whl", hash = "sha256:b0d28928ac3ec8e42ef4cc227822647a19f1d544f21f96457965dc01cf555261"}, {file = "keyring-23.5.0-py3-none-any.whl", hash = "sha256:b0d28928ac3ec8e42ef4cc227822647a19f1d544f21f96457965dc01cf555261"},
{file = "keyring-23.5.0.tar.gz", hash = "sha256:9012508e141a80bd1c0b6778d5c610dd9f8c464d75ac6774248500503f972fb9"}, {file = "keyring-23.5.0.tar.gz", hash = "sha256:9012508e141a80bd1c0b6778d5c610dd9f8c464d75ac6774248500503f972fb9"},
] ]
ldap3 = [ ldap3 = [
{file = "ldap3-2.9.1-py2.6.egg", hash = "sha256:5ab7febc00689181375de40c396dcad4f2659cd260fc5e94c508b6d77c17e9d5"},
{file = "ldap3-2.9.1-py2.7.egg", hash = "sha256:2bc966556fc4d4fa9f445a1c31dc484ee81d44a51ab0e2d0fd05b62cac75daa6"},
{file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
{file = "ldap3-2.9.1-py3.9.egg", hash = "sha256:5630d1383e09ba94839e253e013f1aa1a2cf7a547628ba1265cb7b9a844b5687"},
{file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
] ]
lxml = [ lxml = [
@ -2126,40 +2141,58 @@ mccabe = [
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
] ]
msgpack = [ msgpack = [
{file = "msgpack-1.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"},
{file = "msgpack-1.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"},
{file = "msgpack-1.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467"},
{file = "msgpack-1.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147"}, {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6"},
{file = "msgpack-1.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39"}, {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa"},
{file = "msgpack-1.0.3-cp310-cp310-win32.whl", hash = "sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85"}, {file = "msgpack-1.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6"},
{file = "msgpack-1.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7"}, {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba"},
{file = "msgpack-1.0.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d"}, {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e"},
{file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b"}, {file = "msgpack-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db"},
{file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec"}, {file = "msgpack-1.0.4-cp310-cp310-win32.whl", hash = "sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef"},
{file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770"}, {file = "msgpack-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075"},
{file = "msgpack-1.0.3-cp36-cp36m-win32.whl", hash = "sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9"}, {file = "msgpack-1.0.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52"},
{file = "msgpack-1.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a"}, {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9"},
{file = "msgpack-1.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a"}, {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9"},
{file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc"}, {file = "msgpack-1.0.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08"},
{file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a"}, {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8"},
{file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920"}, {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6"},
{file = "msgpack-1.0.3-cp37-cp37m-win32.whl", hash = "sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50"}, {file = "msgpack-1.0.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae"},
{file = "msgpack-1.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba"}, {file = "msgpack-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6"},
{file = "msgpack-1.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea"}, {file = "msgpack-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661"},
{file = "msgpack-1.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a"}, {file = "msgpack-1.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c"},
{file = "msgpack-1.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef"}, {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0"},
{file = "msgpack-1.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611"}, {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227"},
{file = "msgpack-1.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1"}, {file = "msgpack-1.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff"},
{file = "msgpack-1.0.3-cp38-cp38-win32.whl", hash = "sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4"}, {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd"},
{file = "msgpack-1.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a"}, {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e"},
{file = "msgpack-1.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3"}, {file = "msgpack-1.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236"},
{file = "msgpack-1.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52"}, {file = "msgpack-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44"},
{file = "msgpack-1.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2"}, {file = "msgpack-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1"},
{file = "msgpack-1.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996"}, {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d"},
{file = "msgpack-1.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2"}, {file = "msgpack-1.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab"},
{file = "msgpack-1.0.3-cp39-cp39-win32.whl", hash = "sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88"}, {file = "msgpack-1.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb"},
{file = "msgpack-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d"}, {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9"},
{file = "msgpack-1.0.3.tar.gz", hash = "sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e"}, {file = "msgpack-1.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e"},
{file = "msgpack-1.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1"},
{file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e"},
{file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43"},
{file = "msgpack-1.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243"},
{file = "msgpack-1.0.4-cp38-cp38-win32.whl", hash = "sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2"},
{file = "msgpack-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6"},
{file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae"},
{file = "msgpack-1.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55"},
{file = "msgpack-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da"},
{file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f"},
{file = "msgpack-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92"},
{file = "msgpack-1.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f"},
{file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624"},
{file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8"},
{file = "msgpack-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae"},
{file = "msgpack-1.0.4-cp39-cp39-win32.whl", hash = "sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c"},
{file = "msgpack-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce"},
{file = "msgpack-1.0.4.tar.gz", hash = "sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f"},
] ]
mypy = [ mypy = [
{file = "mypy-0.981-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4bc460e43b7785f78862dab78674e62ec3cd523485baecfdf81a555ed29ecfa0"}, {file = "mypy-0.981-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4bc460e43b7785f78862dab78674e62ec3cd523485baecfdf81a555ed29ecfa0"},
@ -2215,8 +2248,8 @@ pathspec = [
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
] ]
phonenumbers = [ phonenumbers = [
{file = "phonenumbers-8.12.44-py2.py3-none-any.whl", hash = "sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6"}, {file = "phonenumbers-8.12.56-py2.py3-none-any.whl", hash = "sha256:80a7422cf0999a6f9b7a2e6cfbdbbfcc56ab5b75414dc3b805bbec91276b64a3"},
{file = "phonenumbers-8.12.44.tar.gz", hash = "sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce"}, {file = "phonenumbers-8.12.56.tar.gz", hash = "sha256:82a4f226c930d02dcdf6d4b29e4cfd8678991fe65c2efd5fdd143557186f0868"},
] ]
pillow = [ pillow = [
{file = "Pillow-9.0.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5d24e1d674dd9d72c66ad3ea9131322819ff86250b30dc5821cbafcfa0b96b4"}, {file = "Pillow-9.0.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5d24e1d674dd9d72c66ad3ea9131322819ff86250b30dc5821cbafcfa0b96b4"},
@ -2259,6 +2292,10 @@ pkginfo = [
{file = "pkginfo-1.8.2-py2.py3-none-any.whl", hash = "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc"}, {file = "pkginfo-1.8.2-py2.py3-none-any.whl", hash = "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc"},
{file = "pkginfo-1.8.2.tar.gz", hash = "sha256:542e0d0b6750e2e21c20179803e40ab50598d8066d51097a0e382cba9eb02bff"}, {file = "pkginfo-1.8.2.tar.gz", hash = "sha256:542e0d0b6750e2e21c20179803e40ab50598d8066d51097a0e382cba9eb02bff"},
] ]
pkgutil_resolve_name = [
{file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"},
{file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
]
platformdirs = [ platformdirs = [
{file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"},
{file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
@ -2268,17 +2305,17 @@ prometheus-client = [
{file = "prometheus_client-0.14.0.tar.gz", hash = "sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750"}, {file = "prometheus_client-0.14.0.tar.gz", hash = "sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750"},
] ]
psycopg2 = [ psycopg2 = [
{file = "psycopg2-2.9.3-cp310-cp310-win32.whl", hash = "sha256:083707a696e5e1c330af2508d8fab36f9700b26621ccbcb538abe22e15485362"}, {file = "psycopg2-2.9.4-cp310-cp310-win32.whl", hash = "sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797"},
{file = "psycopg2-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:d3ca6421b942f60c008f81a3541e8faf6865a28d5a9b48544b0ee4f40cac7fca"}, {file = "psycopg2-2.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c"},
{file = "psycopg2-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:9572e08b50aed176ef6d66f15a21d823bb6f6d23152d35e8451d7d2d18fdac56"}, {file = "psycopg2-2.9.4-cp36-cp36m-win32.whl", hash = "sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a"},
{file = "psycopg2-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a81e3866f99382dfe8c15a151f1ca5fde5815fde879348fe5a9884a7c092a305"}, {file = "psycopg2-2.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb"},
{file = "psycopg2-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:cb10d44e6694d763fa1078a26f7f6137d69f555a78ec85dc2ef716c37447e4b2"}, {file = "psycopg2-2.9.4-cp37-cp37m-win32.whl", hash = "sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb"},
{file = "psycopg2-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4295093a6ae3434d33ec6baab4ca5512a5082cc43c0505293087b8a46d108461"}, {file = "psycopg2-2.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61"},
{file = "psycopg2-2.9.3-cp38-cp38-win32.whl", hash = "sha256:34b33e0162cfcaad151f249c2649fd1030010c16f4bbc40a604c1cb77173dcf7"}, {file = "psycopg2-2.9.4-cp38-cp38-win32.whl", hash = "sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c"},
{file = "psycopg2-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:0762c27d018edbcb2d34d51596e4346c983bd27c330218c56c4dc25ef7e819bf"}, {file = "psycopg2-2.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184"},
{file = "psycopg2-2.9.3-cp39-cp39-win32.whl", hash = "sha256:8cf3878353cc04b053822896bc4922b194792df9df2f1ad8da01fb3043602126"}, {file = "psycopg2-2.9.4-cp39-cp39-win32.whl", hash = "sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426"},
{file = "psycopg2-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:06f32425949bd5fe8f625c49f17ebb9784e1e4fe928b7cce72edc36fb68e4c0c"}, {file = "psycopg2-2.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308"},
{file = "psycopg2-2.9.3.tar.gz", hash = "sha256:8e841d1bf3434da985cc5ef13e6f75c8981ced601fd70cc6bf33351b91562981"}, {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
] ]
psycopg2cffi = [ psycopg2cffi = [
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"}, {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
@ -2287,34 +2324,12 @@ psycopg2cffi-compat = [
{file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"}, {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
] ]
pyasn1 = [ pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
] ]
pyasn1-modules = [ pyasn1-modules = [
{file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"},
{file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"},
{file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"},
{file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"},
{file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"},
{file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"},
{file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"},
{file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"},
{file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"},
{file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"},
{file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"},
{file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"},
{file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"},
] ]
pycodestyle = [ pycodestyle = [
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
@ -2325,49 +2340,50 @@ pycparser = [
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
] ]
pydantic = [ pydantic = [
{file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"},
{file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"},
{file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"},
{file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"},
{file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"},
{file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"},
{file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"},
{file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"},
{file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"},
{file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"},
{file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"},
{file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"},
{file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"},
{file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"},
{file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"},
{file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"},
{file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"},
{file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"},
{file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"},
{file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"},
{file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"},
{file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"},
{file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"},
{file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"},
{file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"},
{file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"},
{file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"},
{file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"},
{file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"},
{file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"},
{file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"},
{file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"},
{file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"},
{file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"},
{file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"},
{file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"},
] ]
pyflakes = [ pyflakes = [
{file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
{file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
] ]
pygithub = [ pygithub = [
{file = "PyGithub-1.55-py3-none-any.whl", hash = "sha256:2caf0054ea079b71e539741ae56c5a95e073b81fa472ce222e81667381b9601b"}, {file = "PyGithub-1.56-py3-none-any.whl", hash = "sha256:d15f13d82165306da8a68aefc0f848a6f6432d5febbff13b60a94758ce3ef8b5"},
{file = "PyGithub-1.55.tar.gz", hash = "sha256:1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283"}, {file = "PyGithub-1.56.tar.gz", hash = "sha256:80c6d85cf0f9418ffeb840fd105840af694c4f17e102970badbaf678251f2a01"},
] ]
pygments = [ pygments = [
{file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"},
@ -2452,6 +2468,13 @@ pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
{file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
{file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
{file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
{file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
@ -2672,8 +2695,8 @@ twine = [
{file = "twine-3.8.0.tar.gz", hash = "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19"}, {file = "twine-3.8.0.tar.gz", hash = "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19"},
] ]
twisted = [ twisted = [
{file = "Twisted-22.4.0-py3-none-any.whl", hash = "sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2"}, {file = "Twisted-22.8.0-py3-none-any.whl", hash = "sha256:8d4718d1e48dcc28933f8beb48dc71cfe77a125e37ad1eb7a3d0acc49baf6c99"},
{file = "Twisted-22.4.0.tar.gz", hash = "sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680"}, {file = "Twisted-22.8.0.tar.gz", hash = "sha256:e5b60de39f2d1da153fbe1874d885fe3fcbdb21fcc446fa759a53e8fc3513bed"},
] ]
twisted-iocpsupport = [ twisted-iocpsupport = [
{file = "twisted-iocpsupport-1.0.2.tar.gz", hash = "sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9"}, {file = "twisted-iocpsupport-1.0.2.tar.gz", hash = "sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9"},
@ -2720,8 +2743,8 @@ typed-ast = [
{file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"},
] ]
types-bleach = [ types-bleach = [
{file = "types-bleach-4.1.4.tar.gz", hash = "sha256:2d30c2c4fb6854088ac636471352c9a51bf6c089289800d2a8060820a01cd43a"}, {file = "types-bleach-5.0.3.tar.gz", hash = "sha256:f7b3df8278efe176d9670d0f063a66c866c77577f71f54b9c7a320e31b1a7bbd"},
{file = "types_bleach-4.1.4-py3-none-any.whl", hash = "sha256:edffe173ed6d7b6f3543036a96204a9319c3bf6c3645917b14274e43f000cc9b"}, {file = "types_bleach-5.0.3-py3-none-any.whl", hash = "sha256:5931525d03571f36b2bb40210c34b662c4d26c8fd6f2b1e1e83fe4d2d2fd63c7"},
] ]
types-commonmark = [ types-commonmark = [
{file = "types-commonmark-0.9.2.tar.gz", hash = "sha256:b894b67750c52fd5abc9a40a9ceb9da4652a391d75c1b480bba9cef90f19fc86"}, {file = "types-commonmark-0.9.2.tar.gz", hash = "sha256:b894b67750c52fd5abc9a40a9ceb9da4652a391d75c1b480bba9cef90f19fc86"},
@ -2748,36 +2771,36 @@ types-opentracing = [
{file = "types_opentracing-2.4.7-py3-none-any.whl", hash = "sha256:861fb8103b07cf717f501dd400cb274ca9992552314d4d6c7a824b11a215e512"}, {file = "types_opentracing-2.4.7-py3-none-any.whl", hash = "sha256:861fb8103b07cf717f501dd400cb274ca9992552314d4d6c7a824b11a215e512"},
] ]
types-pillow = [ types-pillow = [
{file = "types-Pillow-9.0.15.tar.gz", hash = "sha256:d2e385fe5c192e75970f18accce69f5c2a9f186f3feb578a9b91cd6fdf64211d"}, {file = "types-Pillow-9.2.2.1.tar.gz", hash = "sha256:85c139e06e1c46ec5f9c634d5c54a156b0958d5d0e8be024ed353db0c804b426"},
{file = "types_Pillow-9.0.15-py3-none-any.whl", hash = "sha256:c9646595dfafdf8b63d4b1443292ead17ee0fc7b18a143e497b68e0ea2dc1eb6"}, {file = "types_Pillow-9.2.2.1-py3-none-any.whl", hash = "sha256:3a6a871cade8428433a21ef459bb0a65532b87d05f9e836a0664431ce445bdcf"},
] ]
types-psycopg2 = [ types-psycopg2 = [
{file = "types-psycopg2-2.9.9.tar.gz", hash = "sha256:4f9d4d52eeb343dc00fd5ed4f1513a8a5c18efba0a072eb82706d15cf4f20a2e"}, {file = "types-psycopg2-2.9.21.1.tar.gz", hash = "sha256:f5532cf15afdc6b5ebb1e59b7d896617217321f488fd1fbd74e7efb94decfab6"},
{file = "types_psycopg2-2.9.9-py3-none-any.whl", hash = "sha256:cec9291d4318ad70b407310f8304b3d40f6d0358f09870448f7a65e3027c80af"}, {file = "types_psycopg2-2.9.21.1-py3-none-any.whl", hash = "sha256:858838f1972f39da2a6e28274201fed8619a40a235dd86e7f66f4548ec474395"},
] ]
types-pyopenssl = [ types-pyOpenSSL = [
{file = "types-pyOpenSSL-22.0.0.tar.gz", hash = "sha256:d86dde7f6fe2f1ac9fe0b6282e489f649f480364bdaa9d6a4696d52505f4477e"}, {file = "types-pyOpenSSL-22.0.10.tar.gz", hash = "sha256:f943b834f5b97e5e808764c2f6e37be1a2e226c46792296f61558196acfcc3a1"},
{file = "types_pyOpenSSL-22.0.0-py3-none-any.whl", hash = "sha256:da685f57b864979f36df0157895139c8244ad4aad19b551f1678206fbad0108a"}, {file = "types_pyOpenSSL-22.0.10-py3-none-any.whl", hash = "sha256:63baea211768bea580a769ac5c0d637ae8cd3150314aadc5726ca22e4c4f241a"},
] ]
types-pyyaml = [ types-PyYAML = [
{file = "types-PyYAML-6.0.4.tar.gz", hash = "sha256:6252f62d785e730e454dfa0c9f0fb99d8dae254c5c3c686903cf878ea27c04b7"}, {file = "types-PyYAML-6.0.12.tar.gz", hash = "sha256:f6f350418125872f3f0409d96a62a5a5ceb45231af5cc07ee0034ec48a3c82fa"},
{file = "types_PyYAML-6.0.4-py3-none-any.whl", hash = "sha256:693b01c713464a6851f36ff41077f8adbc6e355eda929addfb4a97208aea9b4b"}, {file = "types_PyYAML-6.0.12-py3-none-any.whl", hash = "sha256:29228db9f82df4f1b7febee06bbfb601677882e98a3da98132e31c6874163e15"},
] ]
types-requests = [ types-requests = [
{file = "types-requests-2.27.11.tar.gz", hash = "sha256:6a7ed24b21780af4a5b5e24c310b2cd885fb612df5fd95584d03d87e5f2a195a"}, {file = "types-requests-2.28.11.tar.gz", hash = "sha256:7ee827eb8ce611b02b5117cfec5da6455365b6a575f5e3ff19f655ba603e6b4e"},
{file = "types_requests-2.27.11-py3-none-any.whl", hash = "sha256:506279bad570c7b4b19ac1f22e50146538befbe0c133b2cea66a9b04a533a859"}, {file = "types_requests-2.28.11-py3-none-any.whl", hash = "sha256:af5f55e803cabcfb836dad752bd6d8a0fc8ef1cd84243061c0e27dee04ccf4fd"},
] ]
types-setuptools = [ types-setuptools = [
{file = "types-setuptools-57.4.9.tar.gz", hash = "sha256:536ef74744f8e1e4be4fc719887f886e74e4cf3c792b4a06984320be4df450b5"}, {file = "types-setuptools-65.5.0.1.tar.gz", hash = "sha256:5b297081c8f1fbd992cd8b305a97ed96ee6ffc765e9115124029597dd10b8a71"},
{file = "types_setuptools-57.4.9-py3-none-any.whl", hash = "sha256:948dc6863373750e2cd0b223a84f1fb608414cde5e55cf38ea657b93aeb411d2"}, {file = "types_setuptools-65.5.0.1-py3-none-any.whl", hash = "sha256:601d45b5e9979d2b931de5403aa11153626a1eadd1ce9727b21f24673ced5ceb"},
] ]
types-urllib3 = [ types-urllib3 = [
{file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"}, {file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"},
{file = "types_urllib3-1.26.10-py3-none-any.whl", hash = "sha256:d755278d5ecd7a7a6479a190e54230f241f1a99c19b81518b756b19dc69e518c"}, {file = "types_urllib3-1.26.10-py3-none-any.whl", hash = "sha256:d755278d5ecd7a7a6479a190e54230f241f1a99c19b81518b756b19dc69e518c"},
] ]
typing-extensions = [ typing-extensions = [
{file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
] ]
unpaddedbase64 = [ unpaddedbase64 = [
{file = "unpaddedbase64-2.1.0-py3-none-any.whl", hash = "sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6"}, {file = "unpaddedbase64-2.1.0-py3-none-any.whl", hash = "sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6"},

View file

@ -57,7 +57,7 @@ manifest-path = "rust/Cargo.toml"
[tool.poetry] [tool.poetry]
name = "matrix-synapse" name = "matrix-synapse"
version = "1.69.0" version = "1.70.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol" description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"] authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0" license = "Apache-2.0"
@ -227,7 +227,7 @@ jwt = ["authlib"]
# (if it is not installed, we fall back to slow code.) # (if it is not installed, we fall back to slow code.)
redis = ["txredisapi", "hiredis"] redis = ["txredisapi", "hiredis"]
# Required to use experimental `caches.track_memory_usage` config option. # Required to use experimental `caches.track_memory_usage` config option.
cache_memory = ["pympler"] cache-memory = ["pympler"]
test = ["parameterized", "idna"] test = ["parameterized", "idna"]
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want # The duplication here is awful. I hate hate hate hate hate it. However, for now I want
@ -258,7 +258,7 @@ all = [
"jaeger-client", "opentracing", "jaeger-client", "opentracing",
# redis # redis
"txredisapi", "hiredis", "txredisapi", "hiredis",
# cache_memory # cache-memory
"pympler", "pympler",
# omitted: # omitted:
# - test: it's useful to have this separate from dev deps in the olddeps job # - test: it's useful to have this separate from dev deps in the olddeps job
@ -267,10 +267,10 @@ all = [
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
## We pin black so that our tests don't start failing on new releases. ## We pin black so that our tests don't start failing on new releases.
isort = "==5.7.0" isort = ">=5.10.1"
black = "==22.3.0" black = ">=22.3.0"
flake8-comprehensions = "*" flake8-comprehensions = "*"
flake8-bugbear = "==21.3.2" flake8-bugbear = ">=21.3.2"
flake8 = "*" flake8 = "*"
# Typechecking # Typechecking
@ -296,11 +296,11 @@ parameterized = ">=0.7.4"
idna = ">=2.5" idna = ">=2.5"
# The following are used by the release script # The following are used by the release script
click = "==8.1.1" click = ">=8.1.3"
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints. # GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
GitPython = ">=3.1.20" GitPython = ">=3.1.20"
commonmark = "==0.9.1" commonmark = ">=0.9.1"
pygithub = "==1.55" pygithub = ">=1.55"
# The following are executed as commands by the release script. # The following are executed as commands by the release script.
twine = "*" twine = "*"
# Towncrier min version comes from #3425. Rationale unclear. # Towncrier min version comes from #3425. Rationale unclear.
@ -312,7 +312,7 @@ towncrier = ">=18.6.0rc1"
# system changes. # system changes.
# We are happy to raise these upper bounds upon request, # We are happy to raise these upper bounds upon request,
# provided we check that it's safe to do so (i.e. that CI passes). # provided we check that it's safe to do so (i.e. that CI passes).
requires = ["poetry-core>=1.0.0,<=1.3.1", "setuptools_rust>=1.3,<=1.5.2"] requires = ["poetry-core>=1.0.0,<=1.3.2", "setuptools_rust>=1.3,<=1.5.2"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
@ -321,7 +321,7 @@ build-backend = "poetry.core.masonry.api"
skip = "cp36* *-musllinux_i686" skip = "cp36* *-musllinux_i686"
# We need a rust compiler # We need a rust compiler
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y" before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal"
environment= { PATH = "$PATH:$HOME/.cargo/bin" } environment= { PATH = "$PATH:$HOME/.cargo/bin" }
# For some reason if we don't manually clean the build directory we # For some reason if we don't manually clean the build directory we

View file

@ -1,26 +1,37 @@
attrs==21.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ attrs==21.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \
--hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd
authlib==0.15.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ authlib==1.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252 \ --hash=sha256:0a270c91409fc2b7b0fbee6996e09f2ee3187358762111a9a4225c874b94e891 \
--hash=sha256:ecf4a7a9f2508c0bb07e93a752dd3c495cfaffc20e864ef0ffc95e3f40d2abaf --hash=sha256:be4b6a1dea51122336c210a6945b27a105b9ac572baffd15b07bcff4376c1523
automat==20.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ automat==20.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33 \ --hash=sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33 \
--hash=sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111 --hash=sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111
bcrypt==3.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ bcrypt==4.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d \ --hash=sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535 \
--hash=sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29 \ --hash=sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0 \
--hash=sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7 \ --hash=sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410 \
--hash=sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34 \ --hash=sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd \
--hash=sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7 \ --hash=sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665 \
--hash=sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55 \ --hash=sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab \
--hash=sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd \ --hash=sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71 \
--hash=sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6 \ --hash=sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215 \
--hash=sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1 \ --hash=sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b \
--hash=sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d --hash=sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda \
bleach==4.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9 \
--hash=sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da \ --hash=sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a \
--hash=sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994 --hash=sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344 \
--hash=sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f \
--hash=sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d \
--hash=sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c \
--hash=sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c \
--hash=sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2 \
--hash=sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d \
--hash=sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e \
--hash=sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3
bleach==5.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \
--hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c
canonicaljson==1.6.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ canonicaljson==1.6.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:6ba3cf1702fa3d209b3e915a4e9a3e4ef194f1e8fca189c1f0b7a2a7686a27e6 \ --hash=sha256:6ba3cf1702fa3d209b3e915a4e9a3e4ef194f1e8fca189c1f0b7a2a7686a27e6 \
--hash=sha256:ca59760bc274a899a0da75809d6909ae43e5123381fd6ef040a44d1952c0b448 --hash=sha256:ca59760bc274a899a0da75809d6909ae43e5123381fd6ef040a44d1952c0b448
@ -105,24 +116,24 @@ cryptography==36.0.1 ; python_full_version >= "3.7.1" and python_full_version <
--hash=sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3 \ --hash=sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3 \
--hash=sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1 \ --hash=sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1 \
--hash=sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee --hash=sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee
frozendict==2.3.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ frozendict==2.3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:027952d1698ac9c766ef43711226b178cdd49d2acbdff396936639ad1d2a5615 \ --hash=sha256:15b4b18346259392b0d27598f240e9390fafbff882137a9c48a1e0104fb17f78 \
--hash=sha256:15b5f82aad108125336593cec1b6420c638bf45f449c57e50949fc7654ea5a41 \ --hash=sha256:25a6d2e8b7cf6b6e5677a1a4b53b4073e5d9ec640d1db30dc679627668d25e90 \
--hash=sha256:310aaf81793abf4f471895e6fe65e0e74a28a2aaf7b25c2ba6ccd4e35af06842 \ --hash=sha256:389f395a74eb16992217ac1521e689c1dea2d70113bcb18714669ace1ed623b9 \
--hash=sha256:35eb7e59e287c41f4f712d4d3d2333354175b155d217b97c99c201d2d8920790 \ --hash=sha256:3d8042b7dab5e992e30889c9b71b781d5feef19b372d47d735e4d7d45846fd4a \
--hash=sha256:38c4660f37fcc70a32ff997fe58e40b3fcc60b2017b286e33828efaa16b01308 \ --hash=sha256:3e93aebc6e69a8ef329bbe9afb8342bd33c7b5c7a0c480cb9f7e60b0cbe48072 \
--hash=sha256:398539c52af3c647d103185bbaa1291679f0507ad035fe3bab2a8b0366d52cf1 \ --hash=sha256:3ec86ebf143dd685184215c27ec416c36e0ba1b80d81b1b9482f7d380c049b4e \
--hash=sha256:39942914c1217a5a49c7551495a103b3dbd216e19413687e003b859c6b0ebc12 \ --hash=sha256:4a3b32d47282ae0098b9239a6d53ec539da720258bd762d62191b46f2f87c5fc \
--hash=sha256:5589256058b31f2b91419fa30b8dc62dbdefe7710e688a3fd5b43849161eecc9 \ --hash=sha256:5809e6ff6b7257043a486f7a3b73a7da71cf69a38980b4171e4741291d0d9eb3 \
--hash=sha256:80a14c11e33e8b0bc09e07bba3732c77a502c39edb8c3959fd9a0e490e031158 \ --hash=sha256:7c550ed7fdf1962984bec21630c584d722b3ee5d5f57a0ae2527a0121dc0414a \
--hash=sha256:812279f2b270c980112dc4e367b168054f937108f8044eced4199e0ab2945a37 \ --hash=sha256:84c9887179a245a66a50f52afa08d4d92ae0f269839fab82285c70a0fa0dd782 \
--hash=sha256:919e3609844fece11ab18bcbf28a3ed20f8108ad4149d7927d413687f281c6c9 \ --hash=sha256:95bac22f7f09d81f378f2b3f672b7a50a974ca180feae1507f5e21bc147e8bc8 \
--hash=sha256:a0b46d4bf95bce843c0151959d54c3e5b8d0ce29cb44794e820b3ec980d63eee \ --hash=sha256:aca59108b77cadc13ba7dfea7e8f50811208c7652a13dc6c7f92d7782a24d299 \
--hash=sha256:a4737e5257756bd6b877504ff50185b705db577b5330d53040a6cf6417bb3cdb \ --hash=sha256:b98a0d65a59af6da03f794f90b0c3085a7ee14e7bf8f0ef36b079ee8aa992439 \
--hash=sha256:c1fb7efbfebc2075f781be3d9774e4ba6ce4fc399148b02097f68d4b3c4bc00a \ --hash=sha256:ccb6450a416c9cc9acef7683e637e28356e3ceeabf83521f74cc2718883076b7 \
--hash=sha256:c353c11010a986566a0cb37f9a783c560ffff7d67d5e7fd52221fb03757cdc43 \ --hash=sha256:d722f3d89db6ae35ef35ecc243c40c800eb344848c83dba4798353312cd37b15 \
--hash=sha256:ef818d66c85098a37cf42509545a4ba7dd0c4c679d6262123a8dc14cc474bab7 \ --hash=sha256:dae686722c144b333c4dbdc16323a5de11406d26b76d2be1cc175f90afacb5ba \
--hash=sha256:f988b482d08972a196664718167a993a61c9e9f6fe7b0ca2443570b5f20ca44a --hash=sha256:dbbe1339ac2646523e0bb00d1896085d1f70de23780e4927ca82b36ab8a044d3
hiredis==2.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ hiredis==2.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e \ --hash=sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e \
--hash=sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27 \ --hash=sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27 \
@ -168,9 +179,9 @@ hiredis==2.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0
hyperlink==21.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ hyperlink==21.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \ --hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \
--hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4 --hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
idna==3.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ idna==3.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
ijson==3.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ ijson==3.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:068c692efba9692406b86736dcc6803e4a0b6280d7f0b7534bff3faec677ff38 \ --hash=sha256:068c692efba9692406b86736dcc6803e4a0b6280d7f0b7534bff3faec677ff38 \
--hash=sha256:09c9d7913c88a6059cd054ff854958f34d757402b639cf212ffbec201a705a0d \ --hash=sha256:09c9d7913c88a6059cd054ff854958f34d757402b639cf212ffbec201a705a0d \
@ -246,9 +257,9 @@ incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "
jinja2==3.0.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ jinja2==3.0.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \ --hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \
--hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7 --hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7
jsonschema==4.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ jsonschema==4.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83 \ --hash=sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23 \
--hash=sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823 --hash=sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9
lxml==4.9.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ lxml==4.9.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318 \ --hash=sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318 \
--hash=sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c \ --hash=sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c \
@ -364,41 +375,59 @@ markupsafe==2.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.
matrix-common==1.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ matrix-common==1.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20 \ --hash=sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20 \
--hash=sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1 --hash=sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1
msgpack==1.0.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ msgpack==1.0.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc \ --hash=sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467 \
--hash=sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147 \ --hash=sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae \
--hash=sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3 \ --hash=sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92 \
--hash=sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba \ --hash=sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef \
--hash=sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39 \ --hash=sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624 \
--hash=sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85 \ --hash=sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227 \
--hash=sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9 \ --hash=sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88 \
--hash=sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a \ --hash=sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9 \
--hash=sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec \ --hash=sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8 \
--hash=sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88 \ --hash=sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd \
--hash=sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e \ --hash=sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6 \
--hash=sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a \ --hash=sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55 \
--hash=sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b \ --hash=sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e \
--hash=sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1 \ --hash=sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2 \
--hash=sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3 \ --hash=sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44 \
--hash=sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef \ --hash=sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6 \
--hash=sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079 \ --hash=sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9 \
--hash=sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52 \ --hash=sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab \
--hash=sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a \ --hash=sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae \
--hash=sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a \ --hash=sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa \
--hash=sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4 \ --hash=sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9 \
--hash=sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996 \ --hash=sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e \
--hash=sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73 \ --hash=sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250 \
--hash=sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a \ --hash=sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce \
--hash=sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920 \ --hash=sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075 \
--hash=sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7 \ --hash=sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236 \
--hash=sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d \ --hash=sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae \
--hash=sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770 \ --hash=sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e \
--hash=sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50 \ --hash=sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f \
--hash=sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2 \ --hash=sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08 \
--hash=sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2 \ --hash=sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6 \
--hash=sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d \ --hash=sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d \
--hash=sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea \ --hash=sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43 \
--hash=sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611 --hash=sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1 \
--hash=sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6 \
--hash=sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0 \
--hash=sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c \
--hash=sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff \
--hash=sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db \
--hash=sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243 \
--hash=sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661 \
--hash=sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba \
--hash=sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e \
--hash=sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb \
--hash=sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52 \
--hash=sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6 \
--hash=sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1 \
--hash=sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f \
--hash=sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da \
--hash=sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f \
--hash=sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c \
--hash=sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8
netaddr==0.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ netaddr==0.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac \ --hash=sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac \
--hash=sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243 --hash=sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243
@ -408,9 +437,9 @@ packaging==21.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.
parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c \ --hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c \
--hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9 --hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9
phonenumbers==8.12.44 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ phonenumbers==8.12.56 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce \ --hash=sha256:80a7422cf0999a6f9b7a2e6cfbdbbfcc56ab5b75414dc3b805bbec91276b64a3 \
--hash=sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6 --hash=sha256:82a4f226c930d02dcdf6d4b29e4cfd8678991fe65c2efd5fdd143557186f0868
pillow==9.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ pillow==9.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:011233e0c42a4a7836498e98c1acf5e744c96a67dd5032a6f666cc1fb97eab97 \ --hash=sha256:011233e0c42a4a7836498e98c1acf5e744c96a67dd5032a6f666cc1fb97eab97 \
--hash=sha256:0f29d831e2151e0b7b39981756d201f7108d3d215896212ffe2e992d06bfe049 \ --hash=sha256:0f29d831e2151e0b7b39981756d201f7108d3d215896212ffe2e992d06bfe049 \
@ -447,92 +476,74 @@ pillow==9.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
--hash=sha256:effb7749713d5317478bb3acb3f81d9d7c7f86726d41c1facca068a04cf5bb4c \ --hash=sha256:effb7749713d5317478bb3acb3f81d9d7c7f86726d41c1facca068a04cf5bb4c \
--hash=sha256:f154d173286a5d1863637a7dcd8c3437bb557520b01bddb0be0258dcb72696b5 \ --hash=sha256:f154d173286a5d1863637a7dcd8c3437bb557520b01bddb0be0258dcb72696b5 \
--hash=sha256:f25ed6e28ddf50de7e7ea99d7a976d6a9c415f03adcaac9c41ff6ff41b6d86ac --hash=sha256:f25ed6e28ddf50de7e7ea99d7a976d6a9c415f03adcaac9c41ff6ff41b6d86ac
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
prometheus-client==0.14.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ prometheus-client==0.14.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750 \ --hash=sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750 \
--hash=sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2 --hash=sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2
psycopg2==2.9.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ psycopg2==2.9.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:06f32425949bd5fe8f625c49f17ebb9784e1e4fe928b7cce72edc36fb68e4c0c \ --hash=sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426 \
--hash=sha256:0762c27d018edbcb2d34d51596e4346c983bd27c330218c56c4dc25ef7e819bf \ --hash=sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c \
--hash=sha256:083707a696e5e1c330af2508d8fab36f9700b26621ccbcb538abe22e15485362 \ --hash=sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb \
--hash=sha256:34b33e0162cfcaad151f249c2649fd1030010c16f4bbc40a604c1cb77173dcf7 \ --hash=sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c \
--hash=sha256:4295093a6ae3434d33ec6baab4ca5512a5082cc43c0505293087b8a46d108461 \ --hash=sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308 \
--hash=sha256:8cf3878353cc04b053822896bc4922b194792df9df2f1ad8da01fb3043602126 \ --hash=sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797 \
--hash=sha256:8e841d1bf3434da985cc5ef13e6f75c8981ced601fd70cc6bf33351b91562981 \ --hash=sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a \
--hash=sha256:9572e08b50aed176ef6d66f15a21d823bb6f6d23152d35e8451d7d2d18fdac56 \ --hash=sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61 \
--hash=sha256:a81e3866f99382dfe8c15a151f1ca5fde5815fde879348fe5a9884a7c092a305 \ --hash=sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb \
--hash=sha256:cb10d44e6694d763fa1078a26f7f6137d69f555a78ec85dc2ef716c37447e4b2 \ --hash=sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184 \
--hash=sha256:d3ca6421b942f60c008f81a3541e8faf6865a28d5a9b48544b0ee4f40cac7fca --hash=sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f
psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \ psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05 --hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \ psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
--hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52 --hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52
pyasn1-modules==0.2.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ pyasn1-modules==0.2.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8 \
--hash=sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199 \
--hash=sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811 \
--hash=sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed \
--hash=sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4 \
--hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
--hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 \ --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74
--hash=sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb \
--hash=sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45 \
--hash=sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd \
--hash=sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0 \
--hash=sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d \
--hash=sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405
pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359 \
--hash=sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576 \
--hash=sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf \
--hash=sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7 \
--hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
--hash=sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00 \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
--hash=sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8 \
--hash=sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86 \
--hash=sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12 \
--hash=sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776 \
--hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba \
--hash=sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2 \
--hash=sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3
pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.9.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ pydantic==1.10.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f \ --hash=sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42 \
--hash=sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74 \ --hash=sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624 \
--hash=sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1 \ --hash=sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e \
--hash=sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b \ --hash=sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559 \
--hash=sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537 \ --hash=sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709 \
--hash=sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310 \ --hash=sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9 \
--hash=sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810 \ --hash=sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d \
--hash=sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a \ --hash=sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52 \
--hash=sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761 \ --hash=sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda \
--hash=sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892 \ --hash=sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912 \
--hash=sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58 \ --hash=sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c \
--hash=sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761 \ --hash=sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525 \
--hash=sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195 \ --hash=sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe \
--hash=sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1 \ --hash=sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41 \
--hash=sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd \ --hash=sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b \
--hash=sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b \ --hash=sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283 \
--hash=sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee \ --hash=sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965 \
--hash=sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580 \ --hash=sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c \
--hash=sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608 \ --hash=sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410 \
--hash=sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918 \ --hash=sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5 \
--hash=sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380 \ --hash=sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116 \
--hash=sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a \ --hash=sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98 \
--hash=sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0 \ --hash=sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f \
--hash=sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd \ --hash=sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644 \
--hash=sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728 \ --hash=sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13 \
--hash=sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49 \ --hash=sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd \
--hash=sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166 \ --hash=sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254 \
--hash=sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6 \ --hash=sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6 \
--hash=sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131 \ --hash=sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488 \
--hash=sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11 \ --hash=sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5 \
--hash=sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193 \ --hash=sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c \
--hash=sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a \ --hash=sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1 \
--hash=sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd \ --hash=sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a \
--hash=sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e \ --hash=sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2 \
--hash=sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6 --hash=sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d \
--hash=sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236
pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \ --hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \
--hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907 --hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907
@ -576,6 +587,7 @@ pyrsistent==0.18.1 ; python_full_version >= "3.7.1" and python_full_version < "4
--hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \ --hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \
--hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6 --hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6
pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \ --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \ --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
@ -587,26 +599,32 @@ pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \ --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \ --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \ --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
requests==2.27.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ requests==2.27.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
@ -711,18 +729,18 @@ twisted-iocpsupport==1.0.2 ; python_full_version >= "3.7.1" and python_full_vers
--hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \ --hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \
--hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \ --hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \
--hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415 --hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415
twisted==22.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ twisted==22.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680 \ --hash=sha256:8d4718d1e48dcc28933f8beb48dc71cfe77a125e37ad1eb7a3d0acc49baf6c99 \
--hash=sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2 --hash=sha256:e5b60de39f2d1da153fbe1874d885fe3fcbdb21fcc446fa759a53e8fc3513bed
twisted[tls]==22.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ twisted[tls]==22.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680 \ --hash=sha256:8d4718d1e48dcc28933f8beb48dc71cfe77a125e37ad1eb7a3d0acc49baf6c99 \
--hash=sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2 --hash=sha256:e5b60de39f2d1da153fbe1874d885fe3fcbdb21fcc446fa759a53e8fc3513bed
txredisapi==1.4.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ txredisapi==1.4.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:34c9eba8d34f452d30661f073b67b8cd42b695e3d31678ec1bbf628a65a0f059 \ --hash=sha256:34c9eba8d34f452d30661f073b67b8cd42b695e3d31678ec1bbf628a65a0f059 \
--hash=sha256:e6cc43f51e35d608abdca8f8c7d20e148fe1d82679f6e584baea613ebec812bb --hash=sha256:e6cc43f51e35d608abdca8f8c7d20e148fe1d82679f6e584baea613ebec812bb
typing-extensions==4.1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ typing-extensions==4.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \
--hash=sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2 --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \ unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \ --hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005 --hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005

View file

@ -173,7 +173,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
default_enabled: true, default_enabled: true,
}, },
PushRule { PushRule {
rule_id: Cow::Borrowed("global/override/.org.matrix.msc3786.rule.room.server_acl"), rule_id: Cow::Borrowed("global/override/.m.rule.room.server_acl"),
priority_class: 5, priority_class: 5,
conditions: Cow::Borrowed(&[ conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition { Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
@ -257,19 +257,6 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
default: true, default: true,
default_enabled: true, default_enabled: true,
}, },
PushRule {
rule_id: Cow::Borrowed("global/underride/.org.matrix.msc3772.thread_reply"),
priority_class: 1,
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelationMatch {
rel_type: Cow::Borrowed("m.thread"),
event_type_pattern: None,
sender: None,
sender_type: Some(Cow::Borrowed("user_id")),
})]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
default: true,
default_enabled: true,
},
PushRule { PushRule {
rule_id: Cow::Borrowed("global/underride/.m.rule.message"), rule_id: Cow::Borrowed("global/underride/.m.rule.message"),
priority_class: 1, priority_class: 1,

View file

@ -12,10 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use std::{ use std::collections::BTreeMap;
borrow::Cow,
collections::{BTreeMap, BTreeSet},
};
use anyhow::{Context, Error}; use anyhow::{Context, Error};
use lazy_static::lazy_static; use lazy_static::lazy_static;
@ -49,13 +46,6 @@ pub struct PushRuleEvaluator {
/// The `notifications` section of the current power levels in the room. /// The `notifications` section of the current power levels in the room.
notification_power_levels: BTreeMap<String, i64>, notification_power_levels: BTreeMap<String, i64>,
/// The relations related to the event as a mapping from relation type to
/// set of sender/event type 2-tuples.
relations: BTreeMap<String, BTreeSet<(String, String)>>,
/// Is running "relation" conditions enabled?
relation_match_enabled: bool,
/// The power level of the sender of the event, or None if event is an /// The power level of the sender of the event, or None if event is an
/// outlier. /// outlier.
sender_power_level: Option<i64>, sender_power_level: Option<i64>,
@ -70,8 +60,6 @@ impl PushRuleEvaluator {
room_member_count: u64, room_member_count: u64,
sender_power_level: Option<i64>, sender_power_level: Option<i64>,
notification_power_levels: BTreeMap<String, i64>, notification_power_levels: BTreeMap<String, i64>,
relations: BTreeMap<String, BTreeSet<(String, String)>>,
relation_match_enabled: bool,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let body = flattened_keys let body = flattened_keys
.get("content.body") .get("content.body")
@ -83,8 +71,6 @@ impl PushRuleEvaluator {
body, body,
room_member_count, room_member_count,
notification_power_levels, notification_power_levels,
relations,
relation_match_enabled,
sender_power_level, sender_power_level,
}) })
} }
@ -203,89 +189,11 @@ impl PushRuleEvaluator {
false false
} }
} }
KnownCondition::RelationMatch {
rel_type,
event_type_pattern,
sender,
sender_type,
} => {
self.match_relations(rel_type, sender, sender_type, user_id, event_type_pattern)?
}
}; };
Ok(result) Ok(result)
} }
/// Evaluates a relation condition.
fn match_relations(
&self,
rel_type: &str,
sender: &Option<Cow<str>>,
sender_type: &Option<Cow<str>>,
user_id: Option<&str>,
event_type_pattern: &Option<Cow<str>>,
) -> Result<bool, Error> {
// First check if relation matching is enabled...
if !self.relation_match_enabled {
return Ok(false);
}
// ... and if there are any relations to match against.
let relations = if let Some(relations) = self.relations.get(rel_type) {
relations
} else {
return Ok(false);
};
// Extract the sender pattern from the condition
let sender_pattern = if let Some(sender) = sender {
Some(sender.as_ref())
} else if let Some(sender_type) = sender_type {
if sender_type == "user_id" {
if let Some(user_id) = user_id {
Some(user_id)
} else {
return Ok(false);
}
} else {
warn!("Unrecognized sender_type: {sender_type}");
return Ok(false);
}
} else {
None
};
let mut sender_compiled_pattern = if let Some(pattern) = sender_pattern {
Some(get_glob_matcher(pattern, GlobMatchType::Whole)?)
} else {
None
};
let mut type_compiled_pattern = if let Some(pattern) = event_type_pattern {
Some(get_glob_matcher(pattern, GlobMatchType::Whole)?)
} else {
None
};
for (relation_sender, event_type) in relations {
if let Some(pattern) = &mut sender_compiled_pattern {
if !pattern.is_match(relation_sender)? {
continue;
}
}
if let Some(pattern) = &mut type_compiled_pattern {
if !pattern.is_match(event_type)? {
continue;
}
}
return Ok(true);
}
Ok(false)
}
/// Evaluates a `event_match` condition. /// Evaluates a `event_match` condition.
fn match_event_match( fn match_event_match(
&self, &self,
@ -359,15 +267,8 @@ impl PushRuleEvaluator {
fn push_rule_evaluator() { fn push_rule_evaluator() {
let mut flattened_keys = BTreeMap::new(); let mut flattened_keys = BTreeMap::new();
flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string()); flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string());
let evaluator = PushRuleEvaluator::py_new( let evaluator =
flattened_keys, PushRuleEvaluator::py_new(flattened_keys, 10, Some(0), BTreeMap::new()).unwrap();
10,
Some(0),
BTreeMap::new(),
BTreeMap::new(),
true,
)
.unwrap();
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob")); let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
assert_eq!(result.len(), 3); assert_eq!(result.len(), 3);

View file

@ -275,16 +275,6 @@ pub enum KnownCondition {
SenderNotificationPermission { SenderNotificationPermission {
key: Cow<'static, str>, key: Cow<'static, str>,
}, },
#[serde(rename = "org.matrix.msc3772.relation_match")]
RelationMatch {
rel_type: Cow<'static, str>,
#[serde(skip_serializing_if = "Option::is_none", rename = "type")]
event_type_pattern: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
sender: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
sender_type: Option<Cow<'static, str>>,
},
} }
impl IntoPy<PyObject> for Condition { impl IntoPy<PyObject> for Condition {
@ -401,24 +391,15 @@ impl PushRules {
pub struct FilteredPushRules { pub struct FilteredPushRules {
push_rules: PushRules, push_rules: PushRules,
enabled_map: BTreeMap<String, bool>, enabled_map: BTreeMap<String, bool>,
msc3786_enabled: bool,
msc3772_enabled: bool,
} }
#[pymethods] #[pymethods]
impl FilteredPushRules { impl FilteredPushRules {
#[new] #[new]
pub fn py_new( pub fn py_new(push_rules: PushRules, enabled_map: BTreeMap<String, bool>) -> Self {
push_rules: PushRules,
enabled_map: BTreeMap<String, bool>,
msc3786_enabled: bool,
msc3772_enabled: bool,
) -> Self {
Self { Self {
push_rules, push_rules,
enabled_map, enabled_map,
msc3786_enabled,
msc3772_enabled,
} }
} }
@ -433,31 +414,13 @@ impl FilteredPushRules {
/// Iterates over all the rules and their enabled state, including base /// Iterates over all the rules and their enabled state, including base
/// rules, in the order they should be executed in. /// rules, in the order they should be executed in.
fn iter(&self) -> impl Iterator<Item = (&PushRule, bool)> { fn iter(&self) -> impl Iterator<Item = (&PushRule, bool)> {
self.push_rules self.push_rules.iter().map(|r| {
.iter() let enabled = *self
.filter(|rule| { .enabled_map
// Ignore disabled experimental push rules .get(&*r.rule_id)
if !self.msc3786_enabled .unwrap_or(&r.default_enabled);
&& rule.rule_id == "global/override/.org.matrix.msc3786.rule.room.server_acl" (r, enabled)
{ })
return false;
}
if !self.msc3772_enabled
&& rule.rule_id == "global/underride/.org.matrix.msc3772.thread_reply"
{
return false;
}
true
})
.map(|r| {
let enabled = *self
.enabled_map
.get(&*r.rule_id)
.unwrap_or(&r.default_enabled);
(r, enabled)
})
} }
} }

View file

@ -126,7 +126,7 @@ export COMPLEMENT_BASE_IMAGE=complement-synapse
extra_test_args=() extra_test_args=()
test_tags="synapse_blacklist,msc2716,msc3030,msc3787" test_tags="synapse_blacklist,msc3787"
# All environment variables starting with PASS_ will be shared. # All environment variables starting with PASS_ will be shared.
# (The prefix is stripped off before reaching the container.) # (The prefix is stripped off before reaching the container.)
@ -158,7 +158,10 @@ else
# We only test faster room joins on monoliths, because they are purposefully # We only test faster room joins on monoliths, because they are purposefully
# being developed without worker support to start with. # being developed without worker support to start with.
test_tags="$test_tags,faster_joins" #
# The tests for importing historical messages (MSC2716) and jump to date (MSC3030)
# also only pass with monoliths, currently.
test_tags="$test_tags,faster_joins,msc2716,msc3030"
fi fi

View file

@ -25,13 +25,7 @@ class PushRules:
def rules(self) -> Collection[PushRule]: ... def rules(self) -> Collection[PushRule]: ...
class FilteredPushRules: class FilteredPushRules:
def __init__( def __init__(self, push_rules: PushRules, enabled_map: Dict[str, bool]): ...
self,
push_rules: PushRules,
enabled_map: Dict[str, bool],
msc3786_enabled: bool,
msc3772_enabled: bool,
): ...
def rules(self) -> Collection[Tuple[PushRule, bool]]: ... def rules(self) -> Collection[Tuple[PushRule, bool]]: ...
def get_base_rule_ids() -> Collection[str]: ... def get_base_rule_ids() -> Collection[str]: ...
@ -43,8 +37,6 @@ class PushRuleEvaluator:
room_member_count: int, room_member_count: int,
sender_power_level: Optional[int], sender_power_level: Optional[int],
notification_power_levels: Mapping[str, int], notification_power_levels: Mapping[str, int],
relations: Mapping[str, Set[Tuple[str, str]]],
relation_match_enabled: bool,
): ... ): ...
def run( def run(
self, self,

View file

@ -21,6 +21,7 @@ import os
import sys import sys
from synapse.util.rust import check_rust_lib_up_to_date from synapse.util.rust import check_rust_lib_up_to_date
from synapse.util.stringutils import strtobool
# Check that we're not running on an unsupported Python version. # Check that we're not running on an unsupported Python version.
if sys.version_info < (3, 7): if sys.version_info < (3, 7):
@ -28,25 +29,22 @@ if sys.version_info < (3, 7):
sys.exit(1) sys.exit(1)
# Allow using the asyncio reactor via env var. # Allow using the asyncio reactor via env var.
if bool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", False)): if strtobool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", "0")):
try: from incremental import Version
from incremental import Version
import twisted import twisted
# We need a bugfix that is included in Twisted 21.2.0: # We need a bugfix that is included in Twisted 21.2.0:
# https://twistedmatrix.com/trac/ticket/9787 # https://twistedmatrix.com/trac/ticket/9787
if twisted.version < Version("Twisted", 21, 2, 0): if twisted.version < Version("Twisted", 21, 2, 0):
print("Using asyncio reactor requires Twisted>=21.2.0") print("Using asyncio reactor requires Twisted>=21.2.0")
sys.exit(1) sys.exit(1)
import asyncio import asyncio
from twisted.internet import asyncioreactor from twisted.internet import asyncioreactor
asyncioreactor.install(asyncio.get_event_loop()) asyncioreactor.install(asyncio.get_event_loop())
except ImportError:
pass
# Twisted and canonicaljson will fail to import when this file is executed to # Twisted and canonicaljson will fail to import when this file is executed to
# get the __version__ during a fresh install. That's OK and subsequent calls to # get the __version__ during a fresh install. That's OK and subsequent calls to

View file

@ -72,6 +72,7 @@ from synapse.storage.databases.main.registration import (
RegistrationBackgroundUpdateStore, RegistrationBackgroundUpdateStore,
find_max_generated_user_id_localpart, find_max_generated_user_id_localpart,
) )
from synapse.storage.databases.main.relations import RelationsWorkerStore
from synapse.storage.databases.main.room import RoomBackgroundUpdateStore from synapse.storage.databases.main.room import RoomBackgroundUpdateStore
from synapse.storage.databases.main.roommember import RoomMemberBackgroundUpdateStore from synapse.storage.databases.main.roommember import RoomMemberBackgroundUpdateStore
from synapse.storage.databases.main.search import SearchBackgroundUpdateStore from synapse.storage.databases.main.search import SearchBackgroundUpdateStore
@ -206,6 +207,7 @@ class Store(
PusherWorkerStore, PusherWorkerStore,
PresenceBackgroundUpdateStore, PresenceBackgroundUpdateStore,
ReceiptsBackgroundUpdateStore, ReceiptsBackgroundUpdateStore,
RelationsWorkerStore,
): ):
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]: def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs) return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)

View file

@ -31,6 +31,9 @@ MAX_ALIAS_LENGTH = 255
# the maximum length for a user id is 255 characters # the maximum length for a user id is 255 characters
MAX_USERID_LENGTH = 255 MAX_USERID_LENGTH = 255
# Constant value used for the pseudo-thread which is the main timeline.
MAIN_TIMELINE: Final = "main"
class Membership: class Membership:

View file

@ -640,6 +640,27 @@ class FederationError(RuntimeError):
} }
class FederationPullAttemptBackoffError(RuntimeError):
"""
Raised to indicate that we are are deliberately not attempting to pull the given
event over federation because we've already done so recently and are backing off.
Attributes:
event_id: The event_id which we are refusing to pull
message: A custom error message that gives more context
"""
def __init__(self, event_ids: List[str], message: Optional[str]):
self.event_ids = event_ids
if message:
error_message = message
else:
error_message = f"Not attempting to pull event_ids={self.event_ids} because we already tried to pull them recently (backing off)."
super().__init__(error_message)
class HttpResponseException(CodeMessageException): class HttpResponseException(CodeMessageException):
""" """
Represents an HTTP-level failure of an outbound request Represents an HTTP-level failure of an outbound request

View file

@ -36,7 +36,7 @@ from jsonschema import FormatChecker
from synapse.api.constants import EduTypes, EventContentFields from synapse.api.constants import EduTypes, EventContentFields
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.api.presence import UserPresenceState from synapse.api.presence import UserPresenceState
from synapse.events import EventBase from synapse.events import EventBase, relation_from_event
from synapse.types import JsonDict, RoomID, UserID from synapse.types import JsonDict, RoomID, UserID
if TYPE_CHECKING: if TYPE_CHECKING:
@ -53,6 +53,12 @@ FILTER_SCHEMA = {
# check types are valid event types # check types are valid event types
"types": {"type": "array", "items": {"type": "string"}}, "types": {"type": "array", "items": {"type": "string"}},
"not_types": {"type": "array", "items": {"type": "string"}}, "not_types": {"type": "array", "items": {"type": "string"}},
# MSC3874, filtering /messages.
"org.matrix.msc3874.rel_types": {"type": "array", "items": {"type": "string"}},
"org.matrix.msc3874.not_rel_types": {
"type": "array",
"items": {"type": "string"},
},
}, },
} }
@ -84,6 +90,8 @@ ROOM_EVENT_FILTER_SCHEMA = {
"contains_url": {"type": "boolean"}, "contains_url": {"type": "boolean"},
"lazy_load_members": {"type": "boolean"}, "lazy_load_members": {"type": "boolean"},
"include_redundant_members": {"type": "boolean"}, "include_redundant_members": {"type": "boolean"},
"unread_thread_notifications": {"type": "boolean"},
"org.matrix.msc3773.unread_thread_notifications": {"type": "boolean"},
# Include or exclude events with the provided labels. # Include or exclude events with the provided labels.
# cf https://github.com/matrix-org/matrix-doc/pull/2326 # cf https://github.com/matrix-org/matrix-doc/pull/2326
"org.matrix.labels": {"type": "array", "items": {"type": "string"}}, "org.matrix.labels": {"type": "array", "items": {"type": "string"}},
@ -240,6 +248,9 @@ class FilterCollection:
def include_redundant_members(self) -> bool: def include_redundant_members(self) -> bool:
return self._room_state_filter.include_redundant_members return self._room_state_filter.include_redundant_members
def unread_thread_notifications(self) -> bool:
return self._room_timeline_filter.unread_thread_notifications
async def filter_presence( async def filter_presence(
self, events: Iterable[UserPresenceState] self, events: Iterable[UserPresenceState]
) -> List[UserPresenceState]: ) -> List[UserPresenceState]:
@ -304,6 +315,16 @@ class Filter:
self.include_redundant_members = filter_json.get( self.include_redundant_members = filter_json.get(
"include_redundant_members", False "include_redundant_members", False
) )
self.unread_thread_notifications: bool = filter_json.get(
"unread_thread_notifications", False
)
if (
not self.unread_thread_notifications
and hs.config.experimental.msc3773_enabled
):
self.unread_thread_notifications = filter_json.get(
"org.matrix.msc3773.unread_thread_notifications", False
)
self.types = filter_json.get("types", None) self.types = filter_json.get("types", None)
self.not_types = filter_json.get("not_types", []) self.not_types = filter_json.get("not_types", [])
@ -319,8 +340,15 @@ class Filter:
self.labels = filter_json.get("org.matrix.labels", None) self.labels = filter_json.get("org.matrix.labels", None)
self.not_labels = filter_json.get("org.matrix.not_labels", []) self.not_labels = filter_json.get("org.matrix.not_labels", [])
self.related_by_senders = self.filter_json.get("related_by_senders", None) self.related_by_senders = filter_json.get("related_by_senders", None)
self.related_by_rel_types = self.filter_json.get("related_by_rel_types", None) self.related_by_rel_types = filter_json.get("related_by_rel_types", None)
# For compatibility with _check_fields.
self.rel_types = None
self.not_rel_types = []
if hs.config.experimental.msc3874_enabled:
self.rel_types = filter_json.get("org.matrix.msc3874.rel_types", None)
self.not_rel_types = filter_json.get("org.matrix.msc3874.not_rel_types", [])
def filters_all_types(self) -> bool: def filters_all_types(self) -> bool:
return "*" in self.not_types return "*" in self.not_types
@ -371,11 +399,19 @@ class Filter:
# check if there is a string url field in the content for filtering purposes # check if there is a string url field in the content for filtering purposes
labels = content.get(EventContentFields.LABELS, []) labels = content.get(EventContentFields.LABELS, [])
# Check if the event has a relation.
rel_type = None
if isinstance(event, EventBase):
relation = relation_from_event(event)
if relation:
rel_type = relation.rel_type
field_matchers = { field_matchers = {
"rooms": lambda v: room_id == v, "rooms": lambda v: room_id == v,
"senders": lambda v: sender == v, "senders": lambda v: sender == v,
"types": lambda v: _matches_wildcard(ev_type, v), "types": lambda v: _matches_wildcard(ev_type, v),
"labels": lambda v: v in labels, "labels": lambda v: v in labels,
"rel_types": lambda v: rel_type == v,
} }
result = self._check_fields(field_matchers) result = self._check_fields(field_matchers)

View file

@ -65,6 +65,7 @@ from synapse.rest.client import (
push_rule, push_rule,
read_marker, read_marker,
receipts, receipts,
relations,
room, room,
room_batch, room_batch,
room_keys, room_keys,
@ -308,6 +309,7 @@ class GenericWorkerServer(HomeServer):
sync.register_servlets(self, resource) sync.register_servlets(self, resource)
events.register_servlets(self, resource) events.register_servlets(self, resource)
room.register_servlets(self, resource, is_worker=True) room.register_servlets(self, resource, is_worker=True)
relations.register_servlets(self, resource)
room.register_deprecated_servlets(self, resource) room.register_deprecated_servlets(self, resource)
initial_sync.register_servlets(self, resource) initial_sync.register_servlets(self, resource)
room_batch.register_servlets(self, resource) room_batch.register_servlets(self, resource)

View file

@ -120,7 +120,11 @@ class ApplicationServiceApi(SimpleHttpClient):
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id)) uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
try: try:
response = await self.get_json(uri, {"access_token": service.hs_token}) response = await self.get_json(
uri,
{"access_token": service.hs_token},
headers={"Authorization": f"Bearer {service.hs_token}"},
)
if response is not None: # just an empty json object if response is not None: # just an empty json object
return True return True
except CodeMessageException as e: except CodeMessageException as e:
@ -140,7 +144,11 @@ class ApplicationServiceApi(SimpleHttpClient):
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias)) uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
try: try:
response = await self.get_json(uri, {"access_token": service.hs_token}) response = await self.get_json(
uri,
{"access_token": service.hs_token},
headers={"Authorization": f"Bearer {service.hs_token}"},
)
if response is not None: # just an empty json object if response is not None: # just an empty json object
return True return True
except CodeMessageException as e: except CodeMessageException as e:
@ -181,7 +189,9 @@ class ApplicationServiceApi(SimpleHttpClient):
**fields, **fields,
b"access_token": service.hs_token, b"access_token": service.hs_token,
} }
response = await self.get_json(uri, args=args) response = await self.get_json(
uri, args=args, headers={"Authorization": f"Bearer {service.hs_token}"}
)
if not isinstance(response, list): if not isinstance(response, list):
logger.warning( logger.warning(
"query_3pe to %s returned an invalid response %r", uri, response "query_3pe to %s returned an invalid response %r", uri, response
@ -217,7 +227,11 @@ class ApplicationServiceApi(SimpleHttpClient):
urllib.parse.quote(protocol), urllib.parse.quote(protocol),
) )
try: try:
info = await self.get_json(uri, {"access_token": service.hs_token}) info = await self.get_json(
uri,
{"access_token": service.hs_token},
headers={"Authorization": f"Bearer {service.hs_token}"},
)
if not _is_valid_3pe_metadata(info): if not _is_valid_3pe_metadata(info):
logger.warning( logger.warning(
@ -313,6 +327,7 @@ class ApplicationServiceApi(SimpleHttpClient):
uri=uri, uri=uri,
json_body=body, json_body=body,
args={"access_token": service.hs_token}, args={"access_token": service.hs_token},
headers={"Authorization": f"Bearer {service.hs_token}"},
) )
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug( logger.debug(

View file

@ -159,7 +159,7 @@ class CacheConfig(Config):
self.track_memory_usage = cache_config.get("track_memory_usage", False) self.track_memory_usage = cache_config.get("track_memory_usage", False)
if self.track_memory_usage: if self.track_memory_usage:
check_requirements("cache_memory") check_requirements("cache-memory")
expire_caches = cache_config.get("expire_caches", True) expire_caches = cache_config.get("expire_caches", True)
cache_entry_ttl = cache_config.get("cache_entry_ttl", "30m") cache_entry_ttl = cache_config.get("cache_entry_ttl", "30m")

View file

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from typing import Any from typing import Any, Optional
import attr import attr
@ -95,16 +95,8 @@ class ExperimentalConfig(Config):
# MSC2815 (allow room moderators to view redacted event content) # MSC2815 (allow room moderators to view redacted event content)
self.msc2815_enabled: bool = experimental.get("msc2815_enabled", False) self.msc2815_enabled: bool = experimental.get("msc2815_enabled", False)
# MSC3786 (Add a default push rule to ignore m.room.server_acl events) # MSC3773: Thread notifications
self.msc3786_enabled: bool = experimental.get("msc3786_enabled", False) self.msc3773_enabled: bool = experimental.get("msc3773_enabled", False)
# MSC3771: Thread read receipts
self.msc3771_enabled: bool = experimental.get("msc3771_enabled", False)
# MSC3772: A push rule for mutual relations.
self.msc3772_enabled: bool = experimental.get("msc3772_enabled", False)
# MSC3715: dir param on /relations.
self.msc3715_enabled: bool = experimental.get("msc3715_enabled", False)
# MSC3848: Introduce errcodes for specific event sending failures # MSC3848: Introduce errcodes for specific event sending failures
self.msc3848_enabled: bool = experimental.get("msc3848_enabled", False) self.msc3848_enabled: bool = experimental.get("msc3848_enabled", False)
@ -125,3 +117,11 @@ class ExperimentalConfig(Config):
self.msc3882_token_timeout = self.parse_duration( self.msc3882_token_timeout = self.parse_duration(
experimental.get("msc3882_token_timeout", "5m") experimental.get("msc3882_token_timeout", "5m")
) )
# MSC3874: Filtering /messages with rel_types / not_rel_types.
self.msc3874_enabled: bool = experimental.get("msc3874_enabled", False)
# MSC3886: Simple client rendezvous capability
self.msc3886_endpoint: Optional[str] = experimental.get(
"msc3886_endpoint", None
)

View file

@ -1,27 +0,0 @@
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any
from synapse.types import JsonDict
from ._base import Config
class GroupsConfig(Config):
section = "groups"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.enable_group_creation = config.get("enable_group_creation", False)
self.group_creation_prefix = config.get("group_creation_prefix", "")

View file

@ -326,6 +326,8 @@ def setup_logging(
logBeginner: The Twisted logBeginner to use. logBeginner: The Twisted logBeginner to use.
""" """
from twisted.internet import reactor
log_config_path = ( log_config_path = (
config.worker.worker_log_config config.worker.worker_log_config
if use_worker_options if use_worker_options
@ -348,3 +350,4 @@ def setup_logging(
) )
logging.info("Server hostname: %s", config.server.server_name) logging.info("Server hostname: %s", config.server.server_name)
logging.info("Instance name: %s", hs.get_instance_name()) logging.info("Instance name: %s", hs.get_instance_name())
logging.info("Twisted reactor: %s", type(reactor).__name__)

View file

@ -207,6 +207,9 @@ class HttpListenerConfig:
additional_resources: Dict[str, dict] = attr.Factory(dict) additional_resources: Dict[str, dict] = attr.Factory(dict)
tag: Optional[str] = None tag: Optional[str] = None
request_id_header: Optional[str] = None request_id_header: Optional[str] = None
# If true, the listener will return CORS response headers compatible with MSC3886:
# https://github.com/matrix-org/matrix-spec-proposals/pull/3886
experimental_cors_msc3886: bool = False
@attr.s(slots=True, frozen=True, auto_attribs=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
@ -935,6 +938,7 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
additional_resources=listener.get("additional_resources", {}), additional_resources=listener.get("additional_resources", {}),
tag=listener.get("tag"), tag=listener.get("tag"),
request_id_header=listener.get("request_id_header"), request_id_header=listener.get("request_id_header"),
experimental_cors_msc3886=listener.get("experimental_cors_msc3886", False),
) )
return ListenerConfig(port, bind_addresses, listener_type, tls, http_config) return ListenerConfig(port, bind_addresses, listener_type, tls, http_config)

View file

@ -15,7 +15,18 @@
import logging import logging
import typing import typing
from typing import Any, Collection, Dict, Iterable, List, Optional, Set, Tuple, Union from typing import (
Any,
Collection,
Dict,
Iterable,
List,
Mapping,
Optional,
Set,
Tuple,
Union,
)
from canonicaljson import encode_canonical_json from canonicaljson import encode_canonical_json
from signedjson.key import decode_verify_key_bytes from signedjson.key import decode_verify_key_bytes
@ -134,6 +145,7 @@ def validate_event_for_room_version(event: "EventBase") -> None:
async def check_state_independent_auth_rules( async def check_state_independent_auth_rules(
store: _EventSourceStore, store: _EventSourceStore,
event: "EventBase", event: "EventBase",
batched_auth_events: Optional[Mapping[str, "EventBase"]] = None,
) -> None: ) -> None:
"""Check that an event complies with auth rules that are independent of room state """Check that an event complies with auth rules that are independent of room state
@ -143,6 +155,8 @@ async def check_state_independent_auth_rules(
Args: Args:
store: the datastore; used to fetch the auth events for validation store: the datastore; used to fetch the auth events for validation
event: the event being checked. event: the event being checked.
batched_auth_events: if the event being authed is part of a batch, any events
from the same batch that may be necessary to auth the current event
Raises: Raises:
AuthError if the checks fail AuthError if the checks fail
@ -162,6 +176,9 @@ async def check_state_independent_auth_rules(
redact_behaviour=EventRedactBehaviour.as_is, redact_behaviour=EventRedactBehaviour.as_is,
allow_rejected=True, allow_rejected=True,
) )
if batched_auth_events:
auth_events.update(batched_auth_events)
room_id = event.room_id room_id = event.room_id
auth_dict: MutableStateMap[str] = {} auth_dict: MutableStateMap[str] = {}
expected_auth_types = auth_types_for_event(event.room_version, event) expected_auth_types = auth_types_for_event(event.room_version, event)

View file

@ -65,7 +65,8 @@ class EventContext:
None does not necessarily mean that ``state_group`` does not have None does not necessarily mean that ``state_group`` does not have
a prev_group! a prev_group!
If the event is a state event, this is normally the same as ``prev_group``. If the event is a state event, this is normally the same as
``state_group_before_event``.
If ``state_group`` is None (ie, the event is an outlier), ``prev_group`` If ``state_group`` is None (ie, the event is an outlier), ``prev_group``
will always also be ``None``. will always also be ``None``.

View file

@ -1294,7 +1294,7 @@ class FederationClient(FederationBase):
return resp[1] return resp[1]
async def send_knock(self, destinations: List[str], pdu: EventBase) -> JsonDict: async def send_knock(self, destinations: List[str], pdu: EventBase) -> JsonDict:
"""Attempts to send a knock event to given a list of servers. Iterates """Attempts to send a knock event to a given list of servers. Iterates
through the list until one attempt succeeds. through the list until one attempt succeeds.
Doing so will cause the remote server to add the event to the graph, Doing so will cause the remote server to add the event to the graph,

View file

@ -824,7 +824,14 @@ class FederationServer(FederationBase):
context, self._room_prejoin_state_types context, self._room_prejoin_state_types
) )
) )
return {"knock_state_events": stripped_room_state} return {
"knock_room_state": stripped_room_state,
# Since v1.37, Synapse incorrectly used "knock_state_events" for this field.
# Thus, we also populate a 'knock_state_events' with the same content to
# support old instances.
# See https://github.com/matrix-org/synapse/issues/14088.
"knock_state_events": stripped_room_state,
}
async def _on_send_membership_event( async def _on_send_membership_event(
self, origin: str, content: JsonDict, membership_type: str, room_id: str self, origin: str, content: JsonDict, membership_type: str, room_id: str

View file

@ -353,21 +353,25 @@ class FederationSender(AbstractFederationSender):
last_token = await self.store.get_federation_out_pos("events") last_token = await self.store.get_federation_out_pos("events")
( (
next_token, next_token,
events,
event_to_received_ts, event_to_received_ts,
) = await self.store.get_all_new_events_stream( ) = await self.store.get_all_new_event_ids_stream(
last_token, self._last_poked_id, limit=100 last_token, self._last_poked_id, limit=100
) )
event_ids = event_to_received_ts.keys()
event_entries = await self.store.get_unredacted_events_from_cache_or_db(
event_ids
)
logger.debug( logger.debug(
"Handling %i -> %i: %i events to send (current id %i)", "Handling %i -> %i: %i events to send (current id %i)",
last_token, last_token,
next_token, next_token,
len(events), len(event_entries),
self._last_poked_id, self._last_poked_id,
) )
if not events and next_token >= self._last_poked_id: if not event_entries and next_token >= self._last_poked_id:
logger.debug("All events processed") logger.debug("All events processed")
break break
@ -508,8 +512,14 @@ class FederationSender(AbstractFederationSender):
await handle_event(event) await handle_event(event)
events_by_room: Dict[str, List[EventBase]] = {} events_by_room: Dict[str, List[EventBase]] = {}
for event in events:
events_by_room.setdefault(event.room_id, []).append(event) for event_id in event_ids:
# `event_entries` is unsorted, so we have to iterate over `event_ids`
# to ensure the events are in the right order
event_cache = event_entries.get(event_id)
if event_cache:
event = event_cache.event
events_by_room.setdefault(event.room_id, []).append(event)
await make_deferred_yieldable( await make_deferred_yieldable(
defer.gatherResults( defer.gatherResults(
@ -524,9 +534,10 @@ class FederationSender(AbstractFederationSender):
logger.debug("Successfully handled up to %i", next_token) logger.debug("Successfully handled up to %i", next_token)
await self.store.update_federation_out_pos("events", next_token) await self.store.update_federation_out_pos("events", next_token)
if events: if event_entries:
now = self.clock.time_msec() now = self.clock.time_msec()
ts = event_to_received_ts[events[-1].event_id] last_id = next(reversed(event_ids))
ts = event_to_received_ts[last_id]
assert ts is not None assert ts is not None
synapse.metrics.event_processing_lag.labels( synapse.metrics.event_processing_lag.labels(
@ -536,7 +547,7 @@ class FederationSender(AbstractFederationSender):
"federation_sender" "federation_sender"
).set(ts) ).set(ts)
events_processed_counter.inc(len(events)) events_processed_counter.inc(len(event_entries))
event_processing_loop_room_count.labels("federation_sender").inc( event_processing_loop_room_count.labels("federation_sender").inc(
len(events_by_room) len(events_by_room)

View file

@ -45,6 +45,7 @@ from synapse.federation.units import Transaction
from synapse.http.matrixfederationclient import ByteParser from synapse.http.matrixfederationclient import ByteParser
from synapse.http.types import QueryParams from synapse.http.types import QueryParams
from synapse.types import JsonDict from synapse.types import JsonDict
from synapse.util import ExceptionBundle
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -926,8 +927,7 @@ class SendJoinParser(ByteParser[SendJoinResponse]):
return len(data) return len(data)
def finish(self) -> SendJoinResponse: def finish(self) -> SendJoinResponse:
for c in self._coros: _close_coros(self._coros)
c.close()
if self._response.event_dict: if self._response.event_dict:
self._response.event = make_event_from_dict( self._response.event = make_event_from_dict(
@ -970,6 +970,27 @@ class _StateParser(ByteParser[StateRequestResponse]):
return len(data) return len(data)
def finish(self) -> StateRequestResponse: def finish(self) -> StateRequestResponse:
for c in self._coros: _close_coros(self._coros)
c.close()
return self._response return self._response
def _close_coros(coros: Iterable[Generator[None, bytes, None]]) -> None:
"""Close each of the given coroutines.
Always calls .close() on each coroutine, even if doing so raises an exception.
Any exceptions raised are aggregated into an ExceptionBundle.
:raises ExceptionBundle: if at least one coroutine fails to close.
"""
exceptions = []
for c in coros:
try:
c.close()
except Exception as e:
exceptions.append(e)
if exceptions:
# raise from the first exception so that the traceback has slightly more context
raise ExceptionBundle(
f"There were {len(exceptions)} errors closing coroutines", exceptions
) from exceptions[0]

View file

@ -489,7 +489,7 @@ class FederationV2InviteServlet(BaseFederationServerServlet):
room_version = content["room_version"] room_version = content["room_version"]
event = content["event"] event = content["event"]
invite_room_state = content["invite_room_state"] invite_room_state = content.get("invite_room_state", [])
# Synapse expects invite_room_state to be in unsigned, as it is in v1 # Synapse expects invite_room_state to be in unsigned, as it is in v1
# API # API

View file

@ -225,7 +225,7 @@ class AccountDataEventSource(EventSource[int, JsonDict]):
self, self,
user: UserID, user: UserID,
from_key: int, from_key: int,
limit: Optional[int], limit: int,
room_ids: Collection[str], room_ids: Collection[str],
is_guest: bool, is_guest: bool,
explicit_room_id: Optional[str] = None, explicit_room_id: Optional[str] = None,

View file

@ -109,10 +109,13 @@ class ApplicationServicesHandler:
last_token = await self.store.get_appservice_last_pos() last_token = await self.store.get_appservice_last_pos()
( (
upper_bound, upper_bound,
events,
event_to_received_ts, event_to_received_ts,
) = await self.store.get_all_new_events_stream( ) = await self.store.get_all_new_event_ids_stream(
last_token, self.current_max, limit=100, get_prev_content=True last_token, self.current_max, limit=100
)
events = await self.store.get_events_as_list(
event_to_received_ts.keys(), get_prev_content=True
) )
events_by_room: Dict[str, List[EventBase]] = {} events_by_room: Dict[str, List[EventBase]] = {}

View file

@ -937,7 +937,10 @@ class DeviceListUpdater:
# Check if we are partially joining any rooms. If so we need to store # Check if we are partially joining any rooms. If so we need to store
# all device list updates so that we can handle them correctly once we # all device list updates so that we can handle them correctly once we
# know who is in the room. # know who is in the room.
partial_rooms = await self.store.get_partial_state_rooms_and_servers() # TODO(faster joins): this fetches and processes a bunch of data that we don't
# use. Could be replaced by a tighter query e.g.
# SELECT EXISTS(SELECT 1 FROM partial_state_rooms)
partial_rooms = await self.store.get_partial_state_room_resync_info()
if partial_rooms: if partial_rooms:
await self.store.add_remote_device_list_to_pending( await self.store.add_remote_device_list_to_pending(
user_id, user_id,

View file

@ -16,6 +16,8 @@ import logging
import string import string
from typing import TYPE_CHECKING, Iterable, List, Optional from typing import TYPE_CHECKING, Iterable, List, Optional
from typing_extensions import Literal
from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes
from synapse.api.errors import ( from synapse.api.errors import (
AuthError, AuthError,
@ -434,7 +436,10 @@ class DirectoryHandler:
return await self.auth.check_can_change_room_list(room_id, requester) return await self.auth.check_can_change_room_list(room_id, requester)
async def edit_published_room_list( async def edit_published_room_list(
self, requester: Requester, room_id: str, visibility: str self,
requester: Requester,
room_id: str,
visibility: Literal["public", "private"],
) -> None: ) -> None:
"""Edit the entry of the room in the published room list. """Edit the entry of the room in the published room list.
@ -456,9 +461,6 @@ class DirectoryHandler:
if requester.is_guest: if requester.is_guest:
raise AuthError(403, "Guests cannot edit the published room list") raise AuthError(403, "Guests cannot edit the published room list")
if visibility not in ["public", "private"]:
raise SynapseError(400, "Invalid visibility setting")
if visibility == "public" and not self.enable_room_list_search: if visibility == "public" and not self.enable_room_list_search:
# The room list has been disabled. # The room list has been disabled.
raise AuthError( raise AuthError(
@ -510,7 +512,11 @@ class DirectoryHandler:
await self.store.set_room_is_public(room_id, making_public) await self.store.set_room_is_public(room_id, making_public)
async def edit_published_appservice_room_list( async def edit_published_appservice_room_list(
self, appservice_id: str, network_id: str, room_id: str, visibility: str self,
appservice_id: str,
network_id: str,
room_id: str,
visibility: Literal["public", "private"],
) -> None: ) -> None:
"""Add or remove a room from the appservice/network specific public """Add or remove a room from the appservice/network specific public
room list. room list.
@ -521,9 +527,6 @@ class DirectoryHandler:
room_id room_id
visibility: either "public" or "private" visibility: either "public" or "private"
""" """
if visibility not in ["public", "private"]:
raise SynapseError(400, "Invalid visibility setting")
await self.store.set_room_is_public_appservice( await self.store.set_room_is_public_appservice(
room_id, appservice_id, network_id, visibility == "public" room_id, appservice_id, network_id, visibility == "public"
) )

View file

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import logging import logging
from typing import TYPE_CHECKING, Collection, List, Optional, Union from typing import TYPE_CHECKING, Collection, List, Mapping, Optional, Union
from synapse import event_auth from synapse import event_auth
from synapse.api.constants import ( from synapse.api.constants import (
@ -29,7 +29,6 @@ from synapse.event_auth import (
) )
from synapse.events import EventBase from synapse.events import EventBase
from synapse.events.builder import EventBuilder from synapse.events.builder import EventBuilder
from synapse.events.snapshot import EventContext
from synapse.types import StateMap, get_domain_from_id from synapse.types import StateMap, get_domain_from_id
if TYPE_CHECKING: if TYPE_CHECKING:
@ -51,12 +50,21 @@ class EventAuthHandler:
async def check_auth_rules_from_context( async def check_auth_rules_from_context(
self, self,
event: EventBase, event: EventBase,
context: EventContext, batched_auth_events: Optional[Mapping[str, EventBase]] = None,
) -> None: ) -> None:
"""Check an event passes the auth rules at its own auth events""" """Check an event passes the auth rules at its own auth events
await check_state_independent_auth_rules(self._store, event) Args:
event: event to be authed
batched_auth_events: if the event being authed is part of a batch, any events
from the same batch that may be necessary to auth the current event
"""
await check_state_independent_auth_rules(
self._store, event, batched_auth_events
)
auth_event_ids = event.auth_event_ids() auth_event_ids = event.auth_event_ids()
auth_events_by_id = await self._store.get_events(auth_event_ids) auth_events_by_id = await self._store.get_events(auth_event_ids)
if batched_auth_events:
auth_events_by_id.update(batched_auth_events)
check_state_dependent_auth_rules(event, auth_events_by_id.values()) check_state_dependent_auth_rules(event, auth_events_by_id.values())
def compute_auth_events( def compute_auth_events(

View file

@ -45,6 +45,7 @@ from synapse.api.errors import (
Codes, Codes,
FederationDeniedError, FederationDeniedError,
FederationError, FederationError,
FederationPullAttemptBackoffError,
HttpResponseException, HttpResponseException,
LimitExceededError, LimitExceededError,
NotFoundError, NotFoundError,
@ -631,6 +632,7 @@ class FederationHandler:
room_id=room_id, room_id=room_id,
servers=ret.servers_in_room, servers=ret.servers_in_room,
device_lists_stream_id=self.store.get_device_stream_token(), device_lists_stream_id=self.store.get_device_stream_token(),
joined_via=origin,
) )
try: try:
@ -781,15 +783,27 @@ class FederationHandler:
# Send the signed event back to the room, and potentially receive some # Send the signed event back to the room, and potentially receive some
# further information about the room in the form of partial state events # further information about the room in the form of partial state events
stripped_room_state = await self.federation_client.send_knock( knock_response = await self.federation_client.send_knock(target_hosts, event)
target_hosts, event
)
# Store any stripped room state events in the "unsigned" key of the event. # Store any stripped room state events in the "unsigned" key of the event.
# This is a bit of a hack and is cribbing off of invites. Basically we # This is a bit of a hack and is cribbing off of invites. Basically we
# store the room state here and retrieve it again when this event appears # store the room state here and retrieve it again when this event appears
# in the invitee's sync stream. It is stripped out for all other local users. # in the invitee's sync stream. It is stripped out for all other local users.
event.unsigned["knock_room_state"] = stripped_room_state["knock_state_events"] stripped_room_state = (
knock_response.get("knock_room_state")
# Since v1.37, Synapse incorrectly used "knock_state_events" for this field.
# Thus, we also check for a 'knock_state_events' to support old instances.
# See https://github.com/matrix-org/synapse/issues/14088.
or knock_response.get("knock_state_events")
)
if stripped_room_state is None:
raise KeyError(
"Missing 'knock_room_state' (or legacy 'knock_state_events') field in "
"send_knock response"
)
event.unsigned["knock_room_state"] = stripped_room_state
context = EventContext.for_outlier(self._storage_controllers) context = EventContext.for_outlier(self._storage_controllers)
stream_id = await self._federation_event_handler.persist_events_and_notify( stream_id = await self._federation_event_handler.persist_events_and_notify(
@ -928,7 +942,7 @@ class FederationHandler:
# The remote hasn't signed it yet, obviously. We'll do the full checks # The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request` # when we get the event back in `on_send_join_request`
await self._event_auth_handler.check_auth_rules_from_context(event, context) await self._event_auth_handler.check_auth_rules_from_context(event)
return event return event
async def on_invite_request( async def on_invite_request(
@ -1109,7 +1123,7 @@ class FederationHandler:
try: try:
# The remote hasn't signed it yet, obviously. We'll do the full checks # The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_leave_request` # when we get the event back in `on_send_leave_request`
await self._event_auth_handler.check_auth_rules_from_context(event, context) await self._event_auth_handler.check_auth_rules_from_context(event)
except AuthError as e: except AuthError as e:
logger.warning("Failed to create new leave %r because %s", event, e) logger.warning("Failed to create new leave %r because %s", event, e)
raise e raise e
@ -1168,7 +1182,7 @@ class FederationHandler:
try: try:
# The remote hasn't signed it yet, obviously. We'll do the full checks # The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_knock_request` # when we get the event back in `on_send_knock_request`
await self._event_auth_handler.check_auth_rules_from_context(event, context) await self._event_auth_handler.check_auth_rules_from_context(event)
except AuthError as e: except AuthError as e:
logger.warning("Failed to create new knock %r because %s", event, e) logger.warning("Failed to create new knock %r because %s", event, e)
raise e raise e
@ -1334,9 +1348,7 @@ class FederationHandler:
try: try:
validate_event_for_room_version(event) validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context( await self._event_auth_handler.check_auth_rules_from_context(event)
event, context
)
except AuthError as e: except AuthError as e:
logger.warning("Denying new third party invite %r because %s", event, e) logger.warning("Denying new third party invite %r because %s", event, e)
raise e raise e
@ -1386,7 +1398,7 @@ class FederationHandler:
try: try:
validate_event_for_room_version(event) validate_event_for_room_version(event)
await self._event_auth_handler.check_auth_rules_from_context(event, context) await self._event_auth_handler.check_auth_rules_from_context(event)
except AuthError as e: except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e) logger.warning("Denying third party invite %r because %s", event, e)
raise e raise e
@ -1602,13 +1614,13 @@ class FederationHandler:
"""Resumes resyncing of all partial-state rooms after a restart.""" """Resumes resyncing of all partial-state rooms after a restart."""
assert not self.config.worker.worker_app assert not self.config.worker.worker_app
partial_state_rooms = await self.store.get_partial_state_rooms_and_servers() partial_state_rooms = await self.store.get_partial_state_room_resync_info()
for room_id, servers_in_room in partial_state_rooms.items(): for room_id, resync_info in partial_state_rooms.items():
run_as_background_process( run_as_background_process(
desc="sync_partial_state_room", desc="sync_partial_state_room",
func=self._sync_partial_state_room, func=self._sync_partial_state_room,
initial_destination=None, initial_destination=resync_info.joined_via,
other_destinations=servers_in_room, other_destinations=resync_info.servers_in_room,
room_id=room_id, room_id=room_id,
) )
@ -1637,28 +1649,12 @@ class FederationHandler:
# really leave, that might mean we have difficulty getting the room state over # really leave, that might mean we have difficulty getting the room state over
# federation. # federation.
# https://github.com/matrix-org/synapse/issues/12802 # https://github.com/matrix-org/synapse/issues/12802
#
# TODO(faster_joins): we need some way of prioritising which homeservers in
# `other_destinations` to try first, otherwise we'll spend ages trying dead
# homeservers for large rooms.
# https://github.com/matrix-org/synapse/issues/12999
if initial_destination is None and len(other_destinations) == 0:
raise ValueError(
f"Cannot resync state of {room_id}: no destinations provided"
)
# Make an infinite iterator of destinations to try. Once we find a working # Make an infinite iterator of destinations to try. Once we find a working
# destination, we'll stick with it until it flakes. # destination, we'll stick with it until it flakes.
destinations: Collection[str] destinations = _prioritise_destinations_for_partial_state_resync(
if initial_destination is not None: initial_destination, other_destinations, room_id
# Move `initial_destination` to the front of the list. )
destinations = list(other_destinations)
if initial_destination in destinations:
destinations.remove(initial_destination)
destinations = [initial_destination] + destinations
else:
destinations = other_destinations
destination_iter = itertools.cycle(destinations) destination_iter = itertools.cycle(destinations)
# `destination` is the current remote homeserver we're pulling from. # `destination` is the current remote homeserver we're pulling from.
@ -1708,7 +1704,22 @@ class FederationHandler:
destination, event destination, event
) )
break break
except FederationPullAttemptBackoffError as exc:
# Log a warning about why we failed to process the event (the error message
# for `FederationPullAttemptBackoffError` is pretty good)
logger.warning("_sync_partial_state_room: %s", exc)
# We do not record a failed pull attempt when we backoff fetching a missing
# `prev_event` because not being able to fetch the `prev_events` just means
# we won't be able to de-outlier the pulled event. But we can still use an
# `outlier` in the state/auth chain for another event. So we shouldn't stop
# a downstream event from trying to pull it.
#
# This avoids a cascade of backoff for all events in the DAG downstream from
# one event backoff upstream.
except FederationError as e: except FederationError as e:
# TODO: We should `record_event_failed_pull_attempt` here,
# see https://github.com/matrix-org/synapse/issues/13700
if attempt == len(destinations) - 1: if attempt == len(destinations) - 1:
# We have tried every remote server for this event. Give up. # We have tried every remote server for this event. Give up.
# TODO(faster_joins) giving up isn't the right thing to do # TODO(faster_joins) giving up isn't the right thing to do
@ -1741,3 +1752,29 @@ class FederationHandler:
room_id, room_id,
destination, destination,
) )
def _prioritise_destinations_for_partial_state_resync(
initial_destination: Optional[str],
other_destinations: Collection[str],
room_id: str,
) -> Collection[str]:
"""Work out the order in which we should ask servers to resync events.
If an `initial_destination` is given, it takes top priority. Otherwise
all servers are treated equally.
:raises ValueError: if no destination is provided at all.
"""
if initial_destination is None and len(other_destinations) == 0:
raise ValueError(f"Cannot resync state of {room_id}: no destinations provided")
if initial_destination is None:
return other_destinations
# Move `initial_destination` to the front of the list.
destinations = list(other_destinations)
if initial_destination in destinations:
destinations.remove(initial_destination)
destinations = [initial_destination] + destinations
return destinations

View file

@ -44,6 +44,7 @@ from synapse.api.errors import (
AuthError, AuthError,
Codes, Codes,
FederationError, FederationError,
FederationPullAttemptBackoffError,
HttpResponseException, HttpResponseException,
RequestSendFailed, RequestSendFailed,
SynapseError, SynapseError,
@ -414,7 +415,9 @@ class FederationEventHandler:
# First, precalculate the joined hosts so that the federation sender doesn't # First, precalculate the joined hosts so that the federation sender doesn't
# need to. # need to.
await self._event_creation_handler.cache_joined_hosts_for_event(event, context) await self._event_creation_handler.cache_joined_hosts_for_events(
[(event, context)]
)
await self._check_for_soft_fail(event, context=context, origin=origin) await self._check_for_soft_fail(event, context=context, origin=origin)
await self._run_push_actions_and_persist_event(event, context) await self._run_push_actions_and_persist_event(event, context)
@ -565,6 +568,9 @@ class FederationEventHandler:
event: partial-state event to be de-partial-stated event: partial-state event to be de-partial-stated
Raises: Raises:
FederationPullAttemptBackoffError if we are are deliberately not attempting
to pull the given event over federation because we've already done so
recently and are backing off.
FederationError if we fail to request state from the remote server. FederationError if we fail to request state from the remote server.
""" """
logger.info("Updating state for %s", event.event_id) logger.info("Updating state for %s", event.event_id)
@ -792,9 +798,42 @@ class FederationEventHandler:
], ],
) )
# Check if we already any of these have these events.
# Note: we currently make a lookup in the database directly here rather than
# checking the event cache, due to:
# https://github.com/matrix-org/synapse/issues/13476
existing_events_map = await self._store._get_events_from_db(
[event.event_id for event in events]
)
new_events = []
for event in events:
event_id = event.event_id
# If we've already seen this event ID...
if event_id in existing_events_map:
existing_event = existing_events_map[event_id]
# ...and the event itself was not previously stored as an outlier...
if not existing_event.event.internal_metadata.is_outlier():
# ...then there's no need to persist it. We have it already.
logger.info(
"_process_pulled_event: Ignoring received event %s which we "
"have already seen",
event.event_id,
)
continue
# While we have seen this event before, it was stored as an outlier.
# We'll now persist it as a non-outlier.
logger.info("De-outliering event %s", event_id)
# Continue on with the events that are new to us.
new_events.append(event)
# We want to sort these by depth so we process them and # We want to sort these by depth so we process them and
# tell clients about them in order. # tell clients about them in order.
sorted_events = sorted(events, key=lambda x: x.depth) sorted_events = sorted(new_events, key=lambda x: x.depth)
for ev in sorted_events: for ev in sorted_events:
with nested_logging_context(ev.event_id): with nested_logging_context(ev.event_id):
await self._process_pulled_event(origin, ev, backfilled=backfilled) await self._process_pulled_event(origin, ev, backfilled=backfilled)
@ -846,18 +885,6 @@ class FederationEventHandler:
event_id = event.event_id event_id = event.event_id
existing = await self._store.get_event(
event_id, allow_none=True, allow_rejected=True
)
if existing:
if not existing.internal_metadata.is_outlier():
logger.info(
"_process_pulled_event: Ignoring received event %s which we have already seen",
event_id,
)
return
logger.info("De-outliering event %s", event_id)
try: try:
self._sanity_check_event(event) self._sanity_check_event(event)
except SynapseError as err: except SynapseError as err:
@ -899,6 +926,18 @@ class FederationEventHandler:
context, context,
backfilled=backfilled, backfilled=backfilled,
) )
except FederationPullAttemptBackoffError as exc:
# Log a warning about why we failed to process the event (the error message
# for `FederationPullAttemptBackoffError` is pretty good)
logger.warning("_process_pulled_event: %s", exc)
# We do not record a failed pull attempt when we backoff fetching a missing
# `prev_event` because not being able to fetch the `prev_events` just means
# we won't be able to de-outlier the pulled event. But we can still use an
# `outlier` in the state/auth chain for another event. So we shouldn't stop
# a downstream event from trying to pull it.
#
# This avoids a cascade of backoff for all events in the DAG downstream from
# one event backoff upstream.
except FederationError as e: except FederationError as e:
await self._store.record_event_failed_pull_attempt( await self._store.record_event_failed_pull_attempt(
event.room_id, event_id, str(e) event.room_id, event_id, str(e)
@ -945,6 +984,9 @@ class FederationEventHandler:
The event context. The event context.
Raises: Raises:
FederationPullAttemptBackoffError if we are are deliberately not attempting
to pull the given event over federation because we've already done so
recently and are backing off.
FederationError if we fail to get the state from the remote server after any FederationError if we fail to get the state from the remote server after any
missing `prev_event`s. missing `prev_event`s.
""" """
@ -955,6 +997,18 @@ class FederationEventHandler:
seen = await self._store.have_events_in_timeline(prevs) seen = await self._store.have_events_in_timeline(prevs)
missing_prevs = prevs - seen missing_prevs = prevs - seen
# If we've already recently attempted to pull this missing event, don't
# try it again so soon. Since we have to fetch all of the prev_events, we can
# bail early here if we find any to ignore.
prevs_to_ignore = await self._store.get_event_ids_to_not_pull_from_backoff(
room_id, missing_prevs
)
if len(prevs_to_ignore) > 0:
raise FederationPullAttemptBackoffError(
event_ids=prevs_to_ignore,
message=f"While computing context for event={event_id}, not attempting to pull missing prev_event={prevs_to_ignore[0]} because we already tried to pull recently (backing off).",
)
if not missing_prevs: if not missing_prevs:
return await self._state_handler.compute_event_context(event) return await self._state_handler.compute_event_context(event)
@ -2240,8 +2294,8 @@ class FederationEventHandler:
event_pos = PersistedEventPosition( event_pos = PersistedEventPosition(
self._instance_name, event.internal_metadata.stream_ordering self._instance_name, event.internal_metadata.stream_ordering
) )
await self._notifier.on_new_room_event( await self._notifier.on_new_room_events(
event, event_pos, max_stream_token, extra_users=extra_users [(event, event_pos)], max_stream_token, extra_users=extra_users
) )
if event.type == EventTypes.Member and event.membership == Membership.JOIN: if event.type == EventTypes.Member and event.membership == Membership.JOIN:

View file

@ -57,13 +57,7 @@ class InitialSyncHandler:
self.validator = EventValidator() self.validator = EventValidator()
self.snapshot_cache: ResponseCache[ self.snapshot_cache: ResponseCache[
Tuple[ Tuple[
str, str, Optional[StreamToken], Optional[StreamToken], str, int, bool, bool
Optional[StreamToken],
Optional[StreamToken],
str,
Optional[int],
bool,
bool,
] ]
] = ResponseCache(hs.get_clock(), "initial_sync_cache") ] = ResponseCache(hs.get_clock(), "initial_sync_cache")
self._event_serializer = hs.get_event_client_serializer() self._event_serializer = hs.get_event_client_serializer()
@ -154,11 +148,6 @@ class InitialSyncHandler:
public_room_ids = await self.store.get_public_room_ids() public_room_ids = await self.store.get_public_room_ids()
if pagin_config.limit is not None:
limit = pagin_config.limit
else:
limit = 10
serializer_options = SerializeEventConfig(as_client_event=as_client_event) serializer_options = SerializeEventConfig(as_client_event=as_client_event)
async def handle_room(event: RoomsForUser) -> None: async def handle_room(event: RoomsForUser) -> None:
@ -210,7 +199,7 @@ class InitialSyncHandler:
run_in_background( run_in_background(
self.store.get_recent_events_for_room, self.store.get_recent_events_for_room,
event.room_id, event.room_id,
limit=limit, limit=pagin_config.limit,
end_token=room_end_token, end_token=room_end_token,
), ),
deferred_room_state, deferred_room_state,
@ -360,15 +349,11 @@ class InitialSyncHandler:
member_event_id member_event_id
) )
limit = pagin_config.limit if pagin_config else None
if limit is None:
limit = 10
leave_position = await self.store.get_position_for_event(member_event_id) leave_position = await self.store.get_position_for_event(member_event_id)
stream_token = leave_position.to_room_stream_token() stream_token = leave_position.to_room_stream_token()
messages, token = await self.store.get_recent_events_for_room( messages, token = await self.store.get_recent_events_for_room(
room_id, limit=limit, end_token=stream_token room_id, limit=pagin_config.limit, end_token=stream_token
) )
messages = await filter_events_for_client( messages = await filter_events_for_client(
@ -420,10 +405,6 @@ class InitialSyncHandler:
now_token = self.hs.get_event_sources().get_current_token() now_token = self.hs.get_event_sources().get_current_token()
limit = pagin_config.limit if pagin_config else None
if limit is None:
limit = 10
room_members = [ room_members = [
m m
for m in current_state.values() for m in current_state.values()
@ -467,7 +448,7 @@ class InitialSyncHandler:
run_in_background( run_in_background(
self.store.get_recent_events_for_room, self.store.get_recent_events_for_room,
room_id, room_id,
limit=limit, limit=pagin_config.limit,
end_token=now_token.room_key, end_token=now_token.room_key,
), ),
), ),

View file

@ -1367,8 +1367,16 @@ class EventCreationHandler:
else: else:
try: try:
validate_event_for_room_version(event) validate_event_for_room_version(event)
# If we are persisting a batch of events the event(s) needed to auth the
# current event may be part of the batch and will not be in the DB yet
event_id_to_event = {e.event_id: e for e, _ in events_and_context}
batched_auth_events = {}
for event_id in event.auth_event_ids():
auth_event = event_id_to_event.get(event_id)
if auth_event:
batched_auth_events[event_id] = auth_event
await self._event_auth_handler.check_auth_rules_from_context( await self._event_auth_handler.check_auth_rules_from_context(
event, context event, batched_auth_events
) )
except AuthError as err: except AuthError as err:
logger.warning("Denying new event %r because %s", event, err) logger.warning("Denying new event %r because %s", event, err)
@ -1398,7 +1406,7 @@ class EventCreationHandler:
dont_notify=dont_notify, dont_notify=dont_notify,
), ),
run_in_background( run_in_background(
self.cache_joined_hosts_for_event, event, context self.cache_joined_hosts_for_events, events_and_context
).addErrback( ).addErrback(
log_failure, "cache_joined_hosts_for_event failed" log_failure, "cache_joined_hosts_for_event failed"
), ),
@ -1502,62 +1510,65 @@ class EventCreationHandler:
await self.store.remove_push_actions_from_staging(event.event_id) await self.store.remove_push_actions_from_staging(event.event_id)
raise raise
async def cache_joined_hosts_for_event( async def cache_joined_hosts_for_events(
self, event: EventBase, context: EventContext self, events_and_context: List[Tuple[EventBase, EventContext]]
) -> None: ) -> None:
"""Precalculate the joined hosts at the event, when using Redis, so that """Precalculate the joined hosts at each of the given events, when using Redis, so that
external federation senders don't have to recalculate it themselves. external federation senders don't have to recalculate it themselves.
""" """
if not self._external_cache.is_enabled(): for event, _ in events_and_context:
return if not self._external_cache.is_enabled():
# If external cache is enabled we should always have this.
assert self._external_cache_joined_hosts_updates is not None
# We actually store two mappings, event ID -> prev state group,
# state group -> joined hosts, which is much more space efficient
# than event ID -> joined hosts.
#
# Note: We have to cache event ID -> prev state group, as we don't
# store that in the DB.
#
# Note: We set the state group -> joined hosts cache if it hasn't been
# set for a while, so that the expiry time is reset.
state_entry = await self.state.resolve_state_groups_for_events(
event.room_id, event_ids=event.prev_event_ids()
)
if state_entry.state_group:
await self._external_cache.set(
"event_to_prev_state_group",
event.event_id,
state_entry.state_group,
expiry_ms=60 * 60 * 1000,
)
if state_entry.state_group in self._external_cache_joined_hosts_updates:
return return
state = await state_entry.get_state( # If external cache is enabled we should always have this.
self._storage_controllers.state, StateFilter.all() assert self._external_cache_joined_hosts_updates is not None
# We actually store two mappings, event ID -> prev state group,
# state group -> joined hosts, which is much more space efficient
# than event ID -> joined hosts.
#
# Note: We have to cache event ID -> prev state group, as we don't
# store that in the DB.
#
# Note: We set the state group -> joined hosts cache if it hasn't been
# set for a while, so that the expiry time is reset.
state_entry = await self.state.resolve_state_groups_for_events(
event.room_id, event_ids=event.prev_event_ids()
) )
with opentracing.start_active_span("get_joined_hosts"):
joined_hosts = await self.store.get_joined_hosts( if state_entry.state_group:
event.room_id, state, state_entry await self._external_cache.set(
"event_to_prev_state_group",
event.event_id,
state_entry.state_group,
expiry_ms=60 * 60 * 1000,
) )
# Note that the expiry times must be larger than the expiry time in if state_entry.state_group in self._external_cache_joined_hosts_updates:
# _external_cache_joined_hosts_updates. return
await self._external_cache.set(
"get_joined_hosts",
str(state_entry.state_group),
list(joined_hosts),
expiry_ms=60 * 60 * 1000,
)
self._external_cache_joined_hosts_updates[state_entry.state_group] = None state = await state_entry.get_state(
self._storage_controllers.state, StateFilter.all()
)
with opentracing.start_active_span("get_joined_hosts"):
joined_hosts = await self.store.get_joined_hosts(
event.room_id, state, state_entry
)
# Note that the expiry times must be larger than the expiry time in
# _external_cache_joined_hosts_updates.
await self._external_cache.set(
"get_joined_hosts",
str(state_entry.state_group),
list(joined_hosts),
expiry_ms=60 * 60 * 1000,
)
self._external_cache_joined_hosts_updates[
state_entry.state_group
] = None
async def _validate_canonical_alias( async def _validate_canonical_alias(
self, self,
@ -1885,6 +1896,7 @@ class EventCreationHandler:
events_and_context, backfilled=backfilled events_and_context, backfilled=backfilled
) )
events_and_pos = []
for event in persisted_events: for event in persisted_events:
if self._ephemeral_events_enabled: if self._ephemeral_events_enabled:
# If there's an expiry timestamp on the event, schedule its expiry. # If there's an expiry timestamp on the event, schedule its expiry.
@ -1893,27 +1905,25 @@ class EventCreationHandler:
stream_ordering = event.internal_metadata.stream_ordering stream_ordering = event.internal_metadata.stream_ordering
assert stream_ordering is not None assert stream_ordering is not None
pos = PersistedEventPosition(self._instance_name, stream_ordering) pos = PersistedEventPosition(self._instance_name, stream_ordering)
events_and_pos.append((event, pos))
async def _notify() -> None: if not dont_notify and event.type == EventTypes.Message:
try: # We don't want to block sending messages on any presence code. This
await self.notifier.on_new_room_event( # matters as sometimes presence code can take a while.
event, pos, max_stream_token, extra_users=extra_users run_in_background(self._bump_active_time, requester.user)
)
except Exception:
logger.exception(
"Error notifying about new room event %s",
event.event_id,
)
if not dont_notify: async def _notify() -> None:
# Skip notifying clients, this is used for Beeper's custom try:
# batch sending of non-historical messages. await self.notifier.on_new_room_events(
run_in_background(_notify) events_and_pos, max_stream_token, extra_users=extra_users
)
except Exception:
logger.exception("Error notifying about new room events")
if event.type == EventTypes.Message: if not dont_notify:
# We don't want to block sending messages on any presence code. This # Skip notifying clients, this is used for Beeper's custom
# matters as sometimes presence code can take a while. # batch sending of non-historical messages.
run_in_background(self._bump_active_time, requester.user) run_in_background(_notify)
return persisted_events[-1] return persisted_events[-1]

View file

@ -458,11 +458,6 @@ class PaginationHandler:
# `/messages` should still works with live tokens when manually provided. # `/messages` should still works with live tokens when manually provided.
assert from_token.room_key.topological is not None assert from_token.room_key.topological is not None
if pagin_config.limit is None:
# This shouldn't happen as we've set a default limit before this
# gets called.
raise Exception("limit not set")
room_token = from_token.room_key room_token = from_token.room_key
async with self.pagination_lock.read(room_id): async with self.pagination_lock.read(room_id):

View file

@ -1596,7 +1596,9 @@ class PresenceEventSource(EventSource[int, UserPresenceState]):
self, self,
user: UserID, user: UserID,
from_key: Optional[int], from_key: Optional[int],
limit: Optional[int] = None, # Having a default limit doesn't match the EventSource API, but some
# callers do not provide it. It is unused in this class.
limit: int = 0,
room_ids: Optional[Collection[str]] = None, room_ids: Optional[Collection[str]] = None,
is_guest: bool = False, is_guest: bool = False,
explicit_room_id: Optional[str] = None, explicit_room_id: Optional[str] = None,

View file

@ -63,8 +63,6 @@ class ReceiptsHandler:
self.clock = self.hs.get_clock() self.clock = self.hs.get_clock()
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
self._msc3771_enabled = hs.config.experimental.msc3771_enabled
async def _received_remote_receipt(self, origin: str, content: JsonDict) -> None: async def _received_remote_receipt(self, origin: str, content: JsonDict) -> None:
"""Called when we receive an EDU of type m.receipt from a remote HS.""" """Called when we receive an EDU of type m.receipt from a remote HS."""
receipts = [] receipts = []
@ -96,11 +94,10 @@ class ReceiptsHandler:
# Check if these receipts apply to a thread. # Check if these receipts apply to a thread.
thread_id = None thread_id = None
data = user_values.get("data", {}) data = user_values.get("data", {})
if self._msc3771_enabled and isinstance(data, dict): thread_id = data.get("thread_id")
thread_id = data.get("thread_id") # If the thread ID is invalid, consider it missing.
# If the thread ID is invalid, consider it missing. if not isinstance(thread_id, str):
if not isinstance(thread_id, str): thread_id = None
thread_id = None
receipts.append( receipts.append(
ReadReceipt( ReadReceipt(
@ -261,7 +258,7 @@ class ReceiptEventSource(EventSource[int, JsonDict]):
self, self,
user: UserID, user: UserID,
from_key: int, from_key: int,
limit: Optional[int], limit: int,
room_ids: Iterable[str], room_ids: Iterable[str],
is_guest: bool, is_guest: bool,
explicit_room_id: Optional[str] = None, explicit_room_id: Optional[str] = None,

View file

@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import enum
import logging import logging
from typing import TYPE_CHECKING, Dict, FrozenSet, Iterable, List, Optional, Tuple from typing import TYPE_CHECKING, Dict, FrozenSet, Iterable, List, Optional, Tuple
@ -20,7 +21,8 @@ from synapse.api.constants import RelationTypes
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.events import EventBase, relation_from_event from synapse.events import EventBase, relation_from_event
from synapse.logging.opentracing import trace from synapse.logging.opentracing import trace
from synapse.storage.databases.main.relations import _RelatedEvent from synapse.storage.databases.main.relations import ThreadsNextBatch, _RelatedEvent
from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict, Requester, StreamToken, UserID from synapse.types import JsonDict, Requester, StreamToken, UserID
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
@ -31,6 +33,13 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class ThreadsListInclude(str, enum.Enum):
"""Valid values for the 'include' flag of /threads."""
all = "all"
participated = "participated"
@attr.s(slots=True, frozen=True, auto_attribs=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class _ThreadAggregation: class _ThreadAggregation:
# The latest event in the thread. # The latest event in the thread.
@ -72,13 +81,10 @@ class RelationsHandler:
requester: Requester, requester: Requester,
event_id: str, event_id: str,
room_id: str, room_id: str,
pagin_config: PaginationConfig,
include_original_event: bool,
relation_type: Optional[str] = None, relation_type: Optional[str] = None,
event_type: Optional[str] = None, event_type: Optional[str] = None,
limit: int = 5,
direction: str = "b",
from_token: Optional[StreamToken] = None,
to_token: Optional[StreamToken] = None,
include_original_event: bool = False,
) -> JsonDict: ) -> JsonDict:
"""Get related events of a event, ordered by topological ordering. """Get related events of a event, ordered by topological ordering.
@ -88,14 +94,10 @@ class RelationsHandler:
requester: The user requesting the relations. requester: The user requesting the relations.
event_id: Fetch events that relate to this event ID. event_id: Fetch events that relate to this event ID.
room_id: The room the event belongs to. room_id: The room the event belongs to.
pagin_config: The pagination config rules to apply, if any.
include_original_event: Whether to include the parent event.
relation_type: Only fetch events with this relation type, if given. relation_type: Only fetch events with this relation type, if given.
event_type: Only fetch events with this event type, if given. event_type: Only fetch events with this event type, if given.
limit: Only fetch the most recent `limit` events.
direction: Whether to fetch the most recent first (`"b"`) or the
oldest first (`"f"`).
from_token: Fetch rows from the given token, or from the start if None.
to_token: Fetch rows up to the given token, or up to the end if None.
include_original_event: Whether to include the parent event.
Returns: Returns:
The pagination chunk. The pagination chunk.
@ -123,10 +125,10 @@ class RelationsHandler:
room_id=room_id, room_id=room_id,
relation_type=relation_type, relation_type=relation_type,
event_type=event_type, event_type=event_type,
limit=limit, limit=pagin_config.limit,
direction=direction, direction=pagin_config.direction,
from_token=from_token, from_token=pagin_config.from_token,
to_token=to_token, to_token=pagin_config.to_token,
) )
events = await self._main_store.get_events_as_list( events = await self._main_store.get_events_as_list(
@ -162,8 +164,10 @@ class RelationsHandler:
if next_token: if next_token:
return_value["next_batch"] = await next_token.to_string(self._main_store) return_value["next_batch"] = await next_token.to_string(self._main_store)
if from_token: if pagin_config.from_token:
return_value["prev_batch"] = await from_token.to_string(self._main_store) return_value["prev_batch"] = await pagin_config.from_token.to_string(
self._main_store
)
return return_value return return_value
@ -483,3 +487,79 @@ class RelationsHandler:
results.setdefault(event_id, BundledAggregations()).replace = edit results.setdefault(event_id, BundledAggregations()).replace = edit
return results return results
async def get_threads(
self,
requester: Requester,
room_id: str,
include: ThreadsListInclude,
limit: int = 5,
from_token: Optional[ThreadsNextBatch] = None,
) -> JsonDict:
"""Get related events of a event, ordered by topological ordering.
Args:
requester: The user requesting the relations.
room_id: The room the event belongs to.
include: One of "all" or "participated" to indicate which threads should
be returned.
limit: Only fetch the most recent `limit` events.
from_token: Fetch rows from the given token, or from the start if None.
Returns:
The pagination chunk.
"""
user_id = requester.user.to_string()
# TODO Properly handle a user leaving a room.
(_, member_event_id) = await self._auth.check_user_in_room_or_world_readable(
room_id, requester, allow_departed_users=True
)
# Note that ignored users are not passed into get_relations_for_event
# below. Ignored users are handled in filter_events_for_client (and by
# not passing them in here we should get a better cache hit rate).
thread_roots, next_batch = await self._main_store.get_threads(
room_id=room_id, limit=limit, from_token=from_token
)
events = await self._main_store.get_events_as_list(thread_roots)
if include == ThreadsListInclude.participated:
# Pre-seed thread participation with whether the requester sent the event.
participated = {event.event_id: event.sender == user_id for event in events}
# For events the requester did not send, check the database for whether
# the requester sent a threaded reply.
participated.update(
await self._main_store.get_threads_participated(
[eid for eid, p in participated.items() if not p],
user_id,
)
)
# Limit the returned threads to those the user has participated in.
events = [event for event in events if participated[event.event_id]]
events = await filter_events_for_client(
self._storage_controllers,
user_id,
events,
is_peeking=(member_event_id is None),
)
aggregations = await self.get_bundled_aggregations(
events, requester.user.to_string()
)
now = self._clock.time_msec()
serialized_events = self._event_serializer.serialize_events(
events, now, bundle_aggregations=aggregations
)
return_value: JsonDict = {"chunk": serialized_events}
if next_batch:
return_value["next_batch"] = str(next_batch)
return return_value

View file

@ -229,9 +229,7 @@ class RoomCreationHandler:
}, },
) )
validate_event_for_room_version(tombstone_event) validate_event_for_room_version(tombstone_event)
await self._event_auth_handler.check_auth_rules_from_context( await self._event_auth_handler.check_auth_rules_from_context(tombstone_event)
tombstone_event, tombstone_context
)
# Upgrade the room # Upgrade the room
# #
@ -1658,7 +1656,7 @@ class RoomEventSource(EventSource[RoomStreamToken, EventBase]):
self, self,
user: UserID, user: UserID,
from_key: RoomStreamToken, from_key: RoomStreamToken,
limit: Optional[int], limit: int,
room_ids: Collection[str], room_ids: Collection[str],
is_guest: bool, is_guest: bool,
explicit_room_id: Optional[str] = None, explicit_room_id: Optional[str] = None,

View file

@ -874,7 +874,7 @@ class SsoHandler:
) )
async def handle_terms_accepted( async def handle_terms_accepted(
self, request: Request, session_id: str, terms_version: str self, request: SynapseRequest, session_id: str, terms_version: str
) -> None: ) -> None:
"""Handle a request to the new-user 'consent' endpoint """Handle a request to the new-user 'consent' endpoint

View file

@ -40,7 +40,7 @@ from synapse.handlers.relations import BundledAggregations
from synapse.logging.context import current_context from synapse.logging.context import current_context
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
from synapse.push.clientformat import format_push_rules_for_user from synapse.push.clientformat import format_push_rules_for_user
from synapse.storage.databases.main.event_push_actions import NotifCounts from synapse.storage.databases.main.event_push_actions import RoomNotifCounts
from synapse.storage.roommember import MemberSummary from synapse.storage.roommember import MemberSummary
from synapse.storage.state import StateFilter from synapse.storage.state import StateFilter
from synapse.types import ( from synapse.types import (
@ -128,6 +128,7 @@ class JoinedSyncResult:
ephemeral: List[JsonDict] ephemeral: List[JsonDict]
account_data: List[JsonDict] account_data: List[JsonDict]
unread_notifications: JsonDict unread_notifications: JsonDict
unread_thread_notifications: JsonDict
summary: Optional[JsonDict] summary: Optional[JsonDict]
unread_count: int unread_count: int
@ -1287,7 +1288,7 @@ class SyncHandler:
async def unread_notifs_for_room_id( async def unread_notifs_for_room_id(
self, room_id: str, sync_config: SyncConfig self, room_id: str, sync_config: SyncConfig
) -> NotifCounts: ) -> RoomNotifCounts:
with Measure(self.clock, "unread_notifs_for_room_id"): with Measure(self.clock, "unread_notifs_for_room_id"):
return await self.store.get_unread_event_push_actions_by_room_for_user( return await self.store.get_unread_event_push_actions_by_room_for_user(
@ -1313,6 +1314,19 @@ class SyncHandler:
At the end, we transfer data from the `sync_result_builder` to a new `SyncResult` At the end, we transfer data from the `sync_result_builder` to a new `SyncResult`
instance to signify that the sync calculation is complete. instance to signify that the sync calculation is complete.
""" """
user_id = sync_config.user.to_string()
app_service = self.store.get_app_service_by_user_id(user_id)
if app_service:
# We no longer support AS users using /sync directly.
# See https://github.com/matrix-org/matrix-doc/issues/1144
raise NotImplementedError()
# Note: we get the users room list *before* we get the current token, this
# avoids checking back in history if rooms are joined after the token is fetched.
token_before_rooms = self.event_sources.get_current_token()
mutable_joined_room_ids = set(await self.store.get_rooms_for_user(user_id))
# NB: The now_token gets changed by some of the generate_sync_* methods, # NB: The now_token gets changed by some of the generate_sync_* methods,
# this is due to some of the underlying streams not supporting the ability # this is due to some of the underlying streams not supporting the ability
# to query up to a given point. # to query up to a given point.
@ -1320,6 +1334,57 @@ class SyncHandler:
now_token = self.event_sources.get_current_token() now_token = self.event_sources.get_current_token()
log_kv({"now_token": now_token}) log_kv({"now_token": now_token})
# Since we fetched the users room list before the token, there's a small window
# during which membership events may have been persisted, so we fetch these now
# and modify the joined room list for any changes between the get_rooms_for_user
# call and the get_current_token call.
membership_change_events = []
if since_token:
membership_change_events = await self.store.get_membership_changes_for_user(
user_id, since_token.room_key, now_token.room_key, self.rooms_to_exclude
)
mem_last_change_by_room_id: Dict[str, EventBase] = {}
for event in membership_change_events:
mem_last_change_by_room_id[event.room_id] = event
# For the latest membership event in each room found, add/remove the room ID
# from the joined room list accordingly. In this case we only care if the
# latest change is JOIN.
for room_id, event in mem_last_change_by_room_id.items():
assert event.internal_metadata.stream_ordering
if (
event.internal_metadata.stream_ordering
< token_before_rooms.room_key.stream
):
continue
logger.info(
"User membership change between getting rooms and current token: %s %s %s",
user_id,
event.membership,
room_id,
)
# User joined a room - we have to then check the room state to ensure we
# respect any bans if there's a race between the join and ban events.
if event.membership == Membership.JOIN:
user_ids_in_room = await self.store.get_users_in_room(room_id)
if user_id in user_ids_in_room:
mutable_joined_room_ids.add(room_id)
# The user left the room, or left and was re-invited but not joined yet
else:
mutable_joined_room_ids.discard(room_id)
# Now we have our list of joined room IDs, exclude as configured and freeze
joined_room_ids = frozenset(
(
room_id
for room_id in mutable_joined_room_ids
if room_id not in self.rooms_to_exclude
)
)
logger.debug( logger.debug(
"Calculating sync response for %r between %s and %s", "Calculating sync response for %r between %s and %s",
sync_config.user, sync_config.user,
@ -1327,22 +1392,13 @@ class SyncHandler:
now_token, now_token,
) )
user_id = sync_config.user.to_string()
app_service = self.store.get_app_service_by_user_id(user_id)
if app_service:
# We no longer support AS users using /sync directly.
# See https://github.com/matrix-org/matrix-doc/issues/1144
raise NotImplementedError()
else:
joined_room_ids = await self.get_rooms_for_user_at(
user_id, now_token.room_key
)
sync_result_builder = SyncResultBuilder( sync_result_builder = SyncResultBuilder(
sync_config, sync_config,
full_state, full_state,
since_token=since_token, since_token=since_token,
now_token=now_token, now_token=now_token,
joined_room_ids=joined_room_ids, joined_room_ids=joined_room_ids,
membership_change_events=membership_change_events,
) )
logger.debug("Fetching account data") logger.debug("Fetching account data")
@ -1823,19 +1879,12 @@ class SyncHandler:
Does not modify the `sync_result_builder`. Does not modify the `sync_result_builder`.
""" """
user_id = sync_result_builder.sync_config.user.to_string()
since_token = sync_result_builder.since_token since_token = sync_result_builder.since_token
now_token = sync_result_builder.now_token membership_change_events = sync_result_builder.membership_change_events
assert since_token assert since_token
# Get a list of membership change events that have happened to the user if membership_change_events:
# requesting the sync.
membership_changes = await self.store.get_membership_changes_for_user(
user_id, since_token.room_key, now_token.room_key
)
if membership_changes:
return True return True
stream_id = since_token.room_key.stream stream_id = since_token.room_key.stream
@ -1874,16 +1923,10 @@ class SyncHandler:
since_token = sync_result_builder.since_token since_token = sync_result_builder.since_token
now_token = sync_result_builder.now_token now_token = sync_result_builder.now_token
sync_config = sync_result_builder.sync_config sync_config = sync_result_builder.sync_config
membership_change_events = sync_result_builder.membership_change_events
assert since_token assert since_token
# TODO: we've already called this function and ran this query in
# _have_rooms_changed. We could keep the results in memory to avoid a
# second query, at the cost of more complicated source code.
membership_change_events = await self.store.get_membership_changes_for_user(
user_id, since_token.room_key, now_token.room_key, self.rooms_to_exclude
)
mem_change_events_by_room_id: Dict[str, List[EventBase]] = {} mem_change_events_by_room_id: Dict[str, List[EventBase]] = {}
for event in membership_change_events: for event in membership_change_events:
mem_change_events_by_room_id.setdefault(event.room_id, []).append(event) mem_change_events_by_room_id.setdefault(event.room_id, []).append(event)
@ -2352,6 +2395,7 @@ class SyncHandler:
ephemeral=ephemeral, ephemeral=ephemeral,
account_data=account_data_events, account_data=account_data_events,
unread_notifications=unread_notifications, unread_notifications=unread_notifications,
unread_thread_notifications={},
summary=summary, summary=summary,
unread_count=0, unread_count=0,
) )
@ -2359,10 +2403,33 @@ class SyncHandler:
if room_sync or always_include: if room_sync or always_include:
notifs = await self.unread_notifs_for_room_id(room_id, sync_config) notifs = await self.unread_notifs_for_room_id(room_id, sync_config)
unread_notifications["notification_count"] = notifs.notify_count # Notifications for the main timeline.
unread_notifications["highlight_count"] = notifs.highlight_count notify_count = notifs.main_timeline.notify_count
highlight_count = notifs.main_timeline.highlight_count
unread_count = notifs.main_timeline.unread_count
room_sync.unread_count = notifs.unread_count # Check the sync configuration.
if sync_config.filter_collection.unread_thread_notifications():
# And add info for each thread.
room_sync.unread_thread_notifications = {
thread_id: {
"notification_count": thread_notifs.notify_count,
"highlight_count": thread_notifs.highlight_count,
}
for thread_id, thread_notifs in notifs.threads.items()
if thread_id is not None
}
else:
# Combine the unread counts for all threads and main timeline.
for thread_notifs in notifs.threads.values():
notify_count += thread_notifs.notify_count
highlight_count += thread_notifs.highlight_count
unread_count += thread_notifs.unread_count
unread_notifications["notification_count"] = notify_count
unread_notifications["highlight_count"] = highlight_count
room_sync.unread_count = unread_count
sync_result_builder.joined.append(room_sync) sync_result_builder.joined.append(room_sync)
@ -2384,60 +2451,6 @@ class SyncHandler:
else: else:
raise Exception("Unrecognized rtype: %r", room_builder.rtype) raise Exception("Unrecognized rtype: %r", room_builder.rtype)
async def get_rooms_for_user_at(
self,
user_id: str,
room_key: RoomStreamToken,
) -> FrozenSet[str]:
"""Get set of joined rooms for a user at the given stream ordering.
The stream ordering *must* be recent, otherwise this may throw an
exception if older than a month. (This function is called with the
current token, which should be perfectly fine).
Args:
user_id
stream_ordering
ReturnValue:
Set of room_ids the user is in at given stream_ordering.
"""
joined_rooms = await self.store.get_rooms_for_user_with_stream_ordering(user_id)
joined_room_ids = set()
# We need to check that the stream ordering of the join for each room
# is before the stream_ordering asked for. This might not be the case
# if the user joins a room between us getting the current token and
# calling `get_rooms_for_user_with_stream_ordering`.
# If the membership's stream ordering is after the given stream
# ordering, we need to go and work out if the user was in the room
# before.
# We also need to check whether the room should be excluded from sync
# responses as per the homeserver config.
for joined_room in joined_rooms:
if joined_room.room_id in self.rooms_to_exclude:
continue
if not joined_room.event_pos.persisted_after(room_key):
joined_room_ids.add(joined_room.room_id)
continue
logger.info("User joined room after current token: %s", joined_room.room_id)
extrems = (
await self.store.get_forward_extremities_for_room_at_stream_ordering(
joined_room.room_id, joined_room.event_pos.stream
)
)
user_ids_in_room = await self.state.get_current_user_ids_in_room(
joined_room.room_id, extrems
)
if user_id in user_ids_in_room:
joined_room_ids.add(joined_room.room_id)
return frozenset(joined_room_ids)
def _action_has_highlight(actions: List[JsonDict]) -> bool: def _action_has_highlight(actions: List[JsonDict]) -> bool:
for action in actions: for action in actions:
@ -2534,6 +2547,7 @@ class SyncResultBuilder:
since_token: Optional[StreamToken] since_token: Optional[StreamToken]
now_token: StreamToken now_token: StreamToken
joined_room_ids: FrozenSet[str] joined_room_ids: FrozenSet[str]
membership_change_events: List[EventBase]
presence: List[UserPresenceState] = attr.Factory(list) presence: List[UserPresenceState] = attr.Factory(list)
account_data: List[JsonDict] = attr.Factory(list) account_data: List[JsonDict] = attr.Factory(list)

View file

@ -513,7 +513,7 @@ class TypingNotificationEventSource(EventSource[int, JsonDict]):
self, self,
user: UserID, user: UserID,
from_key: int, from_key: int,
limit: Optional[int], limit: int,
room_ids: Iterable[str], room_ids: Iterable[str],
is_guest: bool, is_guest: bool,
explicit_room_id: Optional[str] = None, explicit_room_id: Optional[str] = None,

View file

@ -19,6 +19,7 @@ import logging
import types import types
import urllib import urllib
from http import HTTPStatus from http import HTTPStatus
from http.client import FOUND
from inspect import isawaitable from inspect import isawaitable
from typing import ( from typing import (
TYPE_CHECKING, TYPE_CHECKING,
@ -339,7 +340,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta):
return callback_return return callback_return
_unrecognised_request_handler(request) return _unrecognised_request_handler(request)
@abc.abstractmethod @abc.abstractmethod
def _send_response( def _send_response(
@ -598,7 +599,7 @@ class RootRedirect(resource.Resource):
class OptionsResource(resource.Resource): class OptionsResource(resource.Resource):
"""Responds to OPTION requests for itself and all children.""" """Responds to OPTION requests for itself and all children."""
def render_OPTIONS(self, request: Request) -> bytes: def render_OPTIONS(self, request: SynapseRequest) -> bytes:
request.setResponseCode(204) request.setResponseCode(204)
request.setHeader(b"Content-Length", b"0") request.setHeader(b"Content-Length", b"0")
@ -763,7 +764,7 @@ def respond_with_json(
def respond_with_json_bytes( def respond_with_json_bytes(
request: Request, request: SynapseRequest,
code: int, code: int,
json_bytes: bytes, json_bytes: bytes,
send_cors: bool = False, send_cors: bool = False,
@ -859,7 +860,7 @@ def _write_bytes_to_request(request: Request, bytes_to_write: bytes) -> None:
_ByteProducer(request, bytes_generator) _ByteProducer(request, bytes_generator)
def set_cors_headers(request: Request) -> None: def set_cors_headers(request: SynapseRequest) -> None:
"""Set the CORS headers so that javascript running in a web browsers can """Set the CORS headers so that javascript running in a web browsers can
use this API use this API
@ -870,10 +871,20 @@ def set_cors_headers(request: Request) -> None:
request.setHeader( request.setHeader(
b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS" b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS"
) )
request.setHeader( if request.experimental_cors_msc3886:
b"Access-Control-Allow-Headers", request.setHeader(
b"X-Requested-With, Content-Type, Authorization, Date", b"Access-Control-Allow-Headers",
) b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match",
)
request.setHeader(
b"Access-Control-Expose-Headers",
b"ETag, Location, X-Max-Bytes",
)
else:
request.setHeader(
b"Access-Control-Allow-Headers",
b"X-Requested-With, Content-Type, Authorization, Date",
)
def set_corp_headers(request: Request) -> None: def set_corp_headers(request: Request) -> None:
@ -942,10 +953,25 @@ def set_clickjacking_protection_headers(request: Request) -> None:
request.setHeader(b"Content-Security-Policy", b"frame-ancestors 'none';") request.setHeader(b"Content-Security-Policy", b"frame-ancestors 'none';")
def respond_with_redirect(request: Request, url: bytes) -> None: def respond_with_redirect(
"""Write a 302 response to the request, if it is still alive.""" request: SynapseRequest, url: bytes, statusCode: int = FOUND, cors: bool = False
) -> None:
"""
Write a 302 (or other specified status code) response to the request, if it is still alive.
Args:
request: The http request to respond to.
url: The URL to redirect to.
statusCode: The HTTP status code to use for the redirect (defaults to 302).
cors: Whether to set CORS headers on the response.
"""
logger.debug("Redirect to %s", url.decode("utf-8")) logger.debug("Redirect to %s", url.decode("utf-8"))
request.redirect(url)
if cors:
set_cors_headers(request)
request.setResponseCode(statusCode)
request.setHeader(b"location", url)
finish_request(request) finish_request(request)

View file

@ -35,6 +35,7 @@ from typing_extensions import Literal
from twisted.web.server import Request from twisted.web.server import Request
from synapse.api.errors import Codes, SynapseError from synapse.api.errors import Codes, SynapseError
from synapse.http import redact_uri
from synapse.http.server import HttpServer from synapse.http.server import HttpServer
from synapse.types import JsonDict, RoomAlias, RoomID from synapse.types import JsonDict, RoomAlias, RoomID
from synapse.util import json_decoder from synapse.util import json_decoder
@ -664,7 +665,13 @@ def parse_json_value_from_request(
try: try:
content = json_decoder.decode(content_bytes.decode("utf-8")) content = json_decoder.decode(content_bytes.decode("utf-8"))
except Exception as e: except Exception as e:
logger.warning("Unable to parse JSON: %s (%s)", e, content_bytes) logger.warning(
"Unable to parse JSON from %s %s response: %s (%s)",
request.method.decode("ascii", errors="replace"),
redact_uri(request.uri.decode("ascii", errors="replace")),
e,
content_bytes,
)
raise SynapseError( raise SynapseError(
HTTPStatus.BAD_REQUEST, "Content not JSON.", errcode=Codes.NOT_JSON HTTPStatus.BAD_REQUEST, "Content not JSON.", errcode=Codes.NOT_JSON
) )

View file

@ -82,6 +82,7 @@ class SynapseRequest(Request):
self.reactor = site.reactor self.reactor = site.reactor
self._channel = channel # this is used by the tests self._channel = channel # this is used by the tests
self.start_time = 0.0 self.start_time = 0.0
self.experimental_cors_msc3886 = site.experimental_cors_msc3886
# The requester, if authenticated. For federation requests this is the # The requester, if authenticated. For federation requests this is the
# server name, for client requests this is the Requester object. # server name, for client requests this is the Requester object.
@ -622,6 +623,8 @@ class SynapseSite(Site):
request_id_header = config.http_options.request_id_header request_id_header = config.http_options.request_id_header
self.experimental_cors_msc3886 = config.http_options.experimental_cors_msc3886
def request_factory(channel: HTTPChannel, queued: bool) -> Request: def request_factory(channel: HTTPChannel, queued: bool) -> Request:
return request_class( return request_class(
channel, channel,

View file

@ -20,7 +20,7 @@ Due to the renaming of metrics in prometheus_client 0.4.0, this customised
vendoring of the code will emit both the old versions that Synapse dashboards vendoring of the code will emit both the old versions that Synapse dashboards
expect, and the newer "best practice" version of the up-to-date official client. expect, and the newer "best practice" version of the up-to-date official client.
""" """
import logging
import math import math
import threading import threading
from http.server import BaseHTTPRequestHandler, HTTPServer from http.server import BaseHTTPRequestHandler, HTTPServer
@ -34,6 +34,7 @@ from prometheus_client.core import Sample
from twisted.web.resource import Resource from twisted.web.resource import Resource
from twisted.web.server import Request from twisted.web.server import Request
logger = logging.getLogger(__name__)
CONTENT_TYPE_LATEST = "text/plain; version=0.0.4; charset=utf-8" CONTENT_TYPE_LATEST = "text/plain; version=0.0.4; charset=utf-8"
@ -219,11 +220,16 @@ class MetricsHandler(BaseHTTPRequestHandler):
except Exception: except Exception:
self.send_error(500, "error generating metric output") self.send_error(500, "error generating metric output")
raise raise
self.send_response(200) try:
self.send_header("Content-Type", CONTENT_TYPE_LATEST) self.send_response(200)
self.send_header("Content-Length", str(len(output))) self.send_header("Content-Type", CONTENT_TYPE_LATEST)
self.end_headers() self.send_header("Content-Length", str(len(output)))
self.wfile.write(output) self.end_headers()
self.wfile.write(output)
except BrokenPipeError as e:
logger.warning(
"BrokenPipeError when serving metrics (%s). Did Prometheus restart?", e
)
def log_message(self, format: str, *args: Any) -> None: def log_message(self, format: str, *args: Any) -> None:
"""Log nothing.""" """Log nothing."""

View file

@ -294,35 +294,31 @@ class Notifier:
""" """
self._new_join_in_room_callbacks.append(cb) self._new_join_in_room_callbacks.append(cb)
async def on_new_room_event( async def on_new_room_events(
self, self,
event: EventBase, events_and_pos: List[Tuple[EventBase, PersistedEventPosition]],
event_pos: PersistedEventPosition,
max_room_stream_token: RoomStreamToken, max_room_stream_token: RoomStreamToken,
extra_users: Optional[Collection[UserID]] = None, extra_users: Optional[Collection[UserID]] = None,
) -> None: ) -> None:
"""Unwraps event and calls `on_new_room_event_args`.""" """Creates a _PendingRoomEventEntry for each of the listed events and calls
await self.on_new_room_event_args( notify_new_room_events with the results."""
event_pos=event_pos, event_entries = []
room_id=event.room_id, for event, pos in events_and_pos:
event_id=event.event_id, entry = self.create_pending_room_event_entry(
event_type=event.type, pos,
state_key=event.get("state_key"), extra_users,
membership=event.content.get("membership"), event.room_id,
max_room_stream_token=max_room_stream_token, event.type,
extra_users=extra_users or [], event.get("state_key"),
) event.content.get("membership"),
)
event_entries.append((entry, event.event_id))
await self.notify_new_room_events(event_entries, max_room_stream_token)
async def on_new_room_event_args( async def notify_new_room_events(
self, self,
room_id: str, event_entries: List[Tuple[_PendingRoomEventEntry, str]],
event_id: str,
event_type: str,
state_key: Optional[str],
membership: Optional[str],
event_pos: PersistedEventPosition,
max_room_stream_token: RoomStreamToken, max_room_stream_token: RoomStreamToken,
extra_users: Optional[Collection[UserID]] = None,
) -> None: ) -> None:
"""Used by handlers to inform the notifier something has happened """Used by handlers to inform the notifier something has happened
in the room, room event wise. in the room, room event wise.
@ -338,22 +334,33 @@ class Notifier:
until all previous events have been persisted before notifying until all previous events have been persisted before notifying
the client streams. the client streams.
""" """
self.pending_new_room_events.append( for event_entry, event_id in event_entries:
_PendingRoomEventEntry( self.pending_new_room_events.append(event_entry)
event_pos=event_pos, await self._third_party_rules.on_new_event(event_id)
extra_users=extra_users or [],
room_id=room_id,
type=event_type,
state_key=state_key,
membership=membership,
)
)
self._notify_pending_new_room_events(max_room_stream_token) self._notify_pending_new_room_events(max_room_stream_token)
await self._third_party_rules.on_new_event(event_id)
self.notify_replication() self.notify_replication()
def create_pending_room_event_entry(
self,
event_pos: PersistedEventPosition,
extra_users: Optional[Collection[UserID]],
room_id: str,
event_type: str,
state_key: Optional[str],
membership: Optional[str],
) -> _PendingRoomEventEntry:
"""Creates and returns a _PendingRoomEventEntry"""
return _PendingRoomEventEntry(
event_pos=event_pos,
extra_users=extra_users or [],
room_id=room_id,
type=event_type,
state_key=state_key,
membership=membership,
)
def _notify_pending_new_room_events( def _notify_pending_new_room_events(
self, max_room_stream_token: RoomStreamToken self, max_room_stream_token: RoomStreamToken
) -> None: ) -> None:

View file

@ -13,32 +13,29 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import itertools
import logging import logging
from typing import ( from typing import (
TYPE_CHECKING, TYPE_CHECKING,
Any, Any,
Collection, Collection,
Dict, Dict,
Iterable,
List, List,
Mapping, Mapping,
Optional, Optional,
Set,
Tuple, Tuple,
Union, Union,
) )
from prometheus_client import Counter from prometheus_client import Counter
from synapse.api.constants import EventTypes, Membership, RelationTypes from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes
from synapse.event_auth import auth_types_for_event, get_user_power_level from synapse.event_auth import auth_types_for_event, get_user_power_level
from synapse.events import EventBase, relation_from_event from synapse.events import EventBase, relation_from_event
from synapse.events.snapshot import EventContext from synapse.events.snapshot import EventContext
from synapse.state import POWER_KEY from synapse.state import POWER_KEY
from synapse.storage.databases.main.roommember import EventIdMembership from synapse.storage.databases.main.roommember import EventIdMembership
from synapse.storage.state import StateFilter from synapse.storage.state import StateFilter
from synapse.synapse_rust.push import FilteredPushRules, PushRule, PushRuleEvaluator from synapse.synapse_rust.push import FilteredPushRules, PushRuleEvaluator
from synapse.util.caches import register_cache from synapse.util.caches import register_cache
from synapse.util.metrics import measure_func from synapse.util.metrics import measure_func
from synapse.visibility import filter_event_for_clients_with_state from synapse.visibility import filter_event_for_clients_with_state
@ -117,9 +114,6 @@ class BulkPushRuleEvaluator:
resizable=False, resizable=False,
) )
# Whether to support MSC3772 is supported.
self._relations_match_enabled = self.hs.config.experimental.msc3772_enabled
async def _get_rules_for_event( async def _get_rules_for_event(
self, self,
event: EventBase, event: EventBase,
@ -200,51 +194,6 @@ class BulkPushRuleEvaluator:
return pl_event.content if pl_event else {}, sender_level return pl_event.content if pl_event else {}, sender_level
async def _get_mutual_relations(
self, parent_id: str, rules: Iterable[Tuple[PushRule, bool]]
) -> Dict[str, Set[Tuple[str, str]]]:
"""
Fetch event metadata for events which related to the same event as the given event.
If the given event has no relation information, returns an empty dictionary.
Args:
parent_id: The event ID which is targeted by relations.
rules: The push rules which will be processed for this event.
Returns:
A dictionary of relation type to:
A set of tuples of:
The sender
The event type
"""
# If the experimental feature is not enabled, skip fetching relations.
if not self._relations_match_enabled:
return {}
# Pre-filter to figure out which relation types are interesting.
rel_types = set()
for rule, enabled in rules:
if not enabled:
continue
for condition in rule.conditions:
if condition["kind"] != "org.matrix.msc3772.relation_match":
continue
# rel_type is required.
rel_type = condition.get("rel_type")
if rel_type:
rel_types.add(rel_type)
# If no valid rules were found, no mutual relations.
if not rel_types:
return {}
# If any valid rules were found, fetch the mutual relations.
return await self.store.get_mutual_event_relations(parent_id, rel_types)
@measure_func("action_for_event_by_user") @measure_func("action_for_event_by_user")
async def action_for_event_by_user( async def action_for_event_by_user(
self, event: EventBase, context: EventContext self, event: EventBase, context: EventContext
@ -276,18 +225,18 @@ class BulkPushRuleEvaluator:
sender_power_level, sender_power_level,
) = await self._get_power_levels_and_sender_level(event, context) ) = await self._get_power_levels_and_sender_level(event, context)
# Find the event's thread ID.
relation = relation_from_event(event) relation = relation_from_event(event)
# If the event does not have a relation, then cannot have any mutual # If the event does not have a relation, then it cannot have a thread ID.
# relations or thread ID. thread_id = MAIN_TIMELINE
relations = {}
thread_id = "main"
if relation: if relation:
relations = await self._get_mutual_relations( # Recursively attempt to find the thread this event relates to.
relation.parent_id,
itertools.chain(*(r.rules() for r in rules_by_user.values())),
)
if relation.rel_type == RelationTypes.THREAD: if relation.rel_type == RelationTypes.THREAD:
thread_id = relation.parent_id thread_id = relation.parent_id
else:
# Since the event has not yet been persisted we check whether
# the parent is part of a thread.
thread_id = await self.store.get_thread_id(relation.parent_id)
# It's possible that old room versions have non-integer power levels (floats or # It's possible that old room versions have non-integer power levels (floats or
# strings). Workaround this by explicitly converting to int. # strings). Workaround this by explicitly converting to int.
@ -301,8 +250,6 @@ class BulkPushRuleEvaluator:
room_member_count, room_member_count,
sender_power_level, sender_power_level,
notification_levels, notification_levels,
relations,
self._relations_match_enabled,
) )
users = rules_by_user.keys() users = rules_by_user.keys()

View file

@ -39,7 +39,12 @@ async def get_badge_count(store: DataStore, user_id: str, group_by_room: bool) -
await concurrently_execute(get_room_unread_count, joins, 10) await concurrently_execute(get_room_unread_count, joins, 10)
for notifs in room_notifs: for notifs in room_notifs:
if notifs.notify_count == 0: # Combine the counts from all the threads.
notify_count = notifs.main_timeline.notify_count + sum(
n.notify_count for n in notifs.threads.values()
)
if notify_count == 0:
continue continue
if group_by_room: if group_by_room:
@ -47,7 +52,7 @@ async def get_badge_count(store: DataStore, user_id: str, group_by_room: bool) -
badge += 1 badge += 1
else: else:
# increment the badge count by the number of unread messages in the room # increment the badge count by the number of unread messages in the room
badge += notifs.notify_count badge += notify_count
return badge return badge

View file

@ -210,15 +210,16 @@ class ReplicationDataHandler:
max_token = self.store.get_room_max_token() max_token = self.store.get_room_max_token()
event_pos = PersistedEventPosition(instance_name, token) event_pos = PersistedEventPosition(instance_name, token)
await self.notifier.on_new_room_event_args( event_entry = self.notifier.create_pending_room_event_entry(
event_pos=event_pos, event_pos,
max_room_stream_token=max_token, extra_users,
extra_users=extra_users, row.data.room_id,
room_id=row.data.room_id, row.data.type,
event_id=row.data.event_id, row.data.state_key,
event_type=row.data.type, row.data.membership,
state_key=row.data.state_key, )
membership=row.data.membership, await self.notifier.notify_new_room_events(
[(event_entry, row.data.event_id)], max_token
) )
# If this event is a join, make a note of it so we have an accurate # If this event is a join, make a note of it so we have an accurate

View file

@ -44,6 +44,7 @@ from synapse.rest.client import (
receipts, receipts,
register, register,
relations, relations,
rendezvous,
report_event, report_event,
room, room,
room_batch, room_batch,
@ -132,3 +133,4 @@ class ClientRestResource(JsonResource):
# unstable # unstable
mutual_rooms.register_servlets(hs, client_resource) mutual_rooms.register_servlets(hs, client_resource)
login_token_request.register_servlets(hs, client_resource) login_token_request.register_servlets(hs, client_resource)
rendezvous.register_servlets(hs, client_resource)

View file

@ -14,18 +14,21 @@
# limitations under the License. # limitations under the License.
import logging import logging
from typing import TYPE_CHECKING, Tuple from typing import TYPE_CHECKING, List, Optional, Tuple
from pydantic import Extra, StrictStr
from synapse.api import errors from synapse.api import errors
from synapse.api.errors import NotFoundError from synapse.api.errors import NotFoundError
from synapse.http.server import HttpServer from synapse.http.server import HttpServer
from synapse.http.servlet import ( from synapse.http.servlet import (
RestServlet, RestServlet,
assert_params_in_dict, parse_and_validate_json_object_from_request,
parse_json_object_from_request,
) )
from synapse.http.site import SynapseRequest from synapse.http.site import SynapseRequest
from synapse.rest.client._base import client_patterns, interactive_auth_handler from synapse.rest.client._base import client_patterns, interactive_auth_handler
from synapse.rest.client.models import AuthenticationData
from synapse.rest.models import RequestBodyModel
from synapse.types import JsonDict from synapse.types import JsonDict
if TYPE_CHECKING: if TYPE_CHECKING:
@ -80,27 +83,29 @@ class DeleteDevicesRestServlet(RestServlet):
self.device_handler = hs.get_device_handler() self.device_handler = hs.get_device_handler()
self.auth_handler = hs.get_auth_handler() self.auth_handler = hs.get_auth_handler()
class PostBody(RequestBodyModel):
auth: Optional[AuthenticationData]
devices: List[StrictStr]
@interactive_auth_handler @interactive_auth_handler
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request)
try: try:
body = parse_json_object_from_request(request) body = parse_and_validate_json_object_from_request(request, self.PostBody)
except errors.SynapseError as e: except errors.SynapseError as e:
if e.errcode == errors.Codes.NOT_JSON: if e.errcode == errors.Codes.NOT_JSON:
# DELETE # TODO: Can/should we remove this fallback now?
# deal with older clients which didn't pass a JSON dict # deal with older clients which didn't pass a JSON dict
# the same as those that pass an empty dict # the same as those that pass an empty dict
body = {} body = self.PostBody.parse_obj({})
else: else:
raise e raise e
assert_params_in_dict(body, ["devices"])
await self.auth_handler.validate_user_via_ui_auth( await self.auth_handler.validate_user_via_ui_auth(
requester, requester,
request, request,
body, body.dict(exclude_unset=True),
"remove device(s) from your account", "remove device(s) from your account",
# Users might call this multiple times in a row while cleaning up # Users might call this multiple times in a row while cleaning up
# devices, allow a single UI auth session to be re-used. # devices, allow a single UI auth session to be re-used.
@ -108,7 +113,7 @@ class DeleteDevicesRestServlet(RestServlet):
) )
await self.device_handler.delete_devices( await self.device_handler.delete_devices(
requester.user.to_string(), body["devices"] requester.user.to_string(), body.devices
) )
return 200, {} return 200, {}
@ -147,6 +152,9 @@ class DeviceRestServlet(RestServlet):
return 200, device return 200, device
class DeleteBody(RequestBodyModel):
auth: Optional[AuthenticationData]
@interactive_auth_handler @interactive_auth_handler
async def on_DELETE( async def on_DELETE(
self, request: SynapseRequest, device_id: str self, request: SynapseRequest, device_id: str
@ -154,20 +162,21 @@ class DeviceRestServlet(RestServlet):
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request)
try: try:
body = parse_json_object_from_request(request) body = parse_and_validate_json_object_from_request(request, self.DeleteBody)
except errors.SynapseError as e: except errors.SynapseError as e:
if e.errcode == errors.Codes.NOT_JSON: if e.errcode == errors.Codes.NOT_JSON:
# TODO: can/should we remove this fallback now?
# deal with older clients which didn't pass a JSON dict # deal with older clients which didn't pass a JSON dict
# the same as those that pass an empty dict # the same as those that pass an empty dict
body = {} body = self.DeleteBody.parse_obj({})
else: else:
raise raise
await self.auth_handler.validate_user_via_ui_auth( await self.auth_handler.validate_user_via_ui_auth(
requester, requester,
request, request,
body, body.dict(exclude_unset=True),
"remove a device from your account", "remove a device from your account",
# Users might call this multiple times in a row while cleaning up # Users might call this multiple times in a row while cleaning up
# devices, allow a single UI auth session to be re-used. # devices, allow a single UI auth session to be re-used.
@ -179,18 +188,33 @@ class DeviceRestServlet(RestServlet):
) )
return 200, {} return 200, {}
class PutBody(RequestBodyModel):
display_name: Optional[StrictStr]
async def on_PUT( async def on_PUT(
self, request: SynapseRequest, device_id: str self, request: SynapseRequest, device_id: str
) -> Tuple[int, JsonDict]: ) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request, allow_guest=True) requester = await self.auth.get_user_by_req(request, allow_guest=True)
body = parse_json_object_from_request(request) body = parse_and_validate_json_object_from_request(request, self.PutBody)
await self.device_handler.update_device( await self.device_handler.update_device(
requester.user.to_string(), device_id, body requester.user.to_string(), device_id, body.dict()
) )
return 200, {} return 200, {}
class DehydratedDeviceDataModel(RequestBodyModel):
"""JSON blob describing a dehydrated device to be stored.
Expects other freeform fields. Use .dict() to access them.
"""
class Config:
extra = Extra.allow
algorithm: StrictStr
class DehydratedDeviceServlet(RestServlet): class DehydratedDeviceServlet(RestServlet):
"""Retrieve or store a dehydrated device. """Retrieve or store a dehydrated device.
@ -246,27 +270,19 @@ class DehydratedDeviceServlet(RestServlet):
else: else:
raise errors.NotFoundError("No dehydrated device available") raise errors.NotFoundError("No dehydrated device available")
async def on_PUT(self, request: SynapseRequest) -> Tuple[int, JsonDict]: class PutBody(RequestBodyModel):
submission = parse_json_object_from_request(request) device_id: StrictStr
requester = await self.auth.get_user_by_req(request) device_data: DehydratedDeviceDataModel
initial_device_display_name: Optional[StrictStr]
if "device_data" not in submission: async def on_PUT(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
raise errors.SynapseError( submission = parse_and_validate_json_object_from_request(request, self.PutBody)
400, requester = await self.auth.get_user_by_req(request)
"device_data missing",
errcode=errors.Codes.MISSING_PARAM,
)
elif not isinstance(submission["device_data"], dict):
raise errors.SynapseError(
400,
"device_data must be an object",
errcode=errors.Codes.INVALID_PARAM,
)
device_id = await self.device_handler.store_dehydrated_device( device_id = await self.device_handler.store_dehydrated_device(
requester.user.to_string(), requester.user.to_string(),
submission["device_data"], submission.device_data,
submission.get("initial_device_display_name", None), submission.initial_device_display_name,
) )
return 200, {"device_id": device_id} return 200, {"device_id": device_id}
@ -300,28 +316,18 @@ class ClaimDehydratedDeviceServlet(RestServlet):
self.auth = hs.get_auth() self.auth = hs.get_auth()
self.device_handler = hs.get_device_handler() self.device_handler = hs.get_device_handler()
class PostBody(RequestBodyModel):
device_id: StrictStr
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request)
submission = parse_json_object_from_request(request) submission = parse_and_validate_json_object_from_request(request, self.PostBody)
if "device_id" not in submission:
raise errors.SynapseError(
400,
"device_id missing",
errcode=errors.Codes.MISSING_PARAM,
)
elif not isinstance(submission["device_id"], str):
raise errors.SynapseError(
400,
"device_id must be a string",
errcode=errors.Codes.INVALID_PARAM,
)
result = await self.device_handler.rehydrate_device( result = await self.device_handler.rehydrate_device(
requester.user.to_string(), requester.user.to_string(),
self.auth.get_access_token_from_request(request), self.auth.get_access_token_from_request(request),
submission["device_id"], submission.device_id,
) )
return 200, result return 200, result

View file

@ -13,15 +13,22 @@
# limitations under the License. # limitations under the License.
import logging import logging
from typing import TYPE_CHECKING, Tuple from typing import TYPE_CHECKING, List, Optional, Tuple
from pydantic import StrictStr
from typing_extensions import Literal
from twisted.web.server import Request from twisted.web.server import Request
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
from synapse.http.server import HttpServer from synapse.http.server import HttpServer
from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.servlet import (
RestServlet,
parse_and_validate_json_object_from_request,
)
from synapse.http.site import SynapseRequest from synapse.http.site import SynapseRequest
from synapse.rest.client._base import client_patterns from synapse.rest.client._base import client_patterns
from synapse.rest.models import RequestBodyModel
from synapse.types import JsonDict, RoomAlias from synapse.types import JsonDict, RoomAlias
if TYPE_CHECKING: if TYPE_CHECKING:
@ -54,6 +61,12 @@ class ClientDirectoryServer(RestServlet):
return 200, res return 200, res
class PutBody(RequestBodyModel):
# TODO: get Pydantic to validate that this is a valid room id?
room_id: StrictStr
# `servers` is unspecced
servers: Optional[List[StrictStr]] = None
async def on_PUT( async def on_PUT(
self, request: SynapseRequest, room_alias: str self, request: SynapseRequest, room_alias: str
) -> Tuple[int, JsonDict]: ) -> Tuple[int, JsonDict]:
@ -61,31 +74,22 @@ class ClientDirectoryServer(RestServlet):
raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM) raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM)
room_alias_obj = RoomAlias.from_string(room_alias) room_alias_obj = RoomAlias.from_string(room_alias)
content = parse_json_object_from_request(request) content = parse_and_validate_json_object_from_request(request, self.PutBody)
if "room_id" not in content:
raise SynapseError(
400, 'Missing params: ["room_id"]', errcode=Codes.BAD_JSON
)
logger.debug("Got content: %s", content) logger.debug("Got content: %s", content)
logger.debug("Got room name: %s", room_alias_obj.to_string()) logger.debug("Got room name: %s", room_alias_obj.to_string())
room_id = content["room_id"] logger.debug("Got room_id: %s", content.room_id)
servers = content["servers"] if "servers" in content else None logger.debug("Got servers: %s", content.servers)
logger.debug("Got room_id: %s", room_id) room = await self.store.get_room(content.room_id)
logger.debug("Got servers: %s", servers)
# TODO(erikj): Check types.
room = await self.store.get_room(room_id)
if room is None: if room is None:
raise SynapseError(400, "Room does not exist") raise SynapseError(400, "Room does not exist")
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request)
await self.directory_handler.create_association( await self.directory_handler.create_association(
requester, room_alias_obj, room_id, servers requester, room_alias_obj, content.room_id, content.servers
) )
return 200, {} return 200, {}
@ -137,16 +141,18 @@ class ClientDirectoryListServer(RestServlet):
return 200, {"visibility": "public" if room["is_public"] else "private"} return 200, {"visibility": "public" if room["is_public"] else "private"}
class PutBody(RequestBodyModel):
visibility: Literal["public", "private"] = "public"
async def on_PUT( async def on_PUT(
self, request: SynapseRequest, room_id: str self, request: SynapseRequest, room_id: str
) -> Tuple[int, JsonDict]: ) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request)
content = parse_json_object_from_request(request) content = parse_and_validate_json_object_from_request(request, self.PutBody)
visibility = content.get("visibility", "public")
await self.directory_handler.edit_published_room_list( await self.directory_handler.edit_published_room_list(
requester, room_id, visibility requester, room_id, content.visibility
) )
return 200, {} return 200, {}
@ -163,12 +169,14 @@ class ClientAppserviceDirectoryListServer(RestServlet):
self.directory_handler = hs.get_directory_handler() self.directory_handler = hs.get_directory_handler()
self.auth = hs.get_auth() self.auth = hs.get_auth()
class PutBody(RequestBodyModel):
visibility: Literal["public", "private"] = "public"
async def on_PUT( async def on_PUT(
self, request: SynapseRequest, network_id: str, room_id: str self, request: SynapseRequest, network_id: str, room_id: str
) -> Tuple[int, JsonDict]: ) -> Tuple[int, JsonDict]:
content = parse_json_object_from_request(request) content = parse_and_validate_json_object_from_request(request, self.PutBody)
visibility = content.get("visibility", "public") return await self._edit(request, network_id, room_id, content.visibility)
return await self._edit(request, network_id, room_id, visibility)
async def on_DELETE( async def on_DELETE(
self, request: SynapseRequest, network_id: str, room_id: str self, request: SynapseRequest, network_id: str, room_id: str
@ -176,7 +184,11 @@ class ClientAppserviceDirectoryListServer(RestServlet):
return await self._edit(request, network_id, room_id, "private") return await self._edit(request, network_id, room_id, "private")
async def _edit( async def _edit(
self, request: SynapseRequest, network_id: str, room_id: str, visibility: str self,
request: SynapseRequest,
network_id: str,
room_id: str,
visibility: Literal["public", "private"],
) -> Tuple[int, JsonDict]: ) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request)
if not requester.app_service: if not requester.app_service:

View file

@ -50,7 +50,9 @@ class EventStreamRestServlet(RestServlet):
raise SynapseError(400, "Guest users must specify room_id param") raise SynapseError(400, "Guest users must specify room_id param")
room_id = parse_string(request, "room_id") room_id = parse_string(request, "room_id")
pagin_config = await PaginationConfig.from_request(self.store, request) pagin_config = await PaginationConfig.from_request(
self.store, request, default_limit=10
)
timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS
if b"timeout" in args: if b"timeout" in args:
try: try:

View file

@ -39,7 +39,9 @@ class InitialSyncRestServlet(RestServlet):
requester = await self.auth.get_user_by_req(request) requester = await self.auth.get_user_by_req(request)
args: Dict[bytes, List[bytes]] = request.args # type: ignore args: Dict[bytes, List[bytes]] = request.args # type: ignore
as_client_event = b"raw" not in args as_client_event = b"raw" not in args
pagination_config = await PaginationConfig.from_request(self.store, request) pagination_config = await PaginationConfig.from_request(
self.store, request, default_limit=10
)
include_archived = parse_boolean(request, "archived", default=False) include_archived = parse_boolean(request, "archived", default=False)
content = await self.initial_sync_handler.snapshot_all_rooms( content = await self.initial_sync_handler.snapshot_all_rooms(
user_id=requester.user.to_string(), user_id=requester.user.to_string(),

View file

@ -15,8 +15,8 @@
import logging import logging
from typing import TYPE_CHECKING, Tuple from typing import TYPE_CHECKING, Tuple
from synapse.api.constants import ReceiptTypes from synapse.api.constants import MAIN_TIMELINE, ReceiptTypes
from synapse.api.errors import SynapseError from synapse.api.errors import Codes, SynapseError
from synapse.http.server import HttpServer from synapse.http.server import HttpServer
from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.servlet import RestServlet, parse_json_object_from_request
from synapse.http.site import SynapseRequest from synapse.http.site import SynapseRequest
@ -43,13 +43,13 @@ class ReceiptRestServlet(RestServlet):
self.receipts_handler = hs.get_receipts_handler() self.receipts_handler = hs.get_receipts_handler()
self.read_marker_handler = hs.get_read_marker_handler() self.read_marker_handler = hs.get_read_marker_handler()
self.presence_handler = hs.get_presence_handler() self.presence_handler = hs.get_presence_handler()
self._main_store = hs.get_datastores().main
self._known_receipt_types = { self._known_receipt_types = {
ReceiptTypes.READ, ReceiptTypes.READ,
ReceiptTypes.READ_PRIVATE, ReceiptTypes.READ_PRIVATE,
ReceiptTypes.FULLY_READ, ReceiptTypes.FULLY_READ,
} }
self._msc3771_enabled = hs.config.experimental.msc3771_enabled
async def on_POST( async def on_POST(
self, request: SynapseRequest, room_id: str, receipt_type: str, event_id: str self, request: SynapseRequest, room_id: str, receipt_type: str, event_id: str
@ -66,13 +66,29 @@ class ReceiptRestServlet(RestServlet):
# Pull the thread ID, if one exists. # Pull the thread ID, if one exists.
thread_id = None thread_id = None
if self._msc3771_enabled: if "thread_id" in body:
if "thread_id" in body: thread_id = body.get("thread_id")
thread_id = body.get("thread_id") if not thread_id or not isinstance(thread_id, str):
if not thread_id or not isinstance(thread_id, str): raise SynapseError(
raise SynapseError( 400,
400, "thread_id field must be a non-empty string" "thread_id field must be a non-empty string",
) Codes.INVALID_PARAM,
)
if receipt_type == ReceiptTypes.FULLY_READ:
raise SynapseError(
400,
f"thread_id is not compatible with {ReceiptTypes.FULLY_READ} receipts.",
Codes.INVALID_PARAM,
)
# Ensure the event ID roughly correlates to the thread ID.
if not await self._is_event_in_thread(event_id, thread_id):
raise SynapseError(
400,
f"event_id {event_id} is not related to thread {thread_id}",
Codes.INVALID_PARAM,
)
await self.presence_handler.bump_presence_active_time(requester.user) await self.presence_handler.bump_presence_active_time(requester.user)
@ -95,6 +111,46 @@ class ReceiptRestServlet(RestServlet):
return 200, {} return 200, {}
async def _is_event_in_thread(self, event_id: str, thread_id: str) -> bool:
"""
The event must be related to the thread ID (in a vague sense) to ensure
clients aren't sending bogus receipts.
A thread ID is considered valid for a given event E if:
1. E has a thread relation which matches the thread ID;
2. E has another event which has a thread relation to E matching the
thread ID; or
3. E is recursively related (via any rel_type) to an event which
satisfies 1 or 2.
Given the following DAG:
A <---[m.thread]-- B <--[m.annotation]-- C
^
|--[m.reference]-- D <--[m.annotation]-- E
It is valid to send a receipt for thread A on A, B, C, D, or E.
It is valid to send a receipt for the main timeline on A, D, and E.
Args:
event_id: The event ID to check.
thread_id: The thread ID the event is potentially part of.
Returns:
True if the event belongs to the given thread, otherwise False.
"""
# If the receipt is on the main timeline, it is enough to check whether
# the event is directly related to a thread.
if thread_id == MAIN_TIMELINE:
return MAIN_TIMELINE == await self._main_store.get_thread_id(event_id)
# Otherwise, check if the event is directly part of a thread, or is the
# root message (or related to the root message) of a thread.
return thread_id == await self._main_store.get_thread_id_for_receipts(event_id)
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
ReceiptRestServlet(hs).register(http_server) ReceiptRestServlet(hs).register(http_server)

View file

@ -13,13 +13,17 @@
# limitations under the License. # limitations under the License.
import logging import logging
import re
from typing import TYPE_CHECKING, Optional, Tuple from typing import TYPE_CHECKING, Optional, Tuple
from synapse.handlers.relations import ThreadsListInclude
from synapse.http.server import HttpServer from synapse.http.server import HttpServer
from synapse.http.servlet import RestServlet, parse_integer, parse_string from synapse.http.servlet import RestServlet, parse_integer, parse_string
from synapse.http.site import SynapseRequest from synapse.http.site import SynapseRequest
from synapse.rest.client._base import client_patterns from synapse.rest.client._base import client_patterns
from synapse.types import JsonDict, StreamToken from synapse.storage.databases.main.relations import ThreadsNextBatch
from synapse.streams.config import PaginationConfig
from synapse.types import JsonDict
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@ -41,9 +45,8 @@ class RelationPaginationServlet(RestServlet):
def __init__(self, hs: "HomeServer"): def __init__(self, hs: "HomeServer"):
super().__init__() super().__init__()
self.auth = hs.get_auth() self.auth = hs.get_auth()
self.store = hs.get_datastores().main self._store = hs.get_datastores().main
self._relations_handler = hs.get_relations_handler() self._relations_handler = hs.get_relations_handler()
self._msc3715_enabled = hs.config.experimental.msc3715_enabled
async def on_GET( async def on_GET(
self, self,
@ -55,49 +58,63 @@ class RelationPaginationServlet(RestServlet):
) -> Tuple[int, JsonDict]: ) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request, allow_guest=True) requester = await self.auth.get_user_by_req(request, allow_guest=True)
limit = parse_integer(request, "limit", default=5) pagination_config = await PaginationConfig.from_request(
# Fetch the direction parameter, if provided. self._store, request, default_limit=5, default_dir="b"
# )
# TODO Use PaginationConfig.from_request when the unstable parameter is
# no longer needed.
direction = parse_string(request, "dir", allowed_values=["f", "b"])
if direction is None:
if self._msc3715_enabled:
direction = parse_string(
request,
"org.matrix.msc3715.dir",
default="b",
allowed_values=["f", "b"],
)
else:
direction = "b"
from_token_str = parse_string(request, "from")
to_token_str = parse_string(request, "to")
# Return the relations
from_token = None
if from_token_str:
from_token = await StreamToken.from_string(self.store, from_token_str)
to_token = None
if to_token_str:
to_token = await StreamToken.from_string(self.store, to_token_str)
# The unstable version of this API returns an extra field for client # The unstable version of this API returns an extra field for client
# compatibility, see https://github.com/matrix-org/synapse/issues/12930. # compatibility, see https://github.com/matrix-org/synapse/issues/12930.
assert request.path is not None assert request.path is not None
include_original_event = request.path.startswith(b"/_matrix/client/unstable/") include_original_event = request.path.startswith(b"/_matrix/client/unstable/")
# Return the relations
result = await self._relations_handler.get_relations( result = await self._relations_handler.get_relations(
requester=requester, requester=requester,
event_id=parent_id, event_id=parent_id,
room_id=room_id, room_id=room_id,
pagin_config=pagination_config,
include_original_event=include_original_event,
relation_type=relation_type, relation_type=relation_type,
event_type=event_type, event_type=event_type,
)
return 200, result
class ThreadsServlet(RestServlet):
PATTERNS = (re.compile("^/_matrix/client/v1/rooms/(?P<room_id>[^/]*)/threads"),)
def __init__(self, hs: "HomeServer"):
super().__init__()
self.auth = hs.get_auth()
self.store = hs.get_datastores().main
self._relations_handler = hs.get_relations_handler()
async def on_GET(
self, request: SynapseRequest, room_id: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
limit = parse_integer(request, "limit", default=5)
from_token_str = parse_string(request, "from")
include = parse_string(
request,
"include",
default=ThreadsListInclude.all.value,
allowed_values=[v.value for v in ThreadsListInclude],
)
# Return the relations
from_token = None
if from_token_str:
from_token = ThreadsNextBatch.from_string(from_token_str)
result = await self._relations_handler.get_threads(
requester=requester,
room_id=room_id,
include=ThreadsListInclude(include),
limit=limit, limit=limit,
direction=direction,
from_token=from_token, from_token=from_token,
to_token=to_token,
include_original_event=include_original_event,
) )
return 200, result return 200, result
@ -105,3 +122,4 @@ class RelationPaginationServlet(RestServlet):
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
RelationPaginationServlet(hs).register(http_server) RelationPaginationServlet(hs).register(http_server)
ThreadsServlet(hs).register(http_server)

View file

@ -0,0 +1,74 @@
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from http.client import TEMPORARY_REDIRECT
from typing import TYPE_CHECKING, Optional
from synapse.http.server import HttpServer, respond_with_redirect
from synapse.http.servlet import RestServlet
from synapse.http.site import SynapseRequest
from synapse.rest.client._base import client_patterns
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class RendezvousServlet(RestServlet):
"""
This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
simple client rendezvous capability that is used by the "Sign in with QR" functionality.
This implementation only serves as a 307 redirect to a configured server rather than being a full implementation.
A module that implements the full functionality is available at: https://pypi.org/project/matrix-http-rendezvous-synapse/.
Request:
POST /rendezvous HTTP/1.1
Content-Type: ...
...
Response:
HTTP/1.1 307
Location: <configured endpoint>
"""
PATTERNS = client_patterns(
"/org.matrix.msc3886/rendezvous$", releases=[], v1=False, unstable=True
)
def __init__(self, hs: "HomeServer"):
super().__init__()
redirection_target: Optional[str] = hs.config.experimental.msc3886_endpoint
assert (
redirection_target is not None
), "Servlet is only registered if there is a redirection target"
self.endpoint = redirection_target.encode("utf-8")
async def on_POST(self, request: SynapseRequest) -> None:
respond_with_redirect(
request, self.endpoint, statusCode=TEMPORARY_REDIRECT, cors=True
)
# PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target.
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
if hs.config.experimental.msc3886_endpoint is not None:
RendezvousServlet(hs).register(http_server)

View file

@ -733,7 +733,9 @@ class RoomInitialSyncRestServlet(RestServlet):
self, request: SynapseRequest, room_id: str self, request: SynapseRequest, room_id: str
) -> Tuple[int, JsonDict]: ) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request, allow_guest=True) requester = await self.auth.get_user_by_req(request, allow_guest=True)
pagination_config = await PaginationConfig.from_request(self.store, request) pagination_config = await PaginationConfig.from_request(
self.store, request, default_limit=10
)
content = await self.initial_sync_handler.room_initial_sync( content = await self.initial_sync_handler.room_initial_sync(
room_id=room_id, requester=requester, pagin_config=pagination_config room_id=room_id, requester=requester, pagin_config=pagination_config
) )

View file

@ -100,6 +100,7 @@ class SyncRestServlet(RestServlet):
self._server_notices_sender = hs.get_server_notices_sender() self._server_notices_sender = hs.get_server_notices_sender()
self._event_serializer = hs.get_event_client_serializer() self._event_serializer = hs.get_event_client_serializer()
self._msc2654_enabled = hs.config.experimental.msc2654_enabled self._msc2654_enabled = hs.config.experimental.msc2654_enabled
self._msc3773_enabled = hs.config.experimental.msc3773_enabled
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
# This will always be set by the time Twisted calls us. # This will always be set by the time Twisted calls us.
@ -509,6 +510,12 @@ class SyncRestServlet(RestServlet):
ephemeral_events = room.ephemeral ephemeral_events = room.ephemeral
result["ephemeral"] = {"events": ephemeral_events} result["ephemeral"] = {"events": ephemeral_events}
result["unread_notifications"] = room.unread_notifications result["unread_notifications"] = room.unread_notifications
if room.unread_thread_notifications:
result["unread_thread_notifications"] = room.unread_thread_notifications
if self._msc3773_enabled:
result[
"org.matrix.msc3773.unread_thread_notifications"
] = room.unread_thread_notifications
result["summary"] = room.summary result["summary"] = room.summary
if self._msc2654_enabled: if self._msc2654_enabled:
result["org.matrix.msc2654.unread_count"] = room.unread_count result["org.matrix.msc2654.unread_count"] = room.unread_count

View file

@ -75,6 +75,8 @@ class VersionsRestServlet(RestServlet):
"r0.6.1", "r0.6.1",
"v1.1", "v1.1",
"v1.2", "v1.2",
"v1.3",
"v1.4",
], ],
# as per MSC1497: # as per MSC1497:
"unstable_features": { "unstable_features": {
@ -103,14 +105,20 @@ class VersionsRestServlet(RestServlet):
"org.matrix.msc3030": self.config.experimental.msc3030_enabled, "org.matrix.msc3030": self.config.experimental.msc3030_enabled,
# Adds support for thread relations, per MSC3440. # Adds support for thread relations, per MSC3440.
"org.matrix.msc3440.stable": True, # TODO: remove when "v1.3" is added above "org.matrix.msc3440.stable": True, # TODO: remove when "v1.3" is added above
# Support for thread read receipts. # Support for thread read receipts & notification counts.
"org.matrix.msc3771": self.config.experimental.msc3771_enabled, "org.matrix.msc3771": True,
"org.matrix.msc3773": self.config.experimental.msc3773_enabled,
# Allows moderators to fetch redacted event content as described in MSC2815 # Allows moderators to fetch redacted event content as described in MSC2815
"fi.mau.msc2815": self.config.experimental.msc2815_enabled, "fi.mau.msc2815": self.config.experimental.msc2815_enabled,
# Adds support for login token requests as per MSC3882 # Adds support for login token requests as per MSC3882
"org.matrix.msc3882": self.config.experimental.msc3882_enabled, "org.matrix.msc3882": self.config.experimental.msc3882_enabled,
# Adds support for remotely enabling/disabling pushers, as per MSC3881 # Adds support for remotely enabling/disabling pushers, as per MSC3881
"org.matrix.msc3881": self.config.experimental.msc3881_enabled, "org.matrix.msc3881": self.config.experimental.msc3881_enabled,
# Adds support for filtering /messages by event relation.
"org.matrix.msc3874": self.config.experimental.msc3874_enabled,
# Adds support for simple HTTP rendezvous as per MSC3886
"org.matrix.msc3886": self.config.experimental.msc3886_endpoint
is not None,
}, },
}, },
) )

View file

@ -20,9 +20,9 @@ from signedjson.sign import sign_json
from unpaddedbase64 import encode_base64 from unpaddedbase64 import encode_base64
from twisted.web.resource import Resource from twisted.web.resource import Resource
from twisted.web.server import Request
from synapse.http.server import respond_with_json_bytes from synapse.http.server import respond_with_json_bytes
from synapse.http.site import SynapseRequest
from synapse.types import JsonDict from synapse.types import JsonDict
if TYPE_CHECKING: if TYPE_CHECKING:
@ -99,7 +99,7 @@ class LocalKey(Resource):
json_object = sign_json(json_object, self.config.server.server_name, key) json_object = sign_json(json_object, self.config.server.server_name, key)
return json_object return json_object
def render_GET(self, request: Request) -> Optional[int]: def render_GET(self, request: SynapseRequest) -> Optional[int]:
time_now = self.clock.time_msec() time_now = self.clock.time_msec()
# Update the expiry time if less than half the interval remains. # Update the expiry time if less than half the interval remains.
if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts: if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts:

View file

@ -139,66 +139,73 @@ class OEmbedProvider:
try: try:
# oEmbed responses *must* be UTF-8 according to the spec. # oEmbed responses *must* be UTF-8 according to the spec.
oembed = json_decoder.decode(raw_body.decode("utf-8")) oembed = json_decoder.decode(raw_body.decode("utf-8"))
except ValueError:
return OEmbedResult({}, None, None)
# The version is a required string field, but not always provided, # The version is a required string field, but not always provided,
# or sometimes provided as a float. Be lenient. # or sometimes provided as a float. Be lenient.
oembed_version = oembed.get("version", "1.0") oembed_version = oembed.get("version", "1.0")
if oembed_version != "1.0" and oembed_version != 1: if oembed_version != "1.0" and oembed_version != 1:
raise RuntimeError(f"Invalid oEmbed version: {oembed_version}") return OEmbedResult({}, None, None)
# Ensure the cache age is None or an int. # Attempt to parse the cache age, if possible.
cache_age = oembed.get("cache_age") try:
if cache_age: cache_age = int(oembed.get("cache_age")) * 1000
cache_age = int(cache_age) * 1000 except (TypeError, ValueError):
# If the cache age cannot be parsed (e.g. wrong type or invalid
# The results. # string), ignore it.
open_graph_response = {
"og:url": url,
}
title = oembed.get("title")
if title:
open_graph_response["og:title"] = title
author_name = oembed.get("author_name")
# Use the provider name and as the site.
provider_name = oembed.get("provider_name")
if provider_name:
open_graph_response["og:site_name"] = provider_name
# If a thumbnail exists, use it. Note that dimensions will be calculated later.
if "thumbnail_url" in oembed:
open_graph_response["og:image"] = oembed["thumbnail_url"]
# Process each type separately.
oembed_type = oembed["type"]
if oembed_type == "rich":
calc_description_and_urls(open_graph_response, oembed["html"])
elif oembed_type == "photo":
# If this is a photo, use the full image, not the thumbnail.
open_graph_response["og:image"] = oembed["url"]
elif oembed_type == "video":
open_graph_response["og:type"] = "video.other"
calc_description_and_urls(open_graph_response, oembed["html"])
open_graph_response["og:video:width"] = oembed["width"]
open_graph_response["og:video:height"] = oembed["height"]
elif oembed_type == "link":
open_graph_response["og:type"] = "website"
else:
raise RuntimeError(f"Unknown oEmbed type: {oembed_type}")
except Exception as e:
# Trap any exception and let the code follow as usual.
logger.warning("Error parsing oEmbed metadata from %s: %r", url, e)
open_graph_response = {}
author_name = None
cache_age = None cache_age = None
# The oEmbed response converted to Open Graph.
open_graph_response: JsonDict = {"og:url": url}
title = oembed.get("title")
if title and isinstance(title, str):
open_graph_response["og:title"] = title
author_name = oembed.get("author_name")
if not isinstance(author_name, str):
author_name = None
# Use the provider name and as the site.
provider_name = oembed.get("provider_name")
if provider_name and isinstance(provider_name, str):
open_graph_response["og:site_name"] = provider_name
# If a thumbnail exists, use it. Note that dimensions will be calculated later.
thumbnail_url = oembed.get("thumbnail_url")
if thumbnail_url and isinstance(thumbnail_url, str):
open_graph_response["og:image"] = thumbnail_url
# Process each type separately.
oembed_type = oembed.get("type")
if oembed_type == "rich":
html = oembed.get("html")
if isinstance(html, str):
calc_description_and_urls(open_graph_response, html)
elif oembed_type == "photo":
# If this is a photo, use the full image, not the thumbnail.
url = oembed.get("url")
if url and isinstance(url, str):
open_graph_response["og:image"] = url
elif oembed_type == "video":
open_graph_response["og:type"] = "video.other"
html = oembed.get("html")
if html and isinstance(html, str):
calc_description_and_urls(open_graph_response, oembed["html"])
for size in ("width", "height"):
val = oembed.get(size)
if val is not None and isinstance(val, int):
open_graph_response[f"og:video:{size}"] = val
elif oembed_type == "link":
open_graph_response["og:type"] = "website"
else:
logger.warning("Unknown oEmbed type: %s", oembed_type)
return OEmbedResult(open_graph_response, author_name, cache_age) return OEmbedResult(open_graph_response, author_name, cache_age)

View file

@ -20,6 +20,7 @@ from synapse.api.errors import SynapseError
from synapse.handlers.sso import get_username_mapping_session_cookie_from_request from synapse.handlers.sso import get_username_mapping_session_cookie_from_request
from synapse.http.server import DirectServeHtmlResource, respond_with_html from synapse.http.server import DirectServeHtmlResource, respond_with_html
from synapse.http.servlet import parse_string from synapse.http.servlet import parse_string
from synapse.http.site import SynapseRequest
from synapse.types import UserID from synapse.types import UserID
from synapse.util.templates import build_jinja_env from synapse.util.templates import build_jinja_env
@ -88,7 +89,7 @@ class NewUserConsentResource(DirectServeHtmlResource):
html = template.render(template_params) html = template.render(template_params)
respond_with_html(request, 200, html) respond_with_html(request, 200, html)
async def _async_render_POST(self, request: Request) -> None: async def _async_render_POST(self, request: SynapseRequest) -> None:
try: try:
session_id = get_username_mapping_session_cookie_from_request(request) session_id = get_username_mapping_session_cookie_from_request(request)
except SynapseError as e: except SynapseError as e:

View file

@ -18,6 +18,7 @@ from twisted.web.resource import Resource
from twisted.web.server import Request from twisted.web.server import Request
from synapse.http.server import set_cors_headers from synapse.http.server import set_cors_headers
from synapse.http.site import SynapseRequest
from synapse.types import JsonDict from synapse.types import JsonDict
from synapse.util import json_encoder from synapse.util import json_encoder
from synapse.util.stringutils import parse_server_name from synapse.util.stringutils import parse_server_name
@ -63,7 +64,7 @@ class ClientWellKnownResource(Resource):
Resource.__init__(self) Resource.__init__(self)
self._well_known_builder = WellKnownBuilder(hs) self._well_known_builder = WellKnownBuilder(hs)
def render_GET(self, request: Request) -> bytes: def render_GET(self, request: SynapseRequest) -> bytes:
set_cors_headers(request) set_cors_headers(request)
r = self._well_known_builder.get_well_known() r = self._well_known_builder.get_well_known()
if not r: if not r:

View file

@ -94,7 +94,7 @@ UNIQUE_INDEX_BACKGROUND_UPDATES = {
"event_search": "event_search_event_id_idx", "event_search": "event_search_event_id_idx",
"local_media_repository_thumbnails": "local_media_repository_thumbnails_method_idx", "local_media_repository_thumbnails": "local_media_repository_thumbnails_method_idx",
"remote_media_cache_thumbnails": "remote_media_repository_thumbnails_method_idx", "remote_media_cache_thumbnails": "remote_media_repository_thumbnails_method_idx",
"event_push_summary": "event_push_summary_unique_index", "event_push_summary": "event_push_summary_unique_index2",
"receipts_linearized": "receipts_linearized_unique_index", "receipts_linearized": "receipts_linearized_unique_index",
"receipts_graph": "receipts_graph_unique_index", "receipts_graph": "receipts_graph_unique_index",
} }
@ -1658,7 +1658,7 @@ class DatabasePool:
table: string giving the table name table: string giving the table name
keyvalues: dict of column names and values to select the row with keyvalues: dict of column names and values to select the row with
retcol: string giving the name of the column to return retcol: string giving the name of the column to return
allow_none: If true, return None instead of failing if the SELECT allow_none: If true, return None instead of raising StoreError if the SELECT
statement returns no rows statement returns no rows
desc: description of the transaction, for logging and metrics desc: description of the transaction, for logging and metrics
""" """

View file

@ -244,12 +244,18 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
# redacted. # redacted.
self._attempt_to_invalidate_cache("get_relations_for_event", (redacts,)) self._attempt_to_invalidate_cache("get_relations_for_event", (redacts,))
self._attempt_to_invalidate_cache("get_applicable_edit", (redacts,)) self._attempt_to_invalidate_cache("get_applicable_edit", (redacts,))
self._attempt_to_invalidate_cache("get_thread_id", (redacts,))
self._attempt_to_invalidate_cache("get_thread_id_for_receipts", (redacts,))
if etype == EventTypes.Member: if etype == EventTypes.Member:
self._membership_stream_cache.entity_has_changed(state_key, stream_ordering) self._membership_stream_cache.entity_has_changed(state_key, stream_ordering)
self._attempt_to_invalidate_cache( self._attempt_to_invalidate_cache(
"get_invited_rooms_for_local_user", (state_key,) "get_invited_rooms_for_local_user", (state_key,)
) )
self._attempt_to_invalidate_cache(
"get_rooms_for_user_with_stream_ordering", (state_key,)
)
self._attempt_to_invalidate_cache("get_rooms_for_user", (state_key,))
if relates_to: if relates_to:
self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,)) self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,))
@ -259,9 +265,7 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
self._attempt_to_invalidate_cache("get_applicable_edit", (relates_to,)) self._attempt_to_invalidate_cache("get_applicable_edit", (relates_to,))
self._attempt_to_invalidate_cache("get_thread_summary", (relates_to,)) self._attempt_to_invalidate_cache("get_thread_summary", (relates_to,))
self._attempt_to_invalidate_cache("get_thread_participated", (relates_to,)) self._attempt_to_invalidate_cache("get_thread_participated", (relates_to,))
self._attempt_to_invalidate_cache( self._attempt_to_invalidate_cache("get_threads", (room_id,))
"get_mutual_event_relations_for_rel_type", (relates_to,)
)
async def invalidate_cache_and_stream( async def invalidate_cache_and_stream(
self, cache_name: str, keys: Tuple[Any, ...] self, cache_name: str, keys: Tuple[Any, ...]

View file

@ -539,9 +539,11 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
"device_id": device_id, "device_id": device_id,
"prev_id": [prev_id] if prev_id else [], "prev_id": [prev_id] if prev_id else [],
"stream_id": stream_id, "stream_id": stream_id,
"org.matrix.opentracing_context": opentracing_context,
} }
if opentracing_context != "{}":
result["org.matrix.opentracing_context"] = opentracing_context
prev_id = stream_id prev_id = stream_id
if device is not None: if device is not None:
@ -549,7 +551,11 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
if keys: if keys:
result["keys"] = keys result["keys"] = keys
device_display_name = device.display_name device_display_name = None
if (
self.hs.config.federation.allow_device_name_lookup_over_federation
):
device_display_name = device.display_name
if device_display_name: if device_display_name:
result["device_display_name"] = device_display_name result["device_display_name"] = device_display_name
else: else:

View file

@ -1501,6 +1501,12 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
event_id: The event that failed to be fetched or processed event_id: The event that failed to be fetched or processed
cause: The error message or reason that we failed to pull the event cause: The error message or reason that we failed to pull the event
""" """
logger.debug(
"record_event_failed_pull_attempt room_id=%s, event_id=%s, cause=%s",
room_id,
event_id,
cause,
)
await self.db_pool.runInteraction( await self.db_pool.runInteraction(
"record_event_failed_pull_attempt", "record_event_failed_pull_attempt",
self._record_event_failed_pull_attempt_upsert_txn, self._record_event_failed_pull_attempt_upsert_txn,
@ -1530,6 +1536,54 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
txn.execute(sql, (room_id, event_id, 1, self._clock.time_msec(), cause)) txn.execute(sql, (room_id, event_id, 1, self._clock.time_msec(), cause))
@trace
async def get_event_ids_to_not_pull_from_backoff(
self,
room_id: str,
event_ids: Collection[str],
) -> List[str]:
"""
Filter down the events to ones that we've failed to pull before recently. Uses
exponential backoff.
Args:
room_id: The room that the events belong to
event_ids: A list of events to filter down
Returns:
List of event_ids that should not be attempted to be pulled
"""
event_failed_pull_attempts = await self.db_pool.simple_select_many_batch(
table="event_failed_pull_attempts",
column="event_id",
iterable=event_ids,
keyvalues={},
retcols=(
"event_id",
"last_attempt_ts",
"num_attempts",
),
desc="get_event_ids_to_not_pull_from_backoff",
)
current_time = self._clock.time_msec()
return [
event_failed_pull_attempt["event_id"]
for event_failed_pull_attempt in event_failed_pull_attempts
# Exponential back-off (up to the upper bound) so we don't try to
# pull the same event over and over. ex. 2hr, 4hr, 8hr, 16hr, etc.
if current_time
< event_failed_pull_attempt["last_attempt_ts"]
+ (
2
** min(
event_failed_pull_attempt["num_attempts"],
BACKFILL_EVENT_EXPONENTIAL_BACKOFF_MAXIMUM_DOUBLING_STEPS,
)
)
* BACKFILL_EVENT_EXPONENTIAL_BACKOFF_STEP_MILLISECONDS
]
async def get_missing_events( async def get_missing_events(
self, self,
room_id: str, room_id: str,

View file

@ -88,7 +88,7 @@ from typing import (
import attr import attr
from synapse.api.constants import ReceiptTypes from synapse.api.constants import MAIN_TIMELINE, ReceiptTypes
from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
from synapse.storage.database import ( from synapse.storage.database import (
@ -119,6 +119,32 @@ DEFAULT_HIGHLIGHT_ACTION: List[Union[dict, str]] = [
] ]
@attr.s(slots=True, auto_attribs=True)
class _RoomReceipt:
"""
HttpPushAction instances include the information used to generate HTTP
requests to a push gateway.
"""
unthreaded_stream_ordering: int = 0
# threaded_stream_ordering includes the main pseudo-thread.
threaded_stream_ordering: Dict[str, int] = attr.Factory(dict)
def is_unread(self, thread_id: str, stream_ordering: int) -> bool:
"""Returns True if the stream ordering is unread according to the receipt information."""
# Only include push actions with a stream ordering after both the unthreaded
# and threaded receipt. Properly handles a user without any receipts present.
return (
self.unthreaded_stream_ordering < stream_ordering
and self.threaded_stream_ordering.get(thread_id, 0) < stream_ordering
)
# A _RoomReceipt with no receipts in it.
MISSING_ROOM_RECEIPT = _RoomReceipt()
@attr.s(slots=True, frozen=True, auto_attribs=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class HttpPushAction: class HttpPushAction:
""" """
@ -157,7 +183,7 @@ class UserPushAction(EmailPushAction):
@attr.s(slots=True, auto_attribs=True) @attr.s(slots=True, auto_attribs=True)
class NotifCounts: class NotifCounts:
""" """
The per-user, per-room count of notifications. Used by sync and push. The per-user, per-room, per-thread count of notifications. Used by sync and push.
""" """
notify_count: int = 0 notify_count: int = 0
@ -165,6 +191,21 @@ class NotifCounts:
highlight_count: int = 0 highlight_count: int = 0
@attr.s(slots=True, auto_attribs=True)
class RoomNotifCounts:
"""
The per-user, per-room count of notifications. Used by sync and push.
"""
main_timeline: NotifCounts
# Map of thread ID to the notification counts.
threads: Dict[str, NotifCounts]
def __len__(self) -> int:
# To properly account for the amount of space in any caches.
return len(self.threads) + 1
def _serialize_action( def _serialize_action(
actions: Collection[Union[Mapping, str]], is_highlight: bool actions: Collection[Union[Mapping, str]], is_highlight: bool
) -> str: ) -> str:
@ -253,6 +294,44 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
self._background_backfill_thread_id, self._background_backfill_thread_id,
) )
# Indexes which will be used to quickly make the thread_id column non-null.
self.db_pool.updates.register_background_index_update(
"event_push_actions_thread_id_null",
index_name="event_push_actions_thread_id_null",
table="event_push_actions",
columns=["thread_id"],
where_clause="thread_id IS NULL",
)
self.db_pool.updates.register_background_index_update(
"event_push_summary_thread_id_null",
index_name="event_push_summary_thread_id_null",
table="event_push_summary",
columns=["thread_id"],
where_clause="thread_id IS NULL",
)
# Check ASAP (and then later, every 1s) to see if we have finished
# background updates the event_push_actions and event_push_summary tables.
self._clock.call_later(0.0, self._check_event_push_backfill_thread_id)
self._event_push_backfill_thread_id_done = False
@wrap_as_background_process("check_event_push_backfill_thread_id")
async def _check_event_push_backfill_thread_id(self) -> None:
"""
Has thread_id finished backfilling?
If not, we need to just-in-time update it so the queries work.
"""
done = await self.db_pool.updates.has_completed_background_update(
"event_push_backfill_thread_id"
)
if done:
self._event_push_backfill_thread_id_done = True
else:
# Reschedule to run.
self._clock.call_later(15.0, self._check_event_push_backfill_thread_id)
async def _background_backfill_thread_id( async def _background_backfill_thread_id(
self, progress: JsonDict, batch_size: int self, progress: JsonDict, batch_size: int
) -> int: ) -> int:
@ -384,12 +463,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
return result return result
@cached(tree=True, max_entries=5000) @cached(tree=True, max_entries=5000, iterable=True)
async def get_unread_event_push_actions_by_room_for_user( async def get_unread_event_push_actions_by_room_for_user(
self, self,
room_id: str, room_id: str,
user_id: str, user_id: str,
) -> NotifCounts: ) -> RoomNotifCounts:
"""Get the notification count, the highlight count and the unread message count """Get the notification count, the highlight count and the unread message count
for a given user in a given room after their latest read receipt. for a given user in a given room after their latest read receipt.
@ -402,8 +481,9 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
user_id: The user to retrieve the counts for. user_id: The user to retrieve the counts for.
Returns Returns
A NotifCounts object containing the notification count, the highlight count A RoomNotifCounts object containing the notification count, the
and the unread message count. highlight count and the unread message count for both the main timeline
and threads.
""" """
return await self.db_pool.runInteraction( return await self.db_pool.runInteraction(
"get_unread_event_push_actions_by_room", "get_unread_event_push_actions_by_room",
@ -417,7 +497,7 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
txn: LoggingTransaction, txn: LoggingTransaction,
room_id: str, room_id: str,
user_id: str, user_id: str,
) -> NotifCounts: ) -> RoomNotifCounts:
# Get the stream ordering of the user's latest receipt in the room. # Get the stream ordering of the user's latest receipt in the room.
result = self.get_last_unthreaded_receipt_for_user_txn( result = self.get_last_unthreaded_receipt_for_user_txn(
txn, txn,
@ -451,8 +531,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
txn: LoggingTransaction, txn: LoggingTransaction,
room_id: str, room_id: str,
user_id: str, user_id: str,
receipt_stream_ordering: int, unthreaded_receipt_stream_ordering: int,
) -> NotifCounts: ) -> RoomNotifCounts:
"""Get the number of unread messages for a user/room that have happened """Get the number of unread messages for a user/room that have happened
since the given stream ordering. since the given stream ordering.
@ -460,78 +540,223 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
txn: The database transaction. txn: The database transaction.
room_id: The room ID to get unread counts for. room_id: The room ID to get unread counts for.
user_id: The user ID to get unread counts for. user_id: The user ID to get unread counts for.
receipt_stream_ordering: The stream ordering of the user's latest unthreaded_receipt_stream_ordering: The stream ordering of the user's latest
receipt in the room. If there are no receipts, the stream ordering unthreaded receipt in the room. If there are no unthreaded receipts,
of the user's join event. the stream ordering of the user's join event.
Returns Returns:
A NotifCounts object containing the notification count, the highlight count A RoomNotifCounts object containing the notification count, the
and the unread message count. highlight count and the unread message count for both the main timeline
and threads.
""" """
counts = NotifCounts() main_counts = NotifCounts()
thread_counts: Dict[str, NotifCounts] = {}
def _get_thread(thread_id: str) -> NotifCounts:
if thread_id == MAIN_TIMELINE:
return main_counts
return thread_counts.setdefault(thread_id, NotifCounts())
receipt_types_clause, receipts_args = make_in_list_sql_clause(
self.database_engine,
"receipt_type",
(ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE),
)
# First ensure that the existing rows have an updated thread_id field.
if not self._event_push_backfill_thread_id_done:
txn.execute(
"""
UPDATE event_push_summary
SET thread_id = ?
WHERE room_id = ? AND user_id = ? AND thread_id is NULL
""",
(MAIN_TIMELINE, room_id, user_id),
)
txn.execute(
"""
UPDATE event_push_actions
SET thread_id = ?
WHERE room_id = ? AND user_id = ? AND thread_id is NULL
""",
(MAIN_TIMELINE, room_id, user_id),
)
# First we pull the counts from the summary table. # First we pull the counts from the summary table.
# #
# We check that `last_receipt_stream_ordering` matches the stream # We check that `last_receipt_stream_ordering` matches the stream ordering of the
# ordering given. If it doesn't match then a new read receipt has arrived and # latest receipt for the thread (which may be either the unthreaded read receipt
# we haven't yet updated the counts in `event_push_summary` to reflect # or the threaded read receipt).
# that; in that case we simply ignore `event_push_summary` counts
# and do a manual count of all of the rows in the `event_push_actions` table
# for this user/room.
# #
# If `last_receipt_stream_ordering` is null then that means it's up to # If it doesn't match then a new read receipt has arrived and we haven't yet
# date (as the row was written by an older version of Synapse that # updated the counts in `event_push_summary` to reflect that; in that case we
# simply ignore `event_push_summary` counts.
#
# We then do a manual count of all the rows in the `event_push_actions` table
# for any user/room/thread which did not have a valid summary found.
#
# If `last_receipt_stream_ordering` is null then that means it's up-to-date
# (as the row was written by an older version of Synapse that
# updated `event_push_summary` synchronously when persisting a new read # updated `event_push_summary` synchronously when persisting a new read
# receipt). # receipt).
txn.execute( txn.execute(
""" f"""
SELECT stream_ordering, notif_count, COALESCE(unread_count, 0) SELECT notif_count, COALESCE(unread_count, 0), thread_id
FROM event_push_summary FROM event_push_summary
LEFT JOIN (
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
FROM receipts_linearized
LEFT JOIN events USING (room_id, event_id)
WHERE
user_id = ?
AND room_id = ?
AND stream_ordering > ?
AND {receipt_types_clause}
GROUP BY thread_id
) AS receipts USING (thread_id)
WHERE room_id = ? AND user_id = ? WHERE room_id = ? AND user_id = ?
AND ( AND (
(last_receipt_stream_ordering IS NULL AND stream_ordering > ?) (last_receipt_stream_ordering IS NULL AND stream_ordering > COALESCE(threaded_receipt_stream_ordering, ?))
OR last_receipt_stream_ordering = ? OR last_receipt_stream_ordering = COALESCE(threaded_receipt_stream_ordering, ?)
) ) AND (notif_count != 0 OR COALESCE(unread_count, 0) != 0)
""", """,
(room_id, user_id, receipt_stream_ordering, receipt_stream_ordering), (
user_id,
room_id,
unthreaded_receipt_stream_ordering,
*receipts_args,
room_id,
user_id,
unthreaded_receipt_stream_ordering,
unthreaded_receipt_stream_ordering,
),
) )
row = txn.fetchone() summarised_threads = set()
for notif_count, unread_count, thread_id in txn:
summary_stream_ordering = 0 summarised_threads.add(thread_id)
if row: counts = _get_thread(thread_id)
summary_stream_ordering = row[0] counts.notify_count += notif_count
counts.notify_count += row[1] counts.unread_count += unread_count
counts.unread_count += row[2]
# Next we need to count highlights, which aren't summarised # Next we need to count highlights, which aren't summarised
sql = """ sql = f"""
SELECT COUNT(*) FROM event_push_actions SELECT COUNT(*), thread_id FROM event_push_actions
LEFT JOIN (
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
FROM receipts_linearized
LEFT JOIN events USING (room_id, event_id)
WHERE
user_id = ?
AND room_id = ?
AND stream_ordering > ?
AND {receipt_types_clause}
GROUP BY thread_id
) AS receipts USING (thread_id)
WHERE user_id = ? WHERE user_id = ?
AND room_id = ? AND room_id = ?
AND stream_ordering > ? AND stream_ordering > COALESCE(threaded_receipt_stream_ordering, ?)
AND highlight = 1 AND highlight = 1
GROUP BY thread_id
""" """
txn.execute(sql, (user_id, room_id, receipt_stream_ordering)) txn.execute(
row = txn.fetchone() sql,
if row: (
counts.highlight_count += row[0] user_id,
room_id,
unthreaded_receipt_stream_ordering,
*receipts_args,
user_id,
room_id,
unthreaded_receipt_stream_ordering,
),
)
for highlight_count, thread_id in txn:
_get_thread(thread_id).highlight_count += highlight_count
# For threads which were summarised we need to count actions since the last
# rotation.
thread_id_clause, thread_id_args = make_in_list_sql_clause(
self.database_engine, "thread_id", summarised_threads
)
# The (inclusive) event stream ordering that was previously summarised.
rotated_upto_stream_ordering = self.db_pool.simple_select_one_onecol_txn(
txn,
table="event_push_summary_stream_ordering",
keyvalues={},
retcol="stream_ordering",
)
unread_counts = self._get_notif_unread_count_for_user_room(
txn, room_id, user_id, rotated_upto_stream_ordering
)
for notif_count, unread_count, thread_id in unread_counts:
if thread_id not in summarised_threads:
continue
if thread_id == MAIN_TIMELINE:
counts.notify_count += notif_count
counts.unread_count += unread_count
elif thread_id in thread_counts:
thread_counts[thread_id].notify_count += notif_count
thread_counts[thread_id].unread_count += unread_count
else:
# Previous thread summaries of 0 are discarded above.
#
# TODO If empty summaries are deleted this can be removed.
thread_counts[thread_id] = NotifCounts(
notify_count=notif_count,
unread_count=unread_count,
highlight_count=0,
)
# Finally we need to count push actions that aren't included in the # Finally we need to count push actions that aren't included in the
# summary returned above. This might be due to recent events that haven't # summary returned above. This might be due to recent events that haven't
# been summarised yet or the summary is out of date due to a recent read # been summarised yet or the summary is out of date due to a recent read
# receipt. # receipt.
start_unread_stream_ordering = max( sql = f"""
receipt_stream_ordering, summary_stream_ordering SELECT
) COUNT(CASE WHEN notif = 1 THEN 1 END),
notify_count, unread_count = self._get_notif_unread_count_for_user_room( COUNT(CASE WHEN unread = 1 THEN 1 END),
txn, room_id, user_id, start_unread_stream_ordering thread_id
FROM event_push_actions
LEFT JOIN (
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
FROM receipts_linearized
LEFT JOIN events USING (room_id, event_id)
WHERE
user_id = ?
AND room_id = ?
AND stream_ordering > ?
AND {receipt_types_clause}
GROUP BY thread_id
) AS receipts USING (thread_id)
WHERE user_id = ?
AND room_id = ?
AND stream_ordering > COALESCE(threaded_receipt_stream_ordering, ?)
AND NOT {thread_id_clause}
GROUP BY thread_id
"""
txn.execute(
sql,
(
user_id,
room_id,
unthreaded_receipt_stream_ordering,
*receipts_args,
user_id,
room_id,
unthreaded_receipt_stream_ordering,
*thread_id_args,
),
) )
for notif_count, unread_count, thread_id in txn:
counts = _get_thread(thread_id)
counts.notify_count += notif_count
counts.unread_count += unread_count
counts.notify_count += notify_count return RoomNotifCounts(main_counts, thread_counts)
counts.unread_count += unread_count
return counts
def _get_notif_unread_count_for_user_room( def _get_notif_unread_count_for_user_room(
self, self,
@ -540,7 +765,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
user_id: str, user_id: str,
stream_ordering: int, stream_ordering: int,
max_stream_ordering: Optional[int] = None, max_stream_ordering: Optional[int] = None,
) -> Tuple[int, int]: thread_id: Optional[str] = None,
) -> List[Tuple[int, int, str]]:
"""Returns the notify and unread counts from `event_push_actions` for """Returns the notify and unread counts from `event_push_actions` for
the given user/room in the given range. the given user/room in the given range.
@ -554,45 +780,55 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
stream_ordering: The (exclusive) minimum stream ordering to consider. stream_ordering: The (exclusive) minimum stream ordering to consider.
max_stream_ordering: The (inclusive) maximum stream ordering to consider. max_stream_ordering: The (inclusive) maximum stream ordering to consider.
If this is not given, then no maximum is applied. If this is not given, then no maximum is applied.
thread_id: The thread ID to fetch unread counts for. If this is not provided
then the results for *all* threads is returned.
Note that if this is provided the resulting list will only have 0 or
1 tuples in it.
Return: Return:
A tuple of the notif count and unread count in the given range. A tuple of the notif count and unread count in the given range for
each thread.
""" """
# If there have been no events in the room since the stream ordering, # If there have been no events in the room since the stream ordering,
# there can't be any push actions either. # there can't be any push actions either.
if not self._events_stream_cache.has_entity_changed(room_id, stream_ordering): if not self._events_stream_cache.has_entity_changed(room_id, stream_ordering):
return 0, 0 return []
clause = "" stream_ordering_clause = ""
args = [user_id, room_id, stream_ordering] args = [user_id, room_id, stream_ordering]
if max_stream_ordering is not None: if max_stream_ordering is not None:
clause = "AND ea.stream_ordering <= ?" stream_ordering_clause = "AND ea.stream_ordering <= ?"
args.append(max_stream_ordering) args.append(max_stream_ordering)
# If the max stream ordering is less than the min stream ordering, # If the max stream ordering is less than the min stream ordering,
# then obviously there are zero push actions in that range. # then obviously there are zero push actions in that range.
if max_stream_ordering <= stream_ordering: if max_stream_ordering <= stream_ordering:
return 0, 0 return []
# Either limit the results to a specific thread or fetch all threads.
thread_id_clause = ""
if thread_id is not None:
thread_id_clause = "AND thread_id = ?"
args.append(thread_id)
sql = f""" sql = f"""
SELECT SELECT
COUNT(CASE WHEN notif = 1 THEN 1 END), COUNT(CASE WHEN notif = 1 THEN 1 END),
COUNT(CASE WHEN unread = 1 THEN 1 END) COUNT(CASE WHEN unread = 1 THEN 1 END),
FROM event_push_actions ea thread_id
WHERE user_id = ? FROM event_push_actions ea
WHERE user_id = ?
AND room_id = ? AND room_id = ?
AND ea.stream_ordering > ? AND ea.stream_ordering > ?
{clause} {stream_ordering_clause}
{thread_id_clause}
GROUP BY thread_id
""" """
txn.execute(sql, args) txn.execute(sql, args)
row = txn.fetchone() return cast(List[Tuple[int, int, str]], txn.fetchall())
if row:
return cast(Tuple[int, int], row)
return 0, 0
async def get_push_action_users_in_range( async def get_push_action_users_in_range(
self, min_stream_ordering: int, max_stream_ordering: int self, min_stream_ordering: int, max_stream_ordering: int
@ -609,7 +845,7 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
def _get_receipts_by_room_txn( def _get_receipts_by_room_txn(
self, txn: LoggingTransaction, user_id: str self, txn: LoggingTransaction, user_id: str
) -> Dict[str, int]: ) -> Dict[str, _RoomReceipt]:
""" """
Generate a map of room ID to the latest stream ordering that has been Generate a map of room ID to the latest stream ordering that has been
read by the given user. read by the given user.
@ -619,7 +855,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
user_id: The user to fetch receipts for. user_id: The user to fetch receipts for.
Returns: Returns:
A map of room ID to stream ordering for all rooms the user has a receipt in. A map including all rooms the user is in with a receipt. It maps
room IDs to _RoomReceipt instances
""" """
receipt_types_clause, args = make_in_list_sql_clause( receipt_types_clause, args = make_in_list_sql_clause(
self.database_engine, self.database_engine,
@ -628,20 +865,26 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
) )
sql = f""" sql = f"""
SELECT room_id, MAX(stream_ordering) SELECT room_id, thread_id, MAX(stream_ordering)
FROM receipts_linearized FROM receipts_linearized
INNER JOIN events USING (room_id, event_id) INNER JOIN events USING (room_id, event_id)
WHERE {receipt_types_clause} WHERE {receipt_types_clause}
AND user_id = ? AND user_id = ?
GROUP BY room_id GROUP BY room_id, thread_id
""" """
args.extend((user_id,)) args.extend((user_id,))
txn.execute(sql, args) txn.execute(sql, args)
return {
room_id: latest_stream_ordering result: Dict[str, _RoomReceipt] = {}
for room_id, latest_stream_ordering in txn.fetchall() for room_id, thread_id, stream_ordering in txn:
} room_receipt = result.setdefault(room_id, _RoomReceipt())
if thread_id is None:
room_receipt.unthreaded_stream_ordering = stream_ordering
else:
room_receipt.threaded_stream_ordering[thread_id] = stream_ordering
return result
async def get_unread_push_actions_for_user_in_range_for_http( async def get_unread_push_actions_for_user_in_range_for_http(
self, self,
@ -674,9 +917,10 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
def get_push_actions_txn( def get_push_actions_txn(
txn: LoggingTransaction, txn: LoggingTransaction,
) -> List[Tuple[str, str, int, str, bool]]: ) -> List[Tuple[str, str, str, int, str, bool]]:
sql = """ sql = """
SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, ep.highlight SELECT ep.event_id, ep.room_id, ep.thread_id, ep.stream_ordering,
ep.actions, ep.highlight
FROM event_push_actions AS ep FROM event_push_actions AS ep
WHERE WHERE
ep.user_id = ? ep.user_id = ?
@ -686,7 +930,7 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
ORDER BY ep.stream_ordering ASC LIMIT ? ORDER BY ep.stream_ordering ASC LIMIT ?
""" """
txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit))
return cast(List[Tuple[str, str, int, str, bool]], txn.fetchall()) return cast(List[Tuple[str, str, str, int, str, bool]], txn.fetchall())
push_actions = await self.db_pool.runInteraction( push_actions = await self.db_pool.runInteraction(
"get_unread_push_actions_for_user_in_range_http", get_push_actions_txn "get_unread_push_actions_for_user_in_range_http", get_push_actions_txn
@ -699,10 +943,10 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
stream_ordering=stream_ordering, stream_ordering=stream_ordering,
actions=_deserialize_action(actions, highlight), actions=_deserialize_action(actions, highlight),
) )
for event_id, room_id, stream_ordering, actions, highlight in push_actions for event_id, room_id, thread_id, stream_ordering, actions, highlight in push_actions
# Only include push actions with a stream ordering after any receipt, or without any if receipts_by_room.get(room_id, MISSING_ROOM_RECEIPT).is_unread(
# receipt present (invited to but never read rooms). thread_id, stream_ordering
if stream_ordering > receipts_by_room.get(room_id, 0) )
] ]
# Now sort it so it's ordered correctly, since currently it will # Now sort it so it's ordered correctly, since currently it will
@ -746,10 +990,10 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
def get_push_actions_txn( def get_push_actions_txn(
txn: LoggingTransaction, txn: LoggingTransaction,
) -> List[Tuple[str, str, int, str, bool, int]]: ) -> List[Tuple[str, str, str, int, str, bool, int]]:
sql = """ sql = """
SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, SELECT ep.event_id, ep.room_id, ep.thread_id, ep.stream_ordering,
ep.highlight, e.received_ts ep.actions, ep.highlight, e.received_ts
FROM event_push_actions AS ep FROM event_push_actions AS ep
INNER JOIN events AS e USING (room_id, event_id) INNER JOIN events AS e USING (room_id, event_id)
WHERE WHERE
@ -760,7 +1004,7 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
ORDER BY ep.stream_ordering DESC LIMIT ? ORDER BY ep.stream_ordering DESC LIMIT ?
""" """
txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit))
return cast(List[Tuple[str, str, int, str, bool, int]], txn.fetchall()) return cast(List[Tuple[str, str, str, int, str, bool, int]], txn.fetchall())
push_actions = await self.db_pool.runInteraction( push_actions = await self.db_pool.runInteraction(
"get_unread_push_actions_for_user_in_range_email", get_push_actions_txn "get_unread_push_actions_for_user_in_range_email", get_push_actions_txn
@ -775,10 +1019,10 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
actions=_deserialize_action(actions, highlight), actions=_deserialize_action(actions, highlight),
received_ts=received_ts, received_ts=received_ts,
) )
for event_id, room_id, stream_ordering, actions, highlight, received_ts in push_actions for event_id, room_id, thread_id, stream_ordering, actions, highlight, received_ts in push_actions
# Only include push actions with a stream ordering after any receipt, or without any if receipts_by_room.get(room_id, MISSING_ROOM_RECEIPT).is_unread(
# receipt present (invited to but never read rooms). thread_id, stream_ordering
if stream_ordering > receipts_by_room.get(room_id, 0) )
] ]
# Now sort it so it's ordered correctly, since currently it will # Now sort it so it's ordered correctly, since currently it will
@ -1102,7 +1346,7 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
) )
sql = """ sql = """
SELECT r.stream_id, r.room_id, r.user_id, e.stream_ordering SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, e.stream_ordering
FROM receipts_linearized AS r FROM receipts_linearized AS r
INNER JOIN events AS e USING (event_id) INNER JOIN events AS e USING (event_id)
WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ? WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ?
@ -1123,55 +1367,105 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
limit, limit,
), ),
) )
rows = cast(List[Tuple[int, str, str, int]], txn.fetchall()) rows = cast(List[Tuple[int, str, str, Optional[str], int]], txn.fetchall())
# For each new read receipt we delete push actions from before it and # For each new read receipt we delete push actions from before it and
# recalculate the summary. # recalculate the summary.
for _, room_id, user_id, stream_ordering in rows: #
# Care must be taken of whether it is a threaded or unthreaded receipt.
for _, room_id, user_id, thread_id, stream_ordering in rows:
# Only handle our own read receipts. # Only handle our own read receipts.
if not self.hs.is_mine_id(user_id): if not self.hs.is_mine_id(user_id):
continue continue
thread_clause = ""
thread_args: Tuple = ()
if thread_id is not None:
thread_clause = "AND thread_id = ?"
thread_args = (thread_id,)
# For each new read receipt we delete push actions from before it and
# recalculate the summary.
txn.execute( txn.execute(
""" f"""
DELETE FROM event_push_actions DELETE FROM event_push_actions
WHERE room_id = ? WHERE room_id = ?
AND user_id = ? AND user_id = ?
AND stream_ordering <= ? AND stream_ordering <= ?
AND highlight = 0 AND highlight = 0
{thread_clause}
""", """,
(room_id, user_id, stream_ordering), (room_id, user_id, stream_ordering, *thread_args),
)
# Fetch the notification counts between the stream ordering of the
# latest receipt and what was previously summarised.
notif_count, unread_count = self._get_notif_unread_count_for_user_room(
txn, room_id, user_id, stream_ordering, old_rotate_stream_ordering
) )
# First ensure that the existing rows have an updated thread_id field. # First ensure that the existing rows have an updated thread_id field.
txn.execute( if not self._event_push_backfill_thread_id_done:
""" txn.execute(
UPDATE event_push_summary """
SET thread_id = ? UPDATE event_push_summary
WHERE room_id = ? AND user_id = ? AND thread_id is NULL SET thread_id = ?
""", WHERE room_id = ? AND user_id = ? AND thread_id is NULL
("main", room_id, user_id), """,
(MAIN_TIMELINE, room_id, user_id),
)
txn.execute(
"""
UPDATE event_push_actions
SET thread_id = ?
WHERE room_id = ? AND user_id = ? AND thread_id is NULL
""",
(MAIN_TIMELINE, room_id, user_id),
)
# Fetch the notification counts between the stream ordering of the
# latest receipt and what was previously summarised.
unread_counts = self._get_notif_unread_count_for_user_room(
txn,
room_id,
user_id,
stream_ordering,
old_rotate_stream_ordering,
thread_id,
) )
# Replace the previous summary with the new counts. # For an unthreaded receipt, mark the summary for all threads in the room
# # as cleared.
# TODO(threads): Upsert per-thread instead of setting them all to main. if thread_id is None:
self.db_pool.simple_upsert_txn( self.db_pool.simple_update_txn(
txn,
table="event_push_summary",
keyvalues={"user_id": user_id, "room_id": room_id},
updatevalues={
"notif_count": 0,
"unread_count": 0,
"stream_ordering": old_rotate_stream_ordering,
"last_receipt_stream_ordering": stream_ordering,
},
)
# For a threaded receipt, we *always* want to update that receipt,
# event if there are no new notifications in that thread. This ensures
# the stream_ordering & last_receipt_stream_ordering are updated.
elif not unread_counts:
unread_counts = [(0, 0, thread_id)]
# Then any updated threads get their notification count and unread
# count updated.
self.db_pool.simple_update_many_txn(
txn, txn,
table="event_push_summary", table="event_push_summary",
keyvalues={"room_id": room_id, "user_id": user_id, "thread_id": "main"}, key_names=("room_id", "user_id", "thread_id"),
values={ key_values=[(room_id, user_id, row[2]) for row in unread_counts],
"notif_count": notif_count, value_names=(
"unread_count": unread_count, "notif_count",
"stream_ordering": old_rotate_stream_ordering, "unread_count",
"last_receipt_stream_ordering": stream_ordering, "stream_ordering",
}, "last_receipt_stream_ordering",
),
value_values=[
(row[0], row[1], old_rotate_stream_ordering, stream_ordering)
for row in unread_counts
],
) )
# We always update `event_push_summary_last_receipt_stream_id` to # We always update `event_push_summary_last_receipt_stream_id` to
@ -1257,25 +1551,38 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
rotate_to_stream_ordering: The new maximum event stream ordering to summarise. rotate_to_stream_ordering: The new maximum event stream ordering to summarise.
""" """
# Ensure that any new actions have an updated thread_id.
if not self._event_push_backfill_thread_id_done:
txn.execute(
"""
UPDATE event_push_actions
SET thread_id = ?
WHERE ? < stream_ordering AND stream_ordering <= ? AND thread_id IS NULL
""",
(MAIN_TIMELINE, old_rotate_stream_ordering, rotate_to_stream_ordering),
)
# XXX Do we need to update summaries here too?
# Calculate the new counts that should be upserted into event_push_summary # Calculate the new counts that should be upserted into event_push_summary
sql = """ sql = """
SELECT user_id, room_id, SELECT user_id, room_id, thread_id,
coalesce(old.%s, 0) + upd.cnt, coalesce(old.%s, 0) + upd.cnt,
upd.stream_ordering upd.stream_ordering
FROM ( FROM (
SELECT user_id, room_id, count(*) as cnt, SELECT user_id, room_id, thread_id, count(*) as cnt,
max(ea.stream_ordering) as stream_ordering max(ea.stream_ordering) as stream_ordering
FROM event_push_actions AS ea FROM event_push_actions AS ea
LEFT JOIN event_push_summary AS old USING (user_id, room_id) LEFT JOIN event_push_summary AS old USING (user_id, room_id, thread_id)
WHERE ? < ea.stream_ordering AND ea.stream_ordering <= ? WHERE ? < ea.stream_ordering AND ea.stream_ordering <= ?
AND ( AND (
old.last_receipt_stream_ordering IS NULL old.last_receipt_stream_ordering IS NULL
OR old.last_receipt_stream_ordering < ea.stream_ordering OR old.last_receipt_stream_ordering < ea.stream_ordering
) )
AND %s = 1 AND %s = 1
GROUP BY user_id, room_id GROUP BY user_id, room_id, thread_id
) AS upd ) AS upd
LEFT JOIN event_push_summary AS old USING (user_id, room_id) LEFT JOIN event_push_summary AS old USING (user_id, room_id, thread_id)
""" """
# First get the count of unread messages. # First get the count of unread messages.
@ -1289,11 +1596,11 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
# object because we might not have the same amount of rows in each of them. To do # object because we might not have the same amount of rows in each of them. To do
# this, we use a dict indexed on the user ID and room ID to make it easier to # this, we use a dict indexed on the user ID and room ID to make it easier to
# populate. # populate.
summaries: Dict[Tuple[str, str], _EventPushSummary] = {} summaries: Dict[Tuple[str, str, str], _EventPushSummary] = {}
for row in txn: for row in txn:
summaries[(row[0], row[1])] = _EventPushSummary( summaries[(row[0], row[1], row[2])] = _EventPushSummary(
unread_count=row[2], unread_count=row[3],
stream_ordering=row[3], stream_ordering=row[4],
notif_count=0, notif_count=0,
) )
@ -1304,48 +1611,50 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
) )
for row in txn: for row in txn:
if (row[0], row[1]) in summaries: if (row[0], row[1], row[2]) in summaries:
summaries[(row[0], row[1])].notif_count = row[2] summaries[(row[0], row[1], row[2])].notif_count = row[3]
else: else:
# Because the rules on notifying are different than the rules on marking # Because the rules on notifying are different than the rules on marking
# a message unread, we might end up with messages that notify but aren't # a message unread, we might end up with messages that notify but aren't
# marked unread, so we might not have a summary for this (user, room) # marked unread, so we might not have a summary for this (user, room)
# tuple to complete. # tuple to complete.
summaries[(row[0], row[1])] = _EventPushSummary( summaries[(row[0], row[1], row[2])] = _EventPushSummary(
unread_count=0, unread_count=0,
stream_ordering=row[3], stream_ordering=row[4],
notif_count=row[2], notif_count=row[3],
) )
logger.info("Rotating notifications, handling %d rows", len(summaries)) logger.info("Rotating notifications, handling %d rows", len(summaries))
# Ensure that any updated threads have an updated thread_id. # Ensure that any updated threads have the proper thread_id.
txn.execute_batch( if not self._event_push_backfill_thread_id_done:
""" txn.execute_batch(
UPDATE event_push_summary """
SET thread_id = ? UPDATE event_push_summary
WHERE room_id = ? AND user_id = ? AND thread_id is NULL SET thread_id = ?
""", WHERE room_id = ? AND user_id = ? AND thread_id is NULL
[("main", room_id, user_id) for user_id, room_id in summaries], """,
) [
self.db_pool.simple_update_many_txn( (MAIN_TIMELINE, room_id, user_id)
txn, for user_id, room_id, _ in summaries
table="event_push_summary", ],
key_names=("user_id", "room_id", "thread_id"), )
key_values=[(user_id, room_id, None) for user_id, room_id in summaries],
value_names=("thread_id",),
value_values=[("main",) for _ in summaries],
)
# TODO(threads): Update on a per-thread basis.
self.db_pool.simple_upsert_many_txn( self.db_pool.simple_upsert_many_txn(
txn, txn,
table="event_push_summary", table="event_push_summary",
key_names=("user_id", "room_id", "thread_id"), key_names=("user_id", "room_id", "thread_id"),
key_values=[(user_id, room_id, "main") for user_id, room_id in summaries], key_values=[
(user_id, room_id, thread_id)
for user_id, room_id, thread_id in summaries
],
value_names=("notif_count", "unread_count", "stream_ordering"), value_names=("notif_count", "unread_count", "stream_ordering"),
value_values=[ value_values=[
(summary.notif_count, summary.unread_count, summary.stream_ordering) (
summary.notif_count,
summary.unread_count,
summary.stream_ordering,
)
for summary in summaries.values() for summary in summaries.values()
], ],
) )
@ -1356,7 +1665,10 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
) )
async def _remove_old_push_actions_that_have_rotated(self) -> None: async def _remove_old_push_actions_that_have_rotated(self) -> None:
"""Clear out old push actions that have been summarised.""" """
Clear out old push actions that have been summarised (and are older than
1 day ago).
"""
# We want to clear out anything that is older than a day that *has* already # We want to clear out anything that is older than a day that *has* already
# been rotated. # been rotated.

Some files were not shown because too many files have changed in this diff Show more