mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-10-01 11:49:51 -04:00
Merge remote-tracking branch 'upstream/release-v1.42'
This commit is contained in:
commit
c75eed92c9
8
.ci/patch_for_twisted_trunk.sh
Executable file
8
.ci/patch_for_twisted_trunk.sh
Executable file
@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
|
||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
4
.ci/twisted_trunk_build_failed_issue_template.md
Normal file
4
.ci/twisted_trunk_build_failed_issue_template.md
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: CI run against Twisted trunk is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
90
.github/workflows/twisted_trunk.yml
vendored
Normal file
90
.github/workflows/twisted_trunk.yml
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
name: Twisted Trunk
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.6
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
|
||||
- name: Dump logs
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:buster
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
run: .ci/patch_for_twisted_trunk.sh
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
69
CHANGES.md
69
CHANGES.md
@ -1,3 +1,71 @@
|
||||
Synapse 1.42.0rc1 (2021-09-01)
|
||||
==============================
|
||||
|
||||
Server administrators are reminded to read [the upgrade notes](docs/upgrade.md#upgrading-to-v1420).
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add support for [MSC3231](https://github.com/matrix-org/matrix-doc/pull/3231): Token authenticated registration. Users can be required to submit a token during registration to authenticate themselves. Contributed by Callum Brown. ([\#10142](https://github.com/matrix-org/synapse/issues/10142))
|
||||
- Add support for [MSC3283](https://github.com/matrix-org/matrix-doc/pull/3283): Expose `enable_set_displayname` in capabilities. ([\#10452](https://github.com/matrix-org/synapse/issues/10452))
|
||||
- Port the `PresenceRouter` module interface to the new generic interface. ([\#10524](https://github.com/matrix-org/synapse/issues/10524))
|
||||
- Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). ([\#10613](https://github.com/matrix-org/synapse/issues/10613), [\#10725](https://github.com/matrix-org/synapse/issues/10725))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Validate new `m.room.power_levels` events. Contributed by @aaronraimist. ([\#10232](https://github.com/matrix-org/synapse/issues/10232))
|
||||
- Display an error on User-Interactive Authentication fallback pages when authentication fails. Contributed by Callum Brown. ([\#10561](https://github.com/matrix-org/synapse/issues/10561))
|
||||
- Remove pushers when deleting an e-mail address from an account. Pushers for old unlinked emails will also be deleted. ([\#10581](https://github.com/matrix-org/synapse/issues/10581), [\#10734](https://github.com/matrix-org/synapse/issues/10734))
|
||||
- Reject Client-Server `/keys/query` requests which provide `device_ids` incorrectly. ([\#10593](https://github.com/matrix-org/synapse/issues/10593))
|
||||
- Rooms with unsupported room versions are no longer returned via `/sync`. ([\#10644](https://github.com/matrix-org/synapse/issues/10644))
|
||||
- Enforce the maximum length for per-room display names and avatar URLs. ([\#10654](https://github.com/matrix-org/synapse/issues/10654))
|
||||
- Fix a bug which caused the `synapse_user_logins_total` Prometheus metric not to be correctly initialised on restart. ([\#10677](https://github.com/matrix-org/synapse/issues/10677))
|
||||
- Improve `ServerNoticeServlet` to avoid duplicate requests and add unit tests. ([\#10679](https://github.com/matrix-org/synapse/issues/10679))
|
||||
- Fix long-standing issue which caused an error when a thumbnail is requested and there are multiple thumbnails with the same quality rating. ([\#10684](https://github.com/matrix-org/synapse/issues/10684))
|
||||
- Fix a regression introduced in v1.41.0 which affected the performance of concurrent fetches of large sets of events, in extreme cases causing the process to hang. ([\#10703](https://github.com/matrix-org/synapse/issues/10703))
|
||||
- Fix a regression introduced in Synapse 1.41 which broke email transmission on Systems using older versions of the Twisted library. ([\#10713](https://github.com/matrix-org/synapse/issues/10713))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add documentation on how to connect Django with Synapse using OpenID Connect and django-oauth-toolkit. Contributed by @HugoDelval. ([\#10192](https://github.com/matrix-org/synapse/issues/10192))
|
||||
- Advertise https://matrix-org.github.io/synapse documentation in the `README` and `CONTRIBUTING` files. ([\#10595](https://github.com/matrix-org/synapse/issues/10595))
|
||||
- Fix some of the titles not rendering in the OpenID Connect documentation. ([\#10639](https://github.com/matrix-org/synapse/issues/10639))
|
||||
- Minor clarifications to the documentation for reverse proxies. ([\#10708](https://github.com/matrix-org/synapse/issues/10708))
|
||||
- Remove table of contents from the top of installation and contributing documentation pages. ([\#10711](https://github.com/matrix-org/synapse/issues/10711))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove deprecated Shutdown Room and Purge Room Admin API. ([\#8830](https://github.com/matrix-org/synapse/issues/8830))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Improve type hints for the proxy agent and SRV resolver modules. Contributed by @dklimpel. ([\#10608](https://github.com/matrix-org/synapse/issues/10608))
|
||||
- Clean up some of the federation event authentication code for clarity. ([\#10614](https://github.com/matrix-org/synapse/issues/10614), [\#10615](https://github.com/matrix-org/synapse/issues/10615), [\#10624](https://github.com/matrix-org/synapse/issues/10624), [\#10640](https://github.com/matrix-org/synapse/issues/10640))
|
||||
- Add a comment asking developers to leave a reason when bumping the database schema version. ([\#10621](https://github.com/matrix-org/synapse/issues/10621))
|
||||
- Remove not needed database updates in modify user admin API. ([\#10627](https://github.com/matrix-org/synapse/issues/10627))
|
||||
- Convert room member storage tuples to `attrs` classes. ([\#10629](https://github.com/matrix-org/synapse/issues/10629), [\#10642](https://github.com/matrix-org/synapse/issues/10642))
|
||||
- Use auto-attribs for the attrs classes used in sync. ([\#10630](https://github.com/matrix-org/synapse/issues/10630))
|
||||
- Make `backfill` and `get_missing_events` use the same codepath. ([\#10645](https://github.com/matrix-org/synapse/issues/10645))
|
||||
- Improve the performance of the `/hierarchy` API (from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946)) by caching responses received over federation. ([\#10647](https://github.com/matrix-org/synapse/issues/10647))
|
||||
- Run a nightly CI build against Twisted trunk. ([\#10651](https://github.com/matrix-org/synapse/issues/10651), [\#10672](https://github.com/matrix-org/synapse/issues/10672))
|
||||
- Do not print out stack traces for network errors when fetching data over federation. ([\#10662](https://github.com/matrix-org/synapse/issues/10662))
|
||||
- Simplify tests for device admin rest API. ([\#10664](https://github.com/matrix-org/synapse/issues/10664))
|
||||
- Add missing type hints to REST servlets. ([\#10665](https://github.com/matrix-org/synapse/issues/10665), [\#10666](https://github.com/matrix-org/synapse/issues/10666), [\#10674](https://github.com/matrix-org/synapse/issues/10674))
|
||||
- Flatten the `tests.synapse.rests` package by moving the contents of `v1` and `v2_alpha` into the parent. ([\#10667](https://github.com/matrix-org/synapse/issues/10667))
|
||||
- Update `complement.sh` to rebuild the base Docker image when run with workers. ([\#10686](https://github.com/matrix-org/synapse/issues/10686))
|
||||
- Split the event-processing methods in `FederationHandler` into a separate `FederationEventHandler`. ([\#10692](https://github.com/matrix-org/synapse/issues/10692))
|
||||
- Remove unused `compare_digest` function. ([\#10706](https://github.com/matrix-org/synapse/issues/10706))
|
||||
|
||||
|
||||
Synapse 1.41.1 (2021-08-31)
|
||||
===========================
|
||||
|
||||
@ -29,7 +97,6 @@ Bugfixes
|
||||
|
||||
- Fix a regression introduced in Synapse 1.41 which broke email transmission on systems using older versions of the Twisted library. ([\#10713](https://github.com/matrix-org/synapse/issues/10713))
|
||||
|
||||
|
||||
Synapse 1.41.0 (2021-08-24)
|
||||
===========================
|
||||
|
||||
|
444
CONTRIBUTING.md
444
CONTRIBUTING.md
@ -1,443 +1,3 @@
|
||||
Welcome to Synapse
|
||||
# Welcome to Synapse
|
||||
|
||||
This document aims to get you started with contributing to this repo!
|
||||
|
||||
- [1. Who can contribute to Synapse?](#1-who-can-contribute-to-synapse)
|
||||
- [2. What do I need?](#2-what-do-i-need)
|
||||
- [3. Get the source.](#3-get-the-source)
|
||||
- [4. Install the dependencies](#4-install-the-dependencies)
|
||||
* [Under Unix (macOS, Linux, BSD, ...)](#under-unix-macos-linux-bsd-)
|
||||
* [Under Windows](#under-windows)
|
||||
- [5. Get in touch.](#5-get-in-touch)
|
||||
- [6. Pick an issue.](#6-pick-an-issue)
|
||||
- [7. Turn coffee and documentation into code and documentation!](#7-turn-coffee-and-documentation-into-code-and-documentation)
|
||||
- [8. Test, test, test!](#8-test-test-test)
|
||||
* [Run the linters.](#run-the-linters)
|
||||
* [Run the unit tests.](#run-the-unit-tests-twisted-trial)
|
||||
* [Run the integration tests (SyTest).](#run-the-integration-tests-sytest)
|
||||
* [Run the integration tests (Complement).](#run-the-integration-tests-complement)
|
||||
- [9. Submit your patch.](#9-submit-your-patch)
|
||||
* [Changelog](#changelog)
|
||||
+ [How do I know what to call the changelog file before I create the PR?](#how-do-i-know-what-to-call-the-changelog-file-before-i-create-the-pr)
|
||||
+ [Debian changelog](#debian-changelog)
|
||||
* [Sign off](#sign-off)
|
||||
- [10. Turn feedback into better code.](#10-turn-feedback-into-better-code)
|
||||
- [11. Find a new issue.](#11-find-a-new-issue)
|
||||
- [Notes for maintainers on merging PRs etc](#notes-for-maintainers-on-merging-prs-etc)
|
||||
- [Conclusion](#conclusion)
|
||||
|
||||
# 1. Who can contribute to Synapse?
|
||||
|
||||
Everyone is welcome to contribute code to [matrix.org
|
||||
projects](https://github.com/matrix-org), provided that they are willing to
|
||||
license their contributions under the same license as the project itself. We
|
||||
follow a simple 'inbound=outbound' model for contributions: the act of
|
||||
submitting an 'inbound' contribution means that the contributor agrees to
|
||||
license the code under the same terms as the project's overall 'outbound'
|
||||
license - in our case, this is almost always Apache Software License v2 (see
|
||||
[LICENSE](LICENSE)).
|
||||
|
||||
# 2. What do I need?
|
||||
|
||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://wiki.python.org/moin/BeginnersGuide/Download).
|
||||
|
||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
|
||||
The preferred and easiest way to contribute changes is to fork the relevant
|
||||
project on GitHub, and then [create a pull request](
|
||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
||||
changes into our repo.
|
||||
|
||||
Please base your changes on the `develop` branch.
|
||||
|
||||
```sh
|
||||
git clone git@github.com:YOUR_GITHUB_USER_NAME/synapse.git
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
If you need help getting started with git, this is beyond the scope of the document, but you
|
||||
can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
## Under Unix (macOS, Linux, BSD, ...)
|
||||
|
||||
Once you have installed Python 3 and added the source, please open a terminal and
|
||||
setup a *virtualenv*, as follows:
|
||||
|
||||
```sh
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,lint,mypy,test]"
|
||||
pip install tox
|
||||
```
|
||||
|
||||
This will install the developer dependencies for the project.
|
||||
|
||||
## Under Windows
|
||||
|
||||
TBD
|
||||
|
||||
|
||||
# 5. Get in touch.
|
||||
|
||||
Join our developer community on Matrix: #synapse-dev:matrix.org !
|
||||
|
||||
|
||||
# 6. Pick an issue.
|
||||
|
||||
Fix your favorite problem or perhaps find a [Good First Issue](https://github.com/matrix-org/synapse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22)
|
||||
to work on.
|
||||
|
||||
|
||||
# 7. Turn coffee and documentation into code and documentation!
|
||||
|
||||
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
||||
it, including the conventions for the [sample configuration
|
||||
file](docs/code_style.md#configuration-file-format).
|
||||
|
||||
There is a growing amount of documentation located in the [docs](docs)
|
||||
directory. This documentation is intended primarily for sysadmins running their
|
||||
own Synapse instance, as well as developers interacting externally with
|
||||
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
||||
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
||||
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
||||
service developers.
|
||||
|
||||
If you add new files added to either of these folders, please use [GitHub-Flavoured
|
||||
Markdown](https://guides.github.com/features/mastering-markdown/).
|
||||
|
||||
Some documentation also exists in [Synapse's GitHub
|
||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||
contributed to by community authors.
|
||||
|
||||
|
||||
# 8. Test, test, test!
|
||||
<a name="test-test-test"></a>
|
||||
|
||||
While you're developing and before submitting a patch, you'll
|
||||
want to test your code.
|
||||
|
||||
## Run the linters.
|
||||
|
||||
The linters look at your code and do two things:
|
||||
|
||||
- ensure that your code follows the coding style adopted by the project;
|
||||
- catch a number of errors in your code.
|
||||
|
||||
They're pretty fast, don't hesitate!
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
Note that this script *will modify your files* to fix styling errors.
|
||||
Make sure that you have saved all your files.
|
||||
|
||||
If you wish to restrict the linters to only the files changed since the last commit
|
||||
(much faster!), you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh -d
|
||||
```
|
||||
|
||||
Or if you know exactly which files you wish to lint, you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
## Run the unit tests (Twisted trial).
|
||||
|
||||
The unit tests run parts of Synapse, including your changes, to see if anything
|
||||
was broken. They are slower than the linters but will typically catch more errors.
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
```
|
||||
|
||||
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
||||
|
||||
```sh
|
||||
less _trial_temp/test.log
|
||||
```
|
||||
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
||||
```
|
||||
|
||||
|
||||
## Run the integration tests ([Sytest](https://github.com/matrix-org/sytest)).
|
||||
|
||||
The integration tests are a more comprehensive suite of tests. They
|
||||
run a full version of Synapse, including your changes, to check if
|
||||
anything was broken. They are slower than the unit tests but will
|
||||
typically catch more errors.
|
||||
|
||||
The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
||||
```
|
||||
|
||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
|
||||
|
||||
## Run the integration tests ([Complement](https://github.com/matrix-org/complement)).
|
||||
|
||||
[Complement](https://github.com/matrix-org/complement) is a suite of black box tests that can be run on any homeserver implementation. It can also be thought of as end-to-end (e2e) tests.
|
||||
|
||||
It's often nice to develop on Synapse and write Complement tests at the same time.
|
||||
Here is how to run your local Synapse checkout against your local Complement checkout.
|
||||
|
||||
(checkout [`complement`](https://github.com/matrix-org/complement) alongside your `synapse` checkout)
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh
|
||||
```
|
||||
|
||||
To run a specific test file, you can pass the test name at the end of the command. The name passed comes from the naming structure in your Complement tests. If you're unsure of the name, you can do a full run and copy it from the test output:
|
||||
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh TestBackfillingHistory
|
||||
```
|
||||
|
||||
To run a specific test, you can specify the whole name structure:
|
||||
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh TestBackfillingHistory/parallel/Backfilled_historical_events_resolve_with_proper_state_in_correct_order
|
||||
```
|
||||
|
||||
|
||||
### Access database for homeserver after Complement test runs.
|
||||
|
||||
If you're curious what the database looks like after you run some tests, here are some steps to get you going in Synapse:
|
||||
|
||||
1. In your Complement test comment out `defer deployment.Destroy(t)` and replace with `defer time.Sleep(2 * time.Hour)` to keep the homeserver running after the tests complete
|
||||
1. Start the Complement tests
|
||||
1. Find the name of the container, `docker ps -f name=complement_` (this will filter for just the Compelement related Docker containers)
|
||||
1. Access the container replacing the name with what you found in the previous step: `docker exec -it complement_1_hs_with_application_service.hs1_2 /bin/bash`
|
||||
1. Install sqlite (database driver), `apt-get update && apt-get install -y sqlite3`
|
||||
1. Then run `sqlite3` and open the database `.open /conf/homeserver.db` (this db path comes from the Synapse homeserver.yaml)
|
||||
|
||||
|
||||
# 9. Submit your patch.
|
||||
|
||||
Once you're happy with your patch, it's time to prepare a Pull Request.
|
||||
|
||||
To prepare a Pull Request, please:
|
||||
|
||||
1. verify that [all the tests pass](#test-test-test), including the coding style;
|
||||
2. [sign off](#sign-off) your contribution;
|
||||
3. `git push` your commit to your fork of Synapse;
|
||||
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
|
||||
5. add a [changelog entry](#changelog) and push it to your Pull Request;
|
||||
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
|
||||
7. if you need to update your PR, please avoid rebasing and just add new commits to your branch.
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the `changelog.d` directory named
|
||||
in the format of `PRnumber.type`. The type can be one of the following:
|
||||
|
||||
* `feature`
|
||||
* `bugfix`
|
||||
* `docker` (for updates to the Docker image)
|
||||
* `doc` (for updates to the documentation)
|
||||
* `removal` (also used for deprecations)
|
||||
* `misc` (for internal-only changes)
|
||||
|
||||
This file will become part of our [changelog](
|
||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
||||
release, so the content of the file should be a short description of your
|
||||
change in the same style as the rest of the changelog. The file can contain Markdown
|
||||
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
contributions and would like to have you shouted out in the release notes!
|
||||
|
||||
For example, a fix in PR #1234 would have its changelog entry in
|
||||
`changelog.d/1234.bugfix`, and contain content like:
|
||||
|
||||
> The security levels of Florbs are now validated when received
|
||||
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
|
||||
|
||||
If there are multiple pull requests involved in a single bugfix/feature/etc,
|
||||
then the content for each `changelog.d` file should be the same. Towncrier will
|
||||
merge the matching files together into a single changelog entry when we come to
|
||||
release.
|
||||
|
||||
### How do I know what to call the changelog file before I create the PR?
|
||||
|
||||
Obviously, you don't know if you should call your newsfile
|
||||
`1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a
|
||||
chicken-and-egg problem.
|
||||
|
||||
There are two options for solving this:
|
||||
|
||||
1. Open the PR without a changelog file, see what number you got, and *then*
|
||||
add the changelog file to your branch (see [Updating your pull
|
||||
request](#updating-your-pull-request)), or:
|
||||
|
||||
1. Look at the [list of all
|
||||
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the
|
||||
highest number you see, and quickly open the PR before somebody else claims
|
||||
your number.
|
||||
|
||||
[This
|
||||
script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh)
|
||||
might be helpful if you find yourself doing this a lot.
|
||||
|
||||
Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come
|
||||
to put together a release!
|
||||
|
||||
### Debian changelog
|
||||
|
||||
Changes which affect the debian packaging files (in `debian`) are an
|
||||
exception to the rule that all changes require a `changelog.d` file.
|
||||
|
||||
In this case, you will need to add an entry to the debian changelog for the
|
||||
next release. For this, run the following command:
|
||||
|
||||
```
|
||||
dch
|
||||
```
|
||||
|
||||
This will make up a new version number (if there isn't already an unreleased
|
||||
version in flight), and open an editor where you can add a new changelog entry.
|
||||
(Our release process will ensure that the version number and maintainer name is
|
||||
corrected for the release.)
|
||||
|
||||
If your change affects both the debian packaging *and* files outside the debian
|
||||
directory, you will need both a regular newsfragment *and* an entry in the
|
||||
debian changelog. (Though typically such changes should be submitted as two
|
||||
separate pull requests.)
|
||||
|
||||
## Sign off
|
||||
|
||||
In order to have a concrete record that your contribution is intentional
|
||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||
same lightweight approach that the Linux Kernel
|
||||
[submitting patches process](
|
||||
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
||||
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||
projects use: the DCO (Developer Certificate of Origin:
|
||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||
the contribution or otherwise have the right to contribute it to Matrix:
|
||||
|
||||
```
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
```
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment:
|
||||
|
||||
```
|
||||
Signed-off-by: Your Name <your@email.example.org>
|
||||
```
|
||||
|
||||
We accept contributions under a legally identifiable name, such as
|
||||
your name on government documentation or common-law names (names
|
||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||
accept anonymous contributions at this time.
|
||||
|
||||
Git allows you to add this signoff automatically when using the `-s`
|
||||
flag to `git commit`, which uses the name and email set in your
|
||||
`user.name` and `user.email` git configs.
|
||||
|
||||
|
||||
# 10. Turn feedback into better code.
|
||||
|
||||
Once the Pull Request is opened, you will see a few things:
|
||||
|
||||
1. our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests and more;
|
||||
2. one or more of the developers will take a look at your Pull Request and offer feedback.
|
||||
|
||||
From this point, you should:
|
||||
|
||||
1. Look at the results of the CI pipeline.
|
||||
- If there is any error, fix the error.
|
||||
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
|
||||
3. Create a new commit with the changes.
|
||||
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
|
||||
- Push this commits to your Pull Request.
|
||||
4. Back to 1.
|
||||
|
||||
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
|
||||
|
||||
# 11. Find a new issue.
|
||||
|
||||
By now, you know the drill!
|
||||
|
||||
# Notes for maintainers on merging PRs etc
|
||||
|
||||
There are some notes for those with commit access to the project on how we
|
||||
manage git [here](docs/development/git.md).
|
||||
|
||||
# Conclusion
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
matrix together all the fragmented communication technologies out there we are
|
||||
reliant on contributions and collaboration from the community to do so. So
|
||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||
do!
|
||||
Please see the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
||||
|
37
README.rst
37
README.rst
@ -1,6 +1,6 @@
|
||||
=========================================================
|
||||
Synapse |support| |development| |license| |pypi| |python|
|
||||
=========================================================
|
||||
=========================================================================
|
||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
||||
=========================================================================
|
||||
|
||||
.. contents::
|
||||
|
||||
@ -85,9 +85,14 @@ For support installing or managing Synapse, please join |room|_ (from a matrix.o
|
||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
||||
support requests, only for bug reports and feature requests.
|
||||
|
||||
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
|
||||
with its source available in |docs|_.
|
||||
|
||||
.. |room| replace:: ``#synapse:matrix.org``
|
||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||
|
||||
.. |docs| replace:: ``docs``
|
||||
.. _docs: docs
|
||||
|
||||
Synapse Installation
|
||||
====================
|
||||
@ -263,7 +268,23 @@ Then update the ``users`` table in the database::
|
||||
Synapse Development
|
||||
===================
|
||||
|
||||
Join our developer community on Matrix: `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_
|
||||
The best place to get started is our
|
||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
||||
information for synapse developers as well as synapse administrators.
|
||||
|
||||
Developers might be particularly interested in:
|
||||
|
||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
||||
* `notes on Synapse's implementation details <https://matrix-org.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
||||
* `how we use git <https://matrix-org.github.io/synapse/latest/development/git.html>`_.
|
||||
|
||||
Alongside all that, join our developer community on Matrix:
|
||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||
|
||||
|
||||
Quick start
|
||||
-----------
|
||||
|
||||
Before setting up a development environment for synapse, make sure you have the
|
||||
system dependencies (such as the python header files) installed - see
|
||||
@ -308,7 +329,7 @@ If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
|
||||
Running the unit tests
|
||||
======================
|
||||
----------------------
|
||||
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
@ -327,7 +348,7 @@ to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-un
|
||||
|
||||
|
||||
Running the Integration Tests
|
||||
=============================
|
||||
-----------------------------
|
||||
|
||||
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||
@ -445,6 +466,10 @@ This is normally caused by a misconfiguration in your reverse-proxy. See
|
||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
||||
|
||||
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
||||
:alt: (Rendered documentation on GitHub Pages)
|
||||
:target: https://matrix-org.github.io/synapse/latest/
|
||||
|
||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
||||
:alt: (check license in LICENSE file)
|
||||
:target: LICENSE
|
||||
|
@ -1 +0,0 @@
|
||||
Fix a regression introduced in Synapse 1.41 which broke email transmission on Systems using older versions of the Twisted library.
|
@ -1 +0,0 @@
|
||||
Fix unauthorised exposure of room metadata to communities.
|
6
debian/changelog
vendored
6
debian/changelog
vendored
@ -1,3 +1,9 @@
|
||||
matrix-synapse-py3 (1.42.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.42.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 01 Sep 2021 11:37:48 +0100
|
||||
|
||||
matrix-synapse-py3 (1.41.1) stable; urgency=high
|
||||
|
||||
* New synapse release 1.41.1.
|
||||
|
@ -52,12 +52,11 @@
|
||||
- [Event Reports](admin_api/event_reports.md)
|
||||
- [Media](admin_api/media_admin_api.md)
|
||||
- [Purge History](admin_api/purge_history_api.md)
|
||||
- [Purge Rooms](admin_api/purge_room.md)
|
||||
- [Register Users](admin_api/register_api.md)
|
||||
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
|
||||
- [Manipulate Room Membership](admin_api/room_membership.md)
|
||||
- [Rooms](admin_api/rooms.md)
|
||||
- [Server Notices](admin_api/server_notices.md)
|
||||
- [Shutdown Room](admin_api/shutdown_room.md)
|
||||
- [Statistics](admin_api/statistics.md)
|
||||
- [Users](admin_api/user_admin_api.md)
|
||||
- [Server Version](admin_api/version_api.md)
|
||||
|
@ -1,21 +0,0 @@
|
||||
Deprecated: Purge room API
|
||||
==========================
|
||||
|
||||
**The old Purge room API is deprecated and will be removed in a future release.
|
||||
See the new [Delete Room API](rooms.md#delete-room-api) for more details.**
|
||||
|
||||
This API will remove all trace of a room from your database.
|
||||
|
||||
All local users must have left the room before it can be removed.
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/purge_room
|
||||
|
||||
{
|
||||
"room_id": "!room:id"
|
||||
}
|
||||
```
|
||||
|
||||
You must authenticate using the access token of an admin user.
|
@ -1,102 +0,0 @@
|
||||
# Deprecated: Shutdown room API
|
||||
|
||||
**The old Shutdown room API is deprecated and will be removed in a future release.
|
||||
See the new [Delete Room API](rooms.md#delete-room-api) for more details.**
|
||||
|
||||
Shuts down a room, preventing new joins and moves local users and room aliases automatically
|
||||
to a new room. The new room will be created with the user specified by the
|
||||
`new_room_user_id` parameter as room administrator and will contain a message
|
||||
explaining what happened. Users invited to the new room will have power level
|
||||
-10 by default, and thus be unable to speak. The old room's power levels will be changed to
|
||||
disallow any further invites or joins.
|
||||
|
||||
The local server will only have the power to move local user and room aliases to
|
||||
the new room. Users on other servers will be unaffected.
|
||||
|
||||
## API
|
||||
|
||||
You will need to authenticate with an access token for an admin user.
|
||||
|
||||
### URL
|
||||
|
||||
`POST /_synapse/admin/v1/shutdown_room/{room_id}`
|
||||
|
||||
### URL Parameters
|
||||
|
||||
* `room_id` - The ID of the room (e.g `!someroom:example.com`)
|
||||
|
||||
### JSON Body Parameters
|
||||
|
||||
* `new_room_user_id` - Required. A string representing the user ID of the user that will admin
|
||||
the new room that all users in the old room will be moved to.
|
||||
* `room_name` - Optional. A string representing the name of the room that new users will be
|
||||
invited to.
|
||||
* `message` - Optional. A string containing the first message that will be sent as
|
||||
`new_room_user_id` in the new room. Ideally this will clearly convey why the
|
||||
original room was shut down.
|
||||
|
||||
If not specified, the default value of `room_name` is "Content Violation
|
||||
Notification". The default value of `message` is "Sharing illegal content on
|
||||
othis server is not permitted and rooms in violation will be blocked."
|
||||
|
||||
### Response Parameters
|
||||
|
||||
* `kicked_users` - An integer number representing the number of users that
|
||||
were kicked.
|
||||
* `failed_to_kick_users` - An integer number representing the number of users
|
||||
that were not kicked.
|
||||
* `local_aliases` - An array of strings representing the local aliases that were migrated from
|
||||
the old room to the new.
|
||||
* `new_room_id` - A string representing the room ID of the new room.
|
||||
|
||||
## Example
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/shutdown_room/!somebadroom%3Aexample.com
|
||||
|
||||
{
|
||||
"new_room_user_id": "@someuser:example.com",
|
||||
"room_name": "Content Violation Notification",
|
||||
"message": "Bad Room has been shutdown due to content violations on this server. Please review our Terms of Service."
|
||||
}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"kicked_users": 5,
|
||||
"failed_to_kick_users": 0,
|
||||
"local_aliases": ["#badroom:example.com", "#evilsaloon:example.com],
|
||||
"new_room_id": "!newroomid:example.com",
|
||||
},
|
||||
```
|
||||
|
||||
## Undoing room shutdowns
|
||||
|
||||
*Note*: This guide may be outdated by the time you read it. By nature of room shutdowns being performed at the database level,
|
||||
the structure can and does change without notice.
|
||||
|
||||
First, it's important to understand that a room shutdown is very destructive. Undoing a shutdown is not as simple as pretending it
|
||||
never happened - work has to be done to move forward instead of resetting the past. In fact, in some cases it might not be possible
|
||||
to recover at all:
|
||||
|
||||
* If the room was invite-only, your users will need to be re-invited.
|
||||
* If the room no longer has any members at all, it'll be impossible to rejoin.
|
||||
* The first user to rejoin will have to do so via an alias on a different server.
|
||||
|
||||
With all that being said, if you still want to try and recover the room:
|
||||
|
||||
1. For safety reasons, shut down Synapse.
|
||||
2. In the database, run `DELETE FROM blocked_rooms WHERE room_id = '!example:example.org';`
|
||||
* For caution: it's recommended to run this in a transaction: `BEGIN; DELETE ...;`, verify you got 1 result, then `COMMIT;`.
|
||||
* The room ID is the same one supplied to the shutdown room API, not the Content Violation room.
|
||||
3. Restart Synapse.
|
||||
|
||||
You will have to manually handle, if you so choose, the following:
|
||||
|
||||
* Aliases that would have been redirected to the Content Violation room.
|
||||
* Users that would have been booted from the room (and will have been force-joined to the Content Violation room).
|
||||
* Removal of the Content Violation room if desired.
|
@ -21,11 +21,15 @@ It returns a JSON body like the following:
|
||||
"threepids": [
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_1>"
|
||||
"address": "<user_mail_1>",
|
||||
"added_at": 1586458409743,
|
||||
"validated_at": 1586458409743
|
||||
},
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_2>"
|
||||
"address": "<user_mail_2>",
|
||||
"added_at": 1586458409743,
|
||||
"validated_at": 1586458409743
|
||||
}
|
||||
],
|
||||
"avatar_url": "<avatar_url>",
|
||||
|
@ -1,7 +1,427 @@
|
||||
<!--
|
||||
Include the contents of CONTRIBUTING.md from the project root (where GitHub likes it
|
||||
to be)
|
||||
-->
|
||||
# Contributing
|
||||
|
||||
{{#include ../../CONTRIBUTING.md}}
|
||||
This document aims to get you started with contributing to Synapse!
|
||||
|
||||
# 1. Who can contribute to Synapse?
|
||||
|
||||
Everyone is welcome to contribute code to [matrix.org
|
||||
projects](https://github.com/matrix-org), provided that they are willing to
|
||||
license their contributions under the same license as the project itself. We
|
||||
follow a simple 'inbound=outbound' model for contributions: the act of
|
||||
submitting an 'inbound' contribution means that the contributor agrees to
|
||||
license the code under the same terms as the project's overall 'outbound'
|
||||
license - in our case, this is almost always Apache Software License v2 (see
|
||||
[LICENSE](https://github.com/matrix-org/synapse/blob/develop/LICENSE)).
|
||||
|
||||
# 2. What do I need?
|
||||
|
||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://wiki.python.org/moin/BeginnersGuide/Download).
|
||||
|
||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
|
||||
The preferred and easiest way to contribute changes is to fork the relevant
|
||||
project on GitHub, and then [create a pull request](
|
||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
||||
changes into our repo.
|
||||
|
||||
Please base your changes on the `develop` branch.
|
||||
|
||||
```sh
|
||||
git clone git@github.com:YOUR_GITHUB_USER_NAME/synapse.git
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
If you need help getting started with git, this is beyond the scope of the document, but you
|
||||
can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
## Under Unix (macOS, Linux, BSD, ...)
|
||||
|
||||
Once you have installed Python 3 and added the source, please open a terminal and
|
||||
setup a *virtualenv*, as follows:
|
||||
|
||||
```sh
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,lint,mypy,test]"
|
||||
pip install tox
|
||||
```
|
||||
|
||||
This will install the developer dependencies for the project.
|
||||
|
||||
## Under Windows
|
||||
|
||||
TBD
|
||||
|
||||
|
||||
# 5. Get in touch.
|
||||
|
||||
Join our developer community on Matrix: #synapse-dev:matrix.org !
|
||||
|
||||
|
||||
# 6. Pick an issue.
|
||||
|
||||
Fix your favorite problem or perhaps find a [Good First Issue](https://github.com/matrix-org/synapse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22)
|
||||
to work on.
|
||||
|
||||
|
||||
# 7. Turn coffee into code and documentation!
|
||||
|
||||
There is a growing amount of documentation located in the
|
||||
[`docs`](https://github.com/matrix-org/synapse/tree/develop/docs)
|
||||
directory, with a rendered version [available online](https://matrix-org.github.io/synapse).
|
||||
This documentation is intended primarily for sysadmins running their
|
||||
own Synapse instance, as well as developers interacting externally with
|
||||
Synapse.
|
||||
[`docs/development`](https://github.com/matrix-org/synapse/tree/develop/docs/development)
|
||||
exists primarily to house documentation for
|
||||
Synapse developers.
|
||||
[`docs/admin_api`](https://github.com/matrix-org/synapse/tree/develop/docs/admin_api) houses documentation
|
||||
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
||||
service developers.
|
||||
|
||||
Synapse's code style is documented [here](../code_style.md). Please follow
|
||||
it, including the conventions for the [sample configuration
|
||||
file](../code_style.md#configuration-file-format).
|
||||
|
||||
We welcome improvements and additions to our documentation itself! When
|
||||
writing new pages, please
|
||||
[build `docs` to a book](https://github.com/matrix-org/synapse/tree/develop/docs#adding-to-the-documentation)
|
||||
to check that your contributions render correctly. The docs are written in
|
||||
[GitHub-Flavoured Markdown](https://guides.github.com/features/mastering-markdown/).
|
||||
|
||||
Some documentation also exists in [Synapse's GitHub
|
||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||
contributed to by community authors.
|
||||
|
||||
|
||||
# 8. Test, test, test!
|
||||
<a name="test-test-test"></a>
|
||||
|
||||
While you're developing and before submitting a patch, you'll
|
||||
want to test your code.
|
||||
|
||||
## Run the linters.
|
||||
|
||||
The linters look at your code and do two things:
|
||||
|
||||
- ensure that your code follows the coding style adopted by the project;
|
||||
- catch a number of errors in your code.
|
||||
|
||||
They're pretty fast, don't hesitate!
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
Note that this script *will modify your files* to fix styling errors.
|
||||
Make sure that you have saved all your files.
|
||||
|
||||
If you wish to restrict the linters to only the files changed since the last commit
|
||||
(much faster!), you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh -d
|
||||
```
|
||||
|
||||
Or if you know exactly which files you wish to lint, you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
## Run the unit tests (Twisted trial).
|
||||
|
||||
The unit tests run parts of Synapse, including your changes, to see if anything
|
||||
was broken. They are slower than the linters but will typically catch more errors.
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
```
|
||||
|
||||
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
||||
|
||||
```sh
|
||||
less _trial_temp/test.log
|
||||
```
|
||||
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
||||
```
|
||||
|
||||
|
||||
## Run the integration tests ([Sytest](https://github.com/matrix-org/sytest)).
|
||||
|
||||
The integration tests are a more comprehensive suite of tests. They
|
||||
run a full version of Synapse, including your changes, to check if
|
||||
anything was broken. They are slower than the unit tests but will
|
||||
typically catch more errors.
|
||||
|
||||
The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
||||
```
|
||||
|
||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
|
||||
|
||||
## Run the integration tests ([Complement](https://github.com/matrix-org/complement)).
|
||||
|
||||
[Complement](https://github.com/matrix-org/complement) is a suite of black box tests that can be run on any homeserver implementation. It can also be thought of as end-to-end (e2e) tests.
|
||||
|
||||
It's often nice to develop on Synapse and write Complement tests at the same time.
|
||||
Here is how to run your local Synapse checkout against your local Complement checkout.
|
||||
|
||||
(checkout [`complement`](https://github.com/matrix-org/complement) alongside your `synapse` checkout)
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh
|
||||
```
|
||||
|
||||
To run a specific test file, you can pass the test name at the end of the command. The name passed comes from the naming structure in your Complement tests. If you're unsure of the name, you can do a full run and copy it from the test output:
|
||||
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh TestBackfillingHistory
|
||||
```
|
||||
|
||||
To run a specific test, you can specify the whole name structure:
|
||||
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh TestBackfillingHistory/parallel/Backfilled_historical_events_resolve_with_proper_state_in_correct_order
|
||||
```
|
||||
|
||||
|
||||
### Access database for homeserver after Complement test runs.
|
||||
|
||||
If you're curious what the database looks like after you run some tests, here are some steps to get you going in Synapse:
|
||||
|
||||
1. In your Complement test comment out `defer deployment.Destroy(t)` and replace with `defer time.Sleep(2 * time.Hour)` to keep the homeserver running after the tests complete
|
||||
1. Start the Complement tests
|
||||
1. Find the name of the container, `docker ps -f name=complement_` (this will filter for just the Compelement related Docker containers)
|
||||
1. Access the container replacing the name with what you found in the previous step: `docker exec -it complement_1_hs_with_application_service.hs1_2 /bin/bash`
|
||||
1. Install sqlite (database driver), `apt-get update && apt-get install -y sqlite3`
|
||||
1. Then run `sqlite3` and open the database `.open /conf/homeserver.db` (this db path comes from the Synapse homeserver.yaml)
|
||||
|
||||
|
||||
# 9. Submit your patch.
|
||||
|
||||
Once you're happy with your patch, it's time to prepare a Pull Request.
|
||||
|
||||
To prepare a Pull Request, please:
|
||||
|
||||
1. verify that [all the tests pass](#test-test-test), including the coding style;
|
||||
2. [sign off](#sign-off) your contribution;
|
||||
3. `git push` your commit to your fork of Synapse;
|
||||
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
|
||||
5. add a [changelog entry](#changelog) and push it to your Pull Request;
|
||||
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
|
||||
7. if you need to update your PR, please avoid rebasing and just add new commits to your branch.
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the `changelog.d` directory named
|
||||
in the format of `PRnumber.type`. The type can be one of the following:
|
||||
|
||||
* `feature`
|
||||
* `bugfix`
|
||||
* `docker` (for updates to the Docker image)
|
||||
* `doc` (for updates to the documentation)
|
||||
* `removal` (also used for deprecations)
|
||||
* `misc` (for internal-only changes)
|
||||
|
||||
This file will become part of our [changelog](
|
||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
||||
release, so the content of the file should be a short description of your
|
||||
change in the same style as the rest of the changelog. The file can contain Markdown
|
||||
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
contributions and would like to have you shouted out in the release notes!
|
||||
|
||||
For example, a fix in PR #1234 would have its changelog entry in
|
||||
`changelog.d/1234.bugfix`, and contain content like:
|
||||
|
||||
> The security levels of Florbs are now validated when received
|
||||
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
|
||||
|
||||
If there are multiple pull requests involved in a single bugfix/feature/etc,
|
||||
then the content for each `changelog.d` file should be the same. Towncrier will
|
||||
merge the matching files together into a single changelog entry when we come to
|
||||
release.
|
||||
|
||||
### How do I know what to call the changelog file before I create the PR?
|
||||
|
||||
Obviously, you don't know if you should call your newsfile
|
||||
`1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a
|
||||
chicken-and-egg problem.
|
||||
|
||||
There are two options for solving this:
|
||||
|
||||
1. Open the PR without a changelog file, see what number you got, and *then*
|
||||
add the changelog file to your branch (see [Updating your pull
|
||||
request](#updating-your-pull-request)), or:
|
||||
|
||||
1. Look at the [list of all
|
||||
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the
|
||||
highest number you see, and quickly open the PR before somebody else claims
|
||||
your number.
|
||||
|
||||
[This
|
||||
script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh)
|
||||
might be helpful if you find yourself doing this a lot.
|
||||
|
||||
Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come
|
||||
to put together a release!
|
||||
|
||||
### Debian changelog
|
||||
|
||||
Changes which affect the debian packaging files (in `debian`) are an
|
||||
exception to the rule that all changes require a `changelog.d` file.
|
||||
|
||||
In this case, you will need to add an entry to the debian changelog for the
|
||||
next release. For this, run the following command:
|
||||
|
||||
```
|
||||
dch
|
||||
```
|
||||
|
||||
This will make up a new version number (if there isn't already an unreleased
|
||||
version in flight), and open an editor where you can add a new changelog entry.
|
||||
(Our release process will ensure that the version number and maintainer name is
|
||||
corrected for the release.)
|
||||
|
||||
If your change affects both the debian packaging *and* files outside the debian
|
||||
directory, you will need both a regular newsfragment *and* an entry in the
|
||||
debian changelog. (Though typically such changes should be submitted as two
|
||||
separate pull requests.)
|
||||
|
||||
## Sign off
|
||||
|
||||
In order to have a concrete record that your contribution is intentional
|
||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||
same lightweight approach that the Linux Kernel
|
||||
[submitting patches process](
|
||||
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
||||
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||
projects use: the DCO (Developer Certificate of Origin:
|
||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||
the contribution or otherwise have the right to contribute it to Matrix:
|
||||
|
||||
```
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
```
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment:
|
||||
|
||||
```
|
||||
Signed-off-by: Your Name <your@email.example.org>
|
||||
```
|
||||
|
||||
We accept contributions under a legally identifiable name, such as
|
||||
your name on government documentation or common-law names (names
|
||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||
accept anonymous contributions at this time.
|
||||
|
||||
Git allows you to add this signoff automatically when using the `-s`
|
||||
flag to `git commit`, which uses the name and email set in your
|
||||
`user.name` and `user.email` git configs.
|
||||
|
||||
|
||||
# 10. Turn feedback into better code.
|
||||
|
||||
Once the Pull Request is opened, you will see a few things:
|
||||
|
||||
1. our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests and more;
|
||||
2. one or more of the developers will take a look at your Pull Request and offer feedback.
|
||||
|
||||
From this point, you should:
|
||||
|
||||
1. Look at the results of the CI pipeline.
|
||||
- If there is any error, fix the error.
|
||||
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
|
||||
3. Create a new commit with the changes.
|
||||
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
|
||||
- Push this commits to your Pull Request.
|
||||
4. Back to 1.
|
||||
|
||||
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
|
||||
|
||||
# 11. Find a new issue.
|
||||
|
||||
By now, you know the drill!
|
||||
|
||||
# Notes for maintainers on merging PRs etc
|
||||
|
||||
There are some notes for those with commit access to the project on how we
|
||||
manage git [here](git.md).
|
||||
|
||||
# Conclusion
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
matrix together all the fragmented communication technologies out there we are
|
||||
reliant on contributions and collaboration from the community to do so. So
|
||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||
do!
|
||||
|
@ -282,6 +282,52 @@ the request is a server admin.
|
||||
Modules can modify the `request_content` (by e.g. adding events to its `initial_state`),
|
||||
or deny the room's creation by raising a `module_api.errors.SynapseError`.
|
||||
|
||||
#### Presence router callbacks
|
||||
|
||||
Presence router callbacks allow module developers to specify additional users (local or remote)
|
||||
to receive certain presence updates from local users. Presence router callbacks can be
|
||||
registered using the module API's `register_presence_router_callbacks` method.
|
||||
|
||||
The available presence router callbacks are:
|
||||
|
||||
```python
|
||||
async def get_users_for_states(
|
||||
self,
|
||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||
) -> Dict[str, Set["synapse.api.UserPresenceState"]]:
|
||||
```
|
||||
**Requires** `get_interested_users` to also be registered
|
||||
|
||||
Called when processing updates to the presence state of one or more users. This callback can
|
||||
be used to instruct the server to forward that presence state to specific users. The module
|
||||
must return a dictionary that maps from Matrix user IDs (which can be local or remote) to the
|
||||
`UserPresenceState` changes that they should be forwarded.
|
||||
|
||||
Synapse will then attempt to send the specified presence updates to each user when possible.
|
||||
|
||||
```python
|
||||
async def get_interested_users(
|
||||
self,
|
||||
user_id: str
|
||||
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]
|
||||
```
|
||||
**Requires** `get_users_for_states` to also be registered
|
||||
|
||||
Called when determining which users someone should be able to see the presence state of. This
|
||||
callback should return complementary results to `get_users_for_state` or the presence information
|
||||
may not be properly forwarded.
|
||||
|
||||
The callback is given the Matrix user ID for a local user that is requesting presence data and
|
||||
should return the Matrix user IDs of the users whose presence state they are allowed to
|
||||
query. The returned users can be local or remote.
|
||||
|
||||
Alternatively the callback can return `synapse.module_api.PRESENCE_ALL_USERS`
|
||||
to indicate that the user should receive updates from all known users.
|
||||
|
||||
For example, if the user `@alice:example.org` is passed to this method, and the Set
|
||||
`{"@bob:example.com", "@charlie:somewhere.org"}` is returned, this signifies that Alice
|
||||
should receive presence updates sent by Bob and Charlie, regardless of whether these users
|
||||
share a room.
|
||||
|
||||
### Porting an existing module that uses the old interface
|
||||
|
||||
|
@ -79,7 +79,7 @@ oidc_providers:
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
### [Dex][dex-idp]
|
||||
### Dex
|
||||
|
||||
[Dex][dex-idp] is a simple, open-source, certified OpenID Connect Provider.
|
||||
Although it is designed to help building a full-blown provider with an
|
||||
@ -117,7 +117,7 @@ oidc_providers:
|
||||
localpart_template: "{{ user.name }}"
|
||||
display_name_template: "{{ user.name|capitalize }}"
|
||||
```
|
||||
### [Keycloak][keycloak-idp]
|
||||
### Keycloak
|
||||
|
||||
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
|
||||
|
||||
@ -166,7 +166,9 @@ oidc_providers:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
### [Auth0][auth0]
|
||||
### Auth0
|
||||
|
||||
[Auth0][auth0] is a hosted SaaS IdP solution.
|
||||
|
||||
1. Create a regular web application for Synapse
|
||||
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||
@ -209,7 +211,7 @@ oidc_providers:
|
||||
|
||||
### GitHub
|
||||
|
||||
GitHub is a bit special as it is not an OpenID Connect compliant provider, but
|
||||
[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but
|
||||
just a regular OAuth2 provider.
|
||||
|
||||
The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user)
|
||||
@ -242,11 +244,13 @@ oidc_providers:
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
### [Google][google-idp]
|
||||
### Google
|
||||
|
||||
[Google][google-idp] is an OpenID certified authentication and authorisation provider.
|
||||
|
||||
1. Set up a project in the Google API Console (see
|
||||
https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup).
|
||||
2. add an "OAuth Client ID" for a Web Application under "Credentials".
|
||||
2. Add an "OAuth Client ID" for a Web Application under "Credentials".
|
||||
3. Copy the Client ID and Client Secret, and add the following to your synapse config:
|
||||
```yaml
|
||||
oidc_providers:
|
||||
@ -446,3 +450,51 @@ The synapse config will look like this:
|
||||
config:
|
||||
email_template: "{{ user.email }}"
|
||||
```
|
||||
|
||||
## Django OAuth Toolkit
|
||||
|
||||
[django-oauth-toolkit](https://github.com/jazzband/django-oauth-toolkit) is a
|
||||
Django application providing out of the box all the endpoints, data and logic
|
||||
needed to add OAuth2 capabilities to your Django projects. It supports
|
||||
[OpenID Connect too](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html).
|
||||
|
||||
Configuration on Django's side:
|
||||
|
||||
1. Add an application: https://example.com/admin/oauth2_provider/application/add/ and choose parameters like this:
|
||||
* `Redirect uris`: https://synapse.example.com/_synapse/client/oidc/callback
|
||||
* `Client type`: `Confidential`
|
||||
* `Authorization grant type`: `Authorization code`
|
||||
* `Algorithm`: `HMAC with SHA-2 256`
|
||||
2. You can [customize the claims](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html#customizing-the-oidc-responses) Django gives to synapse (optional):
|
||||
<details>
|
||||
<summary>Code sample</summary>
|
||||
|
||||
```python
|
||||
class CustomOAuth2Validator(OAuth2Validator):
|
||||
|
||||
def get_additional_claims(self, request):
|
||||
return {
|
||||
"sub": request.user.email,
|
||||
"email": request.user.email,
|
||||
"first_name": request.user.first_name,
|
||||
"last_name": request.user.last_name,
|
||||
}
|
||||
```
|
||||
</details>
|
||||
Your synapse config is then:
|
||||
|
||||
```yaml
|
||||
oidc_providers:
|
||||
- idp_id: django_example
|
||||
idp_name: "Django Example"
|
||||
issuer: "https://example.com/o/"
|
||||
client_id: "your-client-id" # CHANGE ME
|
||||
client_secret: "your-client-secret" # CHANGE ME
|
||||
scopes: ["openid"]
|
||||
user_profile_method: "userinfo_endpoint" # needed because oauth-toolkit does not include user information in the authorization response
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.email.split('@')[0] }}"
|
||||
display_name_template: "{{ user.first_name }} {{ user.last_name }}"
|
||||
email_template: "{{ user.email }}"
|
||||
```
|
||||
|
@ -1,3 +1,9 @@
|
||||
<h2 style="color:red">
|
||||
This page of the Synapse documentation is now deprecated. For up to date
|
||||
documentation on setting up or writing a presence router module, please see
|
||||
<a href="modules.md">this page</a>.
|
||||
</h2>
|
||||
|
||||
# Presence Router Module
|
||||
|
||||
Synapse supports configuring a module that can specify additional users
|
||||
|
@ -64,6 +64,9 @@ server {
|
||||
server_name matrix.example.com;
|
||||
|
||||
location ~* ^(\/_matrix|\/_synapse\/client) {
|
||||
# note: do not add a path (even a single /) after the port in `proxy_pass`,
|
||||
# otherwise nginx will canonicalise the URI and cause signature verification
|
||||
# errors.
|
||||
proxy_pass http://localhost:8008;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
@ -76,10 +79,7 @@ server {
|
||||
}
|
||||
```
|
||||
|
||||
**NOTE**: Do not add a path after the port in `proxy_pass`, otherwise nginx will
|
||||
canonicalise/normalise the URI.
|
||||
|
||||
### Caddy 1
|
||||
### Caddy v1
|
||||
|
||||
```
|
||||
matrix.example.com {
|
||||
@ -99,7 +99,7 @@ example.com:8448 {
|
||||
}
|
||||
```
|
||||
|
||||
### Caddy 2
|
||||
### Caddy v2
|
||||
|
||||
```
|
||||
matrix.example.com {
|
||||
|
@ -108,20 +108,6 @@ presence:
|
||||
#
|
||||
#enabled: false
|
||||
|
||||
# Presence routers are third-party modules that can specify additional logic
|
||||
# to where presence updates from users are routed.
|
||||
#
|
||||
presence_router:
|
||||
# The custom module's class. Uncomment to use a custom presence router module.
|
||||
#
|
||||
#module: "my_custom_router.PresenceRouter"
|
||||
|
||||
# Configuration options of the custom module. Refer to your module's
|
||||
# documentation for available options.
|
||||
#
|
||||
#config:
|
||||
# example_option: 'something'
|
||||
|
||||
# Whether to require authentication to retrieve profile data (avatars,
|
||||
# display names) of other users through the client API. Defaults to
|
||||
# 'false'. Note that profile data is also available via the federation
|
||||
@ -807,6 +793,8 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
# is using
|
||||
# - one for registration that ratelimits registration requests based on the
|
||||
# client's IP address.
|
||||
# - one for checking the validity of registration tokens that ratelimits
|
||||
# requests based on the client's IP address.
|
||||
# - one for login that ratelimits login requests based on the client's IP
|
||||
# address.
|
||||
# - one for login that ratelimits login requests based on the account the
|
||||
@ -835,6 +823,10 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
# per_second: 0.17
|
||||
# burst_count: 3
|
||||
#
|
||||
#rc_registration_token_validity:
|
||||
# per_second: 0.1
|
||||
# burst_count: 5
|
||||
#
|
||||
#rc_login:
|
||||
# address:
|
||||
# per_second: 0.17
|
||||
@ -1183,6 +1175,15 @@ url_preview_accept_language:
|
||||
#
|
||||
#enable_3pid_lookup: true
|
||||
|
||||
# Require users to submit a token during registration.
|
||||
# Tokens can be managed using the admin API:
|
||||
# https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/registration_tokens.html
|
||||
# Note that `enable_registration` must be set to `true`.
|
||||
# Disabling this option will not delete any tokens previously generated.
|
||||
# Defaults to false. Uncomment the following to require tokens:
|
||||
#
|
||||
#registration_requires_token: true
|
||||
|
||||
# If set, allows registration of standard or admin accounts by anyone who
|
||||
# has the shared secret, even if registration is otherwise disabled.
|
||||
#
|
||||
|
@ -1,44 +1,5 @@
|
||||
# Installation Instructions
|
||||
|
||||
There are 3 steps to follow under **Installation Instructions**.
|
||||
|
||||
- [Installation Instructions](#installation-instructions)
|
||||
- [Choosing your server name](#choosing-your-server-name)
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-specific prerequisites](#platform-specific-prerequisites)
|
||||
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
|
||||
- [ArchLinux](#archlinux)
|
||||
- [CentOS/Fedora](#centosfedora)
|
||||
- [macOS](#macos)
|
||||
- [OpenSUSE](#opensuse)
|
||||
- [OpenBSD](#openbsd)
|
||||
- [Windows](#windows)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
|
||||
- [Debian/Ubuntu](#debianubuntu)
|
||||
- [Matrix.org packages](#matrixorg-packages)
|
||||
- [Downstream Debian packages](#downstream-debian-packages)
|
||||
- [Downstream Ubuntu packages](#downstream-ubuntu-packages)
|
||||
- [Fedora](#fedora)
|
||||
- [OpenSUSE](#opensuse-1)
|
||||
- [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
|
||||
- [ArchLinux](#archlinux-1)
|
||||
- [Void Linux](#void-linux)
|
||||
- [FreeBSD](#freebsd)
|
||||
- [OpenBSD](#openbsd-1)
|
||||
- [NixOS](#nixos)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [Using PostgreSQL](#using-postgresql)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
- [Client Well-Known URI](#client-well-known-uri)
|
||||
- [Email](#email)
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
|
||||
|
||||
## Choosing your server name
|
||||
|
||||
It is important to choose the name for your server before you install Synapse,
|
||||
|
@ -85,6 +85,33 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.42.0
|
||||
|
||||
## Removal of old Room Admin API
|
||||
|
||||
The following admin APIs were deprecated in [Synapse 1.25](https://github.com/matrix-org/synapse/blob/v1.25.0/CHANGES.md#removal-warning)
|
||||
(released on 2021-01-13) and have now been removed:
|
||||
|
||||
- `POST /_synapse/admin/v1/purge_room`
|
||||
- `POST /_synapse/admin/v1/shutdown_room/<room_id>`
|
||||
|
||||
Any scripts still using the above APIs should be converted to use the
|
||||
[Delete Room API](https://matrix-org.github.io/synapse/latest/admin_api/rooms.html#delete-room-api).
|
||||
|
||||
## User-interactive authentication fallback templates can now display errors
|
||||
|
||||
This may affect you if you make use of custom HTML templates for the
|
||||
[reCAPTCHA](../synapse/res/templates/recaptcha.html) or
|
||||
[terms](../synapse/res/templates/terms.html) fallback pages.
|
||||
|
||||
The template is now provided an `error` variable if the authentication
|
||||
process failed. See the default templates linked above for an example.
|
||||
|
||||
## Removal of out-of-date email pushers
|
||||
|
||||
Users will stop receiving message updates via email for addresses that were
|
||||
once, but not still, linked to their account.
|
||||
|
||||
|
||||
# Upgrading to v1.41.0
|
||||
|
||||
|
295
docs/usage/administration/admin_api/registration_tokens.md
Normal file
295
docs/usage/administration/admin_api/registration_tokens.md
Normal file
@ -0,0 +1,295 @@
|
||||
# Registration Tokens
|
||||
|
||||
This API allows you to manage tokens which can be used to authenticate
|
||||
registration requests, as proposed in [MSC3231](https://github.com/govynnus/matrix-doc/blob/token-registration/proposals/3231-token-authenticated-registration.md).
|
||||
To use it, you will need to enable the `registration_requires_token` config
|
||||
option, and authenticate by providing an `access_token` for a server admin:
|
||||
see [Admin API](../../usage/administration/admin_api).
|
||||
Note that this API is still experimental; not all clients may support it yet.
|
||||
|
||||
|
||||
## Registration token objects
|
||||
|
||||
Most endpoints make use of JSON objects that contain details about tokens.
|
||||
These objects have the following fields:
|
||||
- `token`: The token which can be used to authenticate registration.
|
||||
- `uses_allowed`: The number of times the token can be used to complete a
|
||||
registration before it becomes invalid.
|
||||
- `pending`: The number of pending uses the token has. When someone uses
|
||||
the token to authenticate themselves, the pending counter is incremented
|
||||
so that the token is not used more than the permitted number of times.
|
||||
When the person completes registration the pending counter is decremented,
|
||||
and the completed counter is incremented.
|
||||
- `completed`: The number of times the token has been used to successfully
|
||||
complete a registration.
|
||||
- `expiry_time`: The latest time the token is valid. Given as the number of
|
||||
milliseconds since 1970-01-01 00:00:00 UTC (the start of the Unix epoch).
|
||||
To convert this into a human-readable form you can remove the milliseconds
|
||||
and use the `date` command. For example, `date -d '@1625394937'`.
|
||||
|
||||
|
||||
## List all tokens
|
||||
|
||||
Lists all tokens and details about them. If the request is successful, the top
|
||||
level JSON object will have a `registration_tokens` key which is an array of
|
||||
registration token objects.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/registration_tokens
|
||||
```
|
||||
|
||||
Optional query parameters:
|
||||
- `valid`: `true` or `false`. If `true`, only valid tokens are returned.
|
||||
If `false`, only tokens that have expired or have had all uses exhausted are
|
||||
returned. If omitted, all tokens are returned regardless of validity.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/registration_tokens
|
||||
```
|
||||
```
|
||||
200 OK
|
||||
|
||||
{
|
||||
"registration_tokens": [
|
||||
{
|
||||
"token": "abcd",
|
||||
"uses_allowed": 3,
|
||||
"pending": 0,
|
||||
"completed": 1,
|
||||
"expiry_time": null
|
||||
},
|
||||
{
|
||||
"token": "pqrs",
|
||||
"uses_allowed": 2,
|
||||
"pending": 1,
|
||||
"completed": 1,
|
||||
"expiry_time": null
|
||||
},
|
||||
{
|
||||
"token": "wxyz",
|
||||
"uses_allowed": null,
|
||||
"pending": 0,
|
||||
"completed": 9,
|
||||
"expiry_time": 1625394937000 // 2021-07-04 10:35:37 UTC
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Example using the `valid` query parameter:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/registration_tokens?valid=false
|
||||
```
|
||||
```
|
||||
200 OK
|
||||
|
||||
{
|
||||
"registration_tokens": [
|
||||
{
|
||||
"token": "pqrs",
|
||||
"uses_allowed": 2,
|
||||
"pending": 1,
|
||||
"completed": 1,
|
||||
"expiry_time": null
|
||||
},
|
||||
{
|
||||
"token": "wxyz",
|
||||
"uses_allowed": null,
|
||||
"pending": 0,
|
||||
"completed": 9,
|
||||
"expiry_time": 1625394937000 // 2021-07-04 10:35:37 UTC
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Get one token
|
||||
|
||||
Get details about a single token. If the request is successful, the response
|
||||
body will be a registration token object.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/registration_tokens/<token>
|
||||
```
|
||||
|
||||
Path parameters:
|
||||
- `token`: The registration token to return details of.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/registration_tokens/abcd
|
||||
```
|
||||
```
|
||||
200 OK
|
||||
|
||||
{
|
||||
"token": "abcd",
|
||||
"uses_allowed": 3,
|
||||
"pending": 0,
|
||||
"completed": 1,
|
||||
"expiry_time": null
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Create token
|
||||
|
||||
Create a new registration token. If the request is successful, the newly created
|
||||
token will be returned as a registration token object in the response body.
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/registration_tokens/new
|
||||
```
|
||||
|
||||
The request body must be a JSON object and can contain the following fields:
|
||||
- `token`: The registration token. A string of no more than 64 characters that
|
||||
consists only of characters matched by the regex `[A-Za-z0-9-_]`.
|
||||
Default: randomly generated.
|
||||
- `uses_allowed`: The integer number of times the token can be used to complete
|
||||
a registration before it becomes invalid.
|
||||
Default: `null` (unlimited uses).
|
||||
- `expiry_time`: The latest time the token is valid. Given as the number of
|
||||
milliseconds since 1970-01-01 00:00:00 UTC (the start of the Unix epoch).
|
||||
You could use, for example, `date '+%s000' -d 'tomorrow'`.
|
||||
Default: `null` (token does not expire).
|
||||
- `length`: The length of the token randomly generated if `token` is not
|
||||
specified. Must be between 1 and 64 inclusive. Default: `16`.
|
||||
|
||||
If a field is omitted the default is used.
|
||||
|
||||
Example using defaults:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/registration_tokens/new
|
||||
|
||||
{}
|
||||
```
|
||||
```
|
||||
200 OK
|
||||
|
||||
{
|
||||
"token": "0M-9jbkf2t_Tgiw1",
|
||||
"uses_allowed": null,
|
||||
"pending": 0,
|
||||
"completed": 0,
|
||||
"expiry_time": null
|
||||
}
|
||||
```
|
||||
|
||||
Example specifying some fields:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/registration_tokens/new
|
||||
|
||||
{
|
||||
"token": "defg",
|
||||
"uses_allowed": 1
|
||||
}
|
||||
```
|
||||
```
|
||||
200 OK
|
||||
|
||||
{
|
||||
"token": "defg",
|
||||
"uses_allowed": 1,
|
||||
"pending": 0,
|
||||
"completed": 0,
|
||||
"expiry_time": null
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Update token
|
||||
|
||||
Update the number of allowed uses or expiry time of a token. If the request is
|
||||
successful, the updated token will be returned as a registration token object
|
||||
in the response body.
|
||||
|
||||
```
|
||||
PUT /_synapse/admin/v1/registration_tokens/<token>
|
||||
```
|
||||
|
||||
Path parameters:
|
||||
- `token`: The registration token to update.
|
||||
|
||||
The request body must be a JSON object and can contain the following fields:
|
||||
- `uses_allowed`: The integer number of times the token can be used to complete
|
||||
a registration before it becomes invalid. By setting `uses_allowed` to `0`
|
||||
the token can be easily made invalid without deleting it.
|
||||
If `null` the token will have an unlimited number of uses.
|
||||
- `expiry_time`: The latest time the token is valid. Given as the number of
|
||||
milliseconds since 1970-01-01 00:00:00 UTC (the start of the Unix epoch).
|
||||
If `null` the token will not expire.
|
||||
|
||||
If a field is omitted its value is not modified.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
PUT /_synapse/admin/v1/registration_tokens/defg
|
||||
|
||||
{
|
||||
"expiry_time": 4781243146000 // 2121-07-06 11:05:46 UTC
|
||||
}
|
||||
```
|
||||
```
|
||||
200 OK
|
||||
|
||||
{
|
||||
"token": "defg",
|
||||
"uses_allowed": 1,
|
||||
"pending": 0,
|
||||
"completed": 0,
|
||||
"expiry_time": 4781243146000
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Delete token
|
||||
|
||||
Delete a registration token. If the request is successful, the response body
|
||||
will be an empty JSON object.
|
||||
|
||||
```
|
||||
DELETE /_synapse/admin/v1/registration_tokens/<token>
|
||||
```
|
||||
|
||||
Path parameters:
|
||||
- `token`: The registration token to delete.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
DELETE /_synapse/admin/v1/registration_tokens/wxyz
|
||||
```
|
||||
```
|
||||
200 OK
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
|
||||
## Errors
|
||||
|
||||
If a request fails a "standard error response" will be returned as defined in
|
||||
the [Matrix Client-Server API specification](https://matrix.org/docs/spec/client_server/r0.6.1#api-standards).
|
||||
|
||||
For example, if the token specified in a path parameter does not exist a
|
||||
`404 Not Found` error will be returned.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/registration_tokens/1234
|
||||
```
|
||||
```
|
||||
404 Not Found
|
||||
|
||||
{
|
||||
"errcode": "M_NOT_FOUND",
|
||||
"error": "No such registration token: 1234"
|
||||
}
|
||||
```
|
@ -1,3 +1,7 @@
|
||||
:root {
|
||||
--pagetoc-width: 250px;
|
||||
}
|
||||
|
||||
@media only screen and (max-width:1439px) {
|
||||
.sidetoc {
|
||||
display: none;
|
||||
@ -8,6 +12,7 @@
|
||||
main {
|
||||
position: relative;
|
||||
margin-left: 100px !important;
|
||||
margin-right: var(--pagetoc-width) !important;
|
||||
}
|
||||
.sidetoc {
|
||||
margin-left: auto;
|
||||
@ -18,7 +23,7 @@
|
||||
}
|
||||
.pagetoc {
|
||||
position: fixed;
|
||||
width: 250px;
|
||||
width: var(--pagetoc-width);
|
||||
overflow: auto;
|
||||
right: 20px;
|
||||
height: calc(100% - var(--menu-bar-height));
|
||||
|
@ -236,6 +236,7 @@ expressions:
|
||||
# Registration/login requests
|
||||
^/_matrix/client/(api/v1|r0|unstable)/login$
|
||||
^/_matrix/client/(r0|unstable)/register$
|
||||
^/_matrix/client/unstable/org.matrix.msc3231/register/org.matrix.msc3231.login.registration_token/validity$
|
||||
|
||||
# Event sending requests
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact
|
||||
|
9
mypy.ini
9
mypy.ini
@ -28,10 +28,13 @@ files =
|
||||
synapse/federation,
|
||||
synapse/groups,
|
||||
synapse/handlers,
|
||||
synapse/http/additional_resource.py,
|
||||
synapse/http/client.py,
|
||||
synapse/http/federation/matrix_federation_agent.py,
|
||||
synapse/http/federation/srv_resolver.py,
|
||||
synapse/http/federation/well_known_resolver.py,
|
||||
synapse/http/matrixfederationclient.py,
|
||||
synapse/http/proxyagent.py,
|
||||
synapse/http/servlet.py,
|
||||
synapse/http/server.py,
|
||||
synapse/http/site.py,
|
||||
@ -54,6 +57,7 @@ files =
|
||||
synapse/storage/databases/main/keys.py,
|
||||
synapse/storage/databases/main/pusher.py,
|
||||
synapse/storage/databases/main/registration.py,
|
||||
synapse/storage/databases/main/session.py,
|
||||
synapse/storage/databases/main/stream.py,
|
||||
synapse/storage/databases/main/ui_auth.py,
|
||||
synapse/storage/database.py,
|
||||
@ -88,8 +92,9 @@ files =
|
||||
tests/handlers/test_password_providers.py,
|
||||
tests/handlers/test_room_summary.py,
|
||||
tests/handlers/test_send_email.py,
|
||||
tests/rest/client/v1/test_login.py,
|
||||
tests/rest/client/v2_alpha/test_auth.py,
|
||||
tests/handlers/test_sync.py,
|
||||
tests/rest/client/test_login.py,
|
||||
tests/rest/client/test_auth.py,
|
||||
tests/util/test_itertools.py,
|
||||
tests/util/test_stream_change_cache.py
|
||||
|
||||
|
@ -35,25 +35,25 @@ if [[ -z "$COMPLEMENT_DIR" ]]; then
|
||||
echo "Checkout available at 'complement-master'"
|
||||
fi
|
||||
|
||||
# Build the base Synapse image from the local checkout
|
||||
docker build -t matrixdotorg/synapse -f "docker/Dockerfile" .
|
||||
|
||||
# If we're using workers, modify the docker files slightly.
|
||||
if [[ -n "$WORKERS" ]]; then
|
||||
BASE_IMAGE=matrixdotorg/synapse-workers
|
||||
BASE_DOCKERFILE=docker/Dockerfile-workers
|
||||
# Build the workers docker image (from the base Synapse image).
|
||||
docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse-workers
|
||||
COMPLEMENT_DOCKERFILE=SynapseWorkers.Dockerfile
|
||||
# And provide some more configuration to complement.
|
||||
export COMPLEMENT_CA=true
|
||||
export COMPLEMENT_VERSION_CHECK_ITERATIONS=500
|
||||
else
|
||||
BASE_IMAGE=matrixdotorg/synapse
|
||||
BASE_DOCKERFILE=docker/Dockerfile
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
COMPLEMENT_DOCKERFILE=Synapse.Dockerfile
|
||||
fi
|
||||
|
||||
# Build the base Synapse image from the local checkout
|
||||
docker build -t $BASE_IMAGE -f "$BASE_DOCKERFILE" .
|
||||
# Build the Synapse monolith image from Complement, based on the above image we just built
|
||||
# Build the Complement image from the Synapse image we just built.
|
||||
docker build -t $COMPLEMENT_BASE_IMAGE -f "$COMPLEMENT_DIR/dockerfiles/$COMPLEMENT_DOCKERFILE" "$COMPLEMENT_DIR/dockerfiles"
|
||||
|
||||
cd "$COMPLEMENT_DIR"
|
||||
|
@ -47,7 +47,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.41.1"
|
||||
__version__ = "1.42.0rc1"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
@ -79,6 +79,7 @@ class LoginType:
|
||||
TERMS = "m.login.terms"
|
||||
SSO = "m.login.sso"
|
||||
DUMMY = "m.login.dummy"
|
||||
REGISTRATION_TOKEN = "org.matrix.msc3231.login.registration_token"
|
||||
|
||||
|
||||
# This is used in the `type` parameter for /register when called by
|
||||
|
@ -147,6 +147,14 @@ class SynapseError(CodeMessageException):
|
||||
return cs_error(self.msg, self.errcode)
|
||||
|
||||
|
||||
class InvalidAPICallError(SynapseError):
|
||||
"""You called an existing API endpoint, but fed that endpoint
|
||||
invalid or incomplete data."""
|
||||
|
||||
def __init__(self, msg: str):
|
||||
super().__init__(HTTPStatus.BAD_REQUEST, msg, Codes.BAD_JSON)
|
||||
|
||||
|
||||
class ProxiedRequestError(SynapseError):
|
||||
"""An error from a general matrix endpoint, eg. from a proxied Matrix API call.
|
||||
|
||||
|
@ -37,6 +37,7 @@ from synapse.app import check_bind_error
|
||||
from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.events.presence_router import load_legacy_presence_router
|
||||
from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||
from synapse.events.third_party_rules import load_legacy_third_party_event_rules
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
@ -370,6 +371,7 @@ async def start(hs: "HomeServer"):
|
||||
|
||||
load_legacy_spam_checkers(hs)
|
||||
load_legacy_third_party_event_rules(hs)
|
||||
load_legacy_presence_router(hs)
|
||||
|
||||
# If we've configured an expiry time for caches, start the background job now.
|
||||
setup_expire_lru_cache_entries(hs)
|
||||
|
@ -95,7 +95,10 @@ from synapse.rest.client.profile import (
|
||||
ProfileRestServlet,
|
||||
)
|
||||
from synapse.rest.client.push_rule import PushRuleRestServlet
|
||||
from synapse.rest.client.register import RegisterRestServlet
|
||||
from synapse.rest.client.register import (
|
||||
RegisterRestServlet,
|
||||
RegistrationTokenValidityRestServlet,
|
||||
)
|
||||
from synapse.rest.client.sendtodevice import SendToDeviceRestServlet
|
||||
from synapse.rest.client.versions import VersionsRestServlet
|
||||
from synapse.rest.client.voip import VoipRestServlet
|
||||
@ -115,6 +118,7 @@ from synapse.storage.databases.main.monthly_active_users import (
|
||||
from synapse.storage.databases.main.presence import PresenceStore
|
||||
from synapse.storage.databases.main.room import RoomWorkerStore
|
||||
from synapse.storage.databases.main.search import SearchStore
|
||||
from synapse.storage.databases.main.session import SessionStore
|
||||
from synapse.storage.databases.main.stats import StatsStore
|
||||
from synapse.storage.databases.main.transactions import TransactionWorkerStore
|
||||
from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore
|
||||
@ -250,6 +254,7 @@ class GenericWorkerSlavedStore(
|
||||
SearchStore,
|
||||
TransactionWorkerStore,
|
||||
LockStore,
|
||||
SessionStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
pass
|
||||
@ -279,6 +284,7 @@ class GenericWorkerServer(HomeServer):
|
||||
resource = JsonResource(self, canonical_json=False)
|
||||
|
||||
RegisterRestServlet(self).register(resource)
|
||||
RegistrationTokenValidityRestServlet(self).register(resource)
|
||||
login.register_servlets(self, resource)
|
||||
ThreepidRestServlet(self).register(resource)
|
||||
DevicesRestServlet(self).register(resource)
|
||||
|
@ -39,5 +39,8 @@ class ExperimentalConfig(Config):
|
||||
# MSC3244 (room version capabilities)
|
||||
self.msc3244_enabled: bool = experimental.get("msc3244_enabled", True)
|
||||
|
||||
# MSC3283 (set displayname, avatar_url and change 3pid capabilities)
|
||||
self.msc3283_enabled: bool = experimental.get("msc3283_enabled", False)
|
||||
|
||||
# MSC3266 (room summary api)
|
||||
self.msc3266_enabled: bool = experimental.get("msc3266_enabled", False)
|
||||
|
@ -79,6 +79,11 @@ class RatelimitConfig(Config):
|
||||
|
||||
self.rc_registration = RateLimitConfig(config.get("rc_registration", {}))
|
||||
|
||||
self.rc_registration_token_validity = RateLimitConfig(
|
||||
config.get("rc_registration_token_validity", {}),
|
||||
defaults={"per_second": 0.1, "burst_count": 5},
|
||||
)
|
||||
|
||||
rc_login_config = config.get("rc_login", {})
|
||||
self.rc_login_address = RateLimitConfig(rc_login_config.get("address", {}))
|
||||
self.rc_login_account = RateLimitConfig(rc_login_config.get("account", {}))
|
||||
@ -143,6 +148,8 @@ class RatelimitConfig(Config):
|
||||
# is using
|
||||
# - one for registration that ratelimits registration requests based on the
|
||||
# client's IP address.
|
||||
# - one for checking the validity of registration tokens that ratelimits
|
||||
# requests based on the client's IP address.
|
||||
# - one for login that ratelimits login requests based on the client's IP
|
||||
# address.
|
||||
# - one for login that ratelimits login requests based on the account the
|
||||
@ -171,6 +178,10 @@ class RatelimitConfig(Config):
|
||||
# per_second: 0.17
|
||||
# burst_count: 3
|
||||
#
|
||||
#rc_registration_token_validity:
|
||||
# per_second: 0.1
|
||||
# burst_count: 5
|
||||
#
|
||||
#rc_login:
|
||||
# address:
|
||||
# per_second: 0.17
|
||||
|
@ -33,6 +33,9 @@ class RegistrationConfig(Config):
|
||||
self.registrations_require_3pid = config.get("registrations_require_3pid", [])
|
||||
self.allowed_local_3pids = config.get("allowed_local_3pids", [])
|
||||
self.enable_3pid_lookup = config.get("enable_3pid_lookup", True)
|
||||
self.registration_requires_token = config.get(
|
||||
"registration_requires_token", False
|
||||
)
|
||||
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||
|
||||
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
||||
@ -140,6 +143,9 @@ class RegistrationConfig(Config):
|
||||
"mechanism by removing the `access_token_lifetime` option."
|
||||
)
|
||||
|
||||
# The fallback template used for authenticating using a registration token
|
||||
self.registration_token_template = self.read_template("registration_token.html")
|
||||
|
||||
# The success template used during fallback auth.
|
||||
self.fallback_success_template = self.read_template("auth_success.html")
|
||||
|
||||
@ -199,6 +205,15 @@ class RegistrationConfig(Config):
|
||||
#
|
||||
#enable_3pid_lookup: true
|
||||
|
||||
# Require users to submit a token during registration.
|
||||
# Tokens can be managed using the admin API:
|
||||
# https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/registration_tokens.html
|
||||
# Note that `enable_registration` must be set to `true`.
|
||||
# Disabling this option will not delete any tokens previously generated.
|
||||
# Defaults to false. Uncomment the following to require tokens:
|
||||
#
|
||||
#registration_requires_token: true
|
||||
|
||||
# If set, allows registration of standard or admin accounts by anyone who
|
||||
# has the shared secret, even if registration is otherwise disabled.
|
||||
#
|
||||
|
@ -248,6 +248,7 @@ class ServerConfig(Config):
|
||||
self.use_presence = config.get("use_presence", True)
|
||||
|
||||
# Custom presence router module
|
||||
# This is the legacy way of configuring it (the config should now be put in the modules section)
|
||||
self.presence_router_module_class = None
|
||||
self.presence_router_config = None
|
||||
presence_router_config = presence_config.get("presence_router")
|
||||
@ -870,20 +871,6 @@ class ServerConfig(Config):
|
||||
#
|
||||
#enabled: false
|
||||
|
||||
# Presence routers are third-party modules that can specify additional logic
|
||||
# to where presence updates from users are routed.
|
||||
#
|
||||
presence_router:
|
||||
# The custom module's class. Uncomment to use a custom presence router module.
|
||||
#
|
||||
#module: "my_custom_router.PresenceRouter"
|
||||
|
||||
# Configuration options of the custom module. Refer to your module's
|
||||
# documentation for available options.
|
||||
#
|
||||
#config:
|
||||
# example_option: 'something'
|
||||
|
||||
# Whether to require authentication to retrieve profile data (avatars,
|
||||
# display names) of other users through the client API. Defaults to
|
||||
# 'false'. Note that profile data is also available via the federation
|
||||
|
@ -11,45 +11,115 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, Set, Union
|
||||
import logging
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Union,
|
||||
)
|
||||
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
GET_USERS_FOR_STATES_CALLBACK = Callable[
|
||||
[Iterable[UserPresenceState]], Awaitable[Dict[str, Set[UserPresenceState]]]
|
||||
]
|
||||
GET_INTERESTED_USERS_CALLBACK = Callable[
|
||||
[str], Awaitable[Union[Set[str], "PresenceRouter.ALL_USERS"]]
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_legacy_presence_router(hs: "HomeServer"):
|
||||
"""Wrapper that loads a presence router module configured using the old
|
||||
configuration, and registers the hooks they implement.
|
||||
"""
|
||||
|
||||
if hs.config.presence_router_module_class is None:
|
||||
return
|
||||
|
||||
module = hs.config.presence_router_module_class
|
||||
config = hs.config.presence_router_config
|
||||
api = hs.get_module_api()
|
||||
|
||||
presence_router = module(config=config, module_api=api)
|
||||
|
||||
# The known hooks. If a module implements a method which name appears in this set,
|
||||
# we'll want to register it.
|
||||
presence_router_methods = {
|
||||
"get_users_for_states",
|
||||
"get_interested_users",
|
||||
}
|
||||
|
||||
# All methods that the module provides should be async, but this wasn't enforced
|
||||
# in the old module system, so we wrap them if needed
|
||||
def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]:
|
||||
# f might be None if the callback isn't implemented by the module. In this
|
||||
# case we don't want to register a callback at all so we return None.
|
||||
if f is None:
|
||||
return None
|
||||
|
||||
def run(*args, **kwargs):
|
||||
# mypy doesn't do well across function boundaries so we need to tell it
|
||||
# f is definitely not None.
|
||||
assert f is not None
|
||||
|
||||
return maybe_awaitable(f(*args, **kwargs))
|
||||
|
||||
return run
|
||||
|
||||
# Register the hooks through the module API.
|
||||
hooks = {
|
||||
hook: async_wrapper(getattr(presence_router, hook, None))
|
||||
for hook in presence_router_methods
|
||||
}
|
||||
|
||||
api.register_presence_router_callbacks(**hooks)
|
||||
|
||||
|
||||
class PresenceRouter:
|
||||
"""
|
||||
A module that the homeserver will call upon to help route user presence updates to
|
||||
additional destinations. If a custom presence router is configured, calls will be
|
||||
passed to that instead.
|
||||
additional destinations.
|
||||
"""
|
||||
|
||||
ALL_USERS = "ALL"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.custom_presence_router = None
|
||||
# Initially there are no callbacks
|
||||
self._get_users_for_states_callbacks: List[GET_USERS_FOR_STATES_CALLBACK] = []
|
||||
self._get_interested_users_callbacks: List[GET_INTERESTED_USERS_CALLBACK] = []
|
||||
|
||||
# Check whether a custom presence router module has been configured
|
||||
if hs.config.presence_router_module_class:
|
||||
# Initialise the module
|
||||
self.custom_presence_router = hs.config.presence_router_module_class(
|
||||
config=hs.config.presence_router_config, module_api=hs.get_module_api()
|
||||
def register_presence_router_callbacks(
|
||||
self,
|
||||
get_users_for_states: Optional[GET_USERS_FOR_STATES_CALLBACK] = None,
|
||||
get_interested_users: Optional[GET_INTERESTED_USERS_CALLBACK] = None,
|
||||
):
|
||||
# PresenceRouter modules are required to implement both of these methods
|
||||
# or neither of them as they are assumed to act in a complementary manner
|
||||
paired_methods = [get_users_for_states, get_interested_users]
|
||||
if paired_methods.count(None) == 1:
|
||||
raise RuntimeError(
|
||||
"PresenceRouter modules must register neither or both of the paired callbacks: "
|
||||
"[get_users_for_states, get_interested_users]"
|
||||
)
|
||||
|
||||
# Ensure the module has implemented the required methods
|
||||
required_methods = ["get_users_for_states", "get_interested_users"]
|
||||
for method_name in required_methods:
|
||||
if not hasattr(self.custom_presence_router, method_name):
|
||||
raise Exception(
|
||||
"PresenceRouter module '%s' must implement all required methods: %s"
|
||||
% (
|
||||
hs.config.presence_router_module_class.__name__,
|
||||
", ".join(required_methods),
|
||||
)
|
||||
)
|
||||
# Append the methods provided to the lists of callbacks
|
||||
if get_users_for_states is not None:
|
||||
self._get_users_for_states_callbacks.append(get_users_for_states)
|
||||
|
||||
if get_interested_users is not None:
|
||||
self._get_interested_users_callbacks.append(get_interested_users)
|
||||
|
||||
async def get_users_for_states(
|
||||
self,
|
||||
@ -66,14 +136,40 @@ class PresenceRouter:
|
||||
A dictionary of user_id -> set of UserPresenceState, indicating which
|
||||
presence updates each user should receive.
|
||||
"""
|
||||
if self.custom_presence_router is not None:
|
||||
# Ask the custom module
|
||||
return await self.custom_presence_router.get_users_for_states(
|
||||
state_updates=state_updates
|
||||
)
|
||||
|
||||
# Don't include any extra destinations for presence updates
|
||||
return {}
|
||||
# Bail out early if we don't have any callbacks to run.
|
||||
if len(self._get_users_for_states_callbacks) == 0:
|
||||
# Don't include any extra destinations for presence updates
|
||||
return {}
|
||||
|
||||
users_for_states = {}
|
||||
# run all the callbacks for get_users_for_states and combine the results
|
||||
for callback in self._get_users_for_states_callbacks:
|
||||
try:
|
||||
result = await callback(state_updates)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
||||
continue
|
||||
|
||||
if not isinstance(result, Dict):
|
||||
logger.warning(
|
||||
"Wrong type returned by module API callback %s: %s, expected Dict",
|
||||
callback,
|
||||
result,
|
||||
)
|
||||
continue
|
||||
|
||||
for key, new_entries in result.items():
|
||||
if not isinstance(new_entries, Set):
|
||||
logger.warning(
|
||||
"Wrong type returned by module API callback %s: %s, expected Set",
|
||||
callback,
|
||||
new_entries,
|
||||
)
|
||||
break
|
||||
users_for_states.setdefault(key, set()).update(new_entries)
|
||||
|
||||
return users_for_states
|
||||
|
||||
async def get_interested_users(self, user_id: str) -> Union[Set[str], ALL_USERS]:
|
||||
"""
|
||||
@ -92,12 +188,36 @@ class PresenceRouter:
|
||||
A set of user IDs to return presence updates for, or ALL_USERS to return all
|
||||
known updates.
|
||||
"""
|
||||
if self.custom_presence_router is not None:
|
||||
# Ask the custom module for interested users
|
||||
return await self.custom_presence_router.get_interested_users(
|
||||
user_id=user_id
|
||||
)
|
||||
|
||||
# A custom presence router is not defined.
|
||||
# Don't report any additional interested users
|
||||
return set()
|
||||
# Bail out early if we don't have any callbacks to run.
|
||||
if len(self._get_interested_users_callbacks) == 0:
|
||||
# Don't report any additional interested users
|
||||
return set()
|
||||
|
||||
interested_users = set()
|
||||
# run all the callbacks for get_interested_users and combine the results
|
||||
for callback in self._get_interested_users_callbacks:
|
||||
try:
|
||||
result = await callback(user_id)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
||||
continue
|
||||
|
||||
# If one of the callbacks returns ALL_USERS then we can stop calling all
|
||||
# of the other callbacks, since the set of interested_users is already as
|
||||
# large as it can possibly be
|
||||
if result == PresenceRouter.ALL_USERS:
|
||||
return PresenceRouter.ALL_USERS
|
||||
|
||||
if not isinstance(result, Set):
|
||||
logger.warning(
|
||||
"Wrong type returned by module API callback %s: %s, expected set",
|
||||
callback,
|
||||
result,
|
||||
)
|
||||
continue
|
||||
|
||||
# Add the new interested users to the set
|
||||
interested_users.update(result)
|
||||
|
||||
return interested_users
|
||||
|
@ -32,6 +32,9 @@ from . import EventBase
|
||||
# the literal fields "foo\" and "bar" but will instead be treated as "foo\\.bar"
|
||||
SPLIT_FIELD_REGEX = re.compile(r"(?<!\\)\.")
|
||||
|
||||
CANONICALJSON_MAX_INT = (2 ** 53) - 1
|
||||
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
|
||||
|
||||
|
||||
def prune_event(event: EventBase) -> EventBase:
|
||||
"""Returns a pruned version of the given event, which removes all keys we
|
||||
@ -505,7 +508,7 @@ def validate_canonicaljson(value: Any):
|
||||
* NaN, Infinity, -Infinity
|
||||
"""
|
||||
if isinstance(value, int):
|
||||
if value <= -(2 ** 53) or 2 ** 53 <= value:
|
||||
if value < CANONICALJSON_MIN_INT or CANONICALJSON_MAX_INT < value:
|
||||
raise SynapseError(400, "JSON integer out of range", Codes.BAD_JSON)
|
||||
|
||||
elif isinstance(value, float):
|
||||
|
@ -11,16 +11,22 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections.abc
|
||||
from typing import Union
|
||||
|
||||
import jsonschema
|
||||
|
||||
from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes, Membership
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import EventFormatVersions
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.builder import EventBuilder
|
||||
from synapse.events.utils import validate_canonicaljson
|
||||
from synapse.events.utils import (
|
||||
CANONICALJSON_MAX_INT,
|
||||
CANONICALJSON_MIN_INT,
|
||||
validate_canonicaljson,
|
||||
)
|
||||
from synapse.federation.federation_server import server_matches_acl_event
|
||||
from synapse.types import EventID, RoomID, UserID
|
||||
|
||||
@ -93,6 +99,29 @@ class EventValidator:
|
||||
400, "Can't create an ACL event that denies the local server"
|
||||
)
|
||||
|
||||
if event.type == EventTypes.PowerLevels:
|
||||
try:
|
||||
jsonschema.validate(
|
||||
instance=event.content,
|
||||
schema=POWER_LEVELS_SCHEMA,
|
||||
cls=plValidator,
|
||||
)
|
||||
except jsonschema.ValidationError as e:
|
||||
if e.path:
|
||||
# example: "users_default": '0' is not of type 'integer'
|
||||
message = '"' + e.path[-1] + '": ' + e.message # noqa: B306
|
||||
# jsonschema.ValidationError.message is a valid attribute
|
||||
else:
|
||||
# example: '0' is not of type 'integer'
|
||||
message = e.message # noqa: B306
|
||||
# jsonschema.ValidationError.message is a valid attribute
|
||||
|
||||
raise SynapseError(
|
||||
code=400,
|
||||
msg=message,
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
def _validate_retention(self, event: EventBase):
|
||||
"""Checks that an event that defines the retention policy for a room respects the
|
||||
format enforced by the spec.
|
||||
@ -195,3 +224,47 @@ class EventValidator:
|
||||
def _ensure_state_event(self, event):
|
||||
if not event.is_state():
|
||||
raise SynapseError(400, "'%s' must be state events" % (event.type,))
|
||||
|
||||
|
||||
POWER_LEVELS_SCHEMA = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ban": {"$ref": "#/definitions/int"},
|
||||
"events": {"$ref": "#/definitions/objectOfInts"},
|
||||
"events_default": {"$ref": "#/definitions/int"},
|
||||
"invite": {"$ref": "#/definitions/int"},
|
||||
"kick": {"$ref": "#/definitions/int"},
|
||||
"notifications": {"$ref": "#/definitions/objectOfInts"},
|
||||
"redact": {"$ref": "#/definitions/int"},
|
||||
"state_default": {"$ref": "#/definitions/int"},
|
||||
"users": {"$ref": "#/definitions/objectOfInts"},
|
||||
"users_default": {"$ref": "#/definitions/int"},
|
||||
},
|
||||
"definitions": {
|
||||
"int": {
|
||||
"type": "integer",
|
||||
"minimum": CANONICALJSON_MIN_INT,
|
||||
"maximum": CANONICALJSON_MAX_INT,
|
||||
},
|
||||
"objectOfInts": {
|
||||
"type": "object",
|
||||
"additionalProperties": {"$ref": "#/definitions/int"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _create_power_level_validator():
|
||||
validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA)
|
||||
|
||||
# by default jsonschema does not consider a frozendict to be an object so
|
||||
# we need to use a custom type checker
|
||||
# https://python-jsonschema.readthedocs.io/en/stable/validate/?highlight=object#validating-with-additional-types
|
||||
type_checker = validator.TYPE_CHECKER.redefine(
|
||||
"object", lambda checker, thing: isinstance(thing, collections.abc.Mapping)
|
||||
)
|
||||
|
||||
return jsonschema.validators.extend(validator, type_checker=type_checker)
|
||||
|
||||
|
||||
plValidator = _create_power_level_validator()
|
||||
|
@ -43,6 +43,7 @@ from synapse.api.errors import (
|
||||
Codes,
|
||||
FederationDeniedError,
|
||||
HttpResponseException,
|
||||
RequestSendFailed,
|
||||
SynapseError,
|
||||
UnsupportedRoomVersionError,
|
||||
)
|
||||
@ -110,6 +111,23 @@ class FederationClient(FederationBase):
|
||||
reset_expiry_on_get=False,
|
||||
)
|
||||
|
||||
# A cache for fetching the room hierarchy over federation.
|
||||
#
|
||||
# Some stale data over federation is OK, but must be refreshed
|
||||
# periodically since the local server is in the room.
|
||||
#
|
||||
# It is a map of (room ID, suggested-only) -> the response of
|
||||
# get_room_hierarchy.
|
||||
self._get_room_hierarchy_cache: ExpiringCache[
|
||||
Tuple[str, bool], Tuple[JsonDict, Sequence[JsonDict], Sequence[str]]
|
||||
] = ExpiringCache(
|
||||
cache_name="get_room_hierarchy_cache",
|
||||
clock=self._clock,
|
||||
max_len=1000,
|
||||
expiry_ms=5 * 60 * 1000,
|
||||
reset_expiry_on_get=False,
|
||||
)
|
||||
|
||||
def _clear_tried_cache(self):
|
||||
"""Clear pdu_destination_tried cache"""
|
||||
now = self._clock.time_msec()
|
||||
@ -558,7 +576,11 @@ class FederationClient(FederationBase):
|
||||
|
||||
try:
|
||||
return await callback(destination)
|
||||
except InvalidResponseError as e:
|
||||
except (
|
||||
RequestSendFailed,
|
||||
InvalidResponseError,
|
||||
NotRetryingDestination,
|
||||
) as e:
|
||||
logger.warning("Failed to %s via %s: %s", description, destination, e)
|
||||
except UnsupportedRoomVersionError:
|
||||
raise
|
||||
@ -1319,6 +1341,10 @@ class FederationClient(FederationBase):
|
||||
remote servers
|
||||
"""
|
||||
|
||||
cached_result = self._get_room_hierarchy_cache.get((room_id, suggested_only))
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
async def send_request(
|
||||
destination: str,
|
||||
) -> Tuple[JsonDict, Sequence[JsonDict], Sequence[str]]:
|
||||
@ -1365,58 +1391,63 @@ class FederationClient(FederationBase):
|
||||
return room, children, inaccessible_children
|
||||
|
||||
try:
|
||||
return await self._try_destination_list(
|
||||
result = await self._try_destination_list(
|
||||
"fetch room hierarchy",
|
||||
destinations,
|
||||
send_request,
|
||||
failover_on_unknown_endpoint=True,
|
||||
)
|
||||
except SynapseError as e:
|
||||
# If an unexpected error occurred, re-raise it.
|
||||
if e.code != 502:
|
||||
raise
|
||||
|
||||
# Fallback to the old federation API and translate the results if
|
||||
# no servers implement the new API.
|
||||
#
|
||||
# The algorithm below is a bit inefficient as it only attempts to
|
||||
# get information for the requested room, but the legacy API may
|
||||
# parse information for the requested room, but the legacy API may
|
||||
# return additional layers.
|
||||
if e.code == 502:
|
||||
legacy_result = await self.get_space_summary(
|
||||
destinations,
|
||||
room_id,
|
||||
suggested_only,
|
||||
max_rooms_per_space=None,
|
||||
exclude_rooms=[],
|
||||
)
|
||||
legacy_result = await self.get_space_summary(
|
||||
destinations,
|
||||
room_id,
|
||||
suggested_only,
|
||||
max_rooms_per_space=None,
|
||||
exclude_rooms=[],
|
||||
)
|
||||
|
||||
# Find the requested room in the response (and remove it).
|
||||
for _i, room in enumerate(legacy_result.rooms):
|
||||
if room.get("room_id") == room_id:
|
||||
break
|
||||
else:
|
||||
# The requested room was not returned, nothing we can do.
|
||||
raise
|
||||
requested_room = legacy_result.rooms.pop(_i)
|
||||
# Find the requested room in the response (and remove it).
|
||||
for _i, room in enumerate(legacy_result.rooms):
|
||||
if room.get("room_id") == room_id:
|
||||
break
|
||||
else:
|
||||
# The requested room was not returned, nothing we can do.
|
||||
raise
|
||||
requested_room = legacy_result.rooms.pop(_i)
|
||||
|
||||
# Find any children events of the requested room.
|
||||
children_events = []
|
||||
children_room_ids = set()
|
||||
for event in legacy_result.events:
|
||||
if event.room_id == room_id:
|
||||
children_events.append(event.data)
|
||||
children_room_ids.add(event.state_key)
|
||||
# And add them under the requested room.
|
||||
requested_room["children_state"] = children_events
|
||||
# Find any children events of the requested room.
|
||||
children_events = []
|
||||
children_room_ids = set()
|
||||
for event in legacy_result.events:
|
||||
if event.room_id == room_id:
|
||||
children_events.append(event.data)
|
||||
children_room_ids.add(event.state_key)
|
||||
# And add them under the requested room.
|
||||
requested_room["children_state"] = children_events
|
||||
|
||||
# Find the children rooms.
|
||||
children = []
|
||||
for room in legacy_result.rooms:
|
||||
if room.get("room_id") in children_room_ids:
|
||||
children.append(room)
|
||||
# Find the children rooms.
|
||||
children = []
|
||||
for room in legacy_result.rooms:
|
||||
if room.get("room_id") in children_room_ids:
|
||||
children.append(room)
|
||||
|
||||
# It isn't clear from the response whether some of the rooms are
|
||||
# not accessible.
|
||||
return requested_room, children, ()
|
||||
# It isn't clear from the response whether some of the rooms are
|
||||
# not accessible.
|
||||
result = (requested_room, children, ())
|
||||
|
||||
raise
|
||||
# Cache the result to avoid fetching data over federation every time.
|
||||
self._get_room_hierarchy_cache[(room_id, suggested_only)] = result
|
||||
return result
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
|
@ -110,6 +110,7 @@ class FederationServer(FederationBase):
|
||||
super().__init__(hs)
|
||||
|
||||
self.handler = hs.get_federation_handler()
|
||||
self._federation_event_handler = hs.get_federation_event_handler()
|
||||
self.state = hs.get_state_handler()
|
||||
self._event_auth_handler = hs.get_event_auth_handler()
|
||||
|
||||
@ -787,7 +788,9 @@ class FederationServer(FederationBase):
|
||||
|
||||
event = await self._check_sigs_and_hash(room_version, event)
|
||||
|
||||
return await self.handler.on_send_membership_event(origin, event)
|
||||
return await self._federation_event_handler.on_send_membership_event(
|
||||
origin, event
|
||||
)
|
||||
|
||||
async def on_event_auth(
|
||||
self, origin: str, room_id: str, event_id: str
|
||||
@ -1005,9 +1008,7 @@ class FederationServer(FederationBase):
|
||||
async with lock:
|
||||
logger.info("handling received PDU: %s", event)
|
||||
try:
|
||||
await self.handler.on_receive_pdu(
|
||||
origin, event, sent_to_us_directly=True
|
||||
)
|
||||
await self._federation_event_handler.on_receive_pdu(origin, event)
|
||||
except FederationError as e:
|
||||
# XXX: Ideally we'd inform the remote we failed to process
|
||||
# the event, but we can't return an error in the transaction
|
||||
|
@ -627,23 +627,28 @@ class AuthHandler(BaseHandler):
|
||||
|
||||
async def add_oob_auth(
|
||||
self, stagetype: str, authdict: Dict[str, Any], clientip: str
|
||||
) -> bool:
|
||||
) -> None:
|
||||
"""
|
||||
Adds the result of out-of-band authentication into an existing auth
|
||||
session. Currently used for adding the result of fallback auth.
|
||||
|
||||
Raises:
|
||||
LoginError if the stagetype is unknown or the session is missing.
|
||||
LoginError is raised by check_auth if authentication fails.
|
||||
"""
|
||||
if stagetype not in self.checkers:
|
||||
raise LoginError(400, "", Codes.MISSING_PARAM)
|
||||
if "session" not in authdict:
|
||||
raise LoginError(400, "", Codes.MISSING_PARAM)
|
||||
|
||||
result = await self.checkers[stagetype].check_auth(authdict, clientip)
|
||||
if result:
|
||||
await self.store.mark_ui_auth_stage_complete(
|
||||
authdict["session"], stagetype, result
|
||||
raise LoginError(
|
||||
400, f"Unknown UIA stage type: {stagetype}", Codes.INVALID_PARAM
|
||||
)
|
||||
return True
|
||||
return False
|
||||
if "session" not in authdict:
|
||||
raise LoginError(400, "Missing session ID", Codes.MISSING_PARAM)
|
||||
|
||||
# If authentication fails a LoginError is raised. Otherwise, store
|
||||
# the successful result.
|
||||
result = await self.checkers[stagetype].check_auth(authdict, clientip)
|
||||
await self.store.mark_ui_auth_stage_complete(
|
||||
authdict["session"], stagetype, result
|
||||
)
|
||||
|
||||
def get_session_id(self, clientdict: Dict[str, Any]) -> Optional[str]:
|
||||
"""
|
||||
@ -1459,6 +1464,10 @@ class AuthHandler(BaseHandler):
|
||||
)
|
||||
|
||||
await self.store.user_delete_threepid(user_id, medium, address)
|
||||
if medium == "email":
|
||||
await self.store.delete_pusher_by_app_id_pushkey_user_id(
|
||||
app_id="m.email", pushkey=address, user_id=user_id
|
||||
)
|
||||
return result
|
||||
|
||||
async def hash(self, password: str) -> str:
|
||||
@ -1727,7 +1736,6 @@ class AuthHandler(BaseHandler):
|
||||
|
||||
@attr.s(slots=True)
|
||||
class MacaroonGenerator:
|
||||
|
||||
hs = attr.ib()
|
||||
|
||||
def generate_guest_access_token(self, user_id: str) -> str:
|
||||
|
File diff suppressed because it is too large
Load Diff
1825
synapse/handlers/federation_event.py
Normal file
1825
synapse/handlers/federation_event.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -151,7 +151,7 @@ class InitialSyncHandler(BaseHandler):
|
||||
limit = 10
|
||||
|
||||
async def handle_room(event: RoomsForUser):
|
||||
d = {
|
||||
d: JsonDict = {
|
||||
"room_id": event.room_id,
|
||||
"membership": event.membership,
|
||||
"visibility": (
|
||||
|
@ -353,6 +353,11 @@ class BasePresenceHandler(abc.ABC):
|
||||
# otherwise would not do).
|
||||
await self.set_state(UserID.from_string(user_id), state, force_notify=True)
|
||||
|
||||
async def is_visible(self, observed_user: UserID, observer_user: UserID) -> bool:
|
||||
raise NotImplementedError(
|
||||
"Attempting to check presence on a non-presence worker."
|
||||
)
|
||||
|
||||
|
||||
class _NullContextManager(ContextManager[None]):
|
||||
"""A context manager which does nothing."""
|
||||
|
@ -56,6 +56,22 @@ login_counter = Counter(
|
||||
)
|
||||
|
||||
|
||||
def init_counters_for_auth_provider(auth_provider_id: str) -> None:
|
||||
"""Ensure the prometheus counters for the given auth provider are initialised
|
||||
|
||||
This fixes a problem where the counters are not reported for a given auth provider
|
||||
until the user first logs in/registers.
|
||||
"""
|
||||
for is_guest in (True, False):
|
||||
login_counter.labels(guest=is_guest, auth_provider=auth_provider_id)
|
||||
for shadow_banned in (True, False):
|
||||
registration_counter.labels(
|
||||
guest=is_guest,
|
||||
shadow_banned=shadow_banned,
|
||||
auth_provider=auth_provider_id,
|
||||
)
|
||||
|
||||
|
||||
class LoginDict(TypedDict):
|
||||
device_id: str
|
||||
access_token: str
|
||||
@ -96,6 +112,8 @@ class RegistrationHandler(BaseHandler):
|
||||
self.session_lifetime = hs.config.session_lifetime
|
||||
self.access_token_lifetime = hs.config.access_token_lifetime
|
||||
|
||||
init_counters_for_auth_provider("")
|
||||
|
||||
async def check_username(
|
||||
self,
|
||||
localpart: str,
|
||||
|
@ -36,6 +36,7 @@ from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.event_auth import get_named_level, get_power_level_event
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
Requester,
|
||||
@ -79,7 +80,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
self.account_data_handler = hs.get_account_data_handler()
|
||||
self.event_auth_handler = hs.get_event_auth_handler()
|
||||
|
||||
self.member_linearizer = Linearizer(name="member")
|
||||
self.member_linearizer: Linearizer = Linearizer(name="member")
|
||||
|
||||
self.clock = hs.get_clock()
|
||||
self.spam_checker = hs.get_spam_checker()
|
||||
@ -556,6 +557,20 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
content.pop("displayname", None)
|
||||
content.pop("avatar_url", None)
|
||||
|
||||
if len(content.get("displayname") or "") > MAX_DISPLAYNAME_LEN:
|
||||
raise SynapseError(
|
||||
400,
|
||||
f"Displayname is too long (max {MAX_DISPLAYNAME_LEN})",
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if len(content.get("avatar_url") or "") > MAX_AVATAR_URL_LEN:
|
||||
raise SynapseError(
|
||||
400,
|
||||
f"Avatar URL is too long (max {MAX_AVATAR_URL_LEN})",
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
effective_membership_state = action
|
||||
if action in ["kick", "unban"]:
|
||||
effective_membership_state = "leave"
|
||||
|
@ -28,12 +28,11 @@ from synapse.api.constants import (
|
||||
Membership,
|
||||
RoomTypes,
|
||||
)
|
||||
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
|
||||
from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.utils import format_event_for_client_v2
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@ -76,6 +75,9 @@ class _PaginationSession:
|
||||
|
||||
|
||||
class RoomSummaryHandler:
|
||||
# A unique key used for pagination sessions for the room hierarchy endpoint.
|
||||
_PAGINATION_SESSION_TYPE = "room_hierarchy_pagination"
|
||||
|
||||
# The time a pagination session remains valid for.
|
||||
_PAGINATION_SESSION_VALIDITY_PERIOD_MS = 5 * 60 * 1000
|
||||
|
||||
@ -87,12 +89,6 @@ class RoomSummaryHandler:
|
||||
self._server_name = hs.hostname
|
||||
self._federation_client = hs.get_federation_client()
|
||||
|
||||
# A map of query information to the current pagination state.
|
||||
#
|
||||
# TODO Allow for multiple workers to share this data.
|
||||
# TODO Expire pagination tokens.
|
||||
self._pagination_sessions: Dict[_PaginationKey, _PaginationSession] = {}
|
||||
|
||||
# If a user tries to fetch the same page multiple times in quick succession,
|
||||
# only process the first attempt and return its result to subsequent requests.
|
||||
self._pagination_response_cache: ResponseCache[
|
||||
@ -102,21 +98,6 @@ class RoomSummaryHandler:
|
||||
"get_room_hierarchy",
|
||||
)
|
||||
|
||||
def _expire_pagination_sessions(self):
|
||||
"""Expire pagination session which are old."""
|
||||
expire_before = (
|
||||
self._clock.time_msec() - self._PAGINATION_SESSION_VALIDITY_PERIOD_MS
|
||||
)
|
||||
to_expire = []
|
||||
|
||||
for key, value in self._pagination_sessions.items():
|
||||
if value.creation_time_ms < expire_before:
|
||||
to_expire.append(key)
|
||||
|
||||
for key in to_expire:
|
||||
logger.debug("Expiring pagination session id %s", key)
|
||||
del self._pagination_sessions[key]
|
||||
|
||||
async def get_space_summary(
|
||||
self,
|
||||
requester: str,
|
||||
@ -327,18 +308,29 @@ class RoomSummaryHandler:
|
||||
|
||||
# If this is continuing a previous session, pull the persisted data.
|
||||
if from_token:
|
||||
self._expire_pagination_sessions()
|
||||
try:
|
||||
pagination_session = await self._store.get_session(
|
||||
session_type=self._PAGINATION_SESSION_TYPE,
|
||||
session_id=from_token,
|
||||
)
|
||||
except StoreError:
|
||||
raise SynapseError(400, "Unknown pagination token", Codes.INVALID_PARAM)
|
||||
|
||||
pagination_key = _PaginationKey(
|
||||
requested_room_id, suggested_only, max_depth, from_token
|
||||
)
|
||||
if pagination_key not in self._pagination_sessions:
|
||||
# If the requester, room ID, suggested-only, or max depth were modified
|
||||
# the session is invalid.
|
||||
if (
|
||||
requester != pagination_session["requester"]
|
||||
or requested_room_id != pagination_session["room_id"]
|
||||
or suggested_only != pagination_session["suggested_only"]
|
||||
or max_depth != pagination_session["max_depth"]
|
||||
):
|
||||
raise SynapseError(400, "Unknown pagination token", Codes.INVALID_PARAM)
|
||||
|
||||
# Load the previous state.
|
||||
pagination_session = self._pagination_sessions[pagination_key]
|
||||
room_queue = pagination_session.room_queue
|
||||
processed_rooms = pagination_session.processed_rooms
|
||||
room_queue = [
|
||||
_RoomQueueEntry(*fields) for fields in pagination_session["room_queue"]
|
||||
]
|
||||
processed_rooms = set(pagination_session["processed_rooms"])
|
||||
else:
|
||||
# The queue of rooms to process, the next room is last on the stack.
|
||||
room_queue = [_RoomQueueEntry(requested_room_id, ())]
|
||||
@ -456,13 +448,21 @@ class RoomSummaryHandler:
|
||||
|
||||
# If there's additional data, generate a pagination token (and persist state).
|
||||
if room_queue:
|
||||
next_batch = random_string(24)
|
||||
result["next_batch"] = next_batch
|
||||
pagination_key = _PaginationKey(
|
||||
requested_room_id, suggested_only, max_depth, next_batch
|
||||
)
|
||||
self._pagination_sessions[pagination_key] = _PaginationSession(
|
||||
self._clock.time_msec(), room_queue, processed_rooms
|
||||
result["next_batch"] = await self._store.create_session(
|
||||
session_type=self._PAGINATION_SESSION_TYPE,
|
||||
value={
|
||||
# Information which must be identical across pagination.
|
||||
"requester": requester,
|
||||
"room_id": requested_room_id,
|
||||
"suggested_only": suggested_only,
|
||||
"max_depth": max_depth,
|
||||
# The stored state.
|
||||
"room_queue": [
|
||||
attr.astuple(room_entry) for room_entry in room_queue
|
||||
],
|
||||
"processed_rooms": list(processed_rooms),
|
||||
},
|
||||
expiry_ms=self._PAGINATION_SESSION_VALIDITY_PERIOD_MS,
|
||||
)
|
||||
|
||||
return result
|
||||
|
@ -37,6 +37,7 @@ from twisted.web.server import Request
|
||||
from synapse.api.constants import LoginType
|
||||
from synapse.api.errors import Codes, NotFoundError, RedirectException, SynapseError
|
||||
from synapse.config.sso import SsoAttributeRequirement
|
||||
from synapse.handlers.register import init_counters_for_auth_provider
|
||||
from synapse.handlers.ui_auth import UIAuthSessionDataConstants
|
||||
from synapse.http import get_request_user_agent
|
||||
from synapse.http.server import respond_with_html, respond_with_redirect
|
||||
@ -213,6 +214,7 @@ class SsoHandler:
|
||||
p_id = p.idp_id
|
||||
assert p_id not in self._identity_providers
|
||||
self._identity_providers[p_id] = p
|
||||
init_counters_for_auth_provider(p_id)
|
||||
|
||||
def get_identity_providers(self) -> Mapping[str, SsoIdentityProvider]:
|
||||
"""Get the configured identity providers"""
|
||||
|
@ -1,5 +1,4 @@
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2018, 2019 New Vector Ltd
|
||||
# Copyright 2015-2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -31,6 +30,8 @@ from prometheus_client import Counter
|
||||
|
||||
from synapse.api.constants import AccountDataTypes, EventTypes, Membership
|
||||
from synapse.api.filtering import FilterCollection
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.events import EventBase
|
||||
from synapse.logging.context import current_context
|
||||
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
|
||||
@ -86,20 +87,20 @@ LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100
|
||||
SyncRequestKey = Tuple[Any, ...]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class SyncConfig:
|
||||
user = attr.ib(type=UserID)
|
||||
filter_collection = attr.ib(type=FilterCollection)
|
||||
is_guest = attr.ib(type=bool)
|
||||
request_key = attr.ib(type=SyncRequestKey)
|
||||
device_id = attr.ib(type=Optional[str])
|
||||
user: UserID
|
||||
filter_collection: FilterCollection
|
||||
is_guest: bool
|
||||
request_key: SyncRequestKey
|
||||
device_id: Optional[str]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class TimelineBatch:
|
||||
prev_batch = attr.ib(type=StreamToken)
|
||||
events = attr.ib(type=List[EventBase])
|
||||
limited = attr.ib(type=bool)
|
||||
prev_batch: StreamToken
|
||||
events: List[EventBase]
|
||||
limited: bool
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Make the result appear empty if there are no updates. This is used
|
||||
@ -113,16 +114,16 @@ class TimelineBatch:
|
||||
# if there are updates for it, which we check after the instance has been created.
|
||||
# This should not be a big deal because we update the notification counts afterwards as
|
||||
# well anyway.
|
||||
@attr.s(slots=True)
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
class JoinedSyncResult:
|
||||
room_id = attr.ib(type=str)
|
||||
timeline = attr.ib(type=TimelineBatch)
|
||||
state = attr.ib(type=StateMap[EventBase])
|
||||
ephemeral = attr.ib(type=List[JsonDict])
|
||||
account_data = attr.ib(type=List[JsonDict])
|
||||
unread_notifications = attr.ib(type=JsonDict)
|
||||
summary = attr.ib(type=Optional[JsonDict])
|
||||
unread_count = attr.ib(type=int)
|
||||
room_id: str
|
||||
timeline: TimelineBatch
|
||||
state: StateMap[EventBase]
|
||||
ephemeral: List[JsonDict]
|
||||
account_data: List[JsonDict]
|
||||
unread_notifications: JsonDict
|
||||
summary: Optional[JsonDict]
|
||||
unread_count: int
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Make the result appear empty if there are no updates. This is used
|
||||
@ -138,12 +139,12 @@ class JoinedSyncResult:
|
||||
)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ArchivedSyncResult:
|
||||
room_id = attr.ib(type=str)
|
||||
timeline = attr.ib(type=TimelineBatch)
|
||||
state = attr.ib(type=StateMap[EventBase])
|
||||
account_data = attr.ib(type=List[JsonDict])
|
||||
room_id: str
|
||||
timeline: TimelineBatch
|
||||
state: StateMap[EventBase]
|
||||
account_data: List[JsonDict]
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Make the result appear empty if there are no updates. This is used
|
||||
@ -152,37 +153,37 @@ class ArchivedSyncResult:
|
||||
return bool(self.timeline or self.state or self.account_data)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class InvitedSyncResult:
|
||||
room_id = attr.ib(type=str)
|
||||
invite = attr.ib(type=EventBase)
|
||||
room_id: str
|
||||
invite: EventBase
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Invited rooms should always be reported to the client"""
|
||||
return True
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class KnockedSyncResult:
|
||||
room_id = attr.ib(type=str)
|
||||
knock = attr.ib(type=EventBase)
|
||||
room_id: str
|
||||
knock: EventBase
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Knocked rooms should always be reported to the client"""
|
||||
return True
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class GroupsSyncResult:
|
||||
join = attr.ib(type=JsonDict)
|
||||
invite = attr.ib(type=JsonDict)
|
||||
leave = attr.ib(type=JsonDict)
|
||||
join: JsonDict
|
||||
invite: JsonDict
|
||||
leave: JsonDict
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self.join or self.invite or self.leave)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class DeviceLists:
|
||||
"""
|
||||
Attributes:
|
||||
@ -190,27 +191,27 @@ class DeviceLists:
|
||||
left: List of user_ids whose devices we no longer track
|
||||
"""
|
||||
|
||||
changed = attr.ib(type=Collection[str])
|
||||
left = attr.ib(type=Collection[str])
|
||||
changed: Collection[str]
|
||||
left: Collection[str]
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self.changed or self.left)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
class _RoomChanges:
|
||||
"""The set of room entries to include in the sync, plus the set of joined
|
||||
and left room IDs since last sync.
|
||||
"""
|
||||
|
||||
room_entries = attr.ib(type=List["RoomSyncResultBuilder"])
|
||||
invited = attr.ib(type=List[InvitedSyncResult])
|
||||
knocked = attr.ib(type=List[KnockedSyncResult])
|
||||
newly_joined_rooms = attr.ib(type=List[str])
|
||||
newly_left_rooms = attr.ib(type=List[str])
|
||||
room_entries: List["RoomSyncResultBuilder"]
|
||||
invited: List[InvitedSyncResult]
|
||||
knocked: List[KnockedSyncResult]
|
||||
newly_joined_rooms: List[str]
|
||||
newly_left_rooms: List[str]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class SyncResult:
|
||||
"""
|
||||
Attributes:
|
||||
@ -230,18 +231,18 @@ class SyncResult:
|
||||
groups: Group updates, if any
|
||||
"""
|
||||
|
||||
next_batch = attr.ib(type=StreamToken)
|
||||
presence = attr.ib(type=List[JsonDict])
|
||||
account_data = attr.ib(type=List[JsonDict])
|
||||
joined = attr.ib(type=List[JoinedSyncResult])
|
||||
invited = attr.ib(type=List[InvitedSyncResult])
|
||||
knocked = attr.ib(type=List[KnockedSyncResult])
|
||||
archived = attr.ib(type=List[ArchivedSyncResult])
|
||||
to_device = attr.ib(type=List[JsonDict])
|
||||
device_lists = attr.ib(type=DeviceLists)
|
||||
device_one_time_keys_count = attr.ib(type=JsonDict)
|
||||
device_unused_fallback_key_types = attr.ib(type=List[str])
|
||||
groups = attr.ib(type=Optional[GroupsSyncResult])
|
||||
next_batch: StreamToken
|
||||
presence: List[UserPresenceState]
|
||||
account_data: List[JsonDict]
|
||||
joined: List[JoinedSyncResult]
|
||||
invited: List[InvitedSyncResult]
|
||||
knocked: List[KnockedSyncResult]
|
||||
archived: List[ArchivedSyncResult]
|
||||
to_device: List[JsonDict]
|
||||
device_lists: DeviceLists
|
||||
device_one_time_keys_count: JsonDict
|
||||
device_unused_fallback_key_types: List[str]
|
||||
groups: Optional[GroupsSyncResult]
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Make the result appear empty if there are no updates. This is used
|
||||
@ -701,7 +702,7 @@ class SyncHandler:
|
||||
name_id = state_ids.get((EventTypes.Name, ""))
|
||||
canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, ""))
|
||||
|
||||
summary = {}
|
||||
summary: JsonDict = {}
|
||||
empty_ms = MemberSummary([], 0)
|
||||
|
||||
# TODO: only send these when they change.
|
||||
@ -1842,6 +1843,9 @@ class SyncHandler:
|
||||
knocked = []
|
||||
|
||||
for event in room_list:
|
||||
if event.room_version_id not in KNOWN_ROOM_VERSIONS:
|
||||
continue
|
||||
|
||||
if event.membership == Membership.JOIN:
|
||||
room_entries.append(
|
||||
RoomSyncResultBuilder(
|
||||
@ -2075,21 +2079,23 @@ class SyncHandler:
|
||||
# If the membership's stream ordering is after the given stream
|
||||
# ordering, we need to go and work out if the user was in the room
|
||||
# before.
|
||||
for room_id, event_pos in joined_rooms:
|
||||
if not event_pos.persisted_after(room_key):
|
||||
joined_room_ids.add(room_id)
|
||||
for joined_room in joined_rooms:
|
||||
if not joined_room.event_pos.persisted_after(room_key):
|
||||
joined_room_ids.add(joined_room.room_id)
|
||||
continue
|
||||
|
||||
logger.info("User joined room after current token: %s", room_id)
|
||||
logger.info("User joined room after current token: %s", joined_room.room_id)
|
||||
|
||||
extrems = (
|
||||
await self.store.get_forward_extremities_for_room_at_stream_ordering(
|
||||
room_id, event_pos.stream
|
||||
joined_room.room_id, joined_room.event_pos.stream
|
||||
)
|
||||
)
|
||||
users_in_room = await self.state.get_current_users_in_room(room_id, extrems)
|
||||
users_in_room = await self.state.get_current_users_in_room(
|
||||
joined_room.room_id, extrems
|
||||
)
|
||||
if user_id in users_in_room:
|
||||
joined_room_ids.add(room_id)
|
||||
joined_room_ids.add(joined_room.room_id)
|
||||
|
||||
return frozenset(joined_room_ids)
|
||||
|
||||
@ -2159,7 +2165,7 @@ def _calculate_state(
|
||||
return {event_id_to_key[e]: e for e in state_ids}
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
class SyncResultBuilder:
|
||||
"""Used to help build up a new SyncResult for a user
|
||||
|
||||
@ -2171,33 +2177,33 @@ class SyncResultBuilder:
|
||||
joined_room_ids: List of rooms the user is joined to
|
||||
|
||||
# The following mirror the fields in a sync response
|
||||
presence (list)
|
||||
account_data (list)
|
||||
joined (list[JoinedSyncResult])
|
||||
invited (list[InvitedSyncResult])
|
||||
knocked (list[KnockedSyncResult])
|
||||
archived (list[ArchivedSyncResult])
|
||||
groups (GroupsSyncResult|None)
|
||||
to_device (list)
|
||||
presence
|
||||
account_data
|
||||
joined
|
||||
invited
|
||||
knocked
|
||||
archived
|
||||
groups
|
||||
to_device
|
||||
"""
|
||||
|
||||
sync_config = attr.ib(type=SyncConfig)
|
||||
full_state = attr.ib(type=bool)
|
||||
since_token = attr.ib(type=Optional[StreamToken])
|
||||
now_token = attr.ib(type=StreamToken)
|
||||
joined_room_ids = attr.ib(type=FrozenSet[str])
|
||||
sync_config: SyncConfig
|
||||
full_state: bool
|
||||
since_token: Optional[StreamToken]
|
||||
now_token: StreamToken
|
||||
joined_room_ids: FrozenSet[str]
|
||||
|
||||
presence = attr.ib(type=List[JsonDict], default=attr.Factory(list))
|
||||
account_data = attr.ib(type=List[JsonDict], default=attr.Factory(list))
|
||||
joined = attr.ib(type=List[JoinedSyncResult], default=attr.Factory(list))
|
||||
invited = attr.ib(type=List[InvitedSyncResult], default=attr.Factory(list))
|
||||
knocked = attr.ib(type=List[KnockedSyncResult], default=attr.Factory(list))
|
||||
archived = attr.ib(type=List[ArchivedSyncResult], default=attr.Factory(list))
|
||||
groups = attr.ib(type=Optional[GroupsSyncResult], default=None)
|
||||
to_device = attr.ib(type=List[JsonDict], default=attr.Factory(list))
|
||||
presence: List[UserPresenceState] = attr.Factory(list)
|
||||
account_data: List[JsonDict] = attr.Factory(list)
|
||||
joined: List[JoinedSyncResult] = attr.Factory(list)
|
||||
invited: List[InvitedSyncResult] = attr.Factory(list)
|
||||
knocked: List[KnockedSyncResult] = attr.Factory(list)
|
||||
archived: List[ArchivedSyncResult] = attr.Factory(list)
|
||||
groups: Optional[GroupsSyncResult] = None
|
||||
to_device: List[JsonDict] = attr.Factory(list)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
class RoomSyncResultBuilder:
|
||||
"""Stores information needed to create either a `JoinedSyncResult` or
|
||||
`ArchivedSyncResult`.
|
||||
@ -2213,10 +2219,10 @@ class RoomSyncResultBuilder:
|
||||
upto_token: Latest point to return events from.
|
||||
"""
|
||||
|
||||
room_id = attr.ib(type=str)
|
||||
rtype = attr.ib(type=str)
|
||||
events = attr.ib(type=Optional[List[EventBase]])
|
||||
newly_joined = attr.ib(type=bool)
|
||||
full_state = attr.ib(type=bool)
|
||||
since_token = attr.ib(type=Optional[StreamToken])
|
||||
upto_token = attr.ib(type=StreamToken)
|
||||
room_id: str
|
||||
rtype: str
|
||||
events: Optional[List[EventBase]]
|
||||
newly_joined: bool
|
||||
full_state: bool
|
||||
since_token: Optional[StreamToken]
|
||||
upto_token: StreamToken
|
||||
|
@ -34,3 +34,8 @@ class UIAuthSessionDataConstants:
|
||||
# used by validate_user_via_ui_auth to store the mxid of the user we are validating
|
||||
# for.
|
||||
REQUEST_USER_ID = "request_user_id"
|
||||
|
||||
# used during registration to store the registration token used (if required) so that:
|
||||
# - we can prevent a token being used twice by one session
|
||||
# - we can 'use up' the token after registration has successfully completed
|
||||
REGISTRATION_TOKEN = "org.matrix.msc3231.login.registration_token"
|
||||
|
@ -49,7 +49,7 @@ class UserInteractiveAuthChecker:
|
||||
clientip: The IP address of the client.
|
||||
|
||||
Raises:
|
||||
SynapseError if authentication failed
|
||||
LoginError if authentication failed.
|
||||
|
||||
Returns:
|
||||
The result of authentication (to pass back to the client?)
|
||||
@ -131,7 +131,9 @@ class RecaptchaAuthChecker(UserInteractiveAuthChecker):
|
||||
)
|
||||
if resp_body["success"]:
|
||||
return True
|
||||
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
|
||||
raise LoginError(
|
||||
401, "Captcha authentication failed", errcode=Codes.UNAUTHORIZED
|
||||
)
|
||||
|
||||
|
||||
class _BaseThreepidAuthChecker:
|
||||
@ -191,7 +193,9 @@ class _BaseThreepidAuthChecker:
|
||||
raise AssertionError("Unrecognized threepid medium: %s" % (medium,))
|
||||
|
||||
if not threepid:
|
||||
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
|
||||
raise LoginError(
|
||||
401, "Unable to get validated threepid", errcode=Codes.UNAUTHORIZED
|
||||
)
|
||||
|
||||
if threepid["medium"] != medium:
|
||||
raise LoginError(
|
||||
@ -237,11 +241,76 @@ class MsisdnAuthChecker(UserInteractiveAuthChecker, _BaseThreepidAuthChecker):
|
||||
return await self._check_threepid("msisdn", authdict)
|
||||
|
||||
|
||||
class RegistrationTokenAuthChecker(UserInteractiveAuthChecker):
|
||||
AUTH_TYPE = LoginType.REGISTRATION_TOKEN
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.hs = hs
|
||||
self._enabled = bool(hs.config.registration_requires_token)
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
def is_enabled(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
async def check_auth(self, authdict: dict, clientip: str) -> Any:
|
||||
if "token" not in authdict:
|
||||
raise LoginError(400, "Missing registration token", Codes.MISSING_PARAM)
|
||||
if not isinstance(authdict["token"], str):
|
||||
raise LoginError(
|
||||
400, "Registration token must be a string", Codes.INVALID_PARAM
|
||||
)
|
||||
if "session" not in authdict:
|
||||
raise LoginError(400, "Missing UIA session", Codes.MISSING_PARAM)
|
||||
|
||||
# Get these here to avoid cyclic dependencies
|
||||
from synapse.handlers.ui_auth import UIAuthSessionDataConstants
|
||||
|
||||
auth_handler = self.hs.get_auth_handler()
|
||||
|
||||
session = authdict["session"]
|
||||
token = authdict["token"]
|
||||
|
||||
# If the LoginType.REGISTRATION_TOKEN stage has already been completed,
|
||||
# return early to avoid incrementing `pending` again.
|
||||
stored_token = await auth_handler.get_session_data(
|
||||
session, UIAuthSessionDataConstants.REGISTRATION_TOKEN
|
||||
)
|
||||
if stored_token:
|
||||
if token != stored_token:
|
||||
raise LoginError(
|
||||
400, "Registration token has changed", Codes.INVALID_PARAM
|
||||
)
|
||||
else:
|
||||
return token
|
||||
|
||||
if await self.store.registration_token_is_valid(token):
|
||||
# Increment pending counter, so that if token has limited uses it
|
||||
# can't be used up by someone else in the meantime.
|
||||
await self.store.set_registration_token_pending(token)
|
||||
# Store the token in the UIA session, so that once registration
|
||||
# is complete `completed` can be incremented.
|
||||
await auth_handler.set_session_data(
|
||||
session,
|
||||
UIAuthSessionDataConstants.REGISTRATION_TOKEN,
|
||||
token,
|
||||
)
|
||||
# The token will be stored as the result of the authentication stage
|
||||
# in ui_auth_sessions_credentials. This allows the pending counter
|
||||
# for tokens to be decremented when expired sessions are deleted.
|
||||
return token
|
||||
else:
|
||||
raise LoginError(
|
||||
401, "Invalid registration token", errcode=Codes.UNAUTHORIZED
|
||||
)
|
||||
|
||||
|
||||
INTERACTIVE_AUTH_CHECKERS = [
|
||||
DummyAuthChecker,
|
||||
TermsAuthChecker,
|
||||
RecaptchaAuthChecker,
|
||||
EmailIdentityAuthChecker,
|
||||
MsisdnAuthChecker,
|
||||
RegistrationTokenAuthChecker,
|
||||
]
|
||||
"""A list of UserInteractiveAuthChecker classes"""
|
||||
|
@ -12,8 +12,15 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.http.server import DirectServeJsonResource
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class AdditionalResource(DirectServeJsonResource):
|
||||
"""Resource wrapper for additional_resources
|
||||
@ -25,7 +32,7 @@ class AdditionalResource(DirectServeJsonResource):
|
||||
and exception handling.
|
||||
"""
|
||||
|
||||
def __init__(self, hs, handler):
|
||||
def __init__(self, hs: "HomeServer", handler):
|
||||
"""Initialise AdditionalResource
|
||||
|
||||
The ``handler`` should return a deferred which completes when it has
|
||||
@ -33,14 +40,14 @@ class AdditionalResource(DirectServeJsonResource):
|
||||
``request.write()``, and call ``request.finish()``.
|
||||
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): homeserver
|
||||
hs: homeserver
|
||||
handler ((twisted.web.server.Request) -> twisted.internet.defer.Deferred):
|
||||
function to be called to handle the request.
|
||||
"""
|
||||
super().__init__()
|
||||
self._handler = handler
|
||||
|
||||
def _async_render(self, request):
|
||||
def _async_render(self, request: Request):
|
||||
# Cheekily pass the result straight through, so we don't need to worry
|
||||
# if its an awaitable or not.
|
||||
return self._handler(request)
|
||||
|
@ -16,7 +16,7 @@
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
from typing import List
|
||||
from typing import Callable, Dict, List
|
||||
|
||||
import attr
|
||||
|
||||
@ -28,35 +28,35 @@ from synapse.logging.context import make_deferred_yieldable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SERVER_CACHE = {}
|
||||
SERVER_CACHE: Dict[bytes, List["Server"]] = {}
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class Server:
|
||||
"""
|
||||
Our record of an individual server which can be tried to reach a destination.
|
||||
|
||||
Attributes:
|
||||
host (bytes): target hostname
|
||||
port (int):
|
||||
priority (int):
|
||||
weight (int):
|
||||
expires (int): when the cache should expire this record - in *seconds* since
|
||||
host: target hostname
|
||||
port:
|
||||
priority:
|
||||
weight:
|
||||
expires: when the cache should expire this record - in *seconds* since
|
||||
the epoch
|
||||
"""
|
||||
|
||||
host = attr.ib()
|
||||
port = attr.ib()
|
||||
priority = attr.ib(default=0)
|
||||
weight = attr.ib(default=0)
|
||||
expires = attr.ib(default=0)
|
||||
host: bytes
|
||||
port: int
|
||||
priority: int = 0
|
||||
weight: int = 0
|
||||
expires: int = 0
|
||||
|
||||
|
||||
def _sort_server_list(server_list):
|
||||
def _sort_server_list(server_list: List[Server]) -> List[Server]:
|
||||
"""Given a list of SRV records sort them into priority order and shuffle
|
||||
each priority with the given weight.
|
||||
"""
|
||||
priority_map = {}
|
||||
priority_map: Dict[int, List[Server]] = {}
|
||||
|
||||
for server in server_list:
|
||||
priority_map.setdefault(server.priority, []).append(server)
|
||||
@ -103,11 +103,16 @@ class SrvResolver:
|
||||
|
||||
Args:
|
||||
dns_client (twisted.internet.interfaces.IResolver): twisted resolver impl
|
||||
cache (dict): cache object
|
||||
get_time (callable): clock implementation. Should return seconds since the epoch
|
||||
cache: cache object
|
||||
get_time: clock implementation. Should return seconds since the epoch
|
||||
"""
|
||||
|
||||
def __init__(self, dns_client=client, cache=SERVER_CACHE, get_time=time.time):
|
||||
def __init__(
|
||||
self,
|
||||
dns_client=client,
|
||||
cache: Dict[bytes, List[Server]] = SERVER_CACHE,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
):
|
||||
self._dns_client = dns_client
|
||||
self._cache = cache
|
||||
self._get_time = get_time
|
||||
@ -116,7 +121,7 @@ class SrvResolver:
|
||||
"""Look up a SRV record
|
||||
|
||||
Args:
|
||||
service_name (bytes): record to look up
|
||||
service_name: record to look up
|
||||
|
||||
Returns:
|
||||
a list of the SRV records, or an empty list if none found
|
||||
@ -158,7 +163,7 @@ class SrvResolver:
|
||||
and answers[0].payload
|
||||
and answers[0].payload.target == dns.Name(b".")
|
||||
):
|
||||
raise ConnectError("Service %s unavailable" % service_name)
|
||||
raise ConnectError(f"Service {service_name!r} unavailable")
|
||||
|
||||
servers = []
|
||||
|
||||
|
@ -173,7 +173,7 @@ class ProxyAgent(_AgentBase):
|
||||
raise ValueError(f"Invalid URI {uri!r}")
|
||||
|
||||
parsed_uri = URI.fromBytes(uri)
|
||||
pool_key = (parsed_uri.scheme, parsed_uri.host, parsed_uri.port)
|
||||
pool_key = f"{parsed_uri.scheme!r}{parsed_uri.host!r}{parsed_uri.port}"
|
||||
request_path = parsed_uri.originForm
|
||||
|
||||
should_skip_proxy = False
|
||||
@ -199,7 +199,7 @@ class ProxyAgent(_AgentBase):
|
||||
)
|
||||
# Cache *all* connections under the same key, since we are only
|
||||
# connecting to a single destination, the proxy:
|
||||
pool_key = ("http-proxy", self.http_proxy_endpoint)
|
||||
pool_key = "http-proxy"
|
||||
endpoint = self.http_proxy_endpoint
|
||||
request_path = uri
|
||||
elif (
|
||||
|
@ -32,6 +32,7 @@ from twisted.internet import defer
|
||||
from twisted.web.resource import IResource
|
||||
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.presence_router import PresenceRouter
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.http.server import (
|
||||
DirectServeHtmlResource,
|
||||
@ -57,6 +58,8 @@ This package defines the 'stable' API which can be used by extension modules whi
|
||||
are loaded into Synapse.
|
||||
"""
|
||||
|
||||
PRESENCE_ALL_USERS = PresenceRouter.ALL_USERS
|
||||
|
||||
__all__ = [
|
||||
"errors",
|
||||
"make_deferred_yieldable",
|
||||
@ -70,6 +73,7 @@ __all__ = [
|
||||
"DirectServeHtmlResource",
|
||||
"DirectServeJsonResource",
|
||||
"ModuleApi",
|
||||
"PRESENCE_ALL_USERS",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -112,6 +116,7 @@ class ModuleApi:
|
||||
self._spam_checker = hs.get_spam_checker()
|
||||
self._account_validity_handler = hs.get_account_validity_handler()
|
||||
self._third_party_event_rules = hs.get_third_party_event_rules()
|
||||
self._presence_router = hs.get_presence_router()
|
||||
|
||||
#################################################################################
|
||||
# The following methods should only be called during the module's initialisation.
|
||||
@ -131,6 +136,11 @@ class ModuleApi:
|
||||
"""Registers callbacks for third party event rules capabilities."""
|
||||
return self._third_party_event_rules.register_third_party_rules_callbacks
|
||||
|
||||
@property
|
||||
def register_presence_router_callbacks(self):
|
||||
"""Registers callbacks for presence router capabilities."""
|
||||
return self._presence_router.register_presence_router_callbacks
|
||||
|
||||
def register_web_resource(self, path: str, resource: IResource):
|
||||
"""Registers a web resource to be served at the given path.
|
||||
|
||||
|
@ -48,7 +48,8 @@ logger = logging.getLogger(__name__)
|
||||
# [1] https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers.
|
||||
|
||||
REQUIREMENTS = [
|
||||
"jsonschema>=2.5.1",
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
"jsonschema>=3.0.0",
|
||||
"frozendict>=1",
|
||||
"unpaddedbase64>=1.1.0",
|
||||
"canonicaljson>=1.4.0",
|
||||
|
@ -62,7 +62,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
|
||||
self.store = hs.get_datastore()
|
||||
self.storage = hs.get_storage()
|
||||
self.clock = hs.get_clock()
|
||||
self.federation_handler = hs.get_federation_handler()
|
||||
self.federation_event_handler = hs.get_federation_event_handler()
|
||||
|
||||
@staticmethod
|
||||
async def _serialize_payload(store, room_id, event_and_contexts, backfilled):
|
||||
@ -127,7 +127,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
|
||||
|
||||
logger.info("Got %d events from federation", len(event_and_contexts))
|
||||
|
||||
max_stream_id = await self.federation_handler.persist_events_and_notify(
|
||||
max_stream_id = await self.federation_event_handler.persist_events_and_notify(
|
||||
room_id, event_and_contexts, backfilled
|
||||
)
|
||||
|
||||
|
@ -16,6 +16,9 @@ function captchaDone() {
|
||||
<body>
|
||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||
<div>
|
||||
{% if error is defined %}
|
||||
<p class="error"><strong>Error: {{ error }}</strong></p>
|
||||
{% endif %}
|
||||
<p>
|
||||
Hello! We need to prevent computer programs and other automated
|
||||
things from creating accounts on this server.
|
||||
|
23
synapse/res/templates/registration_token.html
Normal file
23
synapse/res/templates/registration_token.html
Normal file
@ -0,0 +1,23 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>Authentication</title>
|
||||
<meta name='viewport' content='width=device-width, initial-scale=1,
|
||||
user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'>
|
||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||
<div>
|
||||
{% if error is defined %}
|
||||
<p class="error"><strong>Error: {{ error }}</strong></p>
|
||||
{% endif %}
|
||||
<p>
|
||||
Please enter a registration token.
|
||||
</p>
|
||||
<input type="hidden" name="session" value="{{ session }}" />
|
||||
<input type="text" name="token" />
|
||||
<input type="submit" value="Authenticate" />
|
||||
</div>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
@ -8,6 +8,9 @@
|
||||
<body>
|
||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||
<div>
|
||||
{% if error is defined %}
|
||||
<p class="error"><strong>Error: {{ error }}</strong></p>
|
||||
{% endif %}
|
||||
<p>
|
||||
Please click the button below if you agree to the
|
||||
<a href="{{ terms_url }}">privacy policy of this homeserver.</a>
|
||||
|
@ -36,7 +36,11 @@ from synapse.rest.admin.event_reports import (
|
||||
)
|
||||
from synapse.rest.admin.groups import DeleteGroupAdminRestServlet
|
||||
from synapse.rest.admin.media import ListMediaInRoom, register_servlets_for_media_repo
|
||||
from synapse.rest.admin.purge_room_servlet import PurgeRoomServlet
|
||||
from synapse.rest.admin.registration_tokens import (
|
||||
ListRegistrationTokensRestServlet,
|
||||
NewRegistrationTokenRestServlet,
|
||||
RegistrationTokenRestServlet,
|
||||
)
|
||||
from synapse.rest.admin.rooms import (
|
||||
DeleteRoomRestServlet,
|
||||
ForwardExtremitiesRestServlet,
|
||||
@ -47,7 +51,6 @@ from synapse.rest.admin.rooms import (
|
||||
RoomMembersRestServlet,
|
||||
RoomRestServlet,
|
||||
RoomStateRestServlet,
|
||||
ShutdownRoomRestServlet,
|
||||
)
|
||||
from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet
|
||||
from synapse.rest.admin.statistics import UserMediaStatisticsRestServlet
|
||||
@ -220,8 +223,6 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
RoomMembersRestServlet(hs).register(http_server)
|
||||
DeleteRoomRestServlet(hs).register(http_server)
|
||||
JoinRoomAliasServlet(hs).register(http_server)
|
||||
PurgeRoomServlet(hs).register(http_server)
|
||||
SendServerNoticeServlet(hs).register(http_server)
|
||||
VersionServlet(hs).register(http_server)
|
||||
UserAdminServlet(hs).register(http_server)
|
||||
UserMembershipRestServlet(hs).register(http_server)
|
||||
@ -241,6 +242,13 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
RoomEventContextServlet(hs).register(http_server)
|
||||
RateLimitRestServlet(hs).register(http_server)
|
||||
UsernameAvailableRestServlet(hs).register(http_server)
|
||||
ListRegistrationTokensRestServlet(hs).register(http_server)
|
||||
NewRegistrationTokenRestServlet(hs).register(http_server)
|
||||
RegistrationTokenRestServlet(hs).register(http_server)
|
||||
|
||||
# Some servlets only get registered for the main process.
|
||||
if hs.config.worker_app is None:
|
||||
SendServerNoticeServlet(hs).register(http_server)
|
||||
|
||||
|
||||
def register_servlets_for_client_rest_resource(
|
||||
@ -253,7 +261,6 @@ def register_servlets_for_client_rest_resource(
|
||||
PurgeHistoryRestServlet(hs).register(http_server)
|
||||
ResetPasswordRestServlet(hs).register(http_server)
|
||||
SearchUsersRestServlet(hs).register(http_server)
|
||||
ShutdownRoomRestServlet(hs).register(http_server)
|
||||
UserRegisterServlet(hs).register(http_server)
|
||||
DeleteGroupAdminRestServlet(hs).register(http_server)
|
||||
AccountValidityRenewServlet(hs).register(http_server)
|
||||
|
@ -1,58 +0,0 @@
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
assert_params_in_dict,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.admin import assert_requester_is_admin
|
||||
from synapse.rest.admin._base import admin_patterns
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class PurgeRoomServlet(RestServlet):
|
||||
"""Servlet which will remove all trace of a room from the database
|
||||
|
||||
POST /_synapse/admin/v1/purge_room
|
||||
{
|
||||
"room_id": "!room:id"
|
||||
}
|
||||
|
||||
returns:
|
||||
|
||||
{}
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/purge_room$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.pagination_handler = hs.get_pagination_handler()
|
||||
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
body = parse_json_object_from_request(request)
|
||||
assert_params_in_dict(body, ("room_id",))
|
||||
|
||||
await self.pagination_handler.purge_room(body["room_id"])
|
||||
|
||||
return 200, {}
|
321
synapse/rest/admin/registration_tokens.py
Normal file
321
synapse/rest/admin/registration_tokens.py
Normal file
@ -0,0 +1,321 @@
|
||||
# Copyright 2021 Callum Brown
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import string
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, NotFoundError, SynapseError
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
parse_boolean,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.admin._base import admin_patterns, assert_requester_is_admin
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ListRegistrationTokensRestServlet(RestServlet):
|
||||
"""List registration tokens.
|
||||
|
||||
To list all tokens:
|
||||
|
||||
GET /_synapse/admin/v1/registration_tokens
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"registration_tokens": [
|
||||
{
|
||||
"token": "abcd",
|
||||
"uses_allowed": 3,
|
||||
"pending": 0,
|
||||
"completed": 1,
|
||||
"expiry_time": null
|
||||
},
|
||||
{
|
||||
"token": "wxyz",
|
||||
"uses_allowed": null,
|
||||
"pending": 0,
|
||||
"completed": 9,
|
||||
"expiry_time": 1625394937000
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
The optional query parameter `valid` can be used to filter the response.
|
||||
If it is `true`, only valid tokens are returned. If it is `false`, only
|
||||
tokens that have expired or have had all uses exhausted are returned.
|
||||
If it is omitted, all tokens are returned regardless of validity.
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/registration_tokens$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
valid = parse_boolean(request, "valid")
|
||||
token_list = await self.store.get_registration_tokens(valid)
|
||||
return 200, {"registration_tokens": token_list}
|
||||
|
||||
|
||||
class NewRegistrationTokenRestServlet(RestServlet):
|
||||
"""Create a new registration token.
|
||||
|
||||
For example, to create a token specifying some fields:
|
||||
|
||||
POST /_synapse/admin/v1/registration_tokens/new
|
||||
|
||||
{
|
||||
"token": "defg",
|
||||
"uses_allowed": 1
|
||||
}
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"token": "defg",
|
||||
"uses_allowed": 1,
|
||||
"pending": 0,
|
||||
"completed": 0,
|
||||
"expiry_time": null
|
||||
}
|
||||
|
||||
Defaults are used for any fields not specified.
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/registration_tokens/new$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
self.clock = hs.get_clock()
|
||||
# A string of all the characters allowed to be in a registration_token
|
||||
self.allowed_chars = string.ascii_letters + string.digits + "-_"
|
||||
self.allowed_chars_set = set(self.allowed_chars)
|
||||
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
body = parse_json_object_from_request(request)
|
||||
|
||||
if "token" in body:
|
||||
token = body["token"]
|
||||
if not isinstance(token, str):
|
||||
raise SynapseError(400, "token must be a string", Codes.INVALID_PARAM)
|
||||
if not (0 < len(token) <= 64):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"token must not be empty and must not be longer than 64 characters",
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
if not set(token).issubset(self.allowed_chars_set):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"token must consist only of characters matched by the regex [A-Za-z0-9-_]",
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
else:
|
||||
# Get length of token to generate (default is 16)
|
||||
length = body.get("length", 16)
|
||||
if not isinstance(length, int):
|
||||
raise SynapseError(
|
||||
400, "length must be an integer", Codes.INVALID_PARAM
|
||||
)
|
||||
if not (0 < length <= 64):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"length must be greater than zero and not greater than 64",
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# Generate token
|
||||
token = await self.store.generate_registration_token(
|
||||
length, self.allowed_chars
|
||||
)
|
||||
|
||||
uses_allowed = body.get("uses_allowed", None)
|
||||
if not (
|
||||
uses_allowed is None
|
||||
or (isinstance(uses_allowed, int) and uses_allowed >= 0)
|
||||
):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"uses_allowed must be a non-negative integer or null",
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
expiry_time = body.get("expiry_time", None)
|
||||
if not isinstance(expiry_time, (int, type(None))):
|
||||
raise SynapseError(
|
||||
400, "expiry_time must be an integer or null", Codes.INVALID_PARAM
|
||||
)
|
||||
if isinstance(expiry_time, int) and expiry_time < self.clock.time_msec():
|
||||
raise SynapseError(
|
||||
400, "expiry_time must not be in the past", Codes.INVALID_PARAM
|
||||
)
|
||||
|
||||
created = await self.store.create_registration_token(
|
||||
token, uses_allowed, expiry_time
|
||||
)
|
||||
if not created:
|
||||
raise SynapseError(
|
||||
400, f"Token already exists: {token}", Codes.INVALID_PARAM
|
||||
)
|
||||
|
||||
resp = {
|
||||
"token": token,
|
||||
"uses_allowed": uses_allowed,
|
||||
"pending": 0,
|
||||
"completed": 0,
|
||||
"expiry_time": expiry_time,
|
||||
}
|
||||
return 200, resp
|
||||
|
||||
|
||||
class RegistrationTokenRestServlet(RestServlet):
|
||||
"""Retrieve, update, or delete the given token.
|
||||
|
||||
For example,
|
||||
|
||||
to retrieve a token:
|
||||
|
||||
GET /_synapse/admin/v1/registration_tokens/abcd
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"token": "abcd",
|
||||
"uses_allowed": 3,
|
||||
"pending": 0,
|
||||
"completed": 1,
|
||||
"expiry_time": null
|
||||
}
|
||||
|
||||
|
||||
to update a token:
|
||||
|
||||
PUT /_synapse/admin/v1/registration_tokens/defg
|
||||
|
||||
{
|
||||
"uses_allowed": 5,
|
||||
"expiry_time": 4781243146000
|
||||
}
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"token": "defg",
|
||||
"uses_allowed": 5,
|
||||
"pending": 0,
|
||||
"completed": 0,
|
||||
"expiry_time": 4781243146000
|
||||
}
|
||||
|
||||
|
||||
to delete a token:
|
||||
|
||||
DELETE /_synapse/admin/v1/registration_tokens/wxyz
|
||||
|
||||
200 OK
|
||||
|
||||
{}
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/registration_tokens/(?P<token>[^/]*)$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.clock = hs.get_clock()
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def on_GET(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]:
|
||||
"""Retrieve a registration token."""
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
token_info = await self.store.get_one_registration_token(token)
|
||||
|
||||
# If no result return a 404
|
||||
if token_info is None:
|
||||
raise NotFoundError(f"No such registration token: {token}")
|
||||
|
||||
return 200, token_info
|
||||
|
||||
async def on_PUT(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]:
|
||||
"""Update a registration token."""
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
body = parse_json_object_from_request(request)
|
||||
new_attributes = {}
|
||||
|
||||
# Only add uses_allowed to new_attributes if it is present and valid
|
||||
if "uses_allowed" in body:
|
||||
uses_allowed = body["uses_allowed"]
|
||||
if not (
|
||||
uses_allowed is None
|
||||
or (isinstance(uses_allowed, int) and uses_allowed >= 0)
|
||||
):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"uses_allowed must be a non-negative integer or null",
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
new_attributes["uses_allowed"] = uses_allowed
|
||||
|
||||
if "expiry_time" in body:
|
||||
expiry_time = body["expiry_time"]
|
||||
if not isinstance(expiry_time, (int, type(None))):
|
||||
raise SynapseError(
|
||||
400, "expiry_time must be an integer or null", Codes.INVALID_PARAM
|
||||
)
|
||||
if isinstance(expiry_time, int) and expiry_time < self.clock.time_msec():
|
||||
raise SynapseError(
|
||||
400, "expiry_time must not be in the past", Codes.INVALID_PARAM
|
||||
)
|
||||
new_attributes["expiry_time"] = expiry_time
|
||||
|
||||
if len(new_attributes) == 0:
|
||||
# Nothing to update, get token info to return
|
||||
token_info = await self.store.get_one_registration_token(token)
|
||||
else:
|
||||
token_info = await self.store.update_registration_token(
|
||||
token, new_attributes
|
||||
)
|
||||
|
||||
# If no result return a 404
|
||||
if token_info is None:
|
||||
raise NotFoundError(f"No such registration token: {token}")
|
||||
|
||||
return 200, token_info
|
||||
|
||||
async def on_DELETE(
|
||||
self, request: SynapseRequest, token: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
"""Delete a registration token."""
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
if await self.store.delete_registration_token(token):
|
||||
return 200, {}
|
||||
|
||||
raise NotFoundError(f"No such registration token: {token}")
|
@ -46,41 +46,6 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ShutdownRoomRestServlet(RestServlet):
|
||||
"""Shuts down a room by removing all local users from the room and blocking
|
||||
all future invites and joins to the room. Any local aliases will be repointed
|
||||
to a new room created by `new_room_user_id` and kicked users will be auto
|
||||
joined to the new room.
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/shutdown_room/(?P<room_id>[^/]+)")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.room_shutdown_handler = hs.get_room_shutdown_handler()
|
||||
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
await assert_user_is_admin(self.auth, requester.user)
|
||||
|
||||
content = parse_json_object_from_request(request)
|
||||
assert_params_in_dict(content, ["new_room_user_id"])
|
||||
|
||||
ret = await self.room_shutdown_handler.shutdown_room(
|
||||
room_id=room_id,
|
||||
new_room_user_id=content["new_room_user_id"],
|
||||
new_room_name=content.get("room_name"),
|
||||
message=content.get("message"),
|
||||
requester_user_id=requester.user.to_string(),
|
||||
block=True,
|
||||
)
|
||||
|
||||
return (200, ret)
|
||||
|
||||
|
||||
class DeleteRoomRestServlet(RestServlet):
|
||||
"""Delete a room from server.
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
@ -53,6 +53,8 @@ class SendServerNoticeServlet(RestServlet):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.server_notices_manager = hs.get_server_notices_manager()
|
||||
self.admin_handler = hs.get_admin_handler()
|
||||
self.txns = HttpTransactionCache(hs)
|
||||
|
||||
def register(self, json_resource: HttpServer):
|
||||
@ -79,19 +81,22 @@ class SendServerNoticeServlet(RestServlet):
|
||||
# We grab the server notices manager here as its initialisation has a check for worker processes,
|
||||
# but worker processes still need to initialise SendServerNoticeServlet (as it is part of the
|
||||
# admin api).
|
||||
if not self.hs.get_server_notices_manager().is_enabled():
|
||||
if not self.server_notices_manager.is_enabled():
|
||||
raise SynapseError(400, "Server notices are not enabled on this server")
|
||||
|
||||
user_id = body["user_id"]
|
||||
UserID.from_string(user_id)
|
||||
if not self.hs.is_mine_id(user_id):
|
||||
target_user = UserID.from_string(body["user_id"])
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "Server notices can only be sent to local users")
|
||||
|
||||
event = await self.hs.get_server_notices_manager().send_notice(
|
||||
user_id=body["user_id"],
|
||||
if not await self.admin_handler.get_user(target_user):
|
||||
raise NotFoundError("User not found")
|
||||
|
||||
event = await self.server_notices_manager.send_notice(
|
||||
user_id=target_user.to_string(),
|
||||
type=event_type,
|
||||
state_key=state_key,
|
||||
event_content=body["content"],
|
||||
txn_id=txn_id,
|
||||
)
|
||||
|
||||
return 200, {"event_id": event.event_id}
|
||||
|
@ -228,13 +228,18 @@ class UserRestServletV2(RestServlet):
|
||||
if not isinstance(deactivate, bool):
|
||||
raise SynapseError(400, "'deactivated' parameter is not of type boolean")
|
||||
|
||||
# convert into List[Tuple[str, str]]
|
||||
# convert List[Dict[str, str]] into Set[Tuple[str, str]]
|
||||
if external_ids is not None:
|
||||
new_external_ids = []
|
||||
for external_id in external_ids:
|
||||
new_external_ids.append(
|
||||
(external_id["auth_provider"], external_id["external_id"])
|
||||
)
|
||||
new_external_ids = {
|
||||
(external_id["auth_provider"], external_id["external_id"])
|
||||
for external_id in external_ids
|
||||
}
|
||||
|
||||
# convert List[Dict[str, str]] into Set[Tuple[str, str]]
|
||||
if threepids is not None:
|
||||
new_threepids = {
|
||||
(threepid["medium"], threepid["address"]) for threepid in threepids
|
||||
}
|
||||
|
||||
if user: # modify user
|
||||
if "displayname" in body:
|
||||
@ -243,29 +248,39 @@ class UserRestServletV2(RestServlet):
|
||||
)
|
||||
|
||||
if threepids is not None:
|
||||
# remove old threepids from user
|
||||
old_threepids = await self.store.user_get_threepids(user_id)
|
||||
for threepid in old_threepids:
|
||||
# get changed threepids (added and removed)
|
||||
# convert List[Dict[str, Any]] into Set[Tuple[str, str]]
|
||||
cur_threepids = {
|
||||
(threepid["medium"], threepid["address"])
|
||||
for threepid in await self.store.user_get_threepids(user_id)
|
||||
}
|
||||
add_threepids = new_threepids - cur_threepids
|
||||
del_threepids = cur_threepids - new_threepids
|
||||
|
||||
# remove old threepids
|
||||
for medium, address in del_threepids:
|
||||
try:
|
||||
await self.auth_handler.delete_threepid(
|
||||
user_id, threepid["medium"], threepid["address"], None
|
||||
user_id, medium, address, None
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to remove threepids")
|
||||
raise SynapseError(500, "Failed to remove threepids")
|
||||
|
||||
# add new threepids to user
|
||||
# add new threepids
|
||||
current_time = self.hs.get_clock().time_msec()
|
||||
for threepid in threepids:
|
||||
for medium, address in add_threepids:
|
||||
await self.auth_handler.add_threepid(
|
||||
user_id, threepid["medium"], threepid["address"], current_time
|
||||
user_id, medium, address, current_time
|
||||
)
|
||||
|
||||
if external_ids is not None:
|
||||
# get changed external_ids (added and removed)
|
||||
cur_external_ids = await self.store.get_external_ids_by_user(user_id)
|
||||
add_external_ids = set(new_external_ids) - set(cur_external_ids)
|
||||
del_external_ids = set(cur_external_ids) - set(new_external_ids)
|
||||
cur_external_ids = set(
|
||||
await self.store.get_external_ids_by_user(user_id)
|
||||
)
|
||||
add_external_ids = new_external_ids - cur_external_ids
|
||||
del_external_ids = cur_external_ids - new_external_ids
|
||||
|
||||
# remove old external_ids
|
||||
for auth_provider, external_id in del_external_ids:
|
||||
@ -348,9 +363,9 @@ class UserRestServletV2(RestServlet):
|
||||
|
||||
if threepids is not None:
|
||||
current_time = self.hs.get_clock().time_msec()
|
||||
for threepid in threepids:
|
||||
for medium, address in new_threepids:
|
||||
await self.auth_handler.add_threepid(
|
||||
user_id, threepid["medium"], threepid["address"], current_time
|
||||
user_id, medium, address, current_time
|
||||
)
|
||||
if (
|
||||
self.hs.config.email_enable_notifs
|
||||
@ -362,8 +377,8 @@ class UserRestServletV2(RestServlet):
|
||||
kind="email",
|
||||
app_id="m.email",
|
||||
app_display_name="Email Notifications",
|
||||
device_display_name=threepid["address"],
|
||||
pushkey=threepid["address"],
|
||||
device_display_name=address,
|
||||
pushkey=address,
|
||||
lang=None, # We don't know a user's language here
|
||||
data={},
|
||||
)
|
||||
|
@ -13,24 +13,27 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.http.server import respond_with_html
|
||||
from synapse.http.servlet import RestServlet
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.http.server import HttpServer, respond_with_html
|
||||
from synapse.http.servlet import RestServlet, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AccountValidityRenewServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/account_validity/renew$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.hs = hs
|
||||
@ -46,18 +49,14 @@ class AccountValidityRenewServlet(RestServlet):
|
||||
hs.config.account_validity.account_validity_invalid_token_template
|
||||
)
|
||||
|
||||
async def on_GET(self, request):
|
||||
if b"token" not in request.args:
|
||||
raise SynapseError(400, "Missing renewal token")
|
||||
renewal_token = request.args[b"token"][0]
|
||||
async def on_GET(self, request: Request) -> None:
|
||||
renewal_token = parse_string(request, "token", required=True)
|
||||
|
||||
(
|
||||
token_valid,
|
||||
token_stale,
|
||||
expiration_ts,
|
||||
) = await self.account_activity_handler.renew_account(
|
||||
renewal_token.decode("utf8")
|
||||
)
|
||||
) = await self.account_activity_handler.renew_account(renewal_token)
|
||||
|
||||
if token_valid:
|
||||
status_code = 200
|
||||
@ -77,11 +76,7 @@ class AccountValidityRenewServlet(RestServlet):
|
||||
class AccountValiditySendMailServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/account_validity/send_mail$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.hs = hs
|
||||
@ -91,7 +86,7 @@ class AccountValiditySendMailServlet(RestServlet):
|
||||
hs.config.account_validity.account_validity_renew_by_email_enabled
|
||||
)
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_expired=True)
|
||||
user_id = requester.user.to_string()
|
||||
await self.account_activity_handler.send_renewal_email_to_user(user_id)
|
||||
@ -99,6 +94,6 @@ class AccountValiditySendMailServlet(RestServlet):
|
||||
return 200, {}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
AccountValidityRenewServlet(hs).register(http_server)
|
||||
AccountValiditySendMailServlet(hs).register(http_server)
|
||||
|
@ -15,11 +15,14 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.constants import LoginType
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.errors import LoginError, SynapseError
|
||||
from synapse.api.urls import CLIENT_API_PREFIX
|
||||
from synapse.http.server import respond_with_html
|
||||
from synapse.http.server import HttpServer, respond_with_html
|
||||
from synapse.http.servlet import RestServlet, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
@ -46,9 +49,10 @@ class AuthRestServlet(RestServlet):
|
||||
self.registration_handler = hs.get_registration_handler()
|
||||
self.recaptcha_template = hs.config.recaptcha_template
|
||||
self.terms_template = hs.config.terms_template
|
||||
self.registration_token_template = hs.config.registration_token_template
|
||||
self.success_template = hs.config.fallback_success_template
|
||||
|
||||
async def on_GET(self, request, stagetype):
|
||||
async def on_GET(self, request: SynapseRequest, stagetype: str) -> None:
|
||||
session = parse_string(request, "session")
|
||||
if not session:
|
||||
raise SynapseError(400, "No session supplied")
|
||||
@ -74,6 +78,12 @@ class AuthRestServlet(RestServlet):
|
||||
# re-authenticate with their SSO provider.
|
||||
html = await self.auth_handler.start_sso_ui_auth(request, session)
|
||||
|
||||
elif stagetype == LoginType.REGISTRATION_TOKEN:
|
||||
html = self.registration_token_template.render(
|
||||
session=session,
|
||||
myurl=f"{CLIENT_API_PREFIX}/r0/auth/{LoginType.REGISTRATION_TOKEN}/fallback/web",
|
||||
)
|
||||
|
||||
else:
|
||||
raise SynapseError(404, "Unknown auth stage type")
|
||||
|
||||
@ -81,7 +91,7 @@ class AuthRestServlet(RestServlet):
|
||||
respond_with_html(request, 200, html)
|
||||
return None
|
||||
|
||||
async def on_POST(self, request, stagetype):
|
||||
async def on_POST(self, request: Request, stagetype: str) -> None:
|
||||
|
||||
session = parse_string(request, "session")
|
||||
if not session:
|
||||
@ -95,29 +105,32 @@ class AuthRestServlet(RestServlet):
|
||||
|
||||
authdict = {"response": response, "session": session}
|
||||
|
||||
success = await self.auth_handler.add_oob_auth(
|
||||
LoginType.RECAPTCHA, authdict, request.getClientIP()
|
||||
)
|
||||
|
||||
if success:
|
||||
html = self.success_template.render()
|
||||
else:
|
||||
try:
|
||||
await self.auth_handler.add_oob_auth(
|
||||
LoginType.RECAPTCHA, authdict, request.getClientIP()
|
||||
)
|
||||
except LoginError as e:
|
||||
# Authentication failed, let user try again
|
||||
html = self.recaptcha_template.render(
|
||||
session=session,
|
||||
myurl="%s/r0/auth/%s/fallback/web"
|
||||
% (CLIENT_API_PREFIX, LoginType.RECAPTCHA),
|
||||
sitekey=self.hs.config.recaptcha_public_key,
|
||||
error=e.msg,
|
||||
)
|
||||
else:
|
||||
# No LoginError was raised, so authentication was successful
|
||||
html = self.success_template.render()
|
||||
|
||||
elif stagetype == LoginType.TERMS:
|
||||
authdict = {"session": session}
|
||||
|
||||
success = await self.auth_handler.add_oob_auth(
|
||||
LoginType.TERMS, authdict, request.getClientIP()
|
||||
)
|
||||
|
||||
if success:
|
||||
html = self.success_template.render()
|
||||
else:
|
||||
try:
|
||||
await self.auth_handler.add_oob_auth(
|
||||
LoginType.TERMS, authdict, request.getClientIP()
|
||||
)
|
||||
except LoginError as e:
|
||||
# Authentication failed, let user try again
|
||||
html = self.terms_template.render(
|
||||
session=session,
|
||||
terms_url="%s_matrix/consent?v=%s"
|
||||
@ -127,10 +140,33 @@ class AuthRestServlet(RestServlet):
|
||||
),
|
||||
myurl="%s/r0/auth/%s/fallback/web"
|
||||
% (CLIENT_API_PREFIX, LoginType.TERMS),
|
||||
error=e.msg,
|
||||
)
|
||||
else:
|
||||
# No LoginError was raised, so authentication was successful
|
||||
html = self.success_template.render()
|
||||
|
||||
elif stagetype == LoginType.SSO:
|
||||
# The SSO fallback workflow should not post here,
|
||||
raise SynapseError(404, "Fallback SSO auth does not support POST requests.")
|
||||
|
||||
elif stagetype == LoginType.REGISTRATION_TOKEN:
|
||||
token = parse_string(request, "token", required=True)
|
||||
authdict = {"session": session, "token": token}
|
||||
|
||||
try:
|
||||
await self.auth_handler.add_oob_auth(
|
||||
LoginType.REGISTRATION_TOKEN, authdict, request.getClientIP()
|
||||
)
|
||||
except LoginError as e:
|
||||
html = self.registration_token_template.render(
|
||||
session=session,
|
||||
myurl=f"{CLIENT_API_PREFIX}/r0/auth/{LoginType.REGISTRATION_TOKEN}/fallback/web",
|
||||
error=e.msg,
|
||||
)
|
||||
else:
|
||||
html = self.success_template.render()
|
||||
|
||||
else:
|
||||
raise SynapseError(404, "Unknown auth stage type")
|
||||
|
||||
@ -139,5 +175,5 @@ class AuthRestServlet(RestServlet):
|
||||
return None
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
AuthRestServlet(hs).register(http_server)
|
||||
|
@ -15,6 +15,7 @@ import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, MSC3244_CAPABILITIES
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
@ -61,8 +62,19 @@ class CapabilitiesRestServlet(RestServlet):
|
||||
"org.matrix.msc3244.room_capabilities"
|
||||
] = MSC3244_CAPABILITIES
|
||||
|
||||
if self.config.experimental.msc3283_enabled:
|
||||
response["capabilities"]["org.matrix.msc3283.set_displayname"] = {
|
||||
"enabled": self.config.enable_set_displayname
|
||||
}
|
||||
response["capabilities"]["org.matrix.msc3283.set_avatar_url"] = {
|
||||
"enabled": self.config.enable_set_avatar_url
|
||||
}
|
||||
response["capabilities"]["org.matrix.msc3283.3pid_changes"] = {
|
||||
"enabled": self.config.enable_3pid_changes
|
||||
}
|
||||
|
||||
return 200, response
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
CapabilitiesRestServlet(hs).register(http_server)
|
||||
|
@ -14,34 +14,36 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api import errors
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
assert_params_in_dict,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns, interactive_auth_handler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DevicesRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/devices$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
devices = await self.device_handler.get_devices_by_user(
|
||||
requester.user.to_string()
|
||||
@ -57,7 +59,7 @@ class DeleteDevicesRestServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/delete_devices")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
@ -65,7 +67,7 @@ class DeleteDevicesRestServlet(RestServlet):
|
||||
self.auth_handler = hs.get_auth_handler()
|
||||
|
||||
@interactive_auth_handler
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
try:
|
||||
@ -100,18 +102,16 @@ class DeleteDevicesRestServlet(RestServlet):
|
||||
class DeviceRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/devices/(?P<device_id>[^/]*)$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
self.auth_handler = hs.get_auth_handler()
|
||||
|
||||
async def on_GET(self, request, device_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, device_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
device = await self.device_handler.get_device(
|
||||
requester.user.to_string(), device_id
|
||||
@ -119,7 +119,9 @@ class DeviceRestServlet(RestServlet):
|
||||
return 200, device
|
||||
|
||||
@interactive_auth_handler
|
||||
async def on_DELETE(self, request, device_id):
|
||||
async def on_DELETE(
|
||||
self, request: SynapseRequest, device_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
try:
|
||||
@ -146,7 +148,9 @@ class DeviceRestServlet(RestServlet):
|
||||
await self.device_handler.delete_device(requester.user.to_string(), device_id)
|
||||
return 200, {}
|
||||
|
||||
async def on_PUT(self, request, device_id):
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, device_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
|
||||
body = parse_json_object_from_request(request)
|
||||
@ -193,13 +197,13 @@ class DehydratedDeviceServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/org.matrix.msc2697.v2/dehydrated_device", releases=())
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
|
||||
async def on_GET(self, request: SynapseRequest):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
dehydrated_device = await self.device_handler.get_dehydrated_device(
|
||||
requester.user.to_string()
|
||||
@ -211,7 +215,7 @@ class DehydratedDeviceServlet(RestServlet):
|
||||
else:
|
||||
raise errors.NotFoundError("No dehydrated device available")
|
||||
|
||||
async def on_PUT(self, request: SynapseRequest):
|
||||
async def on_PUT(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
submission = parse_json_object_from_request(request)
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
@ -259,13 +263,13 @@ class ClaimDehydratedDeviceServlet(RestServlet):
|
||||
"/org.matrix.msc2697.v2/dehydrated_device/claim", releases=()
|
||||
)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
|
||||
async def on_POST(self, request: SynapseRequest):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
submission = parse_json_object_from_request(request)
|
||||
@ -292,7 +296,7 @@ class ClaimDehydratedDeviceServlet(RestServlet):
|
||||
return (200, result)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
DeleteDevicesRestServlet(hs).register(http_server)
|
||||
DevicesRestServlet(hs).register(http_server)
|
||||
DeviceRestServlet(hs).register(http_server)
|
||||
|
@ -12,8 +12,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
@ -22,14 +24,19 @@ from synapse.api.errors import (
|
||||
NotFoundError,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import RoomAlias
|
||||
from synapse.types import JsonDict, RoomAlias
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
ClientDirectoryServer(hs).register(http_server)
|
||||
ClientDirectoryListServer(hs).register(http_server)
|
||||
ClientAppserviceDirectoryListServer(hs).register(http_server)
|
||||
@ -38,21 +45,23 @@ def register_servlets(hs, http_server):
|
||||
class ClientDirectoryServer(RestServlet):
|
||||
PATTERNS = client_patterns("/directory/room/(?P<room_alias>[^/]*)$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.store = hs.get_datastore()
|
||||
self.directory_handler = hs.get_directory_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(self, request, room_alias):
|
||||
room_alias = RoomAlias.from_string(room_alias)
|
||||
async def on_GET(self, request: Request, room_alias: str) -> Tuple[int, JsonDict]:
|
||||
room_alias_obj = RoomAlias.from_string(room_alias)
|
||||
|
||||
res = await self.directory_handler.get_association(room_alias)
|
||||
res = await self.directory_handler.get_association(room_alias_obj)
|
||||
|
||||
return 200, res
|
||||
|
||||
async def on_PUT(self, request, room_alias):
|
||||
room_alias = RoomAlias.from_string(room_alias)
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, room_alias: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
room_alias_obj = RoomAlias.from_string(room_alias)
|
||||
|
||||
content = parse_json_object_from_request(request)
|
||||
if "room_id" not in content:
|
||||
@ -61,7 +70,7 @@ class ClientDirectoryServer(RestServlet):
|
||||
)
|
||||
|
||||
logger.debug("Got content: %s", content)
|
||||
logger.debug("Got room name: %s", room_alias.to_string())
|
||||
logger.debug("Got room name: %s", room_alias_obj.to_string())
|
||||
|
||||
room_id = content["room_id"]
|
||||
servers = content["servers"] if "servers" in content else None
|
||||
@ -78,22 +87,25 @@ class ClientDirectoryServer(RestServlet):
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
await self.directory_handler.create_association(
|
||||
requester, room_alias, room_id, servers
|
||||
requester, room_alias_obj, room_id, servers
|
||||
)
|
||||
|
||||
return 200, {}
|
||||
|
||||
async def on_DELETE(self, request, room_alias):
|
||||
async def on_DELETE(
|
||||
self, request: SynapseRequest, room_alias: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
room_alias_obj = RoomAlias.from_string(room_alias)
|
||||
|
||||
try:
|
||||
service = self.auth.get_appservice_by_req(request)
|
||||
room_alias = RoomAlias.from_string(room_alias)
|
||||
await self.directory_handler.delete_appservice_association(
|
||||
service, room_alias
|
||||
service, room_alias_obj
|
||||
)
|
||||
logger.info(
|
||||
"Application service at %s deleted alias %s",
|
||||
service.url,
|
||||
room_alias.to_string(),
|
||||
room_alias_obj.to_string(),
|
||||
)
|
||||
return 200, {}
|
||||
except InvalidClientCredentialsError:
|
||||
@ -103,12 +115,10 @@ class ClientDirectoryServer(RestServlet):
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user = requester.user
|
||||
|
||||
room_alias = RoomAlias.from_string(room_alias)
|
||||
|
||||
await self.directory_handler.delete_association(requester, room_alias)
|
||||
await self.directory_handler.delete_association(requester, room_alias_obj)
|
||||
|
||||
logger.info(
|
||||
"User %s deleted alias %s", user.to_string(), room_alias.to_string()
|
||||
"User %s deleted alias %s", user.to_string(), room_alias_obj.to_string()
|
||||
)
|
||||
|
||||
return 200, {}
|
||||
@ -117,20 +127,22 @@ class ClientDirectoryServer(RestServlet):
|
||||
class ClientDirectoryListServer(RestServlet):
|
||||
PATTERNS = client_patterns("/directory/list/room/(?P<room_id>[^/]*)$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.store = hs.get_datastore()
|
||||
self.directory_handler = hs.get_directory_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(self, request, room_id):
|
||||
async def on_GET(self, request: Request, room_id: str) -> Tuple[int, JsonDict]:
|
||||
room = await self.store.get_room(room_id)
|
||||
if room is None:
|
||||
raise NotFoundError("Unknown room")
|
||||
|
||||
return 200, {"visibility": "public" if room["is_public"] else "private"}
|
||||
|
||||
async def on_PUT(self, request, room_id):
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
content = parse_json_object_from_request(request)
|
||||
@ -142,7 +154,9 @@ class ClientDirectoryListServer(RestServlet):
|
||||
|
||||
return 200, {}
|
||||
|
||||
async def on_DELETE(self, request, room_id):
|
||||
async def on_DELETE(
|
||||
self, request: SynapseRequest, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
await self.directory_handler.edit_published_room_list(
|
||||
@ -157,21 +171,27 @@ class ClientAppserviceDirectoryListServer(RestServlet):
|
||||
"/directory/list/appservice/(?P<network_id>[^/]*)/(?P<room_id>[^/]*)$", v1=True
|
||||
)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.store = hs.get_datastore()
|
||||
self.directory_handler = hs.get_directory_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
def on_PUT(self, request, network_id, room_id):
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, network_id: str, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
content = parse_json_object_from_request(request)
|
||||
visibility = content.get("visibility", "public")
|
||||
return self._edit(request, network_id, room_id, visibility)
|
||||
return await self._edit(request, network_id, room_id, visibility)
|
||||
|
||||
def on_DELETE(self, request, network_id, room_id):
|
||||
return self._edit(request, network_id, room_id, "private")
|
||||
async def on_DELETE(
|
||||
self, request: SynapseRequest, network_id: str, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
return await self._edit(request, network_id, room_id, "private")
|
||||
|
||||
async def _edit(self, request, network_id, room_id, visibility):
|
||||
async def _edit(
|
||||
self, request: SynapseRequest, network_id: str, room_id: str, visibility: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
if not requester.app_service:
|
||||
raise AuthError(
|
||||
|
@ -14,11 +14,18 @@
|
||||
|
||||
"""This module contains REST servlets to do with event streaming, /events."""
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, List, Tuple, Union
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -28,31 +35,30 @@ class EventStreamRestServlet(RestServlet):
|
||||
|
||||
DEFAULT_LONGPOLL_TIME_MS = 30000
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.event_stream_handler = hs.get_event_stream_handler()
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
is_guest = requester.is_guest
|
||||
room_id = None
|
||||
args: Dict[bytes, List[bytes]] = request.args # type: ignore
|
||||
if is_guest:
|
||||
if b"room_id" not in request.args:
|
||||
if b"room_id" not in args:
|
||||
raise SynapseError(400, "Guest users must specify room_id param")
|
||||
if b"room_id" in request.args:
|
||||
room_id = request.args[b"room_id"][0].decode("ascii")
|
||||
room_id = parse_string(request, "room_id")
|
||||
|
||||
pagin_config = await PaginationConfig.from_request(self.store, request)
|
||||
timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS
|
||||
if b"timeout" in request.args:
|
||||
if b"timeout" in args:
|
||||
try:
|
||||
timeout = int(request.args[b"timeout"][0])
|
||||
timeout = int(args[b"timeout"][0])
|
||||
except ValueError:
|
||||
raise SynapseError(400, "timeout must be in milliseconds.")
|
||||
|
||||
as_client_event = b"raw" not in request.args
|
||||
as_client_event = b"raw" not in args
|
||||
|
||||
chunk = await self.event_stream_handler.get_stream(
|
||||
requester.user.to_string(),
|
||||
@ -70,25 +76,27 @@ class EventStreamRestServlet(RestServlet):
|
||||
class EventRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/events/(?P<event_id>[^/]*)$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.clock = hs.get_clock()
|
||||
self.event_handler = hs.get_event_handler()
|
||||
self.auth = hs.get_auth()
|
||||
self._event_serializer = hs.get_event_client_serializer()
|
||||
|
||||
async def on_GET(self, request, event_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, event_id: str
|
||||
) -> Tuple[int, Union[str, JsonDict]]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
event = await self.event_handler.get_event(requester.user, None, event_id)
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
if event:
|
||||
event = await self._event_serializer.serialize_event(event, time_now)
|
||||
return 200, event
|
||||
result = await self._event_serializer.serialize_event(event, time_now)
|
||||
return 200, result
|
||||
else:
|
||||
return 404, "Event not found."
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
EventStreamRestServlet(hs).register(http_server)
|
||||
EventRestServlet(hs).register(http_server)
|
||||
|
@ -13,26 +13,34 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import AuthError, NotFoundError, StoreError, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.types import UserID
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict, UserID
|
||||
|
||||
from ._base import client_patterns, set_timeline_upper_limit
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GetFilterRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/user/(?P<user_id>[^/]*)/filter/(?P<filter_id>[^/]*)")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.filtering = hs.get_filtering()
|
||||
|
||||
async def on_GET(self, request, user_id, filter_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str, filter_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
target_user = UserID.from_string(user_id)
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
@ -43,13 +51,13 @@ class GetFilterRestServlet(RestServlet):
|
||||
raise AuthError(403, "Can only get filters for local users")
|
||||
|
||||
try:
|
||||
filter_id = int(filter_id)
|
||||
filter_id_int = int(filter_id)
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid filter_id")
|
||||
|
||||
try:
|
||||
filter_collection = await self.filtering.get_user_filter(
|
||||
user_localpart=target_user.localpart, filter_id=filter_id
|
||||
user_localpart=target_user.localpart, filter_id=filter_id_int
|
||||
)
|
||||
except StoreError as e:
|
||||
if e.code != 404:
|
||||
@ -62,13 +70,15 @@ class GetFilterRestServlet(RestServlet):
|
||||
class CreateFilterRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/user/(?P<user_id>[^/]*)/filter")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.filtering = hs.get_filtering()
|
||||
|
||||
async def on_POST(self, request, user_id):
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
|
||||
target_user = UserID.from_string(user_id)
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
@ -89,6 +99,6 @@ class CreateFilterRestServlet(RestServlet):
|
||||
return 200, {"filter_id": str(filter_id)}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
GetFilterRestServlet(hs).register(http_server)
|
||||
CreateFilterRestServlet(hs).register(http_server)
|
||||
|
@ -26,6 +26,7 @@ from synapse.api.constants import (
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.handlers.groups_local import GroupsLocalHandler
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
assert_params_in_dict,
|
||||
@ -930,7 +931,7 @@ class GroupsForUserServlet(RestServlet):
|
||||
return 200, result
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
GroupServlet(hs).register(http_server)
|
||||
GroupSummaryServlet(hs).register(http_server)
|
||||
GroupInvitedUsersServlet(hs).register(http_server)
|
||||
|
@ -12,25 +12,33 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import TYPE_CHECKING, Dict, List, Tuple
|
||||
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_boolean
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
# TODO: Needs unit testing
|
||||
class InitialSyncRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/initialSync$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.initial_sync_handler = hs.get_initial_sync_handler()
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
as_client_event = b"raw" not in request.args
|
||||
args: Dict[bytes, List[bytes]] = request.args # type: ignore
|
||||
as_client_event = b"raw" not in args
|
||||
pagination_config = await PaginationConfig.from_request(self.store, request)
|
||||
include_archived = parse_boolean(request, "archived", default=False)
|
||||
content = await self.initial_sync_handler.snapshot_all_rooms(
|
||||
@ -43,5 +51,5 @@ class InitialSyncRestServlet(RestServlet):
|
||||
return 200, content
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
InitialSyncRestServlet(hs).register(http_server)
|
||||
|
@ -15,19 +15,25 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Optional, Tuple
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.errors import InvalidAPICallError, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
parse_integer,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import log_kv, set_tag, trace
|
||||
from synapse.types import StreamToken
|
||||
from synapse.types import JsonDict, StreamToken
|
||||
|
||||
from ._base import client_patterns, interactive_auth_handler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -59,18 +65,16 @@ class KeyUploadServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/keys/upload(/(?P<device_id>[^/]+))?$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
|
||||
@trace(opname="upload_keys")
|
||||
async def on_POST(self, request, device_id):
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, device_id: Optional[str]
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
user_id = requester.user.to_string()
|
||||
body = parse_json_object_from_request(request)
|
||||
@ -148,21 +152,30 @@ class KeyQueryServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/keys/query$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer):
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
user_id = requester.user.to_string()
|
||||
device_id = requester.device_id
|
||||
timeout = parse_integer(request, "timeout", 10 * 1000)
|
||||
body = parse_json_object_from_request(request)
|
||||
|
||||
device_keys = body.get("device_keys")
|
||||
if not isinstance(device_keys, dict):
|
||||
raise InvalidAPICallError("'device_keys' must be a JSON object")
|
||||
|
||||
def is_list_of_strings(values: Any) -> bool:
|
||||
return isinstance(values, list) and all(isinstance(v, str) for v in values)
|
||||
|
||||
if any(not is_list_of_strings(keys) for keys in device_keys.values()):
|
||||
raise InvalidAPICallError(
|
||||
"'device_keys' values must be a list of strings",
|
||||
)
|
||||
|
||||
result = await self.e2e_keys_handler.query_devices(
|
||||
body, timeout, user_id, device_id
|
||||
)
|
||||
@ -181,17 +194,13 @@ class KeyChangesServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/keys/changes$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer):
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
|
||||
from_token_string = parse_string(request, "from", required=True)
|
||||
@ -231,12 +240,12 @@ class OneTimeKeyServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/keys/claim$")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
timeout = parse_integer(request, "timeout", 10 * 1000)
|
||||
body = parse_json_object_from_request(request)
|
||||
@ -255,11 +264,7 @@ class SigningKeyUploadServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/keys/device_signing/upload$", releases=())
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
@ -267,7 +272,7 @@ class SigningKeyUploadServlet(RestServlet):
|
||||
self.auth_handler = hs.get_auth_handler()
|
||||
|
||||
@interactive_auth_handler
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user_id = requester.user.to_string()
|
||||
body = parse_json_object_from_request(request)
|
||||
@ -315,16 +320,12 @@ class SignaturesUploadServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/keys/signatures/upload$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.e2e_keys_handler = hs.get_e2e_keys_handler()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
user_id = requester.user.to_string()
|
||||
body = parse_json_object_from_request(request)
|
||||
@ -335,7 +336,7 @@ class SignaturesUploadServlet(RestServlet):
|
||||
return 200, result
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
KeyUploadServlet(hs).register(http_server)
|
||||
KeyQueryServlet(hs).register(http_server)
|
||||
KeyChangesServlet(hs).register(http_server)
|
||||
|
@ -19,6 +19,7 @@ from twisted.web.server import Request
|
||||
|
||||
from synapse.api.constants import Membership
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
parse_json_object_from_request,
|
||||
@ -103,5 +104,5 @@ class KnockRoomAliasServlet(RestServlet):
|
||||
)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
KnockRoomAliasServlet(hs).register(http_server)
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -14,7 +14,7 @@
|
||||
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, List, Optional
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, List, Optional, Tuple
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
@ -104,7 +104,13 @@ class LoginRestServlet(RestServlet):
|
||||
burst_count=self.hs.config.rc_login_account.burst_count,
|
||||
)
|
||||
|
||||
def on_GET(self, request: SynapseRequest):
|
||||
# ensure the CAS/SAML/OIDC handlers are loaded on this worker instance.
|
||||
# The reason for this is to ensure that the auth_provider_ids are registered
|
||||
# with SsoHandler, which in turn ensures that the login/registration prometheus
|
||||
# counters are initialised for the auth_provider_ids.
|
||||
_load_sso_handlers(hs)
|
||||
|
||||
def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
flows = []
|
||||
if self.jwt_enabled:
|
||||
flows.append({"type": LoginRestServlet.JWT_TYPE})
|
||||
@ -151,7 +157,7 @@ class LoginRestServlet(RestServlet):
|
||||
|
||||
return 200, {"flows": flows}
|
||||
|
||||
async def on_POST(self, request: SynapseRequest):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]:
|
||||
login_submission = parse_json_object_from_request(request)
|
||||
|
||||
if self._msc2918_enabled:
|
||||
@ -211,7 +217,7 @@ class LoginRestServlet(RestServlet):
|
||||
login_submission: JsonDict,
|
||||
appservice: ApplicationService,
|
||||
should_issue_refresh_token: bool = False,
|
||||
):
|
||||
) -> LoginResponse:
|
||||
identifier = login_submission.get("identifier")
|
||||
logger.info("Got appservice login request with identifier: %r", identifier)
|
||||
|
||||
@ -461,10 +467,7 @@ class RefreshTokenServlet(RestServlet):
|
||||
self._clock = hs.get_clock()
|
||||
self.access_token_lifetime = hs.config.access_token_lifetime
|
||||
|
||||
async def on_POST(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
refresh_submission = parse_json_object_from_request(request)
|
||||
|
||||
assert_params_in_dict(refresh_submission, ["refresh_token"])
|
||||
@ -499,12 +502,7 @@ class SsoRedirectServlet(RestServlet):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
# make sure that the relevant handlers are instantiated, so that they
|
||||
# register themselves with the main SSOHandler.
|
||||
if hs.config.cas_enabled:
|
||||
hs.get_cas_handler()
|
||||
if hs.config.saml2_enabled:
|
||||
hs.get_saml_handler()
|
||||
if hs.config.oidc_enabled:
|
||||
hs.get_oidc_handler()
|
||||
_load_sso_handlers(hs)
|
||||
self._sso_handler = hs.get_sso_handler()
|
||||
self._msc2858_enabled = hs.config.experimental.msc2858_enabled
|
||||
self._public_baseurl = hs.config.public_baseurl
|
||||
@ -569,7 +567,7 @@ class SsoRedirectServlet(RestServlet):
|
||||
class CasTicketServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/login/cas/ticket", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self._cas_handler = hs.get_cas_handler()
|
||||
|
||||
@ -591,10 +589,26 @@ class CasTicketServlet(RestServlet):
|
||||
)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
LoginRestServlet(hs).register(http_server)
|
||||
if hs.config.access_token_lifetime is not None:
|
||||
RefreshTokenServlet(hs).register(http_server)
|
||||
SsoRedirectServlet(hs).register(http_server)
|
||||
if hs.config.cas_enabled:
|
||||
CasTicketServlet(hs).register(http_server)
|
||||
|
||||
|
||||
def _load_sso_handlers(hs: "HomeServer") -> None:
|
||||
"""Ensure that the SSO handlers are loaded, if they are enabled by configuration.
|
||||
|
||||
This is mostly useful to ensure that the CAS/SAML/OIDC handlers register themselves
|
||||
with the main SsoHandler.
|
||||
|
||||
It's safe to call this multiple times.
|
||||
"""
|
||||
if hs.config.cas.cas_enabled:
|
||||
hs.get_cas_handler()
|
||||
if hs.config.saml2.saml2_enabled:
|
||||
hs.get_saml_handler()
|
||||
if hs.config.oidc.oidc_enabled:
|
||||
hs.get_oidc_handler()
|
||||
|
@ -13,9 +13,16 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -23,13 +30,13 @@ logger = logging.getLogger(__name__)
|
||||
class LogoutRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/logout$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
self._device_handler = hs.get_device_handler()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_expired=True)
|
||||
|
||||
if requester.device_id is None:
|
||||
@ -48,13 +55,13 @@ class LogoutRestServlet(RestServlet):
|
||||
class LogoutAllRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/logout/all$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
self._device_handler = hs.get_device_handler()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_expired=True)
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
@ -67,6 +74,6 @@ class LogoutAllRestServlet(RestServlet):
|
||||
return 200, {}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
LogoutRestServlet(hs).register(http_server)
|
||||
LogoutAllRestServlet(hs).register(http_server)
|
||||
|
@ -13,26 +13,33 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.events.utils import format_event_for_client_v2_without_room_id
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_integer, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NotificationsServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/notifications$")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.store = hs.get_datastore()
|
||||
self.auth = hs.get_auth()
|
||||
self.clock = hs.get_clock()
|
||||
self._event_serializer = hs.get_event_client_serializer()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
@ -87,5 +94,5 @@ class NotificationsServlet(RestServlet):
|
||||
return 200, {"notifications": returned_push_actions, "next_token": next_token}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
NotificationsServlet(hs).register(http_server)
|
||||
|
@ -12,15 +12,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import AuthError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -58,14 +64,16 @@ class IdTokenServlet(RestServlet):
|
||||
|
||||
EXPIRES_MS = 3600 * 1000
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
self.clock = hs.get_clock()
|
||||
self.server_name = hs.config.server_name
|
||||
|
||||
async def on_POST(self, request, user_id):
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
if user_id != requester.user.to_string():
|
||||
raise AuthError(403, "Cannot request tokens for other users.")
|
||||
@ -90,5 +98,5 @@ class IdTokenServlet(RestServlet):
|
||||
)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
IdTokenServlet(hs).register(http_server)
|
||||
|
@ -13,28 +13,32 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PasswordPolicyServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/password_policy$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.policy = hs.config.password_policy
|
||||
self.enabled = hs.config.password_policy_enabled
|
||||
|
||||
def on_GET(self, request):
|
||||
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
|
||||
if not self.enabled or not self.policy:
|
||||
return (200, {})
|
||||
|
||||
@ -53,5 +57,5 @@ class PasswordPolicyServlet(RestServlet):
|
||||
return (200, policy)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
PasswordPolicyServlet(hs).register(http_server)
|
||||
|
@ -15,12 +15,18 @@
|
||||
""" This module contains REST servlets to do with presence: /presence/<paths>
|
||||
"""
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import AuthError, SynapseError
|
||||
from synapse.handlers.presence import format_user_presence_state
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import UserID
|
||||
from synapse.types import JsonDict, UserID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -28,7 +34,7 @@ logger = logging.getLogger(__name__)
|
||||
class PresenceStatusRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/presence/(?P<user_id>[^/]*)/status", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.presence_handler = hs.get_presence_handler()
|
||||
@ -37,7 +43,9 @@ class PresenceStatusRestServlet(RestServlet):
|
||||
|
||||
self._use_presence = hs.config.server.use_presence
|
||||
|
||||
async def on_GET(self, request, user_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
@ -53,13 +61,15 @@ class PresenceStatusRestServlet(RestServlet):
|
||||
raise AuthError(403, "You are not allowed to see their presence.")
|
||||
|
||||
state = await self.presence_handler.get_state(target_user=user)
|
||||
state = format_user_presence_state(
|
||||
result = format_user_presence_state(
|
||||
state, self.clock.time_msec(), include_user_id=False
|
||||
)
|
||||
|
||||
return 200, state
|
||||
return 200, result
|
||||
|
||||
async def on_PUT(self, request, user_id):
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
@ -91,5 +101,5 @@ class PresenceStatusRestServlet(RestServlet):
|
||||
return 200, {}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
PresenceStatusRestServlet(hs).register(http_server)
|
||||
|
@ -14,22 +14,31 @@
|
||||
|
||||
""" This module contains REST servlets to do with profile: /profile/<paths> """
|
||||
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import UserID
|
||||
from synapse.types import JsonDict, UserID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class ProfileDisplaynameRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)/displayname", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.profile_handler = hs.get_profile_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(self, request, user_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester_user = None
|
||||
|
||||
if self.hs.config.require_auth_for_profile_requests:
|
||||
@ -48,7 +57,9 @@ class ProfileDisplaynameRestServlet(RestServlet):
|
||||
|
||||
return 200, ret
|
||||
|
||||
async def on_PUT(self, request, user_id):
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
user = UserID.from_string(user_id)
|
||||
is_admin = await self.auth.is_server_admin(requester.user)
|
||||
@ -72,13 +83,15 @@ class ProfileDisplaynameRestServlet(RestServlet):
|
||||
class ProfileAvatarURLRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)/avatar_url", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.profile_handler = hs.get_profile_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(self, request, user_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester_user = None
|
||||
|
||||
if self.hs.config.require_auth_for_profile_requests:
|
||||
@ -97,7 +110,9 @@ class ProfileAvatarURLRestServlet(RestServlet):
|
||||
|
||||
return 200, ret
|
||||
|
||||
async def on_PUT(self, request, user_id):
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
is_admin = await self.auth.is_server_admin(requester.user)
|
||||
@ -120,13 +135,15 @@ class ProfileAvatarURLRestServlet(RestServlet):
|
||||
class ProfileRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.profile_handler = hs.get_profile_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(self, request, user_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester_user = None
|
||||
|
||||
if self.hs.config.require_auth_for_profile_requests:
|
||||
@ -149,7 +166,7 @@ class ProfileRestServlet(RestServlet):
|
||||
return 200, ret
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
ProfileDisplaynameRestServlet(hs).register(http_server)
|
||||
ProfileAvatarURLRestServlet(hs).register(http_server)
|
||||
ProfileRestServlet(hs).register(http_server)
|
||||
|
@ -13,17 +13,23 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, StoreError, SynapseError
|
||||
from synapse.http.server import respond_with_html_bytes
|
||||
from synapse.http.server import HttpServer, respond_with_html_bytes
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
assert_params_in_dict,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.push import PusherConfigException
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -31,12 +37,12 @@ logger = logging.getLogger(__name__)
|
||||
class PushersRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/pushers$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user = requester.user
|
||||
|
||||
@ -50,14 +56,14 @@ class PushersRestServlet(RestServlet):
|
||||
class PushersSetRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/pushers/set$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.notifier = hs.get_notifier()
|
||||
self.pusher_pool = self.hs.get_pusherpool()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
user = requester.user
|
||||
|
||||
@ -132,14 +138,14 @@ class PushersRemoveRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/pushers/remove$", v1=True)
|
||||
SUCCESS_HTML = b"<html><body>You have been unsubscribed</body><html>"
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.notifier = hs.get_notifier()
|
||||
self.auth = hs.get_auth()
|
||||
self.pusher_pool = self.hs.get_pusherpool()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> None:
|
||||
requester = await self.auth.get_user_by_req(request, rights="delete_pusher")
|
||||
user = requester.user
|
||||
|
||||
@ -165,7 +171,7 @@ class PushersRemoveRestServlet(RestServlet):
|
||||
return None
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
PushersRestServlet(hs).register(http_server)
|
||||
PushersSetRestServlet(hs).register(http_server)
|
||||
PushersRemoveRestServlet(hs).register(http_server)
|
||||
|
@ -13,27 +13,36 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.constants import ReadReceiptEventFields
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ReadMarkerRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/rooms/(?P<room_id>[^/]*)/read_markers$")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.receipts_handler = hs.get_receipts_handler()
|
||||
self.read_marker_handler = hs.get_read_marker_handler()
|
||||
self.presence_handler = hs.get_presence_handler()
|
||||
|
||||
async def on_POST(self, request, room_id):
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
await self.presence_handler.bump_presence_active_time(requester.user)
|
||||
@ -70,5 +79,5 @@ class ReadMarkerRestServlet(RestServlet):
|
||||
return 200, {}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
ReadMarkerRestServlet(hs).register(http_server)
|
||||
|
@ -12,7 +12,6 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import hmac
|
||||
import logging
|
||||
import random
|
||||
from typing import List, Union
|
||||
@ -28,6 +27,7 @@ from synapse.api.errors import (
|
||||
ThreepidValidationError,
|
||||
UnrecognizedRequestError,
|
||||
)
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config.captcha import CaptchaConfig
|
||||
from synapse.config.consent import ConsentConfig
|
||||
@ -59,18 +59,6 @@ from synapse.util.threepids import (
|
||||
|
||||
from ._base import client_patterns, interactive_auth_handler
|
||||
|
||||
# We ought to be using hmac.compare_digest() but on older pythons it doesn't
|
||||
# exist. It's a _really minor_ security flaw to use plain string comparison
|
||||
# because the timing attack is so obscured by all the other code here it's
|
||||
# unlikely to make much difference
|
||||
if hasattr(hmac, "compare_digest"):
|
||||
compare_digest = hmac.compare_digest
|
||||
else:
|
||||
|
||||
def compare_digest(a, b):
|
||||
return a == b
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -379,6 +367,55 @@ class UsernameAvailabilityRestServlet(RestServlet):
|
||||
return 200, {"available": True}
|
||||
|
||||
|
||||
class RegistrationTokenValidityRestServlet(RestServlet):
|
||||
"""Check the validity of a registration token.
|
||||
|
||||
Example:
|
||||
|
||||
GET /_matrix/client/unstable/org.matrix.msc3231/register/org.matrix.msc3231.login.registration_token/validity?token=abcd
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"valid": true
|
||||
}
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns(
|
||||
f"/org.matrix.msc3231/register/{LoginType.REGISTRATION_TOKEN}/validity",
|
||||
releases=(),
|
||||
unstable=True,
|
||||
)
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastore()
|
||||
self.ratelimiter = Ratelimiter(
|
||||
store=self.store,
|
||||
clock=hs.get_clock(),
|
||||
rate_hz=hs.config.ratelimiting.rc_registration_token_validity.per_second,
|
||||
burst_count=hs.config.ratelimiting.rc_registration_token_validity.burst_count,
|
||||
)
|
||||
|
||||
async def on_GET(self, request):
|
||||
await self.ratelimiter.ratelimit(None, (request.getClientIP(),))
|
||||
|
||||
if not self.hs.config.enable_registration:
|
||||
raise SynapseError(
|
||||
403, "Registration has been disabled", errcode=Codes.FORBIDDEN
|
||||
)
|
||||
|
||||
token = parse_string(request, "token", required=True)
|
||||
valid = await self.store.registration_token_is_valid(token)
|
||||
|
||||
return 200, {"valid": valid}
|
||||
|
||||
|
||||
class RegisterRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/register$")
|
||||
|
||||
@ -686,6 +723,22 @@ class RegisterRestServlet(RestServlet):
|
||||
)
|
||||
|
||||
if registered:
|
||||
# Check if a token was used to authenticate registration
|
||||
registration_token = await self.auth_handler.get_session_data(
|
||||
session_id,
|
||||
UIAuthSessionDataConstants.REGISTRATION_TOKEN,
|
||||
)
|
||||
if registration_token:
|
||||
# Increment the `completed` counter for the token
|
||||
await self.store.use_registration_token(registration_token)
|
||||
# Indicate that the token has been successfully used so that
|
||||
# pending is not decremented again when expiring old UIA sessions.
|
||||
await self.store.mark_ui_auth_stage_complete(
|
||||
session_id,
|
||||
LoginType.REGISTRATION_TOKEN,
|
||||
True,
|
||||
)
|
||||
|
||||
await self.registration_handler.post_registration_actions(
|
||||
user_id=registered_user_id,
|
||||
auth_result=auth_result,
|
||||
@ -868,6 +921,11 @@ def _calculate_registration_flows(
|
||||
for flow in flows:
|
||||
flow.insert(0, LoginType.RECAPTCHA)
|
||||
|
||||
# Prepend registration token to all flows if we're requiring a token
|
||||
if config.registration_requires_token:
|
||||
for flow in flows:
|
||||
flow.insert(0, LoginType.REGISTRATION_TOKEN)
|
||||
|
||||
return flows
|
||||
|
||||
|
||||
@ -876,4 +934,5 @@ def register_servlets(hs, http_server):
|
||||
MsisdnRegisterRequestTokenRestServlet(hs).register(http_server)
|
||||
UsernameAvailabilityRestServlet(hs).register(http_server)
|
||||
RegistrationSubmitTokenServlet(hs).register(http_server)
|
||||
RegistrationTokenValidityRestServlet(hs).register(http_server)
|
||||
RegisterRestServlet(hs).register(http_server)
|
||||
|
@ -13,18 +13,25 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, ShadowBanError, SynapseError
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
assert_params_in_dict,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util import stringutils
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -41,9 +48,6 @@ class RoomUpgradeRestServlet(RestServlet):
|
||||
}
|
||||
|
||||
Creates a new room and shuts down the old one. Returns the ID of the new room.
|
||||
|
||||
Args:
|
||||
hs (synapse.server.HomeServer):
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns(
|
||||
@ -51,13 +55,15 @@ class RoomUpgradeRestServlet(RestServlet):
|
||||
"/rooms/(?P<room_id>[^/]*)/upgrade$"
|
||||
)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self._hs = hs
|
||||
self._room_creation_handler = hs.get_room_creation_handler()
|
||||
self._auth = hs.get_auth()
|
||||
|
||||
async def on_POST(self, request, room_id):
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self._auth.get_user_by_req(request)
|
||||
|
||||
content = parse_json_object_from_request(request)
|
||||
@ -84,5 +90,5 @@ class RoomUpgradeRestServlet(RestServlet):
|
||||
return 200, ret
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
RoomUpgradeRestServlet(hs).register(http_server)
|
||||
|
@ -12,13 +12,19 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.types import UserID
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict, UserID
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -32,13 +38,15 @@ class UserSharedRoomsServlet(RestServlet):
|
||||
releases=(), # This is an unstable feature
|
||||
)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
self.user_directory_active = hs.config.update_user_directory
|
||||
|
||||
async def on_GET(self, request, user_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
|
||||
if not self.user_directory_active:
|
||||
raise SynapseError(
|
||||
@ -63,5 +71,5 @@ class UserSharedRoomsServlet(RestServlet):
|
||||
return 200, {"joined": list(rooms)}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
UserSharedRoomsServlet(hs).register(http_server)
|
||||
|
@ -14,17 +14,26 @@
|
||||
import itertools
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Tuple
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from synapse.api.constants import Membership, PresenceState
|
||||
from synapse.api.errors import Codes, StoreError, SynapseError
|
||||
from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.events.utils import (
|
||||
format_event_for_client_v2_without_room_id,
|
||||
format_event_raw,
|
||||
)
|
||||
from synapse.handlers.presence import format_user_presence_state
|
||||
from synapse.handlers.sync import KnockedSyncResult, SyncConfig
|
||||
from synapse.handlers.sync import (
|
||||
ArchivedSyncResult,
|
||||
InvitedSyncResult,
|
||||
JoinedSyncResult,
|
||||
KnockedSyncResult,
|
||||
SyncConfig,
|
||||
SyncResult,
|
||||
)
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict, StreamToken
|
||||
@ -192,6 +201,8 @@ class SyncRestServlet(RestServlet):
|
||||
return 200, {}
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
# We know that the the requester has an access token since appservices
|
||||
# cannot use sync.
|
||||
response_content = await self.encode_response(
|
||||
time_now, sync_result, requester.access_token_id, filter_collection
|
||||
)
|
||||
@ -199,7 +210,13 @@ class SyncRestServlet(RestServlet):
|
||||
logger.debug("Event formatting complete")
|
||||
return 200, response_content
|
||||
|
||||
async def encode_response(self, time_now, sync_result, access_token_id, filter):
|
||||
async def encode_response(
|
||||
self,
|
||||
time_now: int,
|
||||
sync_result: SyncResult,
|
||||
access_token_id: Optional[int],
|
||||
filter: FilterCollection,
|
||||
) -> JsonDict:
|
||||
logger.debug("Formatting events in sync response")
|
||||
if filter.event_format == "client":
|
||||
event_formatter = format_event_for_client_v2_without_room_id
|
||||
@ -234,7 +251,7 @@ class SyncRestServlet(RestServlet):
|
||||
|
||||
logger.debug("building sync response dict")
|
||||
|
||||
response: dict = defaultdict(dict)
|
||||
response: JsonDict = defaultdict(dict)
|
||||
response["next_batch"] = await sync_result.next_batch.to_string(self.store)
|
||||
|
||||
if sync_result.account_data:
|
||||
@ -274,6 +291,8 @@ class SyncRestServlet(RestServlet):
|
||||
if archived:
|
||||
response["rooms"][Membership.LEAVE] = archived
|
||||
|
||||
# By the time we get here groups is no longer optional.
|
||||
assert sync_result.groups is not None
|
||||
if sync_result.groups.join:
|
||||
response["groups"][Membership.JOIN] = sync_result.groups.join
|
||||
if sync_result.groups.invite:
|
||||
@ -284,7 +303,7 @@ class SyncRestServlet(RestServlet):
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def encode_presence(events, time_now):
|
||||
def encode_presence(events: List[UserPresenceState], time_now: int) -> JsonDict:
|
||||
return {
|
||||
"events": [
|
||||
{
|
||||
@ -299,25 +318,27 @@ class SyncRestServlet(RestServlet):
|
||||
}
|
||||
|
||||
async def encode_joined(
|
||||
self, rooms, time_now, token_id, event_fields, event_formatter
|
||||
):
|
||||
self,
|
||||
rooms: List[JoinedSyncResult],
|
||||
time_now: int,
|
||||
token_id: Optional[int],
|
||||
event_fields: List[str],
|
||||
event_formatter: Callable[[JsonDict], JsonDict],
|
||||
) -> JsonDict:
|
||||
"""
|
||||
Encode the joined rooms in a sync result
|
||||
|
||||
Args:
|
||||
rooms(list[synapse.handlers.sync.JoinedSyncResult]): list of sync
|
||||
results for rooms this user is joined to
|
||||
time_now(int): current time - used as a baseline for age
|
||||
calculations
|
||||
token_id(int): ID of the user's auth token - used for namespacing
|
||||
rooms: list of sync results for rooms this user is joined to
|
||||
time_now: current time - used as a baseline for age calculations
|
||||
token_id: ID of the user's auth token - used for namespacing
|
||||
of transaction IDs
|
||||
event_fields(list<str>): List of event fields to include. If empty,
|
||||
event_fields: List of event fields to include. If empty,
|
||||
all fields will be returned.
|
||||
event_formatter (func[dict]): function to convert from federation format
|
||||
event_formatter: function to convert from federation format
|
||||
to client format
|
||||
Returns:
|
||||
dict[str, dict[str, object]]: the joined rooms list, in our
|
||||
response format
|
||||
The joined rooms list, in our response format
|
||||
"""
|
||||
joined = {}
|
||||
for room in rooms:
|
||||
@ -332,23 +353,26 @@ class SyncRestServlet(RestServlet):
|
||||
|
||||
return joined
|
||||
|
||||
async def encode_invited(self, rooms, time_now, token_id, event_formatter):
|
||||
async def encode_invited(
|
||||
self,
|
||||
rooms: List[InvitedSyncResult],
|
||||
time_now: int,
|
||||
token_id: Optional[int],
|
||||
event_formatter: Callable[[JsonDict], JsonDict],
|
||||
) -> JsonDict:
|
||||
"""
|
||||
Encode the invited rooms in a sync result
|
||||
|
||||
Args:
|
||||
rooms(list[synapse.handlers.sync.InvitedSyncResult]): list of
|
||||
sync results for rooms this user is invited to
|
||||
time_now(int): current time - used as a baseline for age
|
||||
calculations
|
||||
token_id(int): ID of the user's auth token - used for namespacing
|
||||
rooms: list of sync results for rooms this user is invited to
|
||||
time_now: current time - used as a baseline for age calculations
|
||||
token_id: ID of the user's auth token - used for namespacing
|
||||
of transaction IDs
|
||||
event_formatter (func[dict]): function to convert from federation format
|
||||
event_formatter: function to convert from federation format
|
||||
to client format
|
||||
|
||||
Returns:
|
||||
dict[str, dict[str, object]]: the invited rooms list, in our
|
||||
response format
|
||||
The invited rooms list, in our response format
|
||||
"""
|
||||
invited = {}
|
||||
for room in rooms:
|
||||
@ -371,7 +395,7 @@ class SyncRestServlet(RestServlet):
|
||||
self,
|
||||
rooms: List[KnockedSyncResult],
|
||||
time_now: int,
|
||||
token_id: int,
|
||||
token_id: Optional[int],
|
||||
event_formatter: Callable[[Dict], Dict],
|
||||
) -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
@ -422,25 +446,26 @@ class SyncRestServlet(RestServlet):
|
||||
return knocked
|
||||
|
||||
async def encode_archived(
|
||||
self, rooms, time_now, token_id, event_fields, event_formatter
|
||||
):
|
||||
self,
|
||||
rooms: List[ArchivedSyncResult],
|
||||
time_now: int,
|
||||
token_id: Optional[int],
|
||||
event_fields: List[str],
|
||||
event_formatter: Callable[[JsonDict], JsonDict],
|
||||
) -> JsonDict:
|
||||
"""
|
||||
Encode the archived rooms in a sync result
|
||||
|
||||
Args:
|
||||
rooms (list[synapse.handlers.sync.ArchivedSyncResult]): list of
|
||||
sync results for rooms this user is joined to
|
||||
time_now(int): current time - used as a baseline for age
|
||||
calculations
|
||||
token_id(int): ID of the user's auth token - used for namespacing
|
||||
rooms: list of sync results for rooms this user is joined to
|
||||
time_now: current time - used as a baseline for age calculations
|
||||
token_id: ID of the user's auth token - used for namespacing
|
||||
of transaction IDs
|
||||
event_fields(list<str>): List of event fields to include. If empty,
|
||||
event_fields: List of event fields to include. If empty,
|
||||
all fields will be returned.
|
||||
event_formatter (func[dict]): function to convert from federation format
|
||||
to client format
|
||||
event_formatter: function to convert from federation format to client format
|
||||
Returns:
|
||||
dict[str, dict[str, object]]: The invited rooms list, in our
|
||||
response format
|
||||
The archived rooms list, in our response format
|
||||
"""
|
||||
joined = {}
|
||||
for room in rooms:
|
||||
@ -456,23 +481,27 @@ class SyncRestServlet(RestServlet):
|
||||
return joined
|
||||
|
||||
async def encode_room(
|
||||
self, room, time_now, token_id, joined, only_fields, event_formatter
|
||||
):
|
||||
self,
|
||||
room: Union[JoinedSyncResult, ArchivedSyncResult],
|
||||
time_now: int,
|
||||
token_id: Optional[int],
|
||||
joined: bool,
|
||||
only_fields: Optional[List[str]],
|
||||
event_formatter: Callable[[JsonDict], JsonDict],
|
||||
) -> JsonDict:
|
||||
"""
|
||||
Args:
|
||||
room (JoinedSyncResult|ArchivedSyncResult): sync result for a
|
||||
single room
|
||||
time_now (int): current time - used as a baseline for age
|
||||
calculations
|
||||
token_id (int): ID of the user's auth token - used for namespacing
|
||||
room: sync result for a single room
|
||||
time_now: current time - used as a baseline for age calculations
|
||||
token_id: ID of the user's auth token - used for namespacing
|
||||
of transaction IDs
|
||||
joined (bool): True if the user is joined to this room - will mean
|
||||
joined: True if the user is joined to this room - will mean
|
||||
we handle ephemeral events
|
||||
only_fields(list<str>): Optional. The list of event fields to include.
|
||||
event_formatter (func[dict]): function to convert from federation format
|
||||
only_fields: Optional. The list of event fields to include.
|
||||
event_formatter: function to convert from federation format
|
||||
to client format
|
||||
Returns:
|
||||
dict[str, object]: the room, encoded in our response format
|
||||
The room, encoded in our response format
|
||||
"""
|
||||
|
||||
def serialize(events):
|
||||
@ -508,7 +537,7 @@ class SyncRestServlet(RestServlet):
|
||||
|
||||
account_data = room.account_data
|
||||
|
||||
result = {
|
||||
result: JsonDict = {
|
||||
"timeline": {
|
||||
"events": serialized_timeline,
|
||||
"prev_batch": await room.timeline.prev_batch.to_string(self.store),
|
||||
@ -519,6 +548,7 @@ class SyncRestServlet(RestServlet):
|
||||
}
|
||||
|
||||
if joined:
|
||||
assert isinstance(room, JoinedSyncResult)
|
||||
ephemeral_events = room.ephemeral
|
||||
result["ephemeral"] = {"events": ephemeral_events}
|
||||
result["unread_notifications"] = room.unread_notifications
|
||||
@ -528,5 +558,5 @@ class SyncRestServlet(RestServlet):
|
||||
return result
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
SyncRestServlet(hs).register(http_server)
|
||||
|
@ -13,12 +13,19 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import AuthError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -29,12 +36,14 @@ class TagListServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def on_GET(self, request, user_id, room_id):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str, room_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
if user_id != requester.user.to_string():
|
||||
raise AuthError(403, "Cannot get tags for other users.")
|
||||
@ -54,12 +63,14 @@ class TagServlet(RestServlet):
|
||||
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags/(?P<tag>[^/]*)"
|
||||
)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.handler = hs.get_account_data_handler()
|
||||
|
||||
async def on_PUT(self, request, user_id, room_id, tag):
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, user_id: str, room_id: str, tag: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
if user_id != requester.user.to_string():
|
||||
raise AuthError(403, "Cannot add tags for other users.")
|
||||
@ -70,7 +81,9 @@ class TagServlet(RestServlet):
|
||||
|
||||
return 200, {}
|
||||
|
||||
async def on_DELETE(self, request, user_id, room_id, tag):
|
||||
async def on_DELETE(
|
||||
self, request: SynapseRequest, user_id: str, room_id: str, tag: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
if user_id != requester.user.to_string():
|
||||
raise AuthError(403, "Cannot add tags for other users.")
|
||||
@ -80,6 +93,6 @@ class TagServlet(RestServlet):
|
||||
return 200, {}
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
TagListServlet(hs).register(http_server)
|
||||
TagServlet(hs).register(http_server)
|
||||
|
@ -12,27 +12,33 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, List, Tuple
|
||||
|
||||
from synapse.api.constants import ThirdPartyEntityKind
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ThirdPartyProtocolsServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/thirdparty/protocols")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.auth = hs.get_auth()
|
||||
self.appservice_handler = hs.get_application_service_handler()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
|
||||
protocols = await self.appservice_handler.get_3pe_protocols()
|
||||
@ -42,13 +48,15 @@ class ThirdPartyProtocolsServlet(RestServlet):
|
||||
class ThirdPartyProtocolServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/thirdparty/protocol/(?P<protocol>[^/]+)$")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.auth = hs.get_auth()
|
||||
self.appservice_handler = hs.get_application_service_handler()
|
||||
|
||||
async def on_GET(self, request, protocol):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, protocol: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
|
||||
protocols = await self.appservice_handler.get_3pe_protocols(
|
||||
@ -63,16 +71,18 @@ class ThirdPartyProtocolServlet(RestServlet):
|
||||
class ThirdPartyUserServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/thirdparty/user(/(?P<protocol>[^/]+))?$")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.auth = hs.get_auth()
|
||||
self.appservice_handler = hs.get_application_service_handler()
|
||||
|
||||
async def on_GET(self, request, protocol):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, protocol: str
|
||||
) -> Tuple[int, List[JsonDict]]:
|
||||
await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
|
||||
fields = request.args
|
||||
fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment]
|
||||
fields.pop(b"access_token", None)
|
||||
|
||||
results = await self.appservice_handler.query_3pe(
|
||||
@ -85,16 +95,18 @@ class ThirdPartyUserServlet(RestServlet):
|
||||
class ThirdPartyLocationServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/thirdparty/location(/(?P<protocol>[^/]+))?$")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.auth = hs.get_auth()
|
||||
self.appservice_handler = hs.get_application_service_handler()
|
||||
|
||||
async def on_GET(self, request, protocol):
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, protocol: str
|
||||
) -> Tuple[int, List[JsonDict]]:
|
||||
await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
|
||||
fields = request.args
|
||||
fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment]
|
||||
fields.pop(b"access_token", None)
|
||||
|
||||
results = await self.appservice_handler.query_3pe(
|
||||
@ -104,7 +116,7 @@ class ThirdPartyLocationServlet(RestServlet):
|
||||
return 200, results
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
ThirdPartyProtocolsServlet(hs).register(http_server)
|
||||
ThirdPartyProtocolServlet(hs).register(http_server)
|
||||
ThirdPartyUserServlet(hs).register(http_server)
|
||||
|
@ -12,11 +12,19 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.errors import AuthError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class TokenRefreshRestServlet(RestServlet):
|
||||
"""
|
||||
@ -26,12 +34,12 @@ class TokenRefreshRestServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/tokenrefresh")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: Request) -> None:
|
||||
raise AuthError(403, "tokenrefresh is no longer supported.")
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
TokenRefreshRestServlet(hs).register(http_server)
|
||||
|
@ -13,29 +13,32 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import client_patterns
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UserDirectorySearchRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/user_directory/search$")
|
||||
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
"""
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.user_directory_handler = hs.get_user_directory_handler()
|
||||
|
||||
async def on_POST(self, request):
|
||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
"""Searches for users in directory
|
||||
|
||||
Returns:
|
||||
@ -75,5 +78,5 @@ class UserDirectorySearchRestServlet(RestServlet):
|
||||
return 200, results
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
UserDirectorySearchRestServlet(hs).register(http_server)
|
||||
|
@ -17,9 +17,17 @@
|
||||
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.constants import RoomCreationPreset
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -27,7 +35,7 @@ logger = logging.getLogger(__name__)
|
||||
class VersionsRestServlet(RestServlet):
|
||||
PATTERNS = [re.compile("^/_matrix/client/versions$")]
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.config = hs.config
|
||||
|
||||
@ -45,7 +53,7 @@ class VersionsRestServlet(RestServlet):
|
||||
in self.config.encryption_enabled_by_default_for_room_presets
|
||||
)
|
||||
|
||||
def on_GET(self, request):
|
||||
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
|
||||
return (
|
||||
200,
|
||||
{
|
||||
@ -89,5 +97,5 @@ class VersionsRestServlet(RestServlet):
|
||||
)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
VersionsRestServlet(hs).register(http_server)
|
||||
|
@ -15,20 +15,27 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import hmac
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class VoipRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/voip/turnServer$", v1=True)
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(self, request):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(
|
||||
request, self.hs.config.turn_allow_guests
|
||||
)
|
||||
@ -69,5 +76,5 @@ class VoipRestServlet(RestServlet):
|
||||
)
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
VoipRestServlet(hs).register(http_server)
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
@ -414,9 +414,9 @@ class ThumbnailResource(DirectServeJsonResource):
|
||||
|
||||
if desired_method == "crop":
|
||||
# Thumbnails that match equal or larger sizes of desired width/height.
|
||||
crop_info_list = []
|
||||
crop_info_list: List[Tuple[int, int, int, bool, int, Dict[str, Any]]] = []
|
||||
# Other thumbnails.
|
||||
crop_info_list2 = []
|
||||
crop_info_list2: List[Tuple[int, int, int, bool, int, Dict[str, Any]]] = []
|
||||
for info in thumbnail_infos:
|
||||
# Skip thumbnails generated with different methods.
|
||||
if info["thumbnail_method"] != "crop":
|
||||
@ -451,15 +451,19 @@ class ThumbnailResource(DirectServeJsonResource):
|
||||
info,
|
||||
)
|
||||
)
|
||||
# Pick the most appropriate thumbnail. Some values of `desired_width` and
|
||||
# `desired_height` may result in a tie, in which case we avoid comparing on
|
||||
# the thumbnail info dictionary and pick the thumbnail that appears earlier
|
||||
# in the list of candidates.
|
||||
if crop_info_list:
|
||||
thumbnail_info = min(crop_info_list)[-1]
|
||||
thumbnail_info = min(crop_info_list, key=lambda t: t[:-1])[-1]
|
||||
elif crop_info_list2:
|
||||
thumbnail_info = min(crop_info_list2)[-1]
|
||||
thumbnail_info = min(crop_info_list2, key=lambda t: t[:-1])[-1]
|
||||
elif desired_method == "scale":
|
||||
# Thumbnails that match equal or larger sizes of desired width/height.
|
||||
info_list = []
|
||||
info_list: List[Tuple[int, bool, int, Dict[str, Any]]] = []
|
||||
# Other thumbnails.
|
||||
info_list2 = []
|
||||
info_list2: List[Tuple[int, bool, int, Dict[str, Any]]] = []
|
||||
|
||||
for info in thumbnail_infos:
|
||||
# Skip thumbnails generated with different methods.
|
||||
@ -477,10 +481,14 @@ class ThumbnailResource(DirectServeJsonResource):
|
||||
info_list2.append(
|
||||
(size_quality, type_quality, length_quality, info)
|
||||
)
|
||||
# Pick the most appropriate thumbnail. Some values of `desired_width` and
|
||||
# `desired_height` may result in a tie, in which case we avoid comparing on
|
||||
# the thumbnail info dictionary and pick the thumbnail that appears earlier
|
||||
# in the list of candidates.
|
||||
if info_list:
|
||||
thumbnail_info = min(info_list)[-1]
|
||||
thumbnail_info = min(info_list, key=lambda t: t[:-1])[-1]
|
||||
elif info_list2:
|
||||
thumbnail_info = min(info_list2)[-1]
|
||||
thumbnail_info = min(info_list2, key=lambda t: t[:-1])[-1]
|
||||
|
||||
if thumbnail_info:
|
||||
return FileInfo(
|
||||
|
@ -76,6 +76,7 @@ from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
|
||||
from synapse.handlers.event_auth import EventAuthHandler
|
||||
from synapse.handlers.events import EventHandler, EventStreamHandler
|
||||
from synapse.handlers.federation import FederationHandler
|
||||
from synapse.handlers.federation_event import FederationEventHandler
|
||||
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
|
||||
from synapse.handlers.identity import IdentityHandler
|
||||
from synapse.handlers.initial_sync import InitialSyncHandler
|
||||
@ -546,6 +547,10 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
def get_federation_handler(self) -> FederationHandler:
|
||||
return FederationHandler(self)
|
||||
|
||||
@cache_in_self
|
||||
def get_federation_event_handler(self) -> FederationEventHandler:
|
||||
return FederationEventHandler(self)
|
||||
|
||||
@cache_in_self
|
||||
def get_identity_handler(self) -> IdentityHandler:
|
||||
return IdentityHandler(self)
|
||||
|
@ -12,26 +12,23 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import Optional
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership, RoomCreationPreset
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import UserID, create_requester
|
||||
from synapse.util.caches.descriptors import cached
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SERVER_NOTICE_ROOM_TAG = "m.server_notice"
|
||||
|
||||
|
||||
class ServerNoticesManager:
|
||||
def __init__(self, hs):
|
||||
"""
|
||||
|
||||
Args:
|
||||
hs (synapse.server.HomeServer):
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._store = hs.get_datastore()
|
||||
self._config = hs.config
|
||||
self._account_data_handler = hs.get_account_data_handler()
|
||||
@ -58,6 +55,7 @@ class ServerNoticesManager:
|
||||
event_content: dict,
|
||||
type: str = EventTypes.Message,
|
||||
state_key: Optional[str] = None,
|
||||
txn_id: Optional[str] = None,
|
||||
) -> EventBase:
|
||||
"""Send a notice to the given user
|
||||
|
||||
@ -68,6 +66,7 @@ class ServerNoticesManager:
|
||||
event_content: content of event to send
|
||||
type: type of event
|
||||
is_state_event: Is the event a state event
|
||||
txn_id: The transaction ID.
|
||||
"""
|
||||
room_id = await self.get_or_create_notice_room_for_user(user_id)
|
||||
await self.maybe_invite_user_to_room(user_id, room_id)
|
||||
@ -90,7 +89,7 @@ class ServerNoticesManager:
|
||||
event_dict["state_key"] = state_key
|
||||
|
||||
event, _ = await self._event_creation_handler.create_and_send_nonmember_event(
|
||||
requester, event_dict, ratelimit=False
|
||||
requester, event_dict, ratelimit=False, txn_id=txn_id
|
||||
)
|
||||
return event
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user