mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-10-01 08:25:44 -04:00
Merge branch 'develop' of github.com:matrix-org/synapse into erikj/histogram_extremities
This commit is contained in:
commit
45f28a9d2f
@ -1,21 +0,0 @@
|
|||||||
version: '3.1'
|
|
||||||
|
|
||||||
services:
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: postgres:9.4
|
|
||||||
environment:
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
|
|
||||||
testenv:
|
|
||||||
image: python:3.5
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
env_file: .env
|
|
||||||
environment:
|
|
||||||
SYNAPSE_POSTGRES_HOST: postgres
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
working_dir: /app
|
|
||||||
volumes:
|
|
||||||
- ..:/app
|
|
@ -5,8 +5,8 @@ steps:
|
|||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e pep8"
|
- "tox -e check_codestyle"
|
||||||
label: "\U0001F9F9 PEP-8"
|
label: "\U0001F9F9 Check Style"
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
@ -116,23 +116,6 @@ steps:
|
|||||||
- exit_status: 2
|
- exit_status: 2
|
||||||
limit: 2
|
limit: 2
|
||||||
|
|
||||||
- label: ":python: 3.5 / :postgres: 9.4"
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "-j 4"
|
|
||||||
command:
|
|
||||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
|
||||||
plugins:
|
|
||||||
- docker-compose#v2.1.0:
|
|
||||||
run: testenv
|
|
||||||
config:
|
|
||||||
- .buildkite/docker-compose.py35.pg94.yaml
|
|
||||||
retry:
|
|
||||||
automatic:
|
|
||||||
- exit_status: -1
|
|
||||||
limit: 2
|
|
||||||
- exit_status: 2
|
|
||||||
limit: 2
|
|
||||||
|
|
||||||
- label: ":python: 3.5 / :postgres: 9.5"
|
- label: ":python: 3.5 / :postgres: 9.5"
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 4"
|
TRIAL_FLAGS: "-j 4"
|
||||||
|
@ -72,3 +72,6 @@ Jason Robinson <jasonr at matrix.org>
|
|||||||
|
|
||||||
Joseph Weston <joseph at weston.cloud>
|
Joseph Weston <joseph at weston.cloud>
|
||||||
+ Add admin API for querying HS version
|
+ Add admin API for querying HS version
|
||||||
|
|
||||||
|
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
||||||
|
* Documentation improvements
|
||||||
|
@ -30,21 +30,20 @@ use github's pull request workflow to review the contribution, and either ask
|
|||||||
you to make any refinements needed or merge it and make them ourselves. The
|
you to make any refinements needed or merge it and make them ourselves. The
|
||||||
changes will then land on master when we next do a release.
|
changes will then land on master when we next do a release.
|
||||||
|
|
||||||
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Travis CI
|
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Buildkite
|
||||||
<https://travis-ci.org/matrix-org/synapse>`_ for continuous integration. All
|
<https://buildkite.com/matrix-dot-org/synapse>`_ for continuous integration.
|
||||||
pull requests to synapse get automatically tested by Travis and CircleCI.
|
Buildkite builds need to be authorised by a maintainer. If your change breaks
|
||||||
If your change breaks the build, this will be shown in GitHub, so please
|
the build, this will be shown in GitHub, so please keep an eye on the pull
|
||||||
keep an eye on the pull request for feedback.
|
request for feedback.
|
||||||
|
|
||||||
To run unit tests in a local development environment, you can use:
|
To run unit tests in a local development environment, you can use:
|
||||||
|
|
||||||
- ``tox -e py27`` (requires tox to be installed by ``pip install tox``) for
|
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||||
SQLite-backed Synapse on Python 2.7.
|
for SQLite-backed Synapse on Python 3.5.
|
||||||
- ``tox -e py35`` for SQLite-backed Synapse on Python 3.5.
|
|
||||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||||
- ``tox -e py27-postgres`` for PostgreSQL-backed Synapse on Python 2.7
|
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||||
(requires a running local PostgreSQL with access to create databases).
|
(requires a running local PostgreSQL with access to create databases).
|
||||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 2.7
|
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||||
set up PostgreSQL yourself.
|
set up PostgreSQL yourself.
|
||||||
|
|
||||||
|
32
README.rst
32
README.rst
@ -340,8 +340,11 @@ log lines and looking for any 'Processed request' lines which take more than
|
|||||||
a few seconds to execute. Please let us know at #synapse:matrix.org if
|
a few seconds to execute. Please let us know at #synapse:matrix.org if
|
||||||
you see this failure mode so we can help debug it, however.
|
you see this failure mode so we can help debug it, however.
|
||||||
|
|
||||||
Help!! Synapse eats all my RAM!
|
Help!! Synapse is slow and eats all my RAM/CPU!
|
||||||
-------------------------------
|
-----------------------------------------------
|
||||||
|
|
||||||
|
First, ensure you are running the latest version of Synapse, using Python 3
|
||||||
|
with a PostgreSQL database.
|
||||||
|
|
||||||
Synapse's architecture is quite RAM hungry currently - we deliberately
|
Synapse's architecture is quite RAM hungry currently - we deliberately
|
||||||
cache a lot of recent room data and metadata in RAM in order to speed up
|
cache a lot of recent room data and metadata in RAM in order to speed up
|
||||||
@ -352,14 +355,29 @@ variable. The default is 0.5, which can be decreased to reduce RAM usage
|
|||||||
in memory constrained enviroments, or increased if performance starts to
|
in memory constrained enviroments, or increased if performance starts to
|
||||||
degrade.
|
degrade.
|
||||||
|
|
||||||
|
However, degraded performance due to a low cache factor, common on
|
||||||
|
machines with slow disks, often leads to explosions in memory use due
|
||||||
|
backlogged requests. In this case, reducing the cache factor will make
|
||||||
|
things worse. Instead, try increasing it drastically. 2.0 is a good
|
||||||
|
starting value.
|
||||||
|
|
||||||
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
||||||
improvement in overall amount, and especially in terms of giving back RAM
|
improvement in overall memory use, and especially in terms of giving back
|
||||||
to the OS. To use it, the library must simply be put in the LD_PRELOAD
|
RAM to the OS. To use it, the library must simply be put in the
|
||||||
environment variable when launching Synapse. On Debian, this can be done
|
LD_PRELOAD environment variable when launching Synapse. On Debian, this
|
||||||
by installing the ``libjemalloc1`` package and adding this line to
|
can be done by installing the ``libjemalloc1`` package and adding this
|
||||||
``/etc/default/matrix-synapse``::
|
line to ``/etc/default/matrix-synapse``::
|
||||||
|
|
||||||
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
||||||
|
|
||||||
This can make a significant difference on Python 2.7 - it's unclear how
|
This can make a significant difference on Python 2.7 - it's unclear how
|
||||||
much of an improvement it provides on Python 3.x.
|
much of an improvement it provides on Python 3.x.
|
||||||
|
|
||||||
|
If you're encountering high CPU use by the Synapse process itself, you
|
||||||
|
may be affected by a bug with presence tracking that leads to a
|
||||||
|
massive excess of outgoing federation requests (see `discussion
|
||||||
|
<https://github.com/matrix-org/synapse/issues/3971>`_). If metrics
|
||||||
|
indicate that your server is also issuing far more outgoing federation
|
||||||
|
requests than can be accounted for by your users' activity, this is a
|
||||||
|
likely cause. The misbehavior can be worked around by setting
|
||||||
|
``use_presence: false`` in the Synapse config file.
|
||||||
|
27
UPGRADE.rst
27
UPGRADE.rst
@ -49,6 +49,33 @@ returned by the Client-Server API:
|
|||||||
# configured on port 443.
|
# configured on port 443.
|
||||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||||
|
|
||||||
|
Upgrading to v1.1
|
||||||
|
=================
|
||||||
|
|
||||||
|
Synapse 1.1 removes support for older Python and PostgreSQL versions, as
|
||||||
|
outlined in `our deprecation notice <https://matrix.org/blog/2019/04/08/synapse-deprecating-postgres-9-4-and-python-2-x>`_.
|
||||||
|
|
||||||
|
Minimum Python Version
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Synapse v1.1 has a minimum Python requirement of Python 3.5. Python 3.6 or
|
||||||
|
Python 3.7 are recommended as they have improved internal string handling,
|
||||||
|
significantly reducing memory usage.
|
||||||
|
|
||||||
|
If you use current versions of the Matrix.org-distributed Debian packages or
|
||||||
|
Docker images, action is not required.
|
||||||
|
|
||||||
|
If you install Synapse in a Python virtual environment, please see "Upgrading to
|
||||||
|
v0.34.0" for notes on setting up a new virtualenv under Python 3.
|
||||||
|
|
||||||
|
Minimum PostgreSQL Version
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
If using PostgreSQL under Synapse, you will need to use PostgreSQL 9.5 or above.
|
||||||
|
Please see the
|
||||||
|
`PostgreSQL documentation <https://www.postgresql.org/docs/11/upgrading.html>`_
|
||||||
|
for more details on upgrading your database.
|
||||||
|
|
||||||
Upgrading to v1.0
|
Upgrading to v1.0
|
||||||
=================
|
=================
|
||||||
|
|
||||||
|
1
changelog.d/4276.misc
Normal file
1
changelog.d/4276.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Improve README section on performance troubleshooting.
|
1
changelog.d/5015.misc
Normal file
1
changelog.d/5015.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add logging to 3pid invite signature verification.
|
1
changelog.d/5042.bugfix
Normal file
1
changelog.d/5042.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix bug processing incoming events over federation if call to `/get_missing_events` fails.
|
1
changelog.d/5446.misc
Normal file
1
changelog.d/5446.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Update Python syntax in contrib/ to Python 3.
|
1
changelog.d/5448.removal
Normal file
1
changelog.d/5448.removal
Normal file
@ -0,0 +1 @@
|
|||||||
|
PostgreSQL 9.4 is no longer supported. Synapse requires Postgres 9.5+ or above for Postgres support.
|
1
changelog.d/5474.feature
Normal file
1
changelog.d/5474.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow server admins to define implementations of extra rules for allowing or denying incoming events.
|
1
changelog.d/5477.feature
Normal file
1
changelog.d/5477.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow server admins to define implementations of extra rules for allowing or denying incoming events.
|
1
changelog.d/5478.misc
Normal file
1
changelog.d/5478.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
The demo servers talk to each other again.
|
1
changelog.d/5480.misc
Normal file
1
changelog.d/5480.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add an EXPERIMENTAL config option to try and periodically clean up extremities by sending dummy events.
|
1
changelog.d/5482.misc
Normal file
1
changelog.d/5482.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Synapse's codebase is now formatted by `black`.
|
1
changelog.d/5490.bugfix
Normal file
1
changelog.d/5490.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix failure to start under docker with SAML support enabled.
|
1
changelog.d/5493.misc
Normal file
1
changelog.d/5493.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Track deactivated accounts in the database.
|
@ -15,6 +15,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from twisted.internet import reactor, defer, threads
|
from twisted.internet import reactor, defer, threads
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
@ -36,9 +37,8 @@ from signedjson.sign import verify_signed_json, SignatureVerifyException
|
|||||||
|
|
||||||
CONFIG_JSON = "cmdclient_config.json"
|
CONFIG_JSON = "cmdclient_config.json"
|
||||||
|
|
||||||
TRUSTED_ID_SERVERS = [
|
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||||
'localhost:8001'
|
|
||||||
]
|
|
||||||
|
|
||||||
class SynapseCmd(cmd.Cmd):
|
class SynapseCmd(cmd.Cmd):
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"token": token,
|
"token": token,
|
||||||
"verbose": "on",
|
"verbose": "on",
|
||||||
"complete_usernames": "on",
|
"complete_usernames": "on",
|
||||||
"send_delivery_receipts": "on"
|
"send_delivery_receipts": "on",
|
||||||
}
|
}
|
||||||
self.path_prefix = "/_matrix/client/api/v1"
|
self.path_prefix = "/_matrix/client/api/v1"
|
||||||
self.event_stream_token = "END"
|
self.event_stream_token = "END"
|
||||||
@ -109,7 +109,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
by using $. E.g. 'config roomid room1' then 'raw get /rooms/$roomid'.
|
by using $. E.g. 'config roomid room1' then 'raw get /rooms/$roomid'.
|
||||||
"""
|
"""
|
||||||
if len(line) == 0:
|
if len(line) == 0:
|
||||||
print json.dumps(self.config, indent=4)
|
print(json.dumps(self.config, indent=4))
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -119,12 +119,11 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
config_rules = [ # key, valid_values
|
config_rules = [ # key, valid_values
|
||||||
("verbose", ["on", "off"]),
|
("verbose", ["on", "off"]),
|
||||||
("complete_usernames", ["on", "off"]),
|
("complete_usernames", ["on", "off"]),
|
||||||
("send_delivery_receipts", ["on", "off"])
|
("send_delivery_receipts", ["on", "off"]),
|
||||||
]
|
]
|
||||||
for key, valid_vals in config_rules:
|
for key, valid_vals in config_rules:
|
||||||
if key == args["key"] and args["val"] not in valid_vals:
|
if key == args["key"] and args["val"] not in valid_vals:
|
||||||
print "%s value must be one of %s" % (args["key"],
|
print("%s value must be one of %s" % (args["key"], valid_vals))
|
||||||
valid_vals)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# toggle the http client verbosity
|
# toggle the http client verbosity
|
||||||
@ -133,11 +132,11 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
# assign the new config
|
# assign the new config
|
||||||
self.config[args["key"]] = args["val"]
|
self.config[args["key"]] = args["val"]
|
||||||
print json.dumps(self.config, indent=4)
|
print(json.dumps(self.config, indent=4))
|
||||||
|
|
||||||
save_config(self.config)
|
save_config(self.config)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_register(self, line):
|
def do_register(self, line):
|
||||||
"""Registers for a new account: "register <userid> <noupdate>"
|
"""Registers for a new account: "register <userid> <noupdate>"
|
||||||
@ -153,33 +152,32 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
pwd = getpass.getpass("Type a password for this user: ")
|
pwd = getpass.getpass("Type a password for this user: ")
|
||||||
pwd2 = getpass.getpass("Retype the password: ")
|
pwd2 = getpass.getpass("Retype the password: ")
|
||||||
if pwd != pwd2 or len(pwd) == 0:
|
if pwd != pwd2 or len(pwd) == 0:
|
||||||
print "Password mismatch."
|
print("Password mismatch.")
|
||||||
pwd = None
|
pwd = None
|
||||||
else:
|
else:
|
||||||
password = pwd
|
password = pwd
|
||||||
|
|
||||||
body = {
|
body = {"type": "m.login.password"}
|
||||||
"type": "m.login.password"
|
|
||||||
}
|
|
||||||
if "userid" in args:
|
if "userid" in args:
|
||||||
body["user"] = args["userid"]
|
body["user"] = args["userid"]
|
||||||
if password:
|
if password:
|
||||||
body["password"] = password
|
body["password"] = password
|
||||||
|
|
||||||
reactor.callFromThread(self._do_register, body,
|
reactor.callFromThread(self._do_register, body, "noupdate" not in args)
|
||||||
"noupdate" not in args)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_register(self, data, update_config):
|
def _do_register(self, data, update_config):
|
||||||
# check the registration flows
|
# check the registration flows
|
||||||
url = self._url() + "/register"
|
url = self._url() + "/register"
|
||||||
json_res = yield self.http_client.do_request("GET", url)
|
json_res = yield self.http_client.do_request("GET", url)
|
||||||
print json.dumps(json_res, indent=4)
|
print(json.dumps(json_res, indent=4))
|
||||||
|
|
||||||
passwordFlow = None
|
passwordFlow = None
|
||||||
for flow in json_res["flows"]:
|
for flow in json_res["flows"]:
|
||||||
if flow["type"] == "m.login.recaptcha" or ("stages" in flow and "m.login.recaptcha" in flow["stages"]):
|
if flow["type"] == "m.login.recaptcha" or (
|
||||||
print "Unable to register: Home server requires captcha."
|
"stages" in flow and "m.login.recaptcha" in flow["stages"]
|
||||||
|
):
|
||||||
|
print("Unable to register: Home server requires captcha.")
|
||||||
return
|
return
|
||||||
if flow["type"] == "m.login.password" and "stages" not in flow:
|
if flow["type"] == "m.login.password" and "stages" not in flow:
|
||||||
passwordFlow = flow
|
passwordFlow = flow
|
||||||
@ -189,7 +187,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
return
|
return
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||||
print json.dumps(json_res, indent=4)
|
print(json.dumps(json_res, indent=4))
|
||||||
if update_config and "user_id" in json_res:
|
if update_config and "user_id" in json_res:
|
||||||
self.config["user"] = json_res["user_id"]
|
self.config["user"] = json_res["user_id"]
|
||||||
self.config["token"] = json_res["access_token"]
|
self.config["token"] = json_res["access_token"]
|
||||||
@ -201,9 +199,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
args = self._parse(line, ["user_id"], force_keys=True)
|
args = self._parse(line, ["user_id"], force_keys=True)
|
||||||
can_login = threads.blockingCallFromThread(
|
can_login = threads.blockingCallFromThread(reactor, self._check_can_login)
|
||||||
reactor,
|
|
||||||
self._check_can_login)
|
|
||||||
if can_login:
|
if can_login:
|
||||||
p = getpass.getpass("Enter your password: ")
|
p = getpass.getpass("Enter your password: ")
|
||||||
user = args["user_id"]
|
user = args["user_id"]
|
||||||
@ -213,27 +209,23 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
user = "@" + user + ":" + domain
|
user = "@" + user + ":" + domain
|
||||||
|
|
||||||
reactor.callFromThread(self._do_login, user, p)
|
reactor.callFromThread(self._do_login, user, p)
|
||||||
#print " got %s " % p
|
# print " got %s " % p
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_login(self, user, password):
|
def _do_login(self, user, password):
|
||||||
path = "/login"
|
path = "/login"
|
||||||
data = {
|
data = {"user": user, "password": password, "type": "m.login.password"}
|
||||||
"user": user,
|
|
||||||
"password": password,
|
|
||||||
"type": "m.login.password"
|
|
||||||
}
|
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||||
print json_res
|
print(json_res)
|
||||||
|
|
||||||
if "access_token" in json_res:
|
if "access_token" in json_res:
|
||||||
self.config["user"] = user
|
self.config["user"] = user
|
||||||
self.config["token"] = json_res["access_token"]
|
self.config["token"] = json_res["access_token"]
|
||||||
save_config(self.config)
|
save_config(self.config)
|
||||||
print "Login successful."
|
print("Login successful.")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _check_can_login(self):
|
def _check_can_login(self):
|
||||||
@ -242,18 +234,19 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
# submitting!
|
# submitting!
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
json_res = yield self.http_client.do_request("GET", url)
|
json_res = yield self.http_client.do_request("GET", url)
|
||||||
print json_res
|
print(json_res)
|
||||||
|
|
||||||
if "flows" not in json_res:
|
if "flows" not in json_res:
|
||||||
print "Failed to find any login flows."
|
print("Failed to find any login flows.")
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
|
||||||
flow = json_res["flows"][0] # assume first is the one we want.
|
flow = json_res["flows"][0] # assume first is the one we want.
|
||||||
if ("type" not in flow or "m.login.password" != flow["type"] or
|
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||||
"stages" in flow):
|
|
||||||
fallback_url = self._url() + "/login/fallback"
|
fallback_url = self._url() + "/login/fallback"
|
||||||
print ("Unable to login via the command line client. Please visit "
|
print(
|
||||||
"%s to login." % fallback_url)
|
"Unable to login via the command line client. Please visit "
|
||||||
|
"%s to login." % fallback_url
|
||||||
|
)
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
|
|
||||||
@ -263,21 +256,33 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
||||||
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ['address', 'clientSecret', 'sendAttempt'])
|
args = self._parse(line, ["address", "clientSecret", "sendAttempt"])
|
||||||
|
|
||||||
postArgs = {'email': args['address'], 'clientSecret': args['clientSecret'], 'sendAttempt': args['sendAttempt']}
|
postArgs = {
|
||||||
|
"email": args["address"],
|
||||||
|
"clientSecret": args["clientSecret"],
|
||||||
|
"sendAttempt": args["sendAttempt"],
|
||||||
|
}
|
||||||
|
|
||||||
reactor.callFromThread(self._do_emailrequest, postArgs)
|
reactor.callFromThread(self._do_emailrequest, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailrequest(self, args):
|
def _do_emailrequest(self, args):
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/requestToken"
|
url = (
|
||||||
|
self._identityServerUrl()
|
||||||
|
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
|
)
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
json_res = yield self.http_client.do_request(
|
||||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
"POST",
|
||||||
print json_res
|
url,
|
||||||
if 'sid' in json_res:
|
data=urllib.urlencode(args),
|
||||||
print "Token sent. Your session ID is %s" % (json_res['sid'])
|
jsonreq=False,
|
||||||
|
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||||
|
)
|
||||||
|
print(json_res)
|
||||||
|
if "sid" in json_res:
|
||||||
|
print("Token sent. Your session ID is %s" % (json_res["sid"]))
|
||||||
|
|
||||||
def do_emailvalidate(self, line):
|
def do_emailvalidate(self, line):
|
||||||
"""Validate and associate a third party ID
|
"""Validate and associate a third party ID
|
||||||
@ -285,39 +290,56 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
<token> The token sent to your third party identifier address
|
<token> The token sent to your third party identifier address
|
||||||
<clientSecret> The same clientSecret you supplied in requestToken
|
<clientSecret> The same clientSecret you supplied in requestToken
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ['sid', 'token', 'clientSecret'])
|
args = self._parse(line, ["sid", "token", "clientSecret"])
|
||||||
|
|
||||||
postArgs = { 'sid' : args['sid'], 'token' : args['token'], 'clientSecret': args['clientSecret'] }
|
postArgs = {
|
||||||
|
"sid": args["sid"],
|
||||||
|
"token": args["token"],
|
||||||
|
"clientSecret": args["clientSecret"],
|
||||||
|
}
|
||||||
|
|
||||||
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailvalidate(self, args):
|
def _do_emailvalidate(self, args):
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/submitToken"
|
url = (
|
||||||
|
self._identityServerUrl()
|
||||||
|
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||||
|
)
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
json_res = yield self.http_client.do_request(
|
||||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
"POST",
|
||||||
print json_res
|
url,
|
||||||
|
data=urllib.urlencode(args),
|
||||||
|
jsonreq=False,
|
||||||
|
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||||
|
)
|
||||||
|
print(json_res)
|
||||||
|
|
||||||
def do_3pidbind(self, line):
|
def do_3pidbind(self, line):
|
||||||
"""Validate and associate a third party ID
|
"""Validate and associate a third party ID
|
||||||
<sid> The session ID (sid) given to you in the response to requestToken
|
<sid> The session ID (sid) given to you in the response to requestToken
|
||||||
<clientSecret> The same clientSecret you supplied in requestToken
|
<clientSecret> The same clientSecret you supplied in requestToken
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ['sid', 'clientSecret'])
|
args = self._parse(line, ["sid", "clientSecret"])
|
||||||
|
|
||||||
postArgs = { 'sid' : args['sid'], 'clientSecret': args['clientSecret'] }
|
postArgs = {"sid": args["sid"], "clientSecret": args["clientSecret"]}
|
||||||
postArgs['mxid'] = self.config["user"]
|
postArgs["mxid"] = self.config["user"]
|
||||||
|
|
||||||
reactor.callFromThread(self._do_3pidbind, postArgs)
|
reactor.callFromThread(self._do_3pidbind, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_3pidbind(self, args):
|
def _do_3pidbind(self, args):
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/3pid/bind"
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
json_res = yield self.http_client.do_request(
|
||||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
"POST",
|
||||||
print json_res
|
url,
|
||||||
|
data=urllib.urlencode(args),
|
||||||
|
jsonreq=False,
|
||||||
|
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||||
|
)
|
||||||
|
print(json_res)
|
||||||
|
|
||||||
def do_join(self, line):
|
def do_join(self, line):
|
||||||
"""Joins a room: "join <roomid>" """
|
"""Joins a room: "join <roomid>" """
|
||||||
@ -325,7 +347,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["roomid"], force_keys=True)
|
args = self._parse(line, ["roomid"], force_keys=True)
|
||||||
self._do_membership_change(args["roomid"], "join", self._usr())
|
self._do_membership_change(args["roomid"], "join", self._usr())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_joinalias(self, line):
|
def do_joinalias(self, line):
|
||||||
try:
|
try:
|
||||||
@ -333,7 +355,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
path = "/join/%s" % urllib.quote(args["roomname"])
|
path = "/join/%s" % urllib.quote(args["roomname"])
|
||||||
reactor.callFromThread(self._run_and_pprint, "POST", path, {})
|
reactor.callFromThread(self._run_and_pprint, "POST", path, {})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_topic(self, line):
|
def do_topic(self, line):
|
||||||
""""topic [set|get] <roomid> [<newtopic>]"
|
""""topic [set|get] <roomid> [<newtopic>]"
|
||||||
@ -343,26 +365,24 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
args = self._parse(line, ["action", "roomid", "topic"])
|
args = self._parse(line, ["action", "roomid", "topic"])
|
||||||
if "action" not in args or "roomid" not in args:
|
if "action" not in args or "roomid" not in args:
|
||||||
print "Must specify set|get and a room ID."
|
print("Must specify set|get and a room ID.")
|
||||||
return
|
return
|
||||||
if args["action"].lower() not in ["set", "get"]:
|
if args["action"].lower() not in ["set", "get"]:
|
||||||
print "Must specify set|get, not %s" % args["action"]
|
print("Must specify set|get, not %s" % args["action"])
|
||||||
return
|
return
|
||||||
|
|
||||||
path = "/rooms/%s/topic" % urllib.quote(args["roomid"])
|
path = "/rooms/%s/topic" % urllib.quote(args["roomid"])
|
||||||
|
|
||||||
if args["action"].lower() == "set":
|
if args["action"].lower() == "set":
|
||||||
if "topic" not in args:
|
if "topic" not in args:
|
||||||
print "Must specify a new topic."
|
print("Must specify a new topic.")
|
||||||
return
|
return
|
||||||
body = {
|
body = {"topic": args["topic"]}
|
||||||
"topic": args["topic"]
|
|
||||||
}
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
||||||
elif args["action"].lower() == "get":
|
elif args["action"].lower() == "get":
|
||||||
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_invite(self, line):
|
def do_invite(self, line):
|
||||||
"""Invite a user to a room: "invite <userid> <roomid>" """
|
"""Invite a user to a room: "invite <userid> <roomid>" """
|
||||||
@ -373,49 +393,64 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
reactor.callFromThread(self._do_invite, args["roomid"], user_id)
|
reactor.callFromThread(self._do_invite, args["roomid"], user_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_invite(self, roomid, userstring):
|
def _do_invite(self, roomid, userstring):
|
||||||
if (not userstring.startswith('@') and
|
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||||
self._is_on("complete_usernames")):
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/lookup"
|
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("GET", url, qparams={'medium':'email','address':userstring})
|
json_res = yield self.http_client.do_request(
|
||||||
|
"GET", url, qparams={"medium": "email", "address": userstring}
|
||||||
|
)
|
||||||
|
|
||||||
mxid = None
|
mxid = None
|
||||||
|
|
||||||
if 'mxid' in json_res and 'signatures' in json_res:
|
if "mxid" in json_res and "signatures" in json_res:
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/pubkey/ed25519"
|
url = (
|
||||||
|
self._identityServerUrl()
|
||||||
|
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||||
|
)
|
||||||
|
|
||||||
pubKey = None
|
pubKey = None
|
||||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||||
if 'public_key' in pubKeyObj:
|
if "public_key" in pubKeyObj:
|
||||||
pubKey = nacl.signing.VerifyKey(pubKeyObj['public_key'], encoder=nacl.encoding.HexEncoder)
|
pubKey = nacl.signing.VerifyKey(
|
||||||
|
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print "No public key found in pubkey response!"
|
print("No public key found in pubkey response!")
|
||||||
|
|
||||||
sigValid = False
|
sigValid = False
|
||||||
|
|
||||||
if pubKey:
|
if pubKey:
|
||||||
for signame in json_res['signatures']:
|
for signame in json_res["signatures"]:
|
||||||
if signame not in TRUSTED_ID_SERVERS:
|
if signame not in TRUSTED_ID_SERVERS:
|
||||||
print "Ignoring signature from untrusted server %s" % (signame)
|
print(
|
||||||
|
"Ignoring signature from untrusted server %s"
|
||||||
|
% (signame)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
verify_signed_json(json_res, signame, pubKey)
|
verify_signed_json(json_res, signame, pubKey)
|
||||||
sigValid = True
|
sigValid = True
|
||||||
print "Mapping %s -> %s correctly signed by %s" % (userstring, json_res['mxid'], signame)
|
print(
|
||||||
|
"Mapping %s -> %s correctly signed by %s"
|
||||||
|
% (userstring, json_res["mxid"], signame)
|
||||||
|
)
|
||||||
break
|
break
|
||||||
except SignatureVerifyException as e:
|
except SignatureVerifyException as e:
|
||||||
print "Invalid signature from %s" % (signame)
|
print("Invalid signature from %s" % (signame))
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
if sigValid:
|
if sigValid:
|
||||||
print "Resolved 3pid %s to %s" % (userstring, json_res['mxid'])
|
print("Resolved 3pid %s to %s" % (userstring, json_res["mxid"]))
|
||||||
mxid = json_res['mxid']
|
mxid = json_res["mxid"]
|
||||||
else:
|
else:
|
||||||
print "Got association for %s but couldn't verify signature" % (userstring)
|
print(
|
||||||
|
"Got association for %s but couldn't verify signature"
|
||||||
|
% (userstring)
|
||||||
|
)
|
||||||
|
|
||||||
if not mxid:
|
if not mxid:
|
||||||
mxid = "@" + userstring + ":" + self._domain()
|
mxid = "@" + userstring + ":" + self._domain()
|
||||||
@ -428,18 +463,17 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["roomid"], force_keys=True)
|
args = self._parse(line, ["roomid"], force_keys=True)
|
||||||
self._do_membership_change(args["roomid"], "leave", self._usr())
|
self._do_membership_change(args["roomid"], "leave", self._usr())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_send(self, line):
|
def do_send(self, line):
|
||||||
"""Sends a message. "send <roomid> <body>" """
|
"""Sends a message. "send <roomid> <body>" """
|
||||||
args = self._parse(line, ["roomid", "body"])
|
args = self._parse(line, ["roomid", "body"])
|
||||||
txn_id = "txn%s" % int(time.time())
|
txn_id = "txn%s" % int(time.time())
|
||||||
path = "/rooms/%s/send/m.room.message/%s" % (urllib.quote(args["roomid"]),
|
path = "/rooms/%s/send/m.room.message/%s" % (
|
||||||
txn_id)
|
urllib.quote(args["roomid"]),
|
||||||
body_json = {
|
txn_id,
|
||||||
"msgtype": "m.text",
|
)
|
||||||
"body": args["body"]
|
body_json = {"msgtype": "m.text", "body": args["body"]}
|
||||||
}
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
||||||
|
|
||||||
def do_list(self, line):
|
def do_list(self, line):
|
||||||
@ -453,10 +487,10 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"""
|
"""
|
||||||
args = self._parse(line, ["type", "roomid", "qp"])
|
args = self._parse(line, ["type", "roomid", "qp"])
|
||||||
if not "type" in args or not "roomid" in args:
|
if not "type" in args or not "roomid" in args:
|
||||||
print "Must specify type and room ID."
|
print("Must specify type and room ID.")
|
||||||
return
|
return
|
||||||
if args["type"] not in ["members", "messages"]:
|
if args["type"] not in ["members", "messages"]:
|
||||||
print "Unrecognised type: %s" % args["type"]
|
print("Unrecognised type: %s" % args["type"])
|
||||||
return
|
return
|
||||||
room_id = args["roomid"]
|
room_id = args["roomid"]
|
||||||
path = "/rooms/%s/%s" % (urllib.quote(room_id), args["type"])
|
path = "/rooms/%s/%s" % (urllib.quote(room_id), args["type"])
|
||||||
@ -468,11 +502,10 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
key_value = key_value_str.split("=")
|
key_value = key_value_str.split("=")
|
||||||
qp[key_value[0]] = key_value[1]
|
qp[key_value[0]] = key_value[1]
|
||||||
except:
|
except:
|
||||||
print "Bad query param: %s" % key_value
|
print("Bad query param: %s" % key_value)
|
||||||
return
|
return
|
||||||
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "GET", path,
|
reactor.callFromThread(self._run_and_pprint, "GET", path, query_params=qp)
|
||||||
query_params=qp)
|
|
||||||
|
|
||||||
def do_create(self, line):
|
def do_create(self, line):
|
||||||
"""Creates a room.
|
"""Creates a room.
|
||||||
@ -508,14 +541,22 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["method", "path", "data"])
|
args = self._parse(line, ["method", "path", "data"])
|
||||||
# sanity check
|
# sanity check
|
||||||
if "method" not in args or "path" not in args:
|
if "method" not in args or "path" not in args:
|
||||||
print "Must specify path and method."
|
print("Must specify path and method.")
|
||||||
return
|
return
|
||||||
|
|
||||||
args["method"] = args["method"].upper()
|
args["method"] = args["method"].upper()
|
||||||
valid_methods = ["PUT", "GET", "POST", "DELETE",
|
valid_methods = [
|
||||||
"XPUT", "XGET", "XPOST", "XDELETE"]
|
"PUT",
|
||||||
|
"GET",
|
||||||
|
"POST",
|
||||||
|
"DELETE",
|
||||||
|
"XPUT",
|
||||||
|
"XGET",
|
||||||
|
"XPOST",
|
||||||
|
"XDELETE",
|
||||||
|
]
|
||||||
if args["method"] not in valid_methods:
|
if args["method"] not in valid_methods:
|
||||||
print "Unsupported method: %s" % args["method"]
|
print("Unsupported method: %s" % args["method"])
|
||||||
return
|
return
|
||||||
|
|
||||||
if "data" not in args:
|
if "data" not in args:
|
||||||
@ -524,7 +565,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
args["data"] = json.loads(args["data"])
|
args["data"] = json.loads(args["data"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print "Data is not valid JSON. %s" % e
|
print("Data is not valid JSON. %s" % e)
|
||||||
return
|
return
|
||||||
|
|
||||||
qp = {"access_token": self._tok()}
|
qp = {"access_token": self._tok()}
|
||||||
@ -540,10 +581,13 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
reactor.callFromThread(self._run_and_pprint, args["method"],
|
reactor.callFromThread(
|
||||||
|
self._run_and_pprint,
|
||||||
|
args["method"],
|
||||||
args["path"],
|
args["path"],
|
||||||
args["data"],
|
args["data"],
|
||||||
query_params=qp)
|
query_params=qp,
|
||||||
|
)
|
||||||
|
|
||||||
def do_stream(self, line):
|
def do_stream(self, line):
|
||||||
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
||||||
@ -553,7 +597,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
timeout = int(args["timeout"])
|
timeout = int(args["timeout"])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print "Timeout must be in milliseconds."
|
print("Timeout must be in milliseconds.")
|
||||||
return
|
return
|
||||||
reactor.callFromThread(self._do_event_stream, timeout)
|
reactor.callFromThread(self._do_event_stream, timeout)
|
||||||
|
|
||||||
@ -564,15 +608,18 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
{
|
{
|
||||||
"access_token": self._tok(),
|
"access_token": self._tok(),
|
||||||
"timeout": str(timeout),
|
"timeout": str(timeout),
|
||||||
"from": self.event_stream_token
|
"from": self.event_stream_token,
|
||||||
})
|
},
|
||||||
print json.dumps(res, indent=4)
|
)
|
||||||
|
print(json.dumps(res, indent=4))
|
||||||
|
|
||||||
if "chunk" in res:
|
if "chunk" in res:
|
||||||
for event in res["chunk"]:
|
for event in res["chunk"]:
|
||||||
if (event["type"] == "m.room.message" and
|
if (
|
||||||
self._is_on("send_delivery_receipts") and
|
event["type"] == "m.room.message"
|
||||||
event["user_id"] != self._usr()): # not sent by us
|
and self._is_on("send_delivery_receipts")
|
||||||
|
and event["user_id"] != self._usr()
|
||||||
|
): # not sent by us
|
||||||
self._send_receipt(event, "d")
|
self._send_receipt(event, "d")
|
||||||
|
|
||||||
# update the position in the stram
|
# update the position in the stram
|
||||||
@ -580,18 +627,28 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
self.event_stream_token = res["end"]
|
self.event_stream_token = res["end"]
|
||||||
|
|
||||||
def _send_receipt(self, event, feedback_type):
|
def _send_receipt(self, event, feedback_type):
|
||||||
path = ("/rooms/%s/messages/%s/%s/feedback/%s/%s" %
|
path = "/rooms/%s/messages/%s/%s/feedback/%s/%s" % (
|
||||||
(urllib.quote(event["room_id"]), event["user_id"], event["msg_id"],
|
urllib.quote(event["room_id"]),
|
||||||
self._usr(), feedback_type))
|
event["user_id"],
|
||||||
|
event["msg_id"],
|
||||||
|
self._usr(),
|
||||||
|
feedback_type,
|
||||||
|
)
|
||||||
data = {}
|
data = {}
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data,
|
reactor.callFromThread(
|
||||||
alt_text="Sent receipt for %s" % event["msg_id"])
|
self._run_and_pprint,
|
||||||
|
"PUT",
|
||||||
|
path,
|
||||||
|
data=data,
|
||||||
|
alt_text="Sent receipt for %s" % event["msg_id"],
|
||||||
|
)
|
||||||
|
|
||||||
def _do_membership_change(self, roomid, membership, userid):
|
def _do_membership_change(self, roomid, membership, userid):
|
||||||
path = "/rooms/%s/state/m.room.member/%s" % (urllib.quote(roomid), urllib.quote(userid))
|
path = "/rooms/%s/state/m.room.member/%s" % (
|
||||||
data = {
|
urllib.quote(roomid),
|
||||||
"membership": membership
|
urllib.quote(userid),
|
||||||
}
|
)
|
||||||
|
data = {"membership": membership}
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
||||||
|
|
||||||
def do_displayname(self, line):
|
def do_displayname(self, line):
|
||||||
@ -644,15 +701,20 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
for i, arg in enumerate(line_args):
|
for i, arg in enumerate(line_args):
|
||||||
for config_key in self.config:
|
for config_key in self.config:
|
||||||
if ("$" + config_key) in arg:
|
if ("$" + config_key) in arg:
|
||||||
arg = arg.replace("$" + config_key,
|
arg = arg.replace("$" + config_key, self.config[config_key])
|
||||||
self.config[config_key])
|
|
||||||
line_args[i] = arg
|
line_args[i] = arg
|
||||||
|
|
||||||
return dict(zip(keys, line_args))
|
return dict(zip(keys, line_args))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _run_and_pprint(self, method, path, data=None,
|
def _run_and_pprint(
|
||||||
query_params={"access_token": None}, alt_text=None):
|
self,
|
||||||
|
method,
|
||||||
|
path,
|
||||||
|
data=None,
|
||||||
|
query_params={"access_token": None},
|
||||||
|
alt_text=None,
|
||||||
|
):
|
||||||
""" Runs an HTTP request and pretty prints the output.
|
""" Runs an HTTP request and pretty prints the output.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -665,31 +727,31 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
if "access_token" in query_params:
|
if "access_token" in query_params:
|
||||||
query_params["access_token"] = self._tok()
|
query_params["access_token"] = self._tok()
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(method, url,
|
json_res = yield self.http_client.do_request(
|
||||||
data=data,
|
method, url, data=data, qparams=query_params
|
||||||
qparams=query_params)
|
)
|
||||||
if alt_text:
|
if alt_text:
|
||||||
print alt_text
|
print(alt_text)
|
||||||
else:
|
else:
|
||||||
print json.dumps(json_res, indent=4)
|
print(json.dumps(json_res, indent=4))
|
||||||
|
|
||||||
|
|
||||||
def save_config(config):
|
def save_config(config):
|
||||||
with open(CONFIG_JSON, 'w') as out:
|
with open(CONFIG_JSON, "w") as out:
|
||||||
json.dump(config, out)
|
json.dump(config, out)
|
||||||
|
|
||||||
|
|
||||||
def main(server_url, identity_server_url, username, token, config_path):
|
def main(server_url, identity_server_url, username, token, config_path):
|
||||||
print "Synapse command line client"
|
print("Synapse command line client")
|
||||||
print "==========================="
|
print("===========================")
|
||||||
print "Server: %s" % server_url
|
print("Server: %s" % server_url)
|
||||||
print "Type 'help' to get started."
|
print("Type 'help' to get started.")
|
||||||
print "Close this console with CTRL+C then CTRL+D."
|
print("Close this console with CTRL+C then CTRL+D.")
|
||||||
if not username or not token:
|
if not username or not token:
|
||||||
print "- 'register <username>' - Register an account"
|
print("- 'register <username>' - Register an account")
|
||||||
print "- 'stream' - Connect to the event stream"
|
print("- 'stream' - Connect to the event stream")
|
||||||
print "- 'create <roomid>' - Create a room"
|
print("- 'create <roomid>' - Create a room")
|
||||||
print "- 'send <roomid> <message>' - Send a message"
|
print("- 'send <roomid> <message>' - Send a message")
|
||||||
http_client = TwistedHttpClient()
|
http_client = TwistedHttpClient()
|
||||||
|
|
||||||
# the command line client
|
# the command line client
|
||||||
@ -699,13 +761,13 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
global CONFIG_JSON
|
global CONFIG_JSON
|
||||||
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
||||||
try:
|
try:
|
||||||
with open(config_path, 'r') as config:
|
with open(config_path, "r") as config:
|
||||||
syn_cmd.config = json.load(config)
|
syn_cmd.config = json.load(config)
|
||||||
try:
|
try:
|
||||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
print "Loaded config from %s" % config_path
|
print("Loaded config from %s" % config_path)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -716,27 +778,37 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser("Starts a synapse client.")
|
parser = argparse.ArgumentParser("Starts a synapse client.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-s", "--server", dest="server", default="http://localhost:8008",
|
"-s",
|
||||||
help="The URL of the home server to talk to.")
|
"--server",
|
||||||
|
dest="server",
|
||||||
|
default="http://localhost:8008",
|
||||||
|
help="The URL of the home server to talk to.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-i", "--identity-server", dest="identityserver", default="http://localhost:8090",
|
"-i",
|
||||||
help="The URL of the identity server to talk to.")
|
"--identity-server",
|
||||||
|
dest="identityserver",
|
||||||
|
default="http://localhost:8090",
|
||||||
|
help="The URL of the identity server to talk to.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-u", "--username", dest="username",
|
"-u", "--username", dest="username", help="Your username on the server."
|
||||||
help="Your username on the server.")
|
)
|
||||||
|
parser.add_argument("-t", "--token", dest="token", help="Your access token.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-t", "--token", dest="token",
|
"-c",
|
||||||
help="Your access token.")
|
"--config",
|
||||||
parser.add_argument(
|
dest="config",
|
||||||
"-c", "--config", dest="config", default=CONFIG_JSON,
|
default=CONFIG_JSON,
|
||||||
help="The location of the config.json file to read from.")
|
help="The location of the config.json file to read from.",
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if not args.server:
|
if not args.server:
|
||||||
print "You must supply a server URL to communicate with."
|
print("You must supply a server URL to communicate with.")
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
from twisted.web.http_headers import Headers
|
from twisted.web.http_headers import Headers
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
@ -72,9 +73,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
response = yield self._create_put_request(
|
response = yield self._create_put_request(
|
||||||
url,
|
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||||
data,
|
|
||||||
headers_dict={"Content-Type": ["application/json"]}
|
|
||||||
)
|
)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
defer.returnValue((response.code, body))
|
defer.returnValue((response.code, body))
|
||||||
@ -94,40 +93,34 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if "Content-Type" not in headers_dict:
|
if "Content-Type" not in headers_dict:
|
||||||
raise defer.error(
|
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||||
RuntimeError("Must include Content-Type header for PUTs"))
|
|
||||||
|
|
||||||
return self._create_request(
|
return self._create_request(
|
||||||
"PUT",
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
url,
|
|
||||||
producer=_JsonProducer(json_data),
|
|
||||||
headers_dict=headers_dict
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict={}):
|
def _create_get_request(self, url, headers_dict={}):
|
||||||
""" Wrapper of _create_request to issue a GET request
|
""" Wrapper of _create_request to issue a GET request
|
||||||
"""
|
"""
|
||||||
return self._create_request(
|
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||||
"GET",
|
|
||||||
url,
|
|
||||||
headers_dict=headers_dict
|
|
||||||
)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_request(self, method, url, data=None, qparams=None, jsonreq=True, headers={}):
|
def do_request(
|
||||||
|
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||||
|
):
|
||||||
if qparams:
|
if qparams:
|
||||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||||
|
|
||||||
if jsonreq:
|
if jsonreq:
|
||||||
prod = _JsonProducer(data)
|
prod = _JsonProducer(data)
|
||||||
headers['Content-Type'] = ["application/json"];
|
headers["Content-Type"] = ["application/json"]
|
||||||
else:
|
else:
|
||||||
prod = _RawProducer(data)
|
prod = _RawProducer(data)
|
||||||
|
|
||||||
if method in ["POST", "PUT"]:
|
if method in ["POST", "PUT"]:
|
||||||
response = yield self._create_request(method, url,
|
response = yield self._create_request(
|
||||||
producer=prod,
|
method, url, producer=prod, headers_dict=headers
|
||||||
headers_dict=headers)
|
)
|
||||||
else:
|
else:
|
||||||
response = yield self._create_request(method, url)
|
response = yield self._create_request(method, url)
|
||||||
|
|
||||||
@ -141,27 +134,24 @@ class TwistedHttpClient(HttpClient):
|
|||||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||||
|
|
||||||
retries_left = 5
|
retries_left = 5
|
||||||
print "%s to %s with headers %s" % (method, url, headers_dict)
|
print("%s to %s with headers %s" % (method, url, headers_dict))
|
||||||
if self.verbose and producer:
|
if self.verbose and producer:
|
||||||
if "password" in producer.data:
|
if "password" in producer.data:
|
||||||
temp = producer.data["password"]
|
temp = producer.data["password"]
|
||||||
producer.data["password"] = "[REDACTED]"
|
producer.data["password"] = "[REDACTED]"
|
||||||
print json.dumps(producer.data, indent=4)
|
print(json.dumps(producer.data, indent=4))
|
||||||
producer.data["password"] = temp
|
producer.data["password"] = temp
|
||||||
else:
|
else:
|
||||||
print json.dumps(producer.data, indent=4)
|
print(json.dumps(producer.data, indent=4))
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
response = yield self.agent.request(
|
response = yield self.agent.request(
|
||||||
method,
|
method, url.encode("UTF8"), Headers(headers_dict), producer
|
||||||
url.encode("UTF8"),
|
|
||||||
Headers(headers_dict),
|
|
||||||
producer
|
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print "uh oh: %s" % e
|
print("uh oh: %s" % e)
|
||||||
if retries_left:
|
if retries_left:
|
||||||
yield self.sleep(2 ** (5 - retries_left))
|
yield self.sleep(2 ** (5 - retries_left))
|
||||||
retries_left -= 1
|
retries_left -= 1
|
||||||
@ -169,8 +159,8 @@ class TwistedHttpClient(HttpClient):
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print "Status %s %s" % (response.code, response.phrase)
|
print("Status %s %s" % (response.code, response.phrase))
|
||||||
print pformat(list(response.headers.getAllRawHeaders()))
|
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||||
defer.returnValue(response)
|
defer.returnValue(response)
|
||||||
|
|
||||||
def sleep(self, seconds):
|
def sleep(self, seconds):
|
||||||
@ -178,6 +168,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
reactor.callLater(seconds, d.callback, seconds)
|
reactor.callLater(seconds, d.callback, seconds)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class _RawProducer(object):
|
class _RawProducer(object):
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
@ -194,9 +185,11 @@ class _RawProducer(object):
|
|||||||
def stopProducing(self):
|
def stopProducing(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class _JsonProducer(object):
|
class _JsonProducer(object):
|
||||||
""" Used by the twisted http client to create the HTTP body from json
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
self.data = jsn
|
self.data = jsn
|
||||||
self.body = json.dumps(jsn).encode("utf8")
|
self.body = json.dumps(jsn).encode("utf8")
|
||||||
|
@ -19,13 +19,13 @@ from curses.ascii import isprint
|
|||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
|
|
||||||
|
|
||||||
class CursesStdIO():
|
class CursesStdIO:
|
||||||
def __init__(self, stdscr, callback=None):
|
def __init__(self, stdscr, callback=None):
|
||||||
self.statusText = "Synapse test app -"
|
self.statusText = "Synapse test app -"
|
||||||
self.searchText = ''
|
self.searchText = ""
|
||||||
self.stdscr = stdscr
|
self.stdscr = stdscr
|
||||||
|
|
||||||
self.logLine = ''
|
self.logLine = ""
|
||||||
|
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
|
|
||||||
@ -71,8 +71,7 @@ class CursesStdIO():
|
|||||||
i = 0
|
i = 0
|
||||||
index = len(self.lines) - 1
|
index = len(self.lines) - 1
|
||||||
while i < (self.rows - 3) and index >= 0:
|
while i < (self.rows - 3) and index >= 0:
|
||||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index],
|
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||||
curses.A_NORMAL)
|
|
||||||
i = i + 1
|
i = i + 1
|
||||||
index = index - 1
|
index = index - 1
|
||||||
|
|
||||||
@ -85,15 +84,13 @@ class CursesStdIO():
|
|||||||
raise RuntimeError("TextTooLongError")
|
raise RuntimeError("TextTooLongError")
|
||||||
|
|
||||||
self.stdscr.addstr(
|
self.stdscr.addstr(
|
||||||
self.rows - 2, 0,
|
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
text + ' ' * (self.cols - len(text)),
|
)
|
||||||
curses.A_STANDOUT)
|
|
||||||
|
|
||||||
def printLogLine(self, text):
|
def printLogLine(self, text):
|
||||||
self.stdscr.addstr(
|
self.stdscr.addstr(
|
||||||
0, 0,
|
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
text + ' ' * (self.cols - len(text)),
|
)
|
||||||
curses.A_STANDOUT)
|
|
||||||
|
|
||||||
def doRead(self):
|
def doRead(self):
|
||||||
""" Input is ready! """
|
""" Input is ready! """
|
||||||
@ -105,7 +102,7 @@ class CursesStdIO():
|
|||||||
|
|
||||||
elif c == curses.KEY_ENTER or c == 10:
|
elif c == curses.KEY_ENTER or c == 10:
|
||||||
text = self.searchText
|
text = self.searchText
|
||||||
self.searchText = ''
|
self.searchText = ""
|
||||||
|
|
||||||
self.print_line(">> %s" % text)
|
self.print_line(">> %s" % text)
|
||||||
|
|
||||||
@ -122,11 +119,13 @@ class CursesStdIO():
|
|||||||
return
|
return
|
||||||
self.searchText = self.searchText + chr(c)
|
self.searchText = self.searchText + chr(c)
|
||||||
|
|
||||||
self.stdscr.addstr(self.rows - 1, 0,
|
self.stdscr.addstr(
|
||||||
self.searchText + (' ' * (
|
self.rows - 1,
|
||||||
self.cols - len(self.searchText) - 2)))
|
0,
|
||||||
|
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||||
|
)
|
||||||
|
|
||||||
self.paintStatus(self.statusText + ' %d' % len(self.searchText))
|
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||||
self.stdscr.move(self.rows - 1, len(self.searchText))
|
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||||
self.stdscr.refresh()
|
self.stdscr.refresh()
|
||||||
|
|
||||||
@ -143,7 +142,6 @@ class CursesStdIO():
|
|||||||
|
|
||||||
|
|
||||||
class Callback(object):
|
class Callback(object):
|
||||||
|
|
||||||
def __init__(self, stdio):
|
def __init__(self, stdio):
|
||||||
self.stdio = stdio
|
self.stdio = stdio
|
||||||
|
|
||||||
@ -164,5 +162,5 @@ def main(stdscr):
|
|||||||
screen.close()
|
screen.close()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
curses.wrapper(main)
|
curses.wrapper(main)
|
||||||
|
@ -28,9 +28,7 @@ Currently assumes the local address is localhost:<port>
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
from synapse.federation import (
|
from synapse.federation import ReplicationHandler
|
||||||
ReplicationHandler
|
|
||||||
)
|
|
||||||
|
|
||||||
from synapse.federation.units import Pdu
|
from synapse.federation.units import Pdu
|
||||||
|
|
||||||
@ -38,7 +36,7 @@ from synapse.util import origin_from_ucid
|
|||||||
|
|
||||||
from synapse.app.homeserver import SynapseHomeServer
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
|
||||||
#from synapse.util.logutils import log_function
|
# from synapse.util.logutils import log_function
|
||||||
|
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
@ -83,7 +81,7 @@ class InputOutput(object):
|
|||||||
room_name, = m.groups()
|
room_name, = m.groups()
|
||||||
self.print_line("%s joining %s" % (self.user, room_name))
|
self.print_line("%s joining %s" % (self.user, room_name))
|
||||||
self.server.join_room(room_name, self.user, self.user)
|
self.server.join_room(room_name, self.user, self.user)
|
||||||
#self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match("^invite (\S+) (\S+)$", line)
|
m = re.match("^invite (\S+) (\S+)$", line)
|
||||||
@ -92,7 +90,7 @@ class InputOutput(object):
|
|||||||
room_name, invitee = m.groups()
|
room_name, invitee = m.groups()
|
||||||
self.print_line("%s invited to %s" % (invitee, room_name))
|
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||||
self.server.invite_to_room(room_name, self.user, invitee)
|
self.server.invite_to_room(room_name, self.user, invitee)
|
||||||
#self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match("^send (\S+) (.*)$", line)
|
m = re.match("^send (\S+) (.*)$", line)
|
||||||
@ -101,7 +99,7 @@ class InputOutput(object):
|
|||||||
room_name, body = m.groups()
|
room_name, body = m.groups()
|
||||||
self.print_line("%s send to %s" % (self.user, room_name))
|
self.print_line("%s send to %s" % (self.user, room_name))
|
||||||
self.server.send_message(room_name, self.user, body)
|
self.server.send_message(room_name, self.user, body)
|
||||||
#self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match("^backfill (\S+)$", line)
|
m = re.match("^backfill (\S+)$", line)
|
||||||
@ -125,7 +123,6 @@ class InputOutput(object):
|
|||||||
|
|
||||||
|
|
||||||
class IOLoggerHandler(logging.Handler):
|
class IOLoggerHandler(logging.Handler):
|
||||||
|
|
||||||
def __init__(self, io):
|
def __init__(self, io):
|
||||||
logging.Handler.__init__(self)
|
logging.Handler.__init__(self)
|
||||||
self.io = io
|
self.io = io
|
||||||
@ -142,6 +139,7 @@ class Room(object):
|
|||||||
""" Used to store (in memory) the current membership state of a room, and
|
""" Used to store (in memory) the current membership state of a room, and
|
||||||
which home servers we should send PDUs associated with the room to.
|
which home servers we should send PDUs associated with the room to.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, room_name):
|
def __init__(self, room_name):
|
||||||
self.room_name = room_name
|
self.room_name = room_name
|
||||||
self.invited = set()
|
self.invited = set()
|
||||||
@ -175,6 +173,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
""" A very basic home server implentation that allows people to join a
|
""" A very basic home server implentation that allows people to join a
|
||||||
room and then invite other people.
|
room and then invite other people.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, server_name, replication_layer, output):
|
def __init__(self, server_name, replication_layer, output):
|
||||||
self.server_name = server_name
|
self.server_name = server_name
|
||||||
self.replication_layer = replication_layer
|
self.replication_layer = replication_layer
|
||||||
@ -197,25 +196,26 @@ class HomeServer(ReplicationHandler):
|
|||||||
elif pdu.content["membership"] == "invite":
|
elif pdu.content["membership"] == "invite":
|
||||||
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||||
else:
|
else:
|
||||||
self.output.print_line("#%s (unrec) %s = %s" %
|
self.output.print_line(
|
||||||
(pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
"#%s (unrec) %s = %s"
|
||||||
|
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||||
)
|
)
|
||||||
|
|
||||||
#def on_state_change(self, pdu):
|
# def on_state_change(self, pdu):
|
||||||
##self.output.print_line("#%s (state) %s *** %s" %
|
##self.output.print_line("#%s (state) %s *** %s" %
|
||||||
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||||
##)
|
##)
|
||||||
|
|
||||||
#if "joinee" in pdu.content:
|
# if "joinee" in pdu.content:
|
||||||
#self._on_join(pdu.context, pdu.content["joinee"])
|
# self._on_join(pdu.context, pdu.content["joinee"])
|
||||||
#elif "invitee" in pdu.content:
|
# elif "invitee" in pdu.content:
|
||||||
#self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
# self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||||
|
|
||||||
def _on_message(self, pdu):
|
def _on_message(self, pdu):
|
||||||
""" We received a message
|
""" We received a message
|
||||||
"""
|
"""
|
||||||
self.output.print_line("#%s %s %s" %
|
self.output.print_line(
|
||||||
(pdu.context, pdu.content["sender"], pdu.content["body"])
|
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_join(self, context, joinee):
|
def _on_join(self, context, joinee):
|
||||||
@ -224,9 +224,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_participant(joinee)
|
room.add_participant(joinee)
|
||||||
|
|
||||||
self.output.print_line("#%s %s %s" %
|
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||||
(context, joinee, "*** JOINED")
|
|
||||||
)
|
|
||||||
|
|
||||||
def _on_invite(self, origin, context, invitee):
|
def _on_invite(self, origin, context, invitee):
|
||||||
""" Someone has been invited
|
""" Someone has been invited
|
||||||
@ -234,9 +232,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_invited(invitee)
|
room.add_invited(invitee)
|
||||||
|
|
||||||
self.output.print_line("#%s %s %s" %
|
self.output.print_line("#%s %s %s" % (context, invitee, "*** INVITED"))
|
||||||
(context, invitee, "*** INVITED")
|
|
||||||
)
|
|
||||||
|
|
||||||
if not room.have_got_metadata and origin is not self.server_name:
|
if not room.have_got_metadata and origin is not self.server_name:
|
||||||
logger.debug("Get room state")
|
logger.debug("Get room state")
|
||||||
@ -318,7 +314,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
return self.replication_layer.backfill(dest, room_name, limit)
|
return self.replication_layer.backfill(dest, room_name, limit)
|
||||||
|
|
||||||
def _get_room_remote_servers(self, room_name):
|
def _get_room_remote_servers(self, room_name):
|
||||||
return [i for i in self.joined_rooms.setdefault(room_name,).servers]
|
return [i for i in self.joined_rooms.setdefault(room_name).servers]
|
||||||
|
|
||||||
def _get_or_create_room(self, room_name):
|
def _get_or_create_room(self, room_name):
|
||||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||||
@ -331,8 +327,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
def main(stdscr):
|
def main(stdscr):
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('user', type=str)
|
parser.add_argument("user", type=str)
|
||||||
parser.add_argument('-v', '--verbose', action='count')
|
parser.add_argument("-v", "--verbose", action="count")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
user = args.user
|
user = args.user
|
||||||
@ -342,8 +338,9 @@ def main(stdscr):
|
|||||||
|
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(lineno)d - '
|
formatter = logging.Formatter(
|
||||||
'%(levelname)s - %(message)s')
|
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
|
||||||
|
)
|
||||||
if not os.path.exists("logs"):
|
if not os.path.exists("logs"):
|
||||||
os.makedirs("logs")
|
os.makedirs("logs")
|
||||||
fh = logging.FileHandler("logs/%s" % user)
|
fh = logging.FileHandler("logs/%s" % user)
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -48,7 +50,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
c = colors.pop()
|
c = colors.pop()
|
||||||
color_map[o] = c
|
color_map[o] = c
|
||||||
except:
|
except:
|
||||||
print "Run out of colours!"
|
print("Run out of colours!")
|
||||||
color_map[o] = "black"
|
color_map[o] = "black"
|
||||||
|
|
||||||
graph = pydot.Dot(graph_name="Test")
|
graph = pydot.Dot(graph_name="Test")
|
||||||
@ -57,9 +59,9 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
||||||
pdu_map[name] = pdu
|
pdu_map[name] = pdu
|
||||||
|
|
||||||
t = datetime.datetime.fromtimestamp(
|
t = datetime.datetime.fromtimestamp(float(pdu["ts"]) / 1000).strftime(
|
||||||
float(pdu["ts"]) / 1000
|
"%Y-%m-%d %H:%M:%S,%f"
|
||||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
)
|
||||||
|
|
||||||
label = (
|
label = (
|
||||||
"<"
|
"<"
|
||||||
@ -79,11 +81,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
"depth": pdu.get("depth"),
|
"depth": pdu.get("depth"),
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(
|
node = pydot.Node(name=name, label=label, color=color_map[pdu.get("origin")])
|
||||||
name=name,
|
|
||||||
label=label,
|
|
||||||
color=color_map[pdu.get("origin")]
|
|
||||||
)
|
|
||||||
node_map[name] = node
|
node_map[name] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
@ -93,7 +91,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
end_name = make_name(i, o)
|
end_name = make_name(i, o)
|
||||||
|
|
||||||
if end_name not in node_map:
|
if end_name not in node_map:
|
||||||
print "%s not in nodes" % end_name
|
print("%s not in nodes" % end_name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
edge = pydot.Edge(node_map[start_name], node_map[end_name])
|
edge = pydot.Edge(node_map[start_name], node_map[end_name])
|
||||||
@ -107,14 +105,13 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
|
|
||||||
if prev_state_name in node_map:
|
if prev_state_name in node_map:
|
||||||
state_edge = pydot.Edge(
|
state_edge = pydot.Edge(
|
||||||
node_map[start_name], node_map[prev_state_name],
|
node_map[start_name], node_map[prev_state_name], style="dotted"
|
||||||
style='dotted'
|
|
||||||
)
|
)
|
||||||
graph.add_edge(state_edge)
|
graph.add_edge(state_edge)
|
||||||
|
|
||||||
graph.write('%s.dot' % filename_prefix, format='raw', prog='dot')
|
graph.write("%s.dot" % filename_prefix, format="raw", prog="dot")
|
||||||
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
||||||
graph.write_svg("%s.svg" % filename_prefix, prog='dot')
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host, room):
|
def get_pdus(host, room):
|
||||||
@ -134,11 +131,10 @@ if __name__ == "__main__":
|
|||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--prefix", dest="prefix",
|
"-p", "--prefix", dest="prefix", help="String to prefix output files with"
|
||||||
help="String to prefix output files with"
|
|
||||||
)
|
)
|
||||||
parser.add_argument('host')
|
parser.add_argument("host")
|
||||||
parser.add_argument('room')
|
parser.add_argument("room")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -36,10 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
args = [room_id]
|
args = [room_id]
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
sql += (
|
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
|
||||||
" ORDER BY topological_ordering DESC, stream_ordering DESC "
|
|
||||||
"LIMIT ?"
|
|
||||||
)
|
|
||||||
|
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
|
|
||||||
@ -56,9 +53,8 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
c = conn.execute(
|
c = conn.execute(
|
||||||
"SELECT state_group FROM event_to_state_groups "
|
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
|
||||||
"WHERE event_id = ?",
|
(event.event_id,),
|
||||||
(event.event_id,)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
res = c.fetchone()
|
res = c.fetchone()
|
||||||
@ -69,7 +65,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
|
|
||||||
t = datetime.datetime.fromtimestamp(
|
t = datetime.datetime.fromtimestamp(
|
||||||
float(event.origin_server_ts) / 1000
|
float(event.origin_server_ts) / 1000
|
||||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||||
|
|
||||||
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
||||||
|
|
||||||
@ -93,10 +89,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
"state_group": state_group,
|
"state_group": state_group,
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(
|
node = pydot.Node(name=event.event_id, label=label)
|
||||||
name=event.event_id,
|
|
||||||
label=label,
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[event.event_id] = node
|
node_map[event.event_id] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
@ -106,10 +99,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except:
|
except:
|
||||||
end_node = pydot.Node(
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
name=prev_id,
|
|
||||||
label="<<b>%s</b>>" % (prev_id,),
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@ -121,18 +111,16 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
if len(event_ids) <= 1:
|
if len(event_ids) <= 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cluster = pydot.Cluster(
|
cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group),))
|
||||||
str(group),
|
|
||||||
label="<State Group: %s>" % (str(group),)
|
|
||||||
)
|
|
||||||
|
|
||||||
for event_id in event_ids:
|
for event_id in event_ids:
|
||||||
cluster.add_node(node_map[event_id])
|
cluster.add_node(node_map[event_id])
|
||||||
|
|
||||||
graph.add_subgraph(cluster)
|
graph.add_subgraph(cluster)
|
||||||
|
|
||||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
@ -141,16 +129,15 @@ if __name__ == "__main__":
|
|||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--prefix", dest="prefix",
|
"-p",
|
||||||
|
"--prefix",
|
||||||
|
dest="prefix",
|
||||||
help="String to prefix output files with",
|
help="String to prefix output files with",
|
||||||
default="graph_output"
|
default="graph_output",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||||
"-l", "--limit",
|
parser.add_argument("db")
|
||||||
help="Only retrieve the last N events.",
|
parser.add_argument("room")
|
||||||
)
|
|
||||||
parser.add_argument('db')
|
|
||||||
parser.add_argument('room')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -26,22 +28,22 @@ from six import string_types
|
|||||||
|
|
||||||
|
|
||||||
def make_graph(file_name, room_id, file_prefix, limit):
|
def make_graph(file_name, room_id, file_prefix, limit):
|
||||||
print "Reading lines"
|
print("Reading lines")
|
||||||
with open(file_name) as f:
|
with open(file_name) as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
print "Read lines"
|
print("Read lines")
|
||||||
|
|
||||||
events = [FrozenEvent(json.loads(line)) for line in lines]
|
events = [FrozenEvent(json.loads(line)) for line in lines]
|
||||||
|
|
||||||
print "Loaded events."
|
print("Loaded events.")
|
||||||
|
|
||||||
events.sort(key=lambda e: e.depth)
|
events.sort(key=lambda e: e.depth)
|
||||||
|
|
||||||
print "Sorted events"
|
print("Sorted events")
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
events = events[-int(limit):]
|
events = events[-int(limit) :]
|
||||||
|
|
||||||
node_map = {}
|
node_map = {}
|
||||||
|
|
||||||
@ -50,12 +52,12 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
for event in events:
|
for event in events:
|
||||||
t = datetime.datetime.fromtimestamp(
|
t = datetime.datetime.fromtimestamp(
|
||||||
float(event.origin_server_ts) / 1000
|
float(event.origin_server_ts) / 1000
|
||||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||||
|
|
||||||
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
||||||
content = content.replace("\n", "<br/>\n")
|
content = content.replace("\n", "<br/>\n")
|
||||||
|
|
||||||
print content
|
print(content)
|
||||||
content = []
|
content = []
|
||||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
for key, value in unfreeze(event.get_dict()["content"]).items():
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -66,15 +68,16 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
value = json.dumps(value)
|
value = json.dumps(value)
|
||||||
|
|
||||||
content.append(
|
content.append(
|
||||||
"<b>%s</b>: %s," % (
|
"<b>%s</b>: %s,"
|
||||||
cgi.escape(key, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
% (
|
||||||
cgi.escape(value, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
cgi.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
|
cgi.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
content = "<br/>\n".join(content)
|
content = "<br/>\n".join(content)
|
||||||
|
|
||||||
print content
|
print(content)
|
||||||
|
|
||||||
label = (
|
label = (
|
||||||
"<"
|
"<"
|
||||||
@ -94,25 +97,19 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
"depth": event.depth,
|
"depth": event.depth,
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(
|
node = pydot.Node(name=event.event_id, label=label)
|
||||||
name=event.event_id,
|
|
||||||
label=label,
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[event.event_id] = node
|
node_map[event.event_id] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
print "Created Nodes"
|
print("Created Nodes")
|
||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
for prev_id, _ in event.prev_events:
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except:
|
except:
|
||||||
end_node = pydot.Node(
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
name=prev_id,
|
|
||||||
label="<<b>%s</b>>" % (prev_id,),
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@ -120,15 +117,16 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
edge = pydot.Edge(node_map[event.event_id], end_node)
|
edge = pydot.Edge(node_map[event.event_id], end_node)
|
||||||
graph.add_edge(edge)
|
graph.add_edge(edge)
|
||||||
|
|
||||||
print "Created edges"
|
print("Created edges")
|
||||||
|
|
||||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||||
|
|
||||||
print "Created Dot"
|
print("Created Dot")
|
||||||
|
|
||||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||||
|
|
||||||
|
print("Created svg")
|
||||||
|
|
||||||
print "Created svg"
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
@ -137,16 +135,15 @@ if __name__ == "__main__":
|
|||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--prefix", dest="prefix",
|
"-p",
|
||||||
|
"--prefix",
|
||||||
|
dest="prefix",
|
||||||
help="String to prefix output files with",
|
help="String to prefix output files with",
|
||||||
default="graph_output"
|
default="graph_output",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||||
"-l", "--limit",
|
parser.add_argument("event_file")
|
||||||
help="Only retrieve the last N events.",
|
parser.add_argument("room")
|
||||||
)
|
|
||||||
parser.add_argument('event_file')
|
|
||||||
parser.add_argument('room')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ the bridge.
|
|||||||
Requires:
|
Requires:
|
||||||
npm install jquery jsdom
|
npm install jquery jsdom
|
||||||
"""
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
import grequests
|
import grequests
|
||||||
@ -19,24 +20,25 @@ import urllib
|
|||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
|
||||||
#ACCESS_TOKEN="" #
|
# ACCESS_TOKEN="" #
|
||||||
|
|
||||||
MATRIXBASE = 'https://matrix.org/_matrix/client/api/v1/'
|
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||||
MYUSERNAME = '@davetest:matrix.org'
|
MYUSERNAME = "@davetest:matrix.org"
|
||||||
|
|
||||||
HTTPBIND = 'https://meet.jit.si/http-bind'
|
HTTPBIND = "https://meet.jit.si/http-bind"
|
||||||
#HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
# HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||||
#ROOMNAME = "matrix"
|
# ROOMNAME = "matrix"
|
||||||
ROOMNAME = "pibble"
|
ROOMNAME = "pibble"
|
||||||
|
|
||||||
HOST="guest.jit.si"
|
HOST = "guest.jit.si"
|
||||||
#HOST="jitsi.vuc.me"
|
# HOST="jitsi.vuc.me"
|
||||||
|
|
||||||
TURNSERVER="turn.guest.jit.si"
|
TURNSERVER = "turn.guest.jit.si"
|
||||||
#TURNSERVER="turn.jitsi.vuc.me"
|
# TURNSERVER="turn.jitsi.vuc.me"
|
||||||
|
|
||||||
|
ROOMDOMAIN = "meet.jit.si"
|
||||||
|
# ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||||
|
|
||||||
ROOMDOMAIN="meet.jit.si"
|
|
||||||
#ROOMDOMAIN="conference.jitsi.vuc.me"
|
|
||||||
|
|
||||||
class TrivialMatrixClient:
|
class TrivialMatrixClient:
|
||||||
def __init__(self, access_token):
|
def __init__(self, access_token):
|
||||||
@ -45,38 +47,50 @@ class TrivialMatrixClient:
|
|||||||
|
|
||||||
def getEvent(self):
|
def getEvent(self):
|
||||||
while True:
|
while True:
|
||||||
url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
|
url = (
|
||||||
|
MATRIXBASE
|
||||||
|
+ "events?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
+ "&timeout=60000"
|
||||||
|
)
|
||||||
if self.token:
|
if self.token:
|
||||||
url += "&from="+self.token
|
url += "&from=" + self.token
|
||||||
req = grequests.get(url)
|
req = grequests.get(url)
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = json.loads(resps[0].content)
|
obj = json.loads(resps[0].content)
|
||||||
print "incoming from matrix",obj
|
print("incoming from matrix", obj)
|
||||||
if 'end' not in obj:
|
if "end" not in obj:
|
||||||
continue
|
continue
|
||||||
self.token = obj['end']
|
self.token = obj["end"]
|
||||||
if len(obj['chunk']):
|
if len(obj["chunk"]):
|
||||||
return obj['chunk'][0]
|
return obj["chunk"][0]
|
||||||
|
|
||||||
def joinRoom(self, roomId):
|
def joinRoom(self, roomId):
|
||||||
url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
|
url = MATRIXBASE + "rooms/" + roomId + "/join?access_token=" + self.access_token
|
||||||
print url
|
print(url)
|
||||||
headers={ 'Content-Type': 'application/json' }
|
headers = {"Content-Type": "application/json"}
|
||||||
req = grequests.post(url, headers=headers, data='{}')
|
req = grequests.post(url, headers=headers, data="{}")
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = json.loads(resps[0].content)
|
obj = json.loads(resps[0].content)
|
||||||
print "response: ",obj
|
print("response: ", obj)
|
||||||
|
|
||||||
def sendEvent(self, roomId, evType, event):
|
def sendEvent(self, roomId, evType, event):
|
||||||
url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
|
url = (
|
||||||
print url
|
MATRIXBASE
|
||||||
print json.dumps(event)
|
+ "rooms/"
|
||||||
headers={ 'Content-Type': 'application/json' }
|
+ roomId
|
||||||
|
+ "/send/"
|
||||||
|
+ evType
|
||||||
|
+ "?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
)
|
||||||
|
print(url)
|
||||||
|
print(json.dumps(event))
|
||||||
|
headers = {"Content-Type": "application/json"}
|
||||||
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = json.loads(resps[0].content)
|
obj = json.loads(resps[0].content)
|
||||||
print "response: ",obj
|
print("response: ", obj)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
xmppClients = {}
|
xmppClients = {}
|
||||||
@ -85,39 +99,40 @@ xmppClients = {}
|
|||||||
def matrixLoop():
|
def matrixLoop():
|
||||||
while True:
|
while True:
|
||||||
ev = matrixCli.getEvent()
|
ev = matrixCli.getEvent()
|
||||||
print ev
|
print(ev)
|
||||||
if ev['type'] == 'm.room.member':
|
if ev["type"] == "m.room.member":
|
||||||
print 'membership event'
|
print("membership event")
|
||||||
if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
|
if ev["membership"] == "invite" and ev["state_key"] == MYUSERNAME:
|
||||||
roomId = ev['room_id']
|
roomId = ev["room_id"]
|
||||||
print "joining room %s" % (roomId)
|
print("joining room %s" % (roomId))
|
||||||
matrixCli.joinRoom(roomId)
|
matrixCli.joinRoom(roomId)
|
||||||
elif ev['type'] == 'm.room.message':
|
elif ev["type"] == "m.room.message":
|
||||||
if ev['room_id'] in xmppClients:
|
if ev["room_id"] in xmppClients:
|
||||||
print "already have a bridge for that user, ignoring"
|
print("already have a bridge for that user, ignoring")
|
||||||
continue
|
continue
|
||||||
print "got message, connecting"
|
print("got message, connecting")
|
||||||
xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
xmppClients[ev["room_id"]] = TrivialXmppClient(ev["room_id"], ev["user_id"])
|
||||||
gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
gevent.spawn(xmppClients[ev["room_id"]].xmppLoop)
|
||||||
elif ev['type'] == 'm.call.invite':
|
elif ev["type"] == "m.call.invite":
|
||||||
print "Incoming call"
|
print("Incoming call")
|
||||||
#sdp = ev['content']['offer']['sdp']
|
# sdp = ev['content']['offer']['sdp']
|
||||||
#print "sdp: %s" % (sdp)
|
# print "sdp: %s" % (sdp)
|
||||||
#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
# xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||||
#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
# gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||||
elif ev['type'] == 'm.call.answer':
|
elif ev["type"] == "m.call.answer":
|
||||||
print "Call answered"
|
print("Call answered")
|
||||||
sdp = ev['content']['answer']['sdp']
|
sdp = ev["content"]["answer"]["sdp"]
|
||||||
if ev['room_id'] not in xmppClients:
|
if ev["room_id"] not in xmppClients:
|
||||||
print "We didn't have a call for that room"
|
print("We didn't have a call for that room")
|
||||||
continue
|
continue
|
||||||
# should probably check call ID too
|
# should probably check call ID too
|
||||||
xmppCli = xmppClients[ev['room_id']]
|
xmppCli = xmppClients[ev["room_id"]]
|
||||||
xmppCli.sendAnswer(sdp)
|
xmppCli.sendAnswer(sdp)
|
||||||
elif ev['type'] == 'm.call.hangup':
|
elif ev["type"] == "m.call.hangup":
|
||||||
if ev['room_id'] in xmppClients:
|
if ev["room_id"] in xmppClients:
|
||||||
xmppClients[ev['room_id']].stop()
|
xmppClients[ev["room_id"]].stop()
|
||||||
del xmppClients[ev['room_id']]
|
del xmppClients[ev["room_id"]]
|
||||||
|
|
||||||
|
|
||||||
class TrivialXmppClient:
|
class TrivialXmppClient:
|
||||||
def __init__(self, matrixRoom, userId):
|
def __init__(self, matrixRoom, userId):
|
||||||
@ -131,130 +146,155 @@ class TrivialXmppClient:
|
|||||||
|
|
||||||
def nextRid(self):
|
def nextRid(self):
|
||||||
self.rid += 1
|
self.rid += 1
|
||||||
return '%d' % (self.rid)
|
return "%d" % (self.rid)
|
||||||
|
|
||||||
def sendIq(self, xml):
|
def sendIq(self, xml):
|
||||||
fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
|
fullXml = (
|
||||||
#print "\t>>>%s" % (fullXml)
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>"
|
||||||
|
% (self.nextRid(), self.sid, xml)
|
||||||
|
)
|
||||||
|
# print "\t>>>%s" % (fullXml)
|
||||||
return self.xmppPoke(fullXml)
|
return self.xmppPoke(fullXml)
|
||||||
|
|
||||||
def xmppPoke(self, xml):
|
def xmppPoke(self, xml):
|
||||||
headers = {'Content-Type': 'application/xml'}
|
headers = {"Content-Type": "application/xml"}
|
||||||
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = BeautifulSoup(resps[0].content)
|
obj = BeautifulSoup(resps[0].content)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def sendAnswer(self, answer):
|
def sendAnswer(self, answer):
|
||||||
print "sdp from matrix client",answer
|
print("sdp from matrix client", answer)
|
||||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
p = subprocess.Popen(
|
||||||
|
["node", "unjingle/unjingle.js", "--sdp"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
jingle, out_err = p.communicate(answer)
|
jingle, out_err = p.communicate(answer)
|
||||||
jingle = jingle % {
|
jingle = jingle % {
|
||||||
'tojid': self.callfrom,
|
"tojid": self.callfrom,
|
||||||
'action': 'session-accept',
|
"action": "session-accept",
|
||||||
'initiator': self.callfrom,
|
"initiator": self.callfrom,
|
||||||
'responder': self.jid,
|
"responder": self.jid,
|
||||||
'sid': self.callsid
|
"sid": self.callsid,
|
||||||
}
|
}
|
||||||
print "answer jingle from sdp",jingle
|
print("answer jingle from sdp", jingle)
|
||||||
res = self.sendIq(jingle)
|
res = self.sendIq(jingle)
|
||||||
print "reply from answer: ",res
|
print("reply from answer: ", res)
|
||||||
|
|
||||||
self.ssrcs = {}
|
self.ssrcs = {}
|
||||||
jingleSoup = BeautifulSoup(jingle)
|
jingleSoup = BeautifulSoup(jingle)
|
||||||
for cont in jingleSoup.iq.jingle.findAll('content'):
|
for cont in jingleSoup.iq.jingle.findAll("content"):
|
||||||
if cont.description:
|
if cont.description:
|
||||||
self.ssrcs[cont['name']] = cont.description['ssrc']
|
self.ssrcs[cont["name"]] = cont.description["ssrc"]
|
||||||
print "my ssrcs:",self.ssrcs
|
print("my ssrcs:", self.ssrcs)
|
||||||
|
|
||||||
gevent.joinall([
|
gevent.joinall([gevent.spawn(self.advertiseSsrcs)])
|
||||||
gevent.spawn(self.advertiseSsrcs)
|
|
||||||
])
|
|
||||||
|
|
||||||
def advertiseSsrcs(self):
|
def advertiseSsrcs(self):
|
||||||
time.sleep(7)
|
time.sleep(7)
|
||||||
print "SSRC spammer started"
|
print("SSRC spammer started")
|
||||||
while self.running:
|
while self.running:
|
||||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
|
ssrcMsg = (
|
||||||
|
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||||
|
% {
|
||||||
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
|
"nick": self.userId,
|
||||||
|
"assrc": self.ssrcs["audio"],
|
||||||
|
"vssrc": self.ssrcs["video"],
|
||||||
|
}
|
||||||
|
)
|
||||||
res = self.sendIq(ssrcMsg)
|
res = self.sendIq(ssrcMsg)
|
||||||
print "reply from ssrc announce: ",res
|
print("reply from ssrc announce: ", res)
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def xmppLoop(self):
|
def xmppLoop(self):
|
||||||
self.matrixCallId = time.time()
|
self.matrixCallId = time.time()
|
||||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), HOST)
|
||||||
|
)
|
||||||
|
|
||||||
print res
|
print(res)
|
||||||
self.sid = res.body['sid']
|
self.sid = res.body["sid"]
|
||||||
print "sid %s" % (self.sid)
|
print("sid %s" % (self.sid))
|
||||||
|
|
||||||
res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
|
res = self.sendIq(
|
||||||
|
"<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>"
|
||||||
|
)
|
||||||
|
|
||||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), self.sid, HOST)
|
||||||
|
)
|
||||||
|
|
||||||
res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
|
res = self.sendIq(
|
||||||
print res
|
"<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>"
|
||||||
|
)
|
||||||
|
print(res)
|
||||||
|
|
||||||
self.jid = res.body.iq.bind.jid.string
|
self.jid = res.body.iq.bind.jid.string
|
||||||
print "jid: %s" % (self.jid)
|
print("jid: %s" % (self.jid))
|
||||||
self.shortJid = self.jid.split('-')[0]
|
self.shortJid = self.jid.split("-")[0]
|
||||||
|
|
||||||
res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
|
res = self.sendIq(
|
||||||
|
"<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>"
|
||||||
|
)
|
||||||
|
|
||||||
#randomthing = res.body.iq['to']
|
# randomthing = res.body.iq['to']
|
||||||
#whatsitpart = randomthing.split('-')[0]
|
# whatsitpart = randomthing.split('-')[0]
|
||||||
|
|
||||||
#print "other random bind thing: %s" % (randomthing)
|
# print "other random bind thing: %s" % (randomthing)
|
||||||
|
|
||||||
# advertise preence to the jitsi room, with our nick
|
# advertise preence to the jitsi room, with our nick
|
||||||
res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
|
res = self.sendIq(
|
||||||
self.muc = {'users': []}
|
"<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>"
|
||||||
for p in res.body.findAll('presence'):
|
% (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId)
|
||||||
|
)
|
||||||
|
self.muc = {"users": []}
|
||||||
|
for p in res.body.findAll("presence"):
|
||||||
u = {}
|
u = {}
|
||||||
u['shortJid'] = p['from'].split('/')[1]
|
u["shortJid"] = p["from"].split("/")[1]
|
||||||
if p.c and p.c.nick:
|
if p.c and p.c.nick:
|
||||||
u['nick'] = p.c.nick.string
|
u["nick"] = p.c.nick.string
|
||||||
self.muc['users'].append(u)
|
self.muc["users"].append(u)
|
||||||
print "muc: ",self.muc
|
print("muc: ", self.muc)
|
||||||
|
|
||||||
# wait for stuff
|
# wait for stuff
|
||||||
while True:
|
while True:
|
||||||
print "waiting..."
|
print("waiting...")
|
||||||
res = self.sendIq("")
|
res = self.sendIq("")
|
||||||
print "got from stream: ",res
|
print("got from stream: ", res)
|
||||||
if res.body.iq:
|
if res.body.iq:
|
||||||
jingles = res.body.iq.findAll('jingle')
|
jingles = res.body.iq.findAll("jingle")
|
||||||
if len(jingles):
|
if len(jingles):
|
||||||
self.callfrom = res.body.iq['from']
|
self.callfrom = res.body.iq["from"]
|
||||||
self.handleInvite(jingles[0])
|
self.handleInvite(jingles[0])
|
||||||
elif 'type' in res.body and res.body['type'] == 'terminate':
|
elif "type" in res.body and res.body["type"] == "terminate":
|
||||||
self.running = False
|
self.running = False
|
||||||
del xmppClients[self.matrixRoom]
|
del xmppClients[self.matrixRoom]
|
||||||
return
|
return
|
||||||
|
|
||||||
def handleInvite(self, jingle):
|
def handleInvite(self, jingle):
|
||||||
self.initiator = jingle['initiator']
|
self.initiator = jingle["initiator"]
|
||||||
self.callsid = jingle['sid']
|
self.callsid = jingle["sid"]
|
||||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
p = subprocess.Popen(
|
||||||
print "raw jingle invite",str(jingle)
|
["node", "unjingle/unjingle.js", "--jingle"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
print("raw jingle invite", str(jingle))
|
||||||
sdp, out_err = p.communicate(str(jingle))
|
sdp, out_err = p.communicate(str(jingle))
|
||||||
print "transformed remote offer sdp",sdp
|
print("transformed remote offer sdp", sdp)
|
||||||
inviteEvent = {
|
inviteEvent = {
|
||||||
'offer': {
|
"offer": {"type": "offer", "sdp": sdp},
|
||||||
'type': 'offer',
|
"call_id": self.matrixCallId,
|
||||||
'sdp': sdp
|
"version": 0,
|
||||||
},
|
"lifetime": 30000,
|
||||||
'call_id': self.matrixCallId,
|
|
||||||
'version': 0,
|
|
||||||
'lifetime': 30000
|
|
||||||
}
|
}
|
||||||
matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
|
matrixCli.sendEvent(self.matrixRoom, "m.call.invite", inviteEvent)
|
||||||
|
|
||||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN)
|
|
||||||
|
|
||||||
gevent.joinall([
|
matrixCli = TrivialMatrixClient(ACCESS_TOKEN) # Undefined name
|
||||||
gevent.spawn(matrixLoop)
|
|
||||||
])
|
|
||||||
|
|
||||||
|
gevent.joinall([gevent.spawn(matrixLoop)])
|
||||||
|
@ -1,34 +1,40 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import json
|
import json
|
||||||
import requests
|
import requests
|
||||||
import sys
|
import sys
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_input
|
||||||
|
except NameError: # Python 3
|
||||||
|
raw_input = input
|
||||||
|
|
||||||
|
|
||||||
def _mkurl(template, kws):
|
def _mkurl(template, kws):
|
||||||
for key in kws:
|
for key in kws:
|
||||||
template = template.replace(key, kws[key])
|
template = template.replace(key, kws[key])
|
||||||
return template
|
return template
|
||||||
|
|
||||||
|
|
||||||
def main(hs, room_id, access_token, user_id_prefix, why):
|
def main(hs, room_id, access_token, user_id_prefix, why):
|
||||||
if not why:
|
if not why:
|
||||||
why = "Automated kick."
|
why = "Automated kick."
|
||||||
print "Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
print(
|
||||||
|
"Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
||||||
|
)
|
||||||
room_state_url = _mkurl(
|
room_state_url = _mkurl(
|
||||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
||||||
{
|
{"$HS": hs, "$ROOM": room_id, "$TOKEN": access_token},
|
||||||
"$HS": hs,
|
|
||||||
"$ROOM": room_id,
|
|
||||||
"$TOKEN": access_token
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
print "Getting room state => %s" % room_state_url
|
print("Getting room state => %s" % room_state_url)
|
||||||
res = requests.get(room_state_url)
|
res = requests.get(room_state_url)
|
||||||
print "HTTP %s" % res.status_code
|
print("HTTP %s" % res.status_code)
|
||||||
state_events = res.json()
|
state_events = res.json()
|
||||||
if "error" in state_events:
|
if "error" in state_events:
|
||||||
print "FATAL"
|
print("FATAL")
|
||||||
print state_events
|
print(state_events)
|
||||||
return
|
return
|
||||||
|
|
||||||
kick_list = []
|
kick_list = []
|
||||||
@ -44,47 +50,40 @@ def main(hs, room_id, access_token, user_id_prefix, why):
|
|||||||
kick_list.append(event["state_key"])
|
kick_list.append(event["state_key"])
|
||||||
|
|
||||||
if len(kick_list) == 0:
|
if len(kick_list) == 0:
|
||||||
print "No user IDs match the prefix '%s'" % user_id_prefix
|
print("No user IDs match the prefix '%s'" % user_id_prefix)
|
||||||
return
|
return
|
||||||
|
|
||||||
print "The following user IDs will be kicked from %s" % room_name
|
print("The following user IDs will be kicked from %s" % room_name)
|
||||||
for uid in kick_list:
|
for uid in kick_list:
|
||||||
print uid
|
print(uid)
|
||||||
doit = raw_input("Continue? [Y]es\n")
|
doit = raw_input("Continue? [Y]es\n")
|
||||||
if len(doit) > 0 and doit.lower() == 'y':
|
if len(doit) > 0 and doit.lower() == "y":
|
||||||
print "Kicking members..."
|
print("Kicking members...")
|
||||||
# encode them all
|
# encode them all
|
||||||
kick_list = [urllib.quote(uid) for uid in kick_list]
|
kick_list = [urllib.quote(uid) for uid in kick_list]
|
||||||
for uid in kick_list:
|
for uid in kick_list:
|
||||||
kick_url = _mkurl(
|
kick_url = _mkurl(
|
||||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
||||||
{
|
{"$HS": hs, "$UID": uid, "$ROOM": room_id, "$TOKEN": access_token},
|
||||||
"$HS": hs,
|
|
||||||
"$UID": uid,
|
|
||||||
"$ROOM": room_id,
|
|
||||||
"$TOKEN": access_token
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
kick_body = {
|
kick_body = {"membership": "leave", "reason": why}
|
||||||
"membership": "leave",
|
print("Kicking %s" % uid)
|
||||||
"reason": why
|
|
||||||
}
|
|
||||||
print "Kicking %s" % uid
|
|
||||||
res = requests.put(kick_url, data=json.dumps(kick_body))
|
res = requests.put(kick_url, data=json.dumps(kick_body))
|
||||||
if res.status_code != 200:
|
if res.status_code != 200:
|
||||||
print "ERROR: HTTP %s" % res.status_code
|
print("ERROR: HTTP %s" % res.status_code)
|
||||||
if res.json().get("error"):
|
if res.json().get("error"):
|
||||||
print "ERROR: JSON %s" % res.json()
|
print("ERROR: JSON %s" % res.json())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
||||||
parser.add_argument("-u","--user-id",help="The user ID prefix e.g. '@irc_'")
|
parser.add_argument("-u", "--user-id", help="The user ID prefix e.g. '@irc_'")
|
||||||
parser.add_argument("-t","--token",help="Your access_token")
|
parser.add_argument("-t", "--token", help="Your access_token")
|
||||||
parser.add_argument("-r","--room",help="The room ID to kick members in")
|
parser.add_argument("-r", "--room", help="The room ID to kick members in")
|
||||||
parser.add_argument("-s","--homeserver",help="The base HS url e.g. http://matrix.org")
|
parser.add_argument(
|
||||||
parser.add_argument("-w","--why",help="Reason for the kick. Optional.")
|
"-s", "--homeserver", help="The base HS url e.g. http://matrix.org"
|
||||||
|
)
|
||||||
|
parser.add_argument("-w", "--why", help="Reason for the kick. Optional.")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
|
DO NOT USE THESE DEMO SERVERS IN PRODUCTION
|
||||||
|
|
||||||
Requires you to have done:
|
Requires you to have done:
|
||||||
python setup.py develop
|
python setup.py develop
|
||||||
|
|
||||||
|
|
||||||
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
||||||
It will also start a web server on port 8000 pointed at the webclient.
|
|
||||||
|
To enable the servers to communicate untrusted ssl certs are used. In order to do this the servers do not check the certs
|
||||||
|
and are configured in a highly insecure way. Do not use these configuration files in production.
|
||||||
|
|
||||||
stop.sh will stop the synapse servers and the webclient.
|
stop.sh will stop the synapse servers and the webclient.
|
||||||
|
|
||||||
|
@ -27,9 +27,71 @@ for port in 8080 8081 8082; do
|
|||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
--report-stats no
|
--report-stats no
|
||||||
|
|
||||||
|
if ! grep -F "Customisation made by demo/start.sh" -q $DIR/etc/$port.config; then
|
||||||
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Warning, this heredoc depends on the interaction of tabs and spaces. Please don't
|
||||||
|
# accidentaly bork me with your fancy settings.
|
||||||
|
listeners=$(cat <<-PORTLISTENERS
|
||||||
|
# Configure server to listen on both $https_port and $port
|
||||||
|
# This overides some of the default settings above
|
||||||
|
listeners:
|
||||||
|
- port: $https_port
|
||||||
|
type: http
|
||||||
|
tls: true
|
||||||
|
resources:
|
||||||
|
- names: [client, federation]
|
||||||
|
|
||||||
|
- port: $port
|
||||||
|
tls: false
|
||||||
|
bind_addresses: ['::1', '127.0.0.1']
|
||||||
|
type: http
|
||||||
|
x_forwarded: true
|
||||||
|
resources:
|
||||||
|
- names: [client, federation]
|
||||||
|
compress: false
|
||||||
|
PORTLISTENERS
|
||||||
|
)
|
||||||
|
echo "${listeners}" >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Disable tls for the servers
|
||||||
|
printf '\n\n# Disable tls on the servers.' >> $DIR/etc/$port.config
|
||||||
|
echo '# DO NOT USE IN PRODUCTION' >> $DIR/etc/$port.config
|
||||||
|
echo 'use_insecure_ssl_client_just_for_testing_do_not_use: true' >> $DIR/etc/$port.config
|
||||||
|
echo 'federation_verify_certificates: false' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Set tls paths
|
||||||
|
echo "tls_certificate_path: \"$DIR/etc/localhost:$https_port.tls.crt\"" >> $DIR/etc/$port.config
|
||||||
|
echo "tls_private_key_path: \"$DIR/etc/localhost:$https_port.tls.key\"" >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Generate tls keys
|
||||||
|
openssl req -x509 -newkey rsa:4096 -keyout $DIR/etc/localhost\:$https_port.tls.key -out $DIR/etc/localhost\:$https_port.tls.crt -days 365 -nodes -subj "/O=matrix"
|
||||||
|
|
||||||
|
# Ignore keys from the trusted keys server
|
||||||
|
echo '# Ignore keys from the trusted keys server' >> $DIR/etc/$port.config
|
||||||
|
echo 'trusted_key_servers:' >> $DIR/etc/$port.config
|
||||||
|
echo ' - server_name: "matrix.org"' >> $DIR/etc/$port.config
|
||||||
|
echo ' accept_keys_insecurely: true' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Reduce the blacklist
|
||||||
|
blacklist=$(cat <<-BLACK
|
||||||
|
# Set the blacklist so that it doesn't include 127.0.0.1
|
||||||
|
federation_ip_range_blacklist:
|
||||||
|
- '10.0.0.0/8'
|
||||||
|
- '172.16.0.0/12'
|
||||||
|
- '192.168.0.0/16'
|
||||||
|
- '100.64.0.0/10'
|
||||||
|
- '169.254.0.0/16'
|
||||||
|
- '::1/128'
|
||||||
|
- 'fe80::/64'
|
||||||
|
- 'fc00::/7'
|
||||||
|
BLACK
|
||||||
|
)
|
||||||
|
echo "${blacklist}" >> $DIR/etc/$port.config
|
||||||
|
fi
|
||||||
|
|
||||||
# Check script parameters
|
# Check script parameters
|
||||||
if [ $# -eq 1 ]; then
|
if [ $# -eq 1 ]; then
|
||||||
if [ $1 = "--no-rate-limit" ]; then
|
if [ $1 = "--no-rate-limit" ]; then
|
||||||
|
@ -6,23 +6,25 @@ import cgi, logging
|
|||||||
|
|
||||||
from daemonize import Daemonize
|
from daemonize import Daemonize
|
||||||
|
|
||||||
|
|
||||||
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||||
UPLOAD_PATH = "upload"
|
UPLOAD_PATH = "upload"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Accept all post request as file upload
|
Accept all post request as file upload
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def do_POST(self):
|
def do_POST(self):
|
||||||
|
|
||||||
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
||||||
length = self.headers['content-length']
|
length = self.headers["content-length"]
|
||||||
data = self.rfile.read(int(length))
|
data = self.rfile.read(int(length))
|
||||||
|
|
||||||
with open(path, 'wb') as fh:
|
with open(path, "wb") as fh:
|
||||||
fh.write(data)
|
fh.write(data)
|
||||||
|
|
||||||
self.send_response(200)
|
self.send_response(200)
|
||||||
self.send_header('Content-Type', 'application/json')
|
self.send_header("Content-Type", "application/json")
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
|
|
||||||
# Return the absolute path of the uploaded file
|
# Return the absolute path of the uploaded file
|
||||||
@ -33,30 +35,25 @@ def setup():
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("directory")
|
parser.add_argument("directory")
|
||||||
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
||||||
parser.add_argument('-P', "--pid-file", dest="pid", default="web.pid")
|
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# Get absolute path to directory to serve, as daemonize changes to '/'
|
# Get absolute path to directory to serve, as daemonize changes to '/'
|
||||||
os.chdir(args.directory)
|
os.chdir(args.directory)
|
||||||
dr = os.getcwd()
|
dr = os.getcwd()
|
||||||
|
|
||||||
httpd = BaseHTTPServer.HTTPServer(
|
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
|
||||||
('', args.port),
|
|
||||||
SimpleHTTPRequestHandlerWithPOST
|
|
||||||
)
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
os.chdir(dr)
|
os.chdir(dr)
|
||||||
httpd.serve_forever()
|
httpd.serve_forever()
|
||||||
|
|
||||||
daemon = Daemonize(
|
daemon = Daemonize(
|
||||||
app="synapse-webclient",
|
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
|
||||||
pid=args.pid,
|
|
||||||
action=run,
|
|
||||||
auto_close_fds=False,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
daemon.start()
|
daemon.start()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
@ -57,6 +57,7 @@ RUN pip install --prefix="/install" --no-warn-script-location \
|
|||||||
|
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
|
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
|
||||||
|
|
||||||
|
# xmlsec is required for saml support
|
||||||
RUN apk add --no-cache --virtual .runtime_deps \
|
RUN apk add --no-cache --virtual .runtime_deps \
|
||||||
libffi \
|
libffi \
|
||||||
libjpeg-turbo \
|
libjpeg-turbo \
|
||||||
@ -64,7 +65,8 @@ RUN apk add --no-cache --virtual .runtime_deps \
|
|||||||
libxslt \
|
libxslt \
|
||||||
libpq \
|
libpq \
|
||||||
zlib \
|
zlib \
|
||||||
su-exec
|
su-exec \
|
||||||
|
xmlsec
|
||||||
|
|
||||||
COPY --from=builder /install /usr/local
|
COPY --from=builder /install /usr/local
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
|
@ -3,10 +3,10 @@
|
|||||||
FROM matrixdotorg/sytest:latest
|
FROM matrixdotorg/sytest:latest
|
||||||
|
|
||||||
# The Sytest image doesn't come with python, so install that
|
# The Sytest image doesn't come with python, so install that
|
||||||
RUN apt-get -qq install -y python python-dev python-pip
|
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||||
|
|
||||||
# We need tox to run the tests in run_pg_tests.sh
|
# We need tox to run the tests in run_pg_tests.sh
|
||||||
RUN pip install tox
|
RUN python3 -m pip install tox
|
||||||
|
|
||||||
ADD run_pg_tests.sh /pg_tests.sh
|
ADD run_pg_tests.sh /pg_tests.sh
|
||||||
ENTRYPOINT /pg_tests.sh
|
ENTRYPOINT /pg_tests.sh
|
||||||
|
@ -17,4 +17,4 @@ su -c '/usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start'
|
|||||||
# Run the tests
|
# Run the tests
|
||||||
cd /src
|
cd /src
|
||||||
export TRIAL_FLAGS="-j 4"
|
export TRIAL_FLAGS="-j 4"
|
||||||
tox --workdir=/tmp -e py27-postgres
|
tox --workdir=/tmp -e py35-postgres
|
||||||
|
@ -8,7 +8,10 @@ import glob
|
|||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
# Utility functions
|
# Utility functions
|
||||||
convert = lambda src, dst, environ: open(dst, "w").write(jinja2.Template(open(src).read()).render(**environ))
|
convert = lambda src, dst, environ: open(dst, "w").write(
|
||||||
|
jinja2.Template(open(src).read()).render(**environ)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_arguments(environ, args):
|
def check_arguments(environ, args):
|
||||||
for argument in args:
|
for argument in args:
|
||||||
@ -16,18 +19,22 @@ def check_arguments(environ, args):
|
|||||||
print("Environment variable %s is mandatory, exiting." % argument)
|
print("Environment variable %s is mandatory, exiting." % argument)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
def generate_secrets(environ, secrets):
|
def generate_secrets(environ, secrets):
|
||||||
for name, secret in secrets.items():
|
for name, secret in secrets.items():
|
||||||
if secret not in environ:
|
if secret not in environ:
|
||||||
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
|
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
|
||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
with open(filename) as handle: value = handle.read()
|
with open(filename) as handle:
|
||||||
|
value = handle.read()
|
||||||
else:
|
else:
|
||||||
print("Generating a random secret for {}".format(name))
|
print("Generating a random secret for {}".format(name))
|
||||||
value = codecs.encode(os.urandom(32), "hex").decode()
|
value = codecs.encode(os.urandom(32), "hex").decode()
|
||||||
with open(filename, "w") as handle: handle.write(value)
|
with open(filename, "w") as handle:
|
||||||
|
handle.write(value)
|
||||||
environ[secret] = value
|
environ[secret] = value
|
||||||
|
|
||||||
|
|
||||||
# Prepare the configuration
|
# Prepare the configuration
|
||||||
mode = sys.argv[1] if len(sys.argv) > 1 else None
|
mode = sys.argv[1] if len(sys.argv) > 1 else None
|
||||||
environ = os.environ.copy()
|
environ = os.environ.copy()
|
||||||
@ -36,12 +43,17 @@ args = ["python", "-m", "synapse.app.homeserver"]
|
|||||||
|
|
||||||
# In generate mode, generate a configuration, missing keys, then exit
|
# In generate mode, generate a configuration, missing keys, then exit
|
||||||
if mode == "generate":
|
if mode == "generate":
|
||||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH"))
|
check_arguments(
|
||||||
|
environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH")
|
||||||
|
)
|
||||||
args += [
|
args += [
|
||||||
"--server-name", environ["SYNAPSE_SERVER_NAME"],
|
"--server-name",
|
||||||
"--report-stats", environ["SYNAPSE_REPORT_STATS"],
|
environ["SYNAPSE_SERVER_NAME"],
|
||||||
"--config-path", environ["SYNAPSE_CONFIG_PATH"],
|
"--report-stats",
|
||||||
"--generate-config"
|
environ["SYNAPSE_REPORT_STATS"],
|
||||||
|
"--config-path",
|
||||||
|
environ["SYNAPSE_CONFIG_PATH"],
|
||||||
|
"--generate-config",
|
||||||
]
|
]
|
||||||
os.execv("/usr/local/bin/python", args)
|
os.execv("/usr/local/bin/python", args)
|
||||||
|
|
||||||
@ -51,12 +63,16 @@ else:
|
|||||||
config_path = environ["SYNAPSE_CONFIG_PATH"]
|
config_path = environ["SYNAPSE_CONFIG_PATH"]
|
||||||
else:
|
else:
|
||||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"))
|
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"))
|
||||||
generate_secrets(environ, {
|
generate_secrets(
|
||||||
|
environ,
|
||||||
|
{
|
||||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY"
|
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||||
})
|
},
|
||||||
|
)
|
||||||
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
||||||
if not os.path.exists("/compiled"): os.mkdir("/compiled")
|
if not os.path.exists("/compiled"):
|
||||||
|
os.mkdir("/compiled")
|
||||||
|
|
||||||
config_path = "/compiled/homeserver.yaml"
|
config_path = "/compiled/homeserver.yaml"
|
||||||
|
|
||||||
@ -69,19 +85,23 @@ else:
|
|||||||
if tlsanswerstring in ("false", "off", "0", "no"):
|
if tlsanswerstring in ("false", "off", "0", "no"):
|
||||||
environ["SYNAPSE_NO_TLS"] = False
|
environ["SYNAPSE_NO_TLS"] = False
|
||||||
else:
|
else:
|
||||||
print("Environment variable \"SYNAPSE_NO_TLS\" found but value \"" + tlsanswerstring + "\" unrecognized; exiting.")
|
print(
|
||||||
|
'Environment variable "SYNAPSE_NO_TLS" found but value "'
|
||||||
|
+ tlsanswerstring
|
||||||
|
+ '" unrecognized; exiting.'
|
||||||
|
)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
convert("/conf/homeserver.yaml", config_path, environ)
|
convert("/conf/homeserver.yaml", config_path, environ)
|
||||||
convert("/conf/log.config", "/compiled/log.config", environ)
|
convert("/conf/log.config", "/compiled/log.config", environ)
|
||||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||||
|
|
||||||
|
|
||||||
args += [
|
args += [
|
||||||
"--config-path", config_path,
|
"--config-path",
|
||||||
|
config_path,
|
||||||
# tell synapse to put any generated keys in /data rather than /compiled
|
# tell synapse to put any generated keys in /data rather than /compiled
|
||||||
"--keys-directory", "/data",
|
"--keys-directory",
|
||||||
|
"/data",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Generate missing keys and start synapse
|
# Generate missing keys and start synapse
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
Using Postgres
|
Using Postgres
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
Postgres version 9.4 or later is known to work.
|
Postgres version 9.5 or later is known to work.
|
||||||
|
|
||||||
Install postgres client libraries
|
Install postgres client libraries
|
||||||
=================================
|
=================================
|
||||||
|
@ -18,226 +18,220 @@ import os
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath(".."))
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
#needs_sphinx = '1.0'
|
# needs_sphinx = '1.0'
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
# ones.
|
# ones.
|
||||||
extensions = [
|
extensions = [
|
||||||
'sphinx.ext.autodoc',
|
"sphinx.ext.autodoc",
|
||||||
'sphinx.ext.intersphinx',
|
"sphinx.ext.intersphinx",
|
||||||
'sphinx.ext.coverage',
|
"sphinx.ext.coverage",
|
||||||
'sphinx.ext.ifconfig',
|
"sphinx.ext.ifconfig",
|
||||||
'sphinxcontrib.napoleon',
|
"sphinxcontrib.napoleon",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
#source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'Synapse'
|
project = "Synapse"
|
||||||
copyright = u'Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd'
|
copyright = (
|
||||||
|
"Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd"
|
||||||
|
)
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = '1.0'
|
version = "1.0"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = '1.0'
|
release = "1.0"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#language = None
|
# language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
#today = ''
|
# today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
#today_fmt = '%B %d, %Y'
|
# today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
#default_role = None
|
# default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
#add_function_parentheses = True
|
# add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
#add_module_names = True
|
# add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
#show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
#modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
|
|
||||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
#keep_warnings = False
|
# keep_warnings = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
html_theme = 'default'
|
html_theme = "default"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
#html_theme_options = {}
|
# html_theme_options = {}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
#html_theme_path = []
|
# html_theme_path = []
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
#html_title = None
|
# html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
#html_short_title = None
|
# html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
#html_logo = None
|
# html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
#html_favicon = None
|
# html_favicon = None
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
# directly to the root of the documentation.
|
# directly to the root of the documentation.
|
||||||
#html_extra_path = []
|
# html_extra_path = []
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
#html_last_updated_fmt = '%b %d, %Y'
|
# html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
#html_use_smartypants = True
|
# html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
#html_sidebars = {}
|
# html_sidebars = {}
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
#html_additional_pages = {}
|
# html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#html_domain_indices = True
|
# html_domain_indices = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
#html_use_index = True
|
# html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
#html_split_index = False
|
# html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
#html_show_sourcelink = True
|
# html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
#html_show_sphinx = True
|
# html_show_sphinx = True
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
#html_show_copyright = True
|
# html_show_copyright = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
#html_use_opensearch = ''
|
# html_use_opensearch = ''
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
#html_file_suffix = None
|
# html_file_suffix = None
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'Synapsedoc'
|
htmlhelp_basename = "Synapsedoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#'papersize': 'letterpaper',
|
#'papersize': 'letterpaper',
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
#'pointsize': '10pt',
|
||||||
#'pointsize': '10pt',
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
#'preamble': '',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [("index", "Synapse.tex", "Synapse Documentation", "TNG", "manual")]
|
||||||
('index', 'Synapse.tex', u'Synapse Documentation',
|
|
||||||
u'TNG', 'manual'),
|
|
||||||
]
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
#latex_logo = None
|
# latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
#latex_use_parts = False
|
# latex_use_parts = False
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
# If true, show page references after internal links.
|
||||||
#latex_show_pagerefs = False
|
# latex_show_pagerefs = False
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#latex_show_urls = False
|
# latex_show_urls = False
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#latex_appendices = []
|
# latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_domain_indices = True
|
# latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output ---------------------------------------
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [("index", "synapse", "Synapse Documentation", ["TNG"], 1)]
|
||||||
('index', 'synapse', u'Synapse Documentation',
|
|
||||||
[u'TNG'], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#man_show_urls = False
|
# man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output -------------------------------------------
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
@ -246,26 +240,32 @@ man_pages = [
|
|||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
('index', 'Synapse', u'Synapse Documentation',
|
(
|
||||||
u'TNG', 'Synapse', 'One line description of project.',
|
"index",
|
||||||
'Miscellaneous'),
|
"Synapse",
|
||||||
|
"Synapse Documentation",
|
||||||
|
"TNG",
|
||||||
|
"Synapse",
|
||||||
|
"One line description of project.",
|
||||||
|
"Miscellaneous",
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#texinfo_appendices = []
|
# texinfo_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#texinfo_domain_indices = True
|
# texinfo_domain_indices = True
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
#texinfo_show_urls = 'footnote'
|
# texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
#texinfo_no_detailmenu = False
|
# texinfo_no_detailmenu = False
|
||||||
|
|
||||||
|
|
||||||
# Example configuration for intersphinx: refer to the Python standard library.
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
intersphinx_mapping = {'http://docs.python.org/': None}
|
intersphinx_mapping = {"http://docs.python.org/": None}
|
||||||
|
|
||||||
napoleon_include_special_with_doc = True
|
napoleon_include_special_with_doc = True
|
||||||
napoleon_use_ivar = True
|
napoleon_use_ivar = True
|
||||||
|
@ -28,3 +28,22 @@
|
|||||||
directory = "misc"
|
directory = "misc"
|
||||||
name = "Internal Changes"
|
name = "Internal Changes"
|
||||||
showcontent = true
|
showcontent = true
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
target-version = ['py34']
|
||||||
|
exclude = '''
|
||||||
|
|
||||||
|
(
|
||||||
|
/(
|
||||||
|
\.eggs # exclude a few common directories in the
|
||||||
|
| \.git # root of the project
|
||||||
|
| \.tox
|
||||||
|
| \.venv
|
||||||
|
| _build
|
||||||
|
| _trial_temp.*
|
||||||
|
| build
|
||||||
|
| dist
|
||||||
|
| debian
|
||||||
|
)/
|
||||||
|
)
|
||||||
|
'''
|
||||||
|
@ -39,11 +39,11 @@ def check_auth(auth, auth_chain, events):
|
|||||||
print("Success:", e.event_id, e.type, e.state_key)
|
print("Success:", e.event_id, e.type, e.state_key)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin
|
"json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
|
||||||
)
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
@ -30,7 +30,7 @@ class dictobj(dict):
|
|||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
|
"input_json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
@ -40,7 +39,7 @@ def main():
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("signature_name")
|
parser.add_argument("signature_name")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
|
"input_json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
|
||||||
)
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
@ -69,5 +68,5 @@ def main():
|
|||||||
print("FAIL %s" % (key_id,))
|
print("FAIL %s" % (key_id,))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
@ -116,5 +116,5 @@ def main():
|
|||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
@ -19,10 +19,10 @@ class DefinitionVisitor(ast.NodeVisitor):
|
|||||||
self.names = {}
|
self.names = {}
|
||||||
self.attrs = set()
|
self.attrs = set()
|
||||||
self.definitions = {
|
self.definitions = {
|
||||||
'def': self.functions,
|
"def": self.functions,
|
||||||
'class': self.classes,
|
"class": self.classes,
|
||||||
'names': self.names,
|
"names": self.names,
|
||||||
'attrs': self.attrs,
|
"attrs": self.attrs,
|
||||||
}
|
}
|
||||||
|
|
||||||
def visit_Name(self, node):
|
def visit_Name(self, node):
|
||||||
@ -47,23 +47,23 @@ class DefinitionVisitor(ast.NodeVisitor):
|
|||||||
|
|
||||||
|
|
||||||
def non_empty(defs):
|
def non_empty(defs):
|
||||||
functions = {name: non_empty(f) for name, f in defs['def'].items()}
|
functions = {name: non_empty(f) for name, f in defs["def"].items()}
|
||||||
classes = {name: non_empty(f) for name, f in defs['class'].items()}
|
classes = {name: non_empty(f) for name, f in defs["class"].items()}
|
||||||
result = {}
|
result = {}
|
||||||
if functions:
|
if functions:
|
||||||
result['def'] = functions
|
result["def"] = functions
|
||||||
if classes:
|
if classes:
|
||||||
result['class'] = classes
|
result["class"] = classes
|
||||||
names = defs['names']
|
names = defs["names"]
|
||||||
uses = []
|
uses = []
|
||||||
for name in names.get('Load', ()):
|
for name in names.get("Load", ()):
|
||||||
if name not in names.get('Param', ()) and name not in names.get('Store', ()):
|
if name not in names.get("Param", ()) and name not in names.get("Store", ()):
|
||||||
uses.append(name)
|
uses.append(name)
|
||||||
uses.extend(defs['attrs'])
|
uses.extend(defs["attrs"])
|
||||||
if uses:
|
if uses:
|
||||||
result['uses'] = uses
|
result["uses"] = uses
|
||||||
result['names'] = names
|
result["names"] = names
|
||||||
result['attrs'] = defs['attrs']
|
result["attrs"] = defs["attrs"]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -81,33 +81,33 @@ def definitions_in_file(filepath):
|
|||||||
|
|
||||||
|
|
||||||
def defined_names(prefix, defs, names):
|
def defined_names(prefix, defs, names):
|
||||||
for name, funcs in defs.get('def', {}).items():
|
for name, funcs in defs.get("def", {}).items():
|
||||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
names.setdefault(name, {"defined": []})["defined"].append(prefix + name)
|
||||||
defined_names(prefix + name + ".", funcs, names)
|
defined_names(prefix + name + ".", funcs, names)
|
||||||
|
|
||||||
for name, funcs in defs.get('class', {}).items():
|
for name, funcs in defs.get("class", {}).items():
|
||||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
names.setdefault(name, {"defined": []})["defined"].append(prefix + name)
|
||||||
defined_names(prefix + name + ".", funcs, names)
|
defined_names(prefix + name + ".", funcs, names)
|
||||||
|
|
||||||
|
|
||||||
def used_names(prefix, item, defs, names):
|
def used_names(prefix, item, defs, names):
|
||||||
for name, funcs in defs.get('def', {}).items():
|
for name, funcs in defs.get("def", {}).items():
|
||||||
used_names(prefix + name + ".", name, funcs, names)
|
used_names(prefix + name + ".", name, funcs, names)
|
||||||
|
|
||||||
for name, funcs in defs.get('class', {}).items():
|
for name, funcs in defs.get("class", {}).items():
|
||||||
used_names(prefix + name + ".", name, funcs, names)
|
used_names(prefix + name + ".", name, funcs, names)
|
||||||
|
|
||||||
path = prefix.rstrip('.')
|
path = prefix.rstrip(".")
|
||||||
for used in defs.get('uses', ()):
|
for used in defs.get("uses", ()):
|
||||||
if used in names:
|
if used in names:
|
||||||
if item:
|
if item:
|
||||||
names[item].setdefault('uses', []).append(used)
|
names[item].setdefault("uses", []).append(used)
|
||||||
names[used].setdefault('used', {}).setdefault(item, []).append(path)
|
names[used].setdefault("used", {}).setdefault(item, []).append(path)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Find definitions.')
|
parser = argparse.ArgumentParser(description="Find definitions.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--unused", action="store_true", help="Only list unused definitions"
|
"--unused", action="store_true", help="Only list unused definitions"
|
||||||
)
|
)
|
||||||
@ -119,7 +119,7 @@ if __name__ == '__main__':
|
|||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"directories",
|
"directories",
|
||||||
nargs='+',
|
nargs="+",
|
||||||
metavar="DIR",
|
metavar="DIR",
|
||||||
help="Directories to search for definitions",
|
help="Directories to search for definitions",
|
||||||
)
|
)
|
||||||
@ -164,7 +164,7 @@ if __name__ == '__main__':
|
|||||||
continue
|
continue
|
||||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||||
continue
|
continue
|
||||||
if args.unused and definition.get('used'):
|
if args.unused and definition.get("used"):
|
||||||
continue
|
continue
|
||||||
result[name] = definition
|
result[name] = definition
|
||||||
|
|
||||||
@ -196,9 +196,9 @@ if __name__ == '__main__':
|
|||||||
continue
|
continue
|
||||||
result[name] = definition
|
result[name] = definition
|
||||||
|
|
||||||
if args.format == 'yaml':
|
if args.format == "yaml":
|
||||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
yaml.dump(result, sys.stdout, default_flow_style=False)
|
||||||
elif args.format == 'dot':
|
elif args.format == "dot":
|
||||||
print("digraph {")
|
print("digraph {")
|
||||||
for name, entry in result.items():
|
for name, entry in result.items():
|
||||||
print(name)
|
print(name)
|
||||||
|
@ -63,7 +63,7 @@ def encode_canonical_json(value):
|
|||||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||||
ensure_ascii=False,
|
ensure_ascii=False,
|
||||||
# Remove unecessary white space.
|
# Remove unecessary white space.
|
||||||
separators=(',', ':'),
|
separators=(",", ":"),
|
||||||
# Sort the keys of dictionaries.
|
# Sort the keys of dictionaries.
|
||||||
sort_keys=True,
|
sort_keys=True,
|
||||||
# Encode the resulting unicode as UTF-8 bytes.
|
# Encode the resulting unicode as UTF-8 bytes.
|
||||||
@ -145,7 +145,7 @@ def request_json(method, origin_name, origin_key, destination, path, content):
|
|||||||
authorization_headers = []
|
authorization_headers = []
|
||||||
|
|
||||||
for key, sig in signed_json["signatures"][origin_name].items():
|
for key, sig in signed_json["signatures"][origin_name].items():
|
||||||
header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig)
|
header = 'X-Matrix origin=%s,key="%s",sig="%s"' % (origin_name, key, sig)
|
||||||
authorization_headers.append(header.encode("ascii"))
|
authorization_headers.append(header.encode("ascii"))
|
||||||
print("Authorization: %s" % header, file=sys.stderr)
|
print("Authorization: %s" % header, file=sys.stderr)
|
||||||
|
|
||||||
@ -161,11 +161,7 @@ def request_json(method, origin_name, origin_key, destination, path, content):
|
|||||||
headers["Content-Type"] = "application/json"
|
headers["Content-Type"] = "application/json"
|
||||||
|
|
||||||
result = s.request(
|
result = s.request(
|
||||||
method=method,
|
method=method, url=dest, headers=headers, verify=False, data=content
|
||||||
url=dest,
|
|
||||||
headers=headers,
|
|
||||||
verify=False,
|
|
||||||
data=content,
|
|
||||||
)
|
)
|
||||||
sys.stderr.write("Status Code: %d\n" % (result.status_code,))
|
sys.stderr.write("Status Code: %d\n" % (result.status_code,))
|
||||||
return result.json()
|
return result.json()
|
||||||
@ -241,18 +237,18 @@ def main():
|
|||||||
|
|
||||||
|
|
||||||
def read_args_from_config(args):
|
def read_args_from_config(args):
|
||||||
with open(args.config, 'r') as fh:
|
with open(args.config, "r") as fh:
|
||||||
config = yaml.safe_load(fh)
|
config = yaml.safe_load(fh)
|
||||||
if not args.server_name:
|
if not args.server_name:
|
||||||
args.server_name = config['server_name']
|
args.server_name = config["server_name"]
|
||||||
if not args.signing_key_path:
|
if not args.signing_key_path:
|
||||||
args.signing_key_path = config['signing_key_path']
|
args.signing_key_path = config["signing_key_path"]
|
||||||
|
|
||||||
|
|
||||||
class MatrixConnectionAdapter(HTTPAdapter):
|
class MatrixConnectionAdapter(HTTPAdapter):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def lookup(s, skip_well_known=False):
|
def lookup(s, skip_well_known=False):
|
||||||
if s[-1] == ']':
|
if s[-1] == "]":
|
||||||
# ipv6 literal (with no port)
|
# ipv6 literal (with no port)
|
||||||
return s, 8448
|
return s, 8448
|
||||||
|
|
||||||
@ -268,9 +264,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||||||
if not skip_well_known:
|
if not skip_well_known:
|
||||||
well_known = MatrixConnectionAdapter.get_well_known(s)
|
well_known = MatrixConnectionAdapter.get_well_known(s)
|
||||||
if well_known:
|
if well_known:
|
||||||
return MatrixConnectionAdapter.lookup(
|
return MatrixConnectionAdapter.lookup(well_known, skip_well_known=True)
|
||||||
well_known, skip_well_known=True
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
srv = srvlookup.lookup("matrix", "tcp", s)[0]
|
srv = srvlookup.lookup("matrix", "tcp", s)[0]
|
||||||
@ -280,8 +274,8 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_well_known(server_name):
|
def get_well_known(server_name):
|
||||||
uri = "https://%s/.well-known/matrix/server" % (server_name, )
|
uri = "https://%s/.well-known/matrix/server" % (server_name,)
|
||||||
print("fetching %s" % (uri, ), file=sys.stderr)
|
print("fetching %s" % (uri,), file=sys.stderr)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = requests.get(uri)
|
resp = requests.get(uri)
|
||||||
@ -294,12 +288,12 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||||||
raise Exception("not a dict")
|
raise Exception("not a dict")
|
||||||
if "m.server" not in parsed_well_known:
|
if "m.server" not in parsed_well_known:
|
||||||
raise Exception("Missing key 'm.server'")
|
raise Exception("Missing key 'm.server'")
|
||||||
new_name = parsed_well_known['m.server']
|
new_name = parsed_well_known["m.server"]
|
||||||
print("well-known lookup gave %s" % (new_name, ), file=sys.stderr)
|
print("well-known lookup gave %s" % (new_name,), file=sys.stderr)
|
||||||
return new_name
|
return new_name
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Invalid response from %s: %s" % (uri, e, ), file=sys.stderr)
|
print("Invalid response from %s: %s" % (uri, e), file=sys.stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_connection(self, url, proxies=None):
|
def get_connection(self, url, proxies=None):
|
||||||
|
@ -79,5 +79,5 @@ def main():
|
|||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
@ -35,11 +35,11 @@ def find_patterns_in_file(filepath):
|
|||||||
find_patterns_in_code(f.read())
|
find_patterns_in_code(f.read())
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Find url patterns.')
|
parser = argparse.ArgumentParser(description="Find url patterns.")
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"directories",
|
"directories",
|
||||||
nargs='+',
|
nargs="+",
|
||||||
metavar="DIR",
|
metavar="DIR",
|
||||||
help="Directories to search for definitions",
|
help="Directories to search for definitions",
|
||||||
)
|
)
|
||||||
|
@ -63,5 +63,5 @@ def main():
|
|||||||
streams[update.name] = update.position
|
streams[update.name] = update.position
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
@ -24,14 +24,14 @@ if __name__ == "__main__":
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-o", "--output_file",
|
"-o",
|
||||||
|
"--output_file",
|
||||||
type=argparse.FileType('w'),
|
type=argparse.FileType("w"),
|
||||||
default=sys.stdout,
|
default=sys.stdout,
|
||||||
help="Where to write the output to",
|
help="Where to write the output to",
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
key_id = "a_" + random_string(4)
|
key_id = "a_" + random_string(4)
|
||||||
key = generate_signing_key(key_id),
|
key = (generate_signing_key(key_id),)
|
||||||
write_signing_keys(args.output_file, key)
|
write_signing_keys(args.output_file, key)
|
||||||
|
@ -50,7 +50,7 @@ def main(src_repo, dest_repo):
|
|||||||
dest_paths = MediaFilePaths(dest_repo)
|
dest_paths = MediaFilePaths(dest_repo)
|
||||||
for line in sys.stdin:
|
for line in sys.stdin:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
parts = line.split('|')
|
parts = line.split("|")
|
||||||
if len(parts) != 2:
|
if len(parts) != 2:
|
||||||
print("Unable to parse input line %s" % line, file=sys.stderr)
|
print("Unable to parse input line %s" % line, file=sys.stderr)
|
||||||
exit(1)
|
exit(1)
|
||||||
@ -107,7 +107,7 @@ if __name__ == "__main__":
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
|
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
|
||||||
)
|
)
|
||||||
parser.add_argument("-v", action='store_true', help='enable debug logging')
|
parser.add_argument("-v", action="store_true", help="enable debug logging")
|
||||||
parser.add_argument("src_repo", help="Path to source content repo")
|
parser.add_argument("src_repo", help="Path to source content repo")
|
||||||
parser.add_argument("dest_repo", help="Path to source content repo")
|
parser.add_argument("dest_repo", help="Path to source content repo")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
@ -15,18 +15,17 @@ ignore =
|
|||||||
tox.ini
|
tox.ini
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 90
|
|
||||||
|
|
||||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
||||||
# for error codes. The ones we ignore are:
|
# for error codes. The ones we ignore are:
|
||||||
# W503: line break before binary operator
|
# W503: line break before binary operator
|
||||||
# W504: line break after binary operator
|
# W504: line break after binary operator
|
||||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||||
# E731: do not assign a lambda expression, use a def
|
# E731: do not assign a lambda expression, use a def
|
||||||
ignore=W503,W504,E203,E731
|
# E501: Line too long (black enforces this for us)
|
||||||
|
ignore=W503,W504,E203,E731,E501
|
||||||
|
|
||||||
[isort]
|
[isort]
|
||||||
line_length = 89
|
line_length = 88
|
||||||
not_skip = __init__.py
|
not_skip = __init__.py
|
||||||
sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
|
sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
|
||||||
default_section=THIRDPARTY
|
default_section=THIRDPARTY
|
||||||
|
31
setup.py
31
setup.py
@ -60,9 +60,12 @@ class TestCommand(Command):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
print ("""Synapse's tests cannot be run via setup.py. To run them, try:
|
print(
|
||||||
|
"""Synapse's tests cannot be run via setup.py. To run them, try:
|
||||||
PYTHONPATH="." trial tests
|
PYTHONPATH="." trial tests
|
||||||
""")
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def read_file(path_segments):
|
def read_file(path_segments):
|
||||||
"""Read a file from the package. Takes a list of strings to join to
|
"""Read a file from the package. Takes a list of strings to join to
|
||||||
@ -84,9 +87,9 @@ version = exec_file(("synapse", "__init__.py"))["__version__"]
|
|||||||
dependencies = exec_file(("synapse", "python_dependencies.py"))
|
dependencies = exec_file(("synapse", "python_dependencies.py"))
|
||||||
long_description = read_file(("README.rst",))
|
long_description = read_file(("README.rst",))
|
||||||
|
|
||||||
REQUIREMENTS = dependencies['REQUIREMENTS']
|
REQUIREMENTS = dependencies["REQUIREMENTS"]
|
||||||
CONDITIONAL_REQUIREMENTS = dependencies['CONDITIONAL_REQUIREMENTS']
|
CONDITIONAL_REQUIREMENTS = dependencies["CONDITIONAL_REQUIREMENTS"]
|
||||||
ALL_OPTIONAL_REQUIREMENTS = dependencies['ALL_OPTIONAL_REQUIREMENTS']
|
ALL_OPTIONAL_REQUIREMENTS = dependencies["ALL_OPTIONAL_REQUIREMENTS"]
|
||||||
|
|
||||||
# Make `pip install matrix-synapse[all]` install all the optional dependencies.
|
# Make `pip install matrix-synapse[all]` install all the optional dependencies.
|
||||||
CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS)
|
CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS)
|
||||||
@ -102,16 +105,16 @@ setup(
|
|||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
python_requires='~=3.5',
|
python_requires="~=3.5",
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
"Development Status :: 5 - Production/Stable",
|
||||||
'Topic :: Communications :: Chat',
|
"Topic :: Communications :: Chat",
|
||||||
'License :: OSI Approved :: Apache Software License',
|
"License :: OSI Approved :: Apache Software License",
|
||||||
'Programming Language :: Python :: 3 :: Only',
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
'Programming Language :: Python :: 3.5',
|
"Programming Language :: Python :: 3.5",
|
||||||
'Programming Language :: Python :: 3.6',
|
"Programming Language :: Python :: 3.6",
|
||||||
'Programming Language :: Python :: 3.7',
|
"Programming Language :: Python :: 3.7",
|
||||||
],
|
],
|
||||||
scripts=["synctl"] + glob.glob("scripts/*"),
|
scripts=["synctl"] + glob.glob("scripts/*"),
|
||||||
cmdclass={'test': TestCommand},
|
cmdclass={"test": TestCommand},
|
||||||
)
|
)
|
||||||
|
@ -28,6 +28,7 @@ try:
|
|||||||
from twisted.internet import protocol
|
from twisted.internet import protocol
|
||||||
from twisted.internet.protocol import Factory
|
from twisted.internet.protocol import Factory
|
||||||
from twisted.names.dns import DNSDatagramProtocol
|
from twisted.names.dns import DNSDatagramProtocol
|
||||||
|
|
||||||
protocol.Factory.noisy = False
|
protocol.Factory.noisy = False
|
||||||
Factory.noisy = False
|
Factory.noisy = False
|
||||||
DNSDatagramProtocol.noisy = False
|
DNSDatagramProtocol.noisy = False
|
||||||
|
@ -57,18 +57,18 @@ def request_registration(
|
|||||||
|
|
||||||
nonce = r.json()["nonce"]
|
nonce = r.json()["nonce"]
|
||||||
|
|
||||||
mac = hmac.new(key=shared_secret.encode('utf8'), digestmod=hashlib.sha1)
|
mac = hmac.new(key=shared_secret.encode("utf8"), digestmod=hashlib.sha1)
|
||||||
|
|
||||||
mac.update(nonce.encode('utf8'))
|
mac.update(nonce.encode("utf8"))
|
||||||
mac.update(b"\x00")
|
mac.update(b"\x00")
|
||||||
mac.update(user.encode('utf8'))
|
mac.update(user.encode("utf8"))
|
||||||
mac.update(b"\x00")
|
mac.update(b"\x00")
|
||||||
mac.update(password.encode('utf8'))
|
mac.update(password.encode("utf8"))
|
||||||
mac.update(b"\x00")
|
mac.update(b"\x00")
|
||||||
mac.update(b"admin" if admin else b"notadmin")
|
mac.update(b"admin" if admin else b"notadmin")
|
||||||
if user_type:
|
if user_type:
|
||||||
mac.update(b"\x00")
|
mac.update(b"\x00")
|
||||||
mac.update(user_type.encode('utf8'))
|
mac.update(user_type.encode("utf8"))
|
||||||
|
|
||||||
mac = mac.hexdigest()
|
mac = mac.hexdigest()
|
||||||
|
|
||||||
@ -134,8 +134,9 @@ def register_new_user(user, password, server_location, shared_secret, admin, use
|
|||||||
else:
|
else:
|
||||||
admin = False
|
admin = False
|
||||||
|
|
||||||
request_registration(user, password, server_location, shared_secret,
|
request_registration(
|
||||||
bool(admin), user_type)
|
user, password, server_location, shared_secret, bool(admin), user_type
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -189,7 +190,7 @@ def main():
|
|||||||
group.add_argument(
|
group.add_argument(
|
||||||
"-c",
|
"-c",
|
||||||
"--config",
|
"--config",
|
||||||
type=argparse.FileType('r'),
|
type=argparse.FileType("r"),
|
||||||
help="Path to server config file. Used to read in shared secret.",
|
help="Path to server config file. Used to read in shared secret.",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -200,7 +201,7 @@ def main():
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"server_url",
|
"server_url",
|
||||||
default="https://localhost:8448",
|
default="https://localhost:8448",
|
||||||
nargs='?',
|
nargs="?",
|
||||||
help="URL to use to talk to the home server. Defaults to "
|
help="URL to use to talk to the home server. Defaults to "
|
||||||
" 'https://localhost:8448'.",
|
" 'https://localhost:8448'.",
|
||||||
)
|
)
|
||||||
@ -220,8 +221,9 @@ def main():
|
|||||||
if args.admin or args.no_admin:
|
if args.admin or args.no_admin:
|
||||||
admin = args.admin
|
admin = args.admin
|
||||||
|
|
||||||
register_new_user(args.user, args.password, args.server_url, secret,
|
register_new_user(
|
||||||
admin, args.user_type)
|
args.user, args.password, args.server_url, secret, admin, args.user_type
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -36,8 +36,11 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
AuthEventTypes = (
|
AuthEventTypes = (
|
||||||
EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
|
EventTypes.Create,
|
||||||
EventTypes.JoinRules, EventTypes.RoomHistoryVisibility,
|
EventTypes.Member,
|
||||||
|
EventTypes.PowerLevels,
|
||||||
|
EventTypes.JoinRules,
|
||||||
|
EventTypes.RoomHistoryVisibility,
|
||||||
EventTypes.ThirdPartyInvite,
|
EventTypes.ThirdPartyInvite,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -54,6 +57,7 @@ class Auth(object):
|
|||||||
FIXME: This class contains a mix of functions for authenticating users
|
FIXME: This class contains a mix of functions for authenticating users
|
||||||
of our client-server API and authenticating events added to room graphs.
|
of our client-server API and authenticating events added to room graphs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
@ -70,15 +74,12 @@ class Auth(object):
|
|||||||
def check_from_context(self, room_version, event, context, do_sig_check=True):
|
def check_from_context(self, room_version, event, context, do_sig_check=True):
|
||||||
prev_state_ids = yield context.get_prev_state_ids(self.store)
|
prev_state_ids = yield context.get_prev_state_ids(self.store)
|
||||||
auth_events_ids = yield self.compute_auth_events(
|
auth_events_ids = yield self.compute_auth_events(
|
||||||
event, prev_state_ids, for_verification=True,
|
event, prev_state_ids, for_verification=True
|
||||||
)
|
)
|
||||||
auth_events = yield self.store.get_events(auth_events_ids)
|
auth_events = yield self.store.get_events(auth_events_ids)
|
||||||
auth_events = {
|
auth_events = {(e.type, e.state_key): e for e in itervalues(auth_events)}
|
||||||
(e.type, e.state_key): e for e in itervalues(auth_events)
|
|
||||||
}
|
|
||||||
self.check(
|
self.check(
|
||||||
room_version, event,
|
room_version, event, auth_events=auth_events, do_sig_check=do_sig_check
|
||||||
auth_events=auth_events, do_sig_check=do_sig_check,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def check(self, room_version, event, auth_events, do_sig_check=True):
|
def check(self, room_version, event, auth_events, do_sig_check=True):
|
||||||
@ -115,15 +116,10 @@ class Auth(object):
|
|||||||
the room.
|
the room.
|
||||||
"""
|
"""
|
||||||
if current_state:
|
if current_state:
|
||||||
member = current_state.get(
|
member = current_state.get((EventTypes.Member, user_id), None)
|
||||||
(EventTypes.Member, user_id),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
member = yield self.state.get_current_state(
|
member = yield self.state.get_current_state(
|
||||||
room_id=room_id,
|
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||||
event_type=EventTypes.Member,
|
|
||||||
state_key=user_id
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self._check_joined_room(member, user_id, room_id)
|
self._check_joined_room(member, user_id, room_id)
|
||||||
@ -143,23 +139,17 @@ class Auth(object):
|
|||||||
the room. This will be the leave event if they have left the room.
|
the room. This will be the leave event if they have left the room.
|
||||||
"""
|
"""
|
||||||
member = yield self.state.get_current_state(
|
member = yield self.state.get_current_state(
|
||||||
room_id=room_id,
|
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||||
event_type=EventTypes.Member,
|
|
||||||
state_key=user_id
|
|
||||||
)
|
)
|
||||||
membership = member.membership if member else None
|
membership = member.membership if member else None
|
||||||
|
|
||||||
if membership not in (Membership.JOIN, Membership.LEAVE):
|
if membership not in (Membership.JOIN, Membership.LEAVE):
|
||||||
raise AuthError(403, "User %s not in room %s" % (
|
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||||
user_id, room_id
|
|
||||||
))
|
|
||||||
|
|
||||||
if membership == Membership.LEAVE:
|
if membership == Membership.LEAVE:
|
||||||
forgot = yield self.store.did_forget(user_id, room_id)
|
forgot = yield self.store.did_forget(user_id, room_id)
|
||||||
if forgot:
|
if forgot:
|
||||||
raise AuthError(403, "User %s not in room %s" % (
|
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||||
user_id, room_id
|
|
||||||
))
|
|
||||||
|
|
||||||
defer.returnValue(member)
|
defer.returnValue(member)
|
||||||
|
|
||||||
@ -171,9 +161,9 @@ class Auth(object):
|
|||||||
|
|
||||||
def _check_joined_room(self, member, user_id, room_id):
|
def _check_joined_room(self, member, user_id, room_id):
|
||||||
if not member or member.membership != Membership.JOIN:
|
if not member or member.membership != Membership.JOIN:
|
||||||
raise AuthError(403, "User %s not in room %s (%s)" % (
|
raise AuthError(
|
||||||
user_id, room_id, repr(member)
|
403, "User %s not in room %s (%s)" % (user_id, room_id, repr(member))
|
||||||
))
|
)
|
||||||
|
|
||||||
def can_federate(self, event, auth_events):
|
def can_federate(self, event, auth_events):
|
||||||
creation_event = auth_events.get((EventTypes.Create, ""))
|
creation_event = auth_events.get((EventTypes.Create, ""))
|
||||||
@ -185,11 +175,7 @@ class Auth(object):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_user_by_req(
|
def get_user_by_req(
|
||||||
self,
|
self, request, allow_guest=False, rights="access", allow_expired=False
|
||||||
request,
|
|
||||||
allow_guest=False,
|
|
||||||
rights="access",
|
|
||||||
allow_expired=False,
|
|
||||||
):
|
):
|
||||||
""" Get a registered user's ID.
|
""" Get a registered user's ID.
|
||||||
|
|
||||||
@ -209,9 +195,8 @@ class Auth(object):
|
|||||||
try:
|
try:
|
||||||
ip_addr = self.hs.get_ip_from_request(request)
|
ip_addr = self.hs.get_ip_from_request(request)
|
||||||
user_agent = request.requestHeaders.getRawHeaders(
|
user_agent = request.requestHeaders.getRawHeaders(
|
||||||
b"User-Agent",
|
b"User-Agent", default=[b""]
|
||||||
default=[b""]
|
)[0].decode("ascii", "surrogateescape")
|
||||||
)[0].decode('ascii', 'surrogateescape')
|
|
||||||
|
|
||||||
access_token = self.get_access_token_from_request(
|
access_token = self.get_access_token_from_request(
|
||||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||||
@ -243,11 +228,12 @@ class Auth(object):
|
|||||||
if self._account_validity.enabled and not allow_expired:
|
if self._account_validity.enabled and not allow_expired:
|
||||||
user_id = user.to_string()
|
user_id = user.to_string()
|
||||||
expiration_ts = yield self.store.get_expiration_ts_for_user(user_id)
|
expiration_ts = yield self.store.get_expiration_ts_for_user(user_id)
|
||||||
if expiration_ts is not None and self.clock.time_msec() >= expiration_ts:
|
if (
|
||||||
|
expiration_ts is not None
|
||||||
|
and self.clock.time_msec() >= expiration_ts
|
||||||
|
):
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403, "User account has expired", errcode=Codes.EXPIRED_ACCOUNT
|
||||||
"User account has expired",
|
|
||||||
errcode=Codes.EXPIRED_ACCOUNT,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# device_id may not be present if get_user_by_access_token has been
|
# device_id may not be present if get_user_by_access_token has been
|
||||||
@ -265,18 +251,23 @@ class Auth(object):
|
|||||||
|
|
||||||
if is_guest and not allow_guest:
|
if is_guest and not allow_guest:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
403,
|
||||||
|
"Guest access not allowed",
|
||||||
|
errcode=Codes.GUEST_ACCESS_FORBIDDEN,
|
||||||
)
|
)
|
||||||
|
|
||||||
request.authenticated_entity = user.to_string()
|
request.authenticated_entity = user.to_string()
|
||||||
|
|
||||||
defer.returnValue(synapse.types.create_requester(
|
defer.returnValue(
|
||||||
user, token_id, is_guest, device_id, app_service=app_service)
|
synapse.types.create_requester(
|
||||||
|
user, token_id, is_guest, device_id, app_service=app_service
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
errcode=Codes.MISSING_TOKEN
|
"Missing access token.",
|
||||||
|
errcode=Codes.MISSING_TOKEN,
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@ -297,20 +288,14 @@ class Auth(object):
|
|||||||
if b"user_id" not in request.args:
|
if b"user_id" not in request.args:
|
||||||
defer.returnValue((app_service.sender, app_service))
|
defer.returnValue((app_service.sender, app_service))
|
||||||
|
|
||||||
user_id = request.args[b"user_id"][0].decode('utf8')
|
user_id = request.args[b"user_id"][0].decode("utf8")
|
||||||
if app_service.sender == user_id:
|
if app_service.sender == user_id:
|
||||||
defer.returnValue((app_service.sender, app_service))
|
defer.returnValue((app_service.sender, app_service))
|
||||||
|
|
||||||
if not app_service.is_interested_in_user(user_id):
|
if not app_service.is_interested_in_user(user_id):
|
||||||
raise AuthError(
|
raise AuthError(403, "Application service cannot masquerade as this user.")
|
||||||
403,
|
|
||||||
"Application service cannot masquerade as this user."
|
|
||||||
)
|
|
||||||
if not (yield self.store.get_user_by_id(user_id)):
|
if not (yield self.store.get_user_by_id(user_id)):
|
||||||
raise AuthError(
|
raise AuthError(403, "Application service has not registered this user")
|
||||||
403,
|
|
||||||
"Application service has not registered this user"
|
|
||||||
)
|
|
||||||
defer.returnValue((user_id, app_service))
|
defer.returnValue((user_id, app_service))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@ -368,13 +353,13 @@ class Auth(object):
|
|||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
"Unknown user_id %s" % user_id,
|
"Unknown user_id %s" % user_id,
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
errcode=Codes.UNKNOWN_TOKEN,
|
||||||
)
|
)
|
||||||
if not stored_user["is_guest"]:
|
if not stored_user["is_guest"]:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
"Guest access token used for regular user",
|
"Guest access token used for regular user",
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
errcode=Codes.UNKNOWN_TOKEN,
|
||||||
)
|
)
|
||||||
ret = {
|
ret = {
|
||||||
"user": user,
|
"user": user,
|
||||||
@ -402,8 +387,9 @@ class Auth(object):
|
|||||||
) as e:
|
) as e:
|
||||||
logger.warning("Invalid macaroon in auth: %s %s", type(e), e)
|
logger.warning("Invalid macaroon in auth: %s %s", type(e), e)
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
"Invalid macaroon passed.",
|
||||||
|
errcode=Codes.UNKNOWN_TOKEN,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _parse_and_validate_macaroon(self, token, rights="access"):
|
def _parse_and_validate_macaroon(self, token, rights="access"):
|
||||||
@ -441,13 +427,13 @@ class Auth(object):
|
|||||||
guest = True
|
guest = True
|
||||||
|
|
||||||
self.validate_macaroon(
|
self.validate_macaroon(
|
||||||
macaroon, rights, self.hs.config.expire_access_token,
|
macaroon, rights, self.hs.config.expire_access_token, user_id=user_id
|
||||||
user_id=user_id,
|
|
||||||
)
|
)
|
||||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
"Invalid macaroon passed.",
|
||||||
|
errcode=Codes.UNKNOWN_TOKEN,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not has_expiry and rights == "access":
|
if not has_expiry and rights == "access":
|
||||||
@ -472,10 +458,11 @@ class Auth(object):
|
|||||||
user_prefix = "user_id = "
|
user_prefix = "user_id = "
|
||||||
for caveat in macaroon.caveats:
|
for caveat in macaroon.caveats:
|
||||||
if caveat.caveat_id.startswith(user_prefix):
|
if caveat.caveat_id.startswith(user_prefix):
|
||||||
return caveat.caveat_id[len(user_prefix):]
|
return caveat.caveat_id[len(user_prefix) :]
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
"No user caveat in macaroon",
|
||||||
|
errcode=Codes.UNKNOWN_TOKEN,
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate_macaroon(self, macaroon, type_string, verify_expiry, user_id):
|
def validate_macaroon(self, macaroon, type_string, verify_expiry, user_id):
|
||||||
@ -522,7 +509,7 @@ class Auth(object):
|
|||||||
prefix = "time < "
|
prefix = "time < "
|
||||||
if not caveat.startswith(prefix):
|
if not caveat.startswith(prefix):
|
||||||
return False
|
return False
|
||||||
expiry = int(caveat[len(prefix):])
|
expiry = int(caveat[len(prefix) :])
|
||||||
now = self.hs.get_clock().time_msec()
|
now = self.hs.get_clock().time_msec()
|
||||||
return now < expiry
|
return now < expiry
|
||||||
|
|
||||||
@ -554,14 +541,12 @@ class Auth(object):
|
|||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
"Unrecognised access token.",
|
"Unrecognised access token.",
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
errcode=Codes.UNKNOWN_TOKEN,
|
||||||
)
|
)
|
||||||
request.authenticated_entity = service.sender
|
request.authenticated_entity = service.sender
|
||||||
return defer.succeed(service)
|
return defer.succeed(service)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise AuthError(
|
raise AuthError(self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.")
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token."
|
|
||||||
)
|
|
||||||
|
|
||||||
def is_server_admin(self, user):
|
def is_server_admin(self, user):
|
||||||
""" Check if the given user is a local server admin.
|
""" Check if the given user is a local server admin.
|
||||||
@ -581,19 +566,19 @@ class Auth(object):
|
|||||||
|
|
||||||
auth_ids = []
|
auth_ids = []
|
||||||
|
|
||||||
key = (EventTypes.PowerLevels, "", )
|
key = (EventTypes.PowerLevels, "")
|
||||||
power_level_event_id = current_state_ids.get(key)
|
power_level_event_id = current_state_ids.get(key)
|
||||||
|
|
||||||
if power_level_event_id:
|
if power_level_event_id:
|
||||||
auth_ids.append(power_level_event_id)
|
auth_ids.append(power_level_event_id)
|
||||||
|
|
||||||
key = (EventTypes.JoinRules, "", )
|
key = (EventTypes.JoinRules, "")
|
||||||
join_rule_event_id = current_state_ids.get(key)
|
join_rule_event_id = current_state_ids.get(key)
|
||||||
|
|
||||||
key = (EventTypes.Member, event.sender, )
|
key = (EventTypes.Member, event.sender)
|
||||||
member_event_id = current_state_ids.get(key)
|
member_event_id = current_state_ids.get(key)
|
||||||
|
|
||||||
key = (EventTypes.Create, "", )
|
key = (EventTypes.Create, "")
|
||||||
create_event_id = current_state_ids.get(key)
|
create_event_id = current_state_ids.get(key)
|
||||||
if create_event_id:
|
if create_event_id:
|
||||||
auth_ids.append(create_event_id)
|
auth_ids.append(create_event_id)
|
||||||
@ -619,7 +604,7 @@ class Auth(object):
|
|||||||
auth_ids.append(member_event_id)
|
auth_ids.append(member_event_id)
|
||||||
|
|
||||||
if for_verification:
|
if for_verification:
|
||||||
key = (EventTypes.Member, event.state_key, )
|
key = (EventTypes.Member, event.state_key)
|
||||||
existing_event_id = current_state_ids.get(key)
|
existing_event_id = current_state_ids.get(key)
|
||||||
if existing_event_id:
|
if existing_event_id:
|
||||||
auth_ids.append(existing_event_id)
|
auth_ids.append(existing_event_id)
|
||||||
@ -628,7 +613,7 @@ class Auth(object):
|
|||||||
if "third_party_invite" in event.content:
|
if "third_party_invite" in event.content:
|
||||||
key = (
|
key = (
|
||||||
EventTypes.ThirdPartyInvite,
|
EventTypes.ThirdPartyInvite,
|
||||||
event.content["third_party_invite"]["signed"]["token"]
|
event.content["third_party_invite"]["signed"]["token"],
|
||||||
)
|
)
|
||||||
third_party_invite_id = current_state_ids.get(key)
|
third_party_invite_id = current_state_ids.get(key)
|
||||||
if third_party_invite_id:
|
if third_party_invite_id:
|
||||||
@ -684,7 +669,7 @@ class Auth(object):
|
|||||||
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
||||||
|
|
||||||
send_level = event_auth.get_send_level(
|
send_level = event_auth.get_send_level(
|
||||||
EventTypes.Aliases, "", power_level_event,
|
EventTypes.Aliases, "", power_level_event
|
||||||
)
|
)
|
||||||
user_level = event_auth.get_user_power_level(user_id, auth_events)
|
user_level = event_auth.get_user_power_level(user_id, auth_events)
|
||||||
|
|
||||||
@ -692,7 +677,7 @@ class Auth(object):
|
|||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403,
|
||||||
"This server requires you to be a moderator in the room to"
|
"This server requires you to be a moderator in the room to"
|
||||||
" edit its room list entry"
|
" edit its room list entry",
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -742,7 +727,7 @@ class Auth(object):
|
|||||||
)
|
)
|
||||||
parts = auth_headers[0].split(b" ")
|
parts = auth_headers[0].split(b" ")
|
||||||
if parts[0] == b"Bearer" and len(parts) == 2:
|
if parts[0] == b"Bearer" and len(parts) == 2:
|
||||||
return parts[1].decode('ascii')
|
return parts[1].decode("ascii")
|
||||||
else:
|
else:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
token_not_found_http_status,
|
token_not_found_http_status,
|
||||||
@ -755,10 +740,10 @@ class Auth(object):
|
|||||||
raise AuthError(
|
raise AuthError(
|
||||||
token_not_found_http_status,
|
token_not_found_http_status,
|
||||||
"Missing access token.",
|
"Missing access token.",
|
||||||
errcode=Codes.MISSING_TOKEN
|
errcode=Codes.MISSING_TOKEN,
|
||||||
)
|
)
|
||||||
|
|
||||||
return query_params[0].decode('ascii')
|
return query_params[0].decode("ascii")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_in_room_or_world_readable(self, room_id, user_id):
|
def check_in_room_or_world_readable(self, room_id, user_id):
|
||||||
@ -785,8 +770,8 @@ class Auth(object):
|
|||||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
visibility and
|
visibility
|
||||||
visibility.content["history_visibility"] == "world_readable"
|
and visibility.content["history_visibility"] == "world_readable"
|
||||||
):
|
):
|
||||||
defer.returnValue((Membership.JOIN, None))
|
defer.returnValue((Membership.JOIN, None))
|
||||||
return
|
return
|
||||||
@ -820,10 +805,11 @@ class Auth(object):
|
|||||||
|
|
||||||
if self.hs.config.hs_disabled:
|
if self.hs.config.hs_disabled:
|
||||||
raise ResourceLimitError(
|
raise ResourceLimitError(
|
||||||
403, self.hs.config.hs_disabled_message,
|
403,
|
||||||
|
self.hs.config.hs_disabled_message,
|
||||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||||
admin_contact=self.hs.config.admin_contact,
|
admin_contact=self.hs.config.admin_contact,
|
||||||
limit_type=self.hs.config.hs_disabled_limit_type
|
limit_type=self.hs.config.hs_disabled_limit_type,
|
||||||
)
|
)
|
||||||
if self.hs.config.limit_usage_by_mau is True:
|
if self.hs.config.limit_usage_by_mau is True:
|
||||||
assert not (user_id and threepid)
|
assert not (user_id and threepid)
|
||||||
@ -848,8 +834,9 @@ class Auth(object):
|
|||||||
current_mau = yield self.store.get_monthly_active_count()
|
current_mau = yield self.store.get_monthly_active_count()
|
||||||
if current_mau >= self.hs.config.max_mau_value:
|
if current_mau >= self.hs.config.max_mau_value:
|
||||||
raise ResourceLimitError(
|
raise ResourceLimitError(
|
||||||
403, "Monthly Active User Limit Exceeded",
|
403,
|
||||||
|
"Monthly Active User Limit Exceeded",
|
||||||
admin_contact=self.hs.config.admin_contact,
|
admin_contact=self.hs.config.admin_contact,
|
||||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||||
limit_type="monthly_active_user"
|
limit_type="monthly_active_user",
|
||||||
)
|
)
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
"""Contains constants from the specification."""
|
"""Contains constants from the specification."""
|
||||||
|
|
||||||
# the "depth" field on events is limited to 2**63 - 1
|
# the "depth" field on events is limited to 2**63 - 1
|
||||||
MAX_DEPTH = 2**63 - 1
|
MAX_DEPTH = 2 ** 63 - 1
|
||||||
|
|
||||||
# the maximum length for a room alias is 255 characters
|
# the maximum length for a room alias is 255 characters
|
||||||
MAX_ALIAS_LENGTH = 255
|
MAX_ALIAS_LENGTH = 255
|
||||||
@ -30,39 +30,41 @@ MAX_USERID_LENGTH = 255
|
|||||||
class Membership(object):
|
class Membership(object):
|
||||||
|
|
||||||
"""Represents the membership states of a user in a room."""
|
"""Represents the membership states of a user in a room."""
|
||||||
INVITE = u"invite"
|
|
||||||
JOIN = u"join"
|
INVITE = "invite"
|
||||||
KNOCK = u"knock"
|
JOIN = "join"
|
||||||
LEAVE = u"leave"
|
KNOCK = "knock"
|
||||||
BAN = u"ban"
|
LEAVE = "leave"
|
||||||
|
BAN = "ban"
|
||||||
LIST = (INVITE, JOIN, KNOCK, LEAVE, BAN)
|
LIST = (INVITE, JOIN, KNOCK, LEAVE, BAN)
|
||||||
|
|
||||||
|
|
||||||
class PresenceState(object):
|
class PresenceState(object):
|
||||||
"""Represents the presence state of a user."""
|
"""Represents the presence state of a user."""
|
||||||
OFFLINE = u"offline"
|
|
||||||
UNAVAILABLE = u"unavailable"
|
OFFLINE = "offline"
|
||||||
ONLINE = u"online"
|
UNAVAILABLE = "unavailable"
|
||||||
|
ONLINE = "online"
|
||||||
|
|
||||||
|
|
||||||
class JoinRules(object):
|
class JoinRules(object):
|
||||||
PUBLIC = u"public"
|
PUBLIC = "public"
|
||||||
KNOCK = u"knock"
|
KNOCK = "knock"
|
||||||
INVITE = u"invite"
|
INVITE = "invite"
|
||||||
PRIVATE = u"private"
|
PRIVATE = "private"
|
||||||
|
|
||||||
|
|
||||||
class LoginType(object):
|
class LoginType(object):
|
||||||
PASSWORD = u"m.login.password"
|
PASSWORD = "m.login.password"
|
||||||
EMAIL_IDENTITY = u"m.login.email.identity"
|
EMAIL_IDENTITY = "m.login.email.identity"
|
||||||
MSISDN = u"m.login.msisdn"
|
MSISDN = "m.login.msisdn"
|
||||||
RECAPTCHA = u"m.login.recaptcha"
|
RECAPTCHA = "m.login.recaptcha"
|
||||||
TERMS = u"m.login.terms"
|
TERMS = "m.login.terms"
|
||||||
DUMMY = u"m.login.dummy"
|
DUMMY = "m.login.dummy"
|
||||||
|
|
||||||
# Only for C/S API v1
|
# Only for C/S API v1
|
||||||
APPLICATION_SERVICE = u"m.login.application_service"
|
APPLICATION_SERVICE = "m.login.application_service"
|
||||||
SHARED_SECRET = u"org.matrix.login.shared_secret"
|
SHARED_SECRET = "org.matrix.login.shared_secret"
|
||||||
|
|
||||||
|
|
||||||
class EventTypes(object):
|
class EventTypes(object):
|
||||||
@ -118,6 +120,7 @@ class UserTypes(object):
|
|||||||
"""Allows for user type specific behaviour. With the benefit of hindsight
|
"""Allows for user type specific behaviour. With the benefit of hindsight
|
||||||
'admin' and 'guest' users should also be UserTypes. Normal users are type None
|
'admin' and 'guest' users should also be UserTypes. Normal users are type None
|
||||||
"""
|
"""
|
||||||
|
|
||||||
SUPPORT = "support"
|
SUPPORT = "support"
|
||||||
ALL_USER_TYPES = (SUPPORT,)
|
ALL_USER_TYPES = (SUPPORT,)
|
||||||
|
|
||||||
@ -125,6 +128,7 @@ class UserTypes(object):
|
|||||||
class RelationTypes(object):
|
class RelationTypes(object):
|
||||||
"""The types of relations known to this server.
|
"""The types of relations known to this server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
ANNOTATION = "m.annotation"
|
ANNOTATION = "m.annotation"
|
||||||
REPLACE = "m.replace"
|
REPLACE = "m.replace"
|
||||||
REFERENCE = "m.reference"
|
REFERENCE = "m.reference"
|
||||||
|
@ -70,6 +70,7 @@ class CodeMessageException(RuntimeError):
|
|||||||
code (int): HTTP error code
|
code (int): HTTP error code
|
||||||
msg (str): string describing the error
|
msg (str): string describing the error
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, code, msg):
|
def __init__(self, code, msg):
|
||||||
super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
|
super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
|
||||||
self.code = code
|
self.code = code
|
||||||
@ -83,6 +84,7 @@ class SynapseError(CodeMessageException):
|
|||||||
Attributes:
|
Attributes:
|
||||||
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, code, msg, errcode=Codes.UNKNOWN):
|
def __init__(self, code, msg, errcode=Codes.UNKNOWN):
|
||||||
"""Constructs a synapse error.
|
"""Constructs a synapse error.
|
||||||
|
|
||||||
@ -95,10 +97,7 @@ class SynapseError(CodeMessageException):
|
|||||||
self.errcode = errcode
|
self.errcode = errcode
|
||||||
|
|
||||||
def error_dict(self):
|
def error_dict(self):
|
||||||
return cs_error(
|
return cs_error(self.msg, self.errcode)
|
||||||
self.msg,
|
|
||||||
self.errcode,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ProxiedRequestError(SynapseError):
|
class ProxiedRequestError(SynapseError):
|
||||||
@ -107,27 +106,23 @@ class ProxiedRequestError(SynapseError):
|
|||||||
Attributes:
|
Attributes:
|
||||||
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, code, msg, errcode=Codes.UNKNOWN, additional_fields=None):
|
def __init__(self, code, msg, errcode=Codes.UNKNOWN, additional_fields=None):
|
||||||
super(ProxiedRequestError, self).__init__(
|
super(ProxiedRequestError, self).__init__(code, msg, errcode)
|
||||||
code, msg, errcode
|
|
||||||
)
|
|
||||||
if additional_fields is None:
|
if additional_fields is None:
|
||||||
self._additional_fields = {}
|
self._additional_fields = {}
|
||||||
else:
|
else:
|
||||||
self._additional_fields = dict(additional_fields)
|
self._additional_fields = dict(additional_fields)
|
||||||
|
|
||||||
def error_dict(self):
|
def error_dict(self):
|
||||||
return cs_error(
|
return cs_error(self.msg, self.errcode, **self._additional_fields)
|
||||||
self.msg,
|
|
||||||
self.errcode,
|
|
||||||
**self._additional_fields
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ConsentNotGivenError(SynapseError):
|
class ConsentNotGivenError(SynapseError):
|
||||||
"""The error returned to the client when the user has not consented to the
|
"""The error returned to the client when the user has not consented to the
|
||||||
privacy policy.
|
privacy policy.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, msg, consent_uri):
|
def __init__(self, msg, consent_uri):
|
||||||
"""Constructs a ConsentNotGivenError
|
"""Constructs a ConsentNotGivenError
|
||||||
|
|
||||||
@ -136,22 +131,17 @@ class ConsentNotGivenError(SynapseError):
|
|||||||
consent_url (str): The URL where the user can give their consent
|
consent_url (str): The URL where the user can give their consent
|
||||||
"""
|
"""
|
||||||
super(ConsentNotGivenError, self).__init__(
|
super(ConsentNotGivenError, self).__init__(
|
||||||
code=http_client.FORBIDDEN,
|
code=http_client.FORBIDDEN, msg=msg, errcode=Codes.CONSENT_NOT_GIVEN
|
||||||
msg=msg,
|
|
||||||
errcode=Codes.CONSENT_NOT_GIVEN
|
|
||||||
)
|
)
|
||||||
self._consent_uri = consent_uri
|
self._consent_uri = consent_uri
|
||||||
|
|
||||||
def error_dict(self):
|
def error_dict(self):
|
||||||
return cs_error(
|
return cs_error(self.msg, self.errcode, consent_uri=self._consent_uri)
|
||||||
self.msg,
|
|
||||||
self.errcode,
|
|
||||||
consent_uri=self._consent_uri
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RegistrationError(SynapseError):
|
class RegistrationError(SynapseError):
|
||||||
"""An error raised when a registration event fails."""
|
"""An error raised when a registration event fails."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -190,15 +180,17 @@ class InteractiveAuthIncompleteError(Exception):
|
|||||||
result (dict): the server response to the request, which should be
|
result (dict): the server response to the request, which should be
|
||||||
passed back to the client
|
passed back to the client
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, result):
|
def __init__(self, result):
|
||||||
super(InteractiveAuthIncompleteError, self).__init__(
|
super(InteractiveAuthIncompleteError, self).__init__(
|
||||||
"Interactive auth not yet complete",
|
"Interactive auth not yet complete"
|
||||||
)
|
)
|
||||||
self.result = result
|
self.result = result
|
||||||
|
|
||||||
|
|
||||||
class UnrecognizedRequestError(SynapseError):
|
class UnrecognizedRequestError(SynapseError):
|
||||||
"""An error indicating we don't understand the request you're trying to make"""
|
"""An error indicating we don't understand the request you're trying to make"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if "errcode" not in kwargs:
|
if "errcode" not in kwargs:
|
||||||
kwargs["errcode"] = Codes.UNRECOGNIZED
|
kwargs["errcode"] = Codes.UNRECOGNIZED
|
||||||
@ -207,21 +199,14 @@ class UnrecognizedRequestError(SynapseError):
|
|||||||
message = "Unrecognized request"
|
message = "Unrecognized request"
|
||||||
else:
|
else:
|
||||||
message = args[0]
|
message = args[0]
|
||||||
super(UnrecognizedRequestError, self).__init__(
|
super(UnrecognizedRequestError, self).__init__(400, message, **kwargs)
|
||||||
400,
|
|
||||||
message,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NotFoundError(SynapseError):
|
class NotFoundError(SynapseError):
|
||||||
"""An error indicating we can't find the thing you asked for"""
|
"""An error indicating we can't find the thing you asked for"""
|
||||||
|
|
||||||
def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND):
|
def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND):
|
||||||
super(NotFoundError, self).__init__(
|
super(NotFoundError, self).__init__(404, msg, errcode=errcode)
|
||||||
404,
|
|
||||||
msg,
|
|
||||||
errcode=errcode
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthError(SynapseError):
|
class AuthError(SynapseError):
|
||||||
@ -238,8 +223,11 @@ class ResourceLimitError(SynapseError):
|
|||||||
Any error raised when there is a problem with resource usage.
|
Any error raised when there is a problem with resource usage.
|
||||||
For instance, the monthly active user limit for the server has been exceeded
|
For instance, the monthly active user limit for the server has been exceeded
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, code, msg,
|
self,
|
||||||
|
code,
|
||||||
|
msg,
|
||||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||||
admin_contact=None,
|
admin_contact=None,
|
||||||
limit_type=None,
|
limit_type=None,
|
||||||
@ -253,7 +241,7 @@ class ResourceLimitError(SynapseError):
|
|||||||
self.msg,
|
self.msg,
|
||||||
self.errcode,
|
self.errcode,
|
||||||
admin_contact=self.admin_contact,
|
admin_contact=self.admin_contact,
|
||||||
limit_type=self.limit_type
|
limit_type=self.limit_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -268,6 +256,7 @@ class EventSizeError(SynapseError):
|
|||||||
|
|
||||||
class EventStreamError(SynapseError):
|
class EventStreamError(SynapseError):
|
||||||
"""An error raised when there a problem with the event stream."""
|
"""An error raised when there a problem with the event stream."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if "errcode" not in kwargs:
|
if "errcode" not in kwargs:
|
||||||
kwargs["errcode"] = Codes.BAD_PAGINATION
|
kwargs["errcode"] = Codes.BAD_PAGINATION
|
||||||
@ -276,47 +265,53 @@ class EventStreamError(SynapseError):
|
|||||||
|
|
||||||
class LoginError(SynapseError):
|
class LoginError(SynapseError):
|
||||||
"""An error raised when there was a problem logging in."""
|
"""An error raised when there was a problem logging in."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class StoreError(SynapseError):
|
class StoreError(SynapseError):
|
||||||
"""An error raised when there was a problem storing some data."""
|
"""An error raised when there was a problem storing some data."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InvalidCaptchaError(SynapseError):
|
class InvalidCaptchaError(SynapseError):
|
||||||
def __init__(self, code=400, msg="Invalid captcha.", error_url=None,
|
def __init__(
|
||||||
errcode=Codes.CAPTCHA_INVALID):
|
self,
|
||||||
|
code=400,
|
||||||
|
msg="Invalid captcha.",
|
||||||
|
error_url=None,
|
||||||
|
errcode=Codes.CAPTCHA_INVALID,
|
||||||
|
):
|
||||||
super(InvalidCaptchaError, self).__init__(code, msg, errcode)
|
super(InvalidCaptchaError, self).__init__(code, msg, errcode)
|
||||||
self.error_url = error_url
|
self.error_url = error_url
|
||||||
|
|
||||||
def error_dict(self):
|
def error_dict(self):
|
||||||
return cs_error(
|
return cs_error(self.msg, self.errcode, error_url=self.error_url)
|
||||||
self.msg,
|
|
||||||
self.errcode,
|
|
||||||
error_url=self.error_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LimitExceededError(SynapseError):
|
class LimitExceededError(SynapseError):
|
||||||
"""A client has sent too many requests and is being throttled.
|
"""A client has sent too many requests and is being throttled.
|
||||||
"""
|
"""
|
||||||
def __init__(self, code=429, msg="Too Many Requests", retry_after_ms=None,
|
|
||||||
errcode=Codes.LIMIT_EXCEEDED):
|
def __init__(
|
||||||
|
self,
|
||||||
|
code=429,
|
||||||
|
msg="Too Many Requests",
|
||||||
|
retry_after_ms=None,
|
||||||
|
errcode=Codes.LIMIT_EXCEEDED,
|
||||||
|
):
|
||||||
super(LimitExceededError, self).__init__(code, msg, errcode)
|
super(LimitExceededError, self).__init__(code, msg, errcode)
|
||||||
self.retry_after_ms = retry_after_ms
|
self.retry_after_ms = retry_after_ms
|
||||||
|
|
||||||
def error_dict(self):
|
def error_dict(self):
|
||||||
return cs_error(
|
return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
|
||||||
self.msg,
|
|
||||||
self.errcode,
|
|
||||||
retry_after_ms=self.retry_after_ms,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RoomKeysVersionError(SynapseError):
|
class RoomKeysVersionError(SynapseError):
|
||||||
"""A client has tried to upload to a non-current version of the room_keys store
|
"""A client has tried to upload to a non-current version of the room_keys store
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, current_version):
|
def __init__(self, current_version):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
@ -331,6 +326,7 @@ class RoomKeysVersionError(SynapseError):
|
|||||||
class UnsupportedRoomVersionError(SynapseError):
|
class UnsupportedRoomVersionError(SynapseError):
|
||||||
"""The client's request to create a room used a room version that the server does
|
"""The client's request to create a room used a room version that the server does
|
||||||
not support."""
|
not support."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(UnsupportedRoomVersionError, self).__init__(
|
super(UnsupportedRoomVersionError, self).__init__(
|
||||||
code=400,
|
code=400,
|
||||||
@ -354,6 +350,7 @@ class IncompatibleRoomVersionError(SynapseError):
|
|||||||
Unlike UnsupportedRoomVersionError, it is specific to the case of the make_join
|
Unlike UnsupportedRoomVersionError, it is specific to the case of the make_join
|
||||||
failing.
|
failing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, room_version):
|
def __init__(self, room_version):
|
||||||
super(IncompatibleRoomVersionError, self).__init__(
|
super(IncompatibleRoomVersionError, self).__init__(
|
||||||
code=400,
|
code=400,
|
||||||
@ -365,11 +362,7 @@ class IncompatibleRoomVersionError(SynapseError):
|
|||||||
self._room_version = room_version
|
self._room_version = room_version
|
||||||
|
|
||||||
def error_dict(self):
|
def error_dict(self):
|
||||||
return cs_error(
|
return cs_error(self.msg, self.errcode, room_version=self._room_version)
|
||||||
self.msg,
|
|
||||||
self.errcode,
|
|
||||||
room_version=self._room_version,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RequestSendFailed(RuntimeError):
|
class RequestSendFailed(RuntimeError):
|
||||||
@ -380,11 +373,11 @@ class RequestSendFailed(RuntimeError):
|
|||||||
networking (e.g. DNS failures, connection timeouts etc), versus unexpected
|
networking (e.g. DNS failures, connection timeouts etc), versus unexpected
|
||||||
errors (like programming errors).
|
errors (like programming errors).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, inner_exception, can_retry):
|
def __init__(self, inner_exception, can_retry):
|
||||||
super(RequestSendFailed, self).__init__(
|
super(RequestSendFailed, self).__init__(
|
||||||
"Failed to send request: %s: %s" % (
|
"Failed to send request: %s: %s"
|
||||||
type(inner_exception).__name__, inner_exception,
|
% (type(inner_exception).__name__, inner_exception)
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.inner_exception = inner_exception
|
self.inner_exception = inner_exception
|
||||||
self.can_retry = can_retry
|
self.can_retry = can_retry
|
||||||
@ -428,7 +421,7 @@ class FederationError(RuntimeError):
|
|||||||
self.affected = affected
|
self.affected = affected
|
||||||
self.source = source
|
self.source = source
|
||||||
|
|
||||||
msg = "%s %s: %s" % (level, code, reason,)
|
msg = "%s %s: %s" % (level, code, reason)
|
||||||
super(FederationError, self).__init__(msg)
|
super(FederationError, self).__init__(msg)
|
||||||
|
|
||||||
def get_dict(self):
|
def get_dict(self):
|
||||||
@ -448,6 +441,7 @@ class HttpResponseException(CodeMessageException):
|
|||||||
Attributes:
|
Attributes:
|
||||||
response (bytes): body of response
|
response (bytes): body of response
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, code, msg, response):
|
def __init__(self, code, msg, response):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -486,7 +480,7 @@ class HttpResponseException(CodeMessageException):
|
|||||||
if not isinstance(j, dict):
|
if not isinstance(j, dict):
|
||||||
j = {}
|
j = {}
|
||||||
|
|
||||||
errcode = j.pop('errcode', Codes.UNKNOWN)
|
errcode = j.pop("errcode", Codes.UNKNOWN)
|
||||||
errmsg = j.pop('error', self.msg)
|
errmsg = j.pop("error", self.msg)
|
||||||
|
|
||||||
return ProxiedRequestError(self.code, errmsg, errcode, j)
|
return ProxiedRequestError(self.code, errmsg, errcode, j)
|
||||||
|
@ -28,117 +28,55 @@ FILTER_SCHEMA = {
|
|||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"limit": {
|
"limit": {"type": "number"},
|
||||||
"type": "number"
|
"senders": {"$ref": "#/definitions/user_id_array"},
|
||||||
},
|
"not_senders": {"$ref": "#/definitions/user_id_array"},
|
||||||
"senders": {
|
|
||||||
"$ref": "#/definitions/user_id_array"
|
|
||||||
},
|
|
||||||
"not_senders": {
|
|
||||||
"$ref": "#/definitions/user_id_array"
|
|
||||||
},
|
|
||||||
# TODO: We don't limit event type values but we probably should...
|
# TODO: We don't limit event type values but we probably should...
|
||||||
# check types are valid event types
|
# check types are valid event types
|
||||||
"types": {
|
"types": {"type": "array", "items": {"type": "string"}},
|
||||||
"type": "array",
|
"not_types": {"type": "array", "items": {"type": "string"}},
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"not_types": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOM_FILTER_SCHEMA = {
|
ROOM_FILTER_SCHEMA = {
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"not_rooms": {
|
"not_rooms": {"$ref": "#/definitions/room_id_array"},
|
||||||
"$ref": "#/definitions/room_id_array"
|
"rooms": {"$ref": "#/definitions/room_id_array"},
|
||||||
|
"ephemeral": {"$ref": "#/definitions/room_event_filter"},
|
||||||
|
"include_leave": {"type": "boolean"},
|
||||||
|
"state": {"$ref": "#/definitions/room_event_filter"},
|
||||||
|
"timeline": {"$ref": "#/definitions/room_event_filter"},
|
||||||
|
"account_data": {"$ref": "#/definitions/room_event_filter"},
|
||||||
},
|
},
|
||||||
"rooms": {
|
|
||||||
"$ref": "#/definitions/room_id_array"
|
|
||||||
},
|
|
||||||
"ephemeral": {
|
|
||||||
"$ref": "#/definitions/room_event_filter"
|
|
||||||
},
|
|
||||||
"include_leave": {
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"state": {
|
|
||||||
"$ref": "#/definitions/room_event_filter"
|
|
||||||
},
|
|
||||||
"timeline": {
|
|
||||||
"$ref": "#/definitions/room_event_filter"
|
|
||||||
},
|
|
||||||
"account_data": {
|
|
||||||
"$ref": "#/definitions/room_event_filter"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOM_EVENT_FILTER_SCHEMA = {
|
ROOM_EVENT_FILTER_SCHEMA = {
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"limit": {
|
"limit": {"type": "number"},
|
||||||
"type": "number"
|
"senders": {"$ref": "#/definitions/user_id_array"},
|
||||||
|
"not_senders": {"$ref": "#/definitions/user_id_array"},
|
||||||
|
"types": {"type": "array", "items": {"type": "string"}},
|
||||||
|
"not_types": {"type": "array", "items": {"type": "string"}},
|
||||||
|
"rooms": {"$ref": "#/definitions/room_id_array"},
|
||||||
|
"not_rooms": {"$ref": "#/definitions/room_id_array"},
|
||||||
|
"contains_url": {"type": "boolean"},
|
||||||
|
"lazy_load_members": {"type": "boolean"},
|
||||||
|
"include_redundant_members": {"type": "boolean"},
|
||||||
},
|
},
|
||||||
"senders": {
|
|
||||||
"$ref": "#/definitions/user_id_array"
|
|
||||||
},
|
|
||||||
"not_senders": {
|
|
||||||
"$ref": "#/definitions/user_id_array"
|
|
||||||
},
|
|
||||||
"types": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"not_types": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"rooms": {
|
|
||||||
"$ref": "#/definitions/room_id_array"
|
|
||||||
},
|
|
||||||
"not_rooms": {
|
|
||||||
"$ref": "#/definitions/room_id_array"
|
|
||||||
},
|
|
||||||
"contains_url": {
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"lazy_load_members": {
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"include_redundant_members": {
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
USER_ID_ARRAY_SCHEMA = {
|
USER_ID_ARRAY_SCHEMA = {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {"type": "string", "format": "matrix_user_id"},
|
||||||
"type": "string",
|
|
||||||
"format": "matrix_user_id"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ROOM_ID_ARRAY_SCHEMA = {
|
ROOM_ID_ARRAY_SCHEMA = {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {"type": "string", "format": "matrix_room_id"},
|
||||||
"type": "string",
|
|
||||||
"format": "matrix_room_id"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
USER_FILTER_SCHEMA = {
|
USER_FILTER_SCHEMA = {
|
||||||
@ -150,22 +88,13 @@ USER_FILTER_SCHEMA = {
|
|||||||
"user_id_array": USER_ID_ARRAY_SCHEMA,
|
"user_id_array": USER_ID_ARRAY_SCHEMA,
|
||||||
"filter": FILTER_SCHEMA,
|
"filter": FILTER_SCHEMA,
|
||||||
"room_filter": ROOM_FILTER_SCHEMA,
|
"room_filter": ROOM_FILTER_SCHEMA,
|
||||||
"room_event_filter": ROOM_EVENT_FILTER_SCHEMA
|
"room_event_filter": ROOM_EVENT_FILTER_SCHEMA,
|
||||||
},
|
},
|
||||||
"properties": {
|
"properties": {
|
||||||
"presence": {
|
"presence": {"$ref": "#/definitions/filter"},
|
||||||
"$ref": "#/definitions/filter"
|
"account_data": {"$ref": "#/definitions/filter"},
|
||||||
},
|
"room": {"$ref": "#/definitions/room_filter"},
|
||||||
"account_data": {
|
"event_format": {"type": "string", "enum": ["client", "federation"]},
|
||||||
"$ref": "#/definitions/filter"
|
|
||||||
},
|
|
||||||
"room": {
|
|
||||||
"$ref": "#/definitions/room_filter"
|
|
||||||
},
|
|
||||||
"event_format": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": ["client", "federation"]
|
|
||||||
},
|
|
||||||
"event_fields": {
|
"event_fields": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
@ -177,26 +106,25 @@ USER_FILTER_SCHEMA = {
|
|||||||
#
|
#
|
||||||
# Note that because this is a regular expression, we have to escape
|
# Note that because this is a regular expression, we have to escape
|
||||||
# each backslash in the pattern.
|
# each backslash in the pattern.
|
||||||
"pattern": r"^((?!\\\\).)*$"
|
"pattern": r"^((?!\\\\).)*$",
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"additionalProperties": False
|
},
|
||||||
|
},
|
||||||
|
"additionalProperties": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@FormatChecker.cls_checks('matrix_room_id')
|
@FormatChecker.cls_checks("matrix_room_id")
|
||||||
def matrix_room_id_validator(room_id_str):
|
def matrix_room_id_validator(room_id_str):
|
||||||
return RoomID.from_string(room_id_str)
|
return RoomID.from_string(room_id_str)
|
||||||
|
|
||||||
|
|
||||||
@FormatChecker.cls_checks('matrix_user_id')
|
@FormatChecker.cls_checks("matrix_user_id")
|
||||||
def matrix_user_id_validator(user_id_str):
|
def matrix_user_id_validator(user_id_str):
|
||||||
return UserID.from_string(user_id_str)
|
return UserID.from_string(user_id_str)
|
||||||
|
|
||||||
|
|
||||||
class Filtering(object):
|
class Filtering(object):
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(Filtering, self).__init__()
|
super(Filtering, self).__init__()
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
@ -228,8 +156,9 @@ class Filtering(object):
|
|||||||
# individual top-level key e.g. public_user_data. Filters are made of
|
# individual top-level key e.g. public_user_data. Filters are made of
|
||||||
# many definitions.
|
# many definitions.
|
||||||
try:
|
try:
|
||||||
jsonschema.validate(user_filter_json, USER_FILTER_SCHEMA,
|
jsonschema.validate(
|
||||||
format_checker=FormatChecker())
|
user_filter_json, USER_FILTER_SCHEMA, format_checker=FormatChecker()
|
||||||
|
)
|
||||||
except jsonschema.ValidationError as e:
|
except jsonschema.ValidationError as e:
|
||||||
raise SynapseError(400, str(e))
|
raise SynapseError(400, str(e))
|
||||||
|
|
||||||
@ -240,10 +169,9 @@ class FilterCollection(object):
|
|||||||
|
|
||||||
room_filter_json = self._filter_json.get("room", {})
|
room_filter_json = self._filter_json.get("room", {})
|
||||||
|
|
||||||
self._room_filter = Filter({
|
self._room_filter = Filter(
|
||||||
k: v for k, v in room_filter_json.items()
|
{k: v for k, v in room_filter_json.items() if k in ("rooms", "not_rooms")}
|
||||||
if k in ("rooms", "not_rooms")
|
)
|
||||||
})
|
|
||||||
|
|
||||||
self._room_timeline_filter = Filter(room_filter_json.get("timeline", {}))
|
self._room_timeline_filter = Filter(room_filter_json.get("timeline", {}))
|
||||||
self._room_state_filter = Filter(room_filter_json.get("state", {}))
|
self._room_state_filter = Filter(room_filter_json.get("state", {}))
|
||||||
@ -252,9 +180,7 @@ class FilterCollection(object):
|
|||||||
self._presence_filter = Filter(filter_json.get("presence", {}))
|
self._presence_filter = Filter(filter_json.get("presence", {}))
|
||||||
self._account_data = Filter(filter_json.get("account_data", {}))
|
self._account_data = Filter(filter_json.get("account_data", {}))
|
||||||
|
|
||||||
self.include_leave = filter_json.get("room", {}).get(
|
self.include_leave = filter_json.get("room", {}).get("include_leave", False)
|
||||||
"include_leave", False
|
|
||||||
)
|
|
||||||
self.event_fields = filter_json.get("event_fields", [])
|
self.event_fields = filter_json.get("event_fields", [])
|
||||||
self.event_format = filter_json.get("event_format", "client")
|
self.event_format = filter_json.get("event_format", "client")
|
||||||
|
|
||||||
@ -299,22 +225,22 @@ class FilterCollection(object):
|
|||||||
|
|
||||||
def blocks_all_presence(self):
|
def blocks_all_presence(self):
|
||||||
return (
|
return (
|
||||||
self._presence_filter.filters_all_types() or
|
self._presence_filter.filters_all_types()
|
||||||
self._presence_filter.filters_all_senders()
|
or self._presence_filter.filters_all_senders()
|
||||||
)
|
)
|
||||||
|
|
||||||
def blocks_all_room_ephemeral(self):
|
def blocks_all_room_ephemeral(self):
|
||||||
return (
|
return (
|
||||||
self._room_ephemeral_filter.filters_all_types() or
|
self._room_ephemeral_filter.filters_all_types()
|
||||||
self._room_ephemeral_filter.filters_all_senders() or
|
or self._room_ephemeral_filter.filters_all_senders()
|
||||||
self._room_ephemeral_filter.filters_all_rooms()
|
or self._room_ephemeral_filter.filters_all_rooms()
|
||||||
)
|
)
|
||||||
|
|
||||||
def blocks_all_room_timeline(self):
|
def blocks_all_room_timeline(self):
|
||||||
return (
|
return (
|
||||||
self._room_timeline_filter.filters_all_types() or
|
self._room_timeline_filter.filters_all_types()
|
||||||
self._room_timeline_filter.filters_all_senders() or
|
or self._room_timeline_filter.filters_all_senders()
|
||||||
self._room_timeline_filter.filters_all_rooms()
|
or self._room_timeline_filter.filters_all_rooms()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -375,12 +301,7 @@ class Filter(object):
|
|||||||
# check if there is a string url field in the content for filtering purposes
|
# check if there is a string url field in the content for filtering purposes
|
||||||
contains_url = isinstance(content.get("url"), text_type)
|
contains_url = isinstance(content.get("url"), text_type)
|
||||||
|
|
||||||
return self.check_fields(
|
return self.check_fields(room_id, sender, ev_type, contains_url)
|
||||||
room_id,
|
|
||||||
sender,
|
|
||||||
ev_type,
|
|
||||||
contains_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
def check_fields(self, room_id, sender, event_type, contains_url):
|
def check_fields(self, room_id, sender, event_type, contains_url):
|
||||||
"""Checks whether the filter matches the given event fields.
|
"""Checks whether the filter matches the given event fields.
|
||||||
@ -391,7 +312,7 @@ class Filter(object):
|
|||||||
literal_keys = {
|
literal_keys = {
|
||||||
"rooms": lambda v: room_id == v,
|
"rooms": lambda v: room_id == v,
|
||||||
"senders": lambda v: sender == v,
|
"senders": lambda v: sender == v,
|
||||||
"types": lambda v: _matches_wildcard(event_type, v)
|
"types": lambda v: _matches_wildcard(event_type, v),
|
||||||
}
|
}
|
||||||
|
|
||||||
for name, match_func in literal_keys.items():
|
for name, match_func in literal_keys.items():
|
||||||
|
@ -44,29 +44,25 @@ class Ratelimiter(object):
|
|||||||
"""
|
"""
|
||||||
self.prune_message_counts(time_now_s)
|
self.prune_message_counts(time_now_s)
|
||||||
message_count, time_start, _ignored = self.message_counts.get(
|
message_count, time_start, _ignored = self.message_counts.get(
|
||||||
key, (0., time_now_s, None),
|
key, (0.0, time_now_s, None)
|
||||||
)
|
)
|
||||||
time_delta = time_now_s - time_start
|
time_delta = time_now_s - time_start
|
||||||
sent_count = message_count - time_delta * rate_hz
|
sent_count = message_count - time_delta * rate_hz
|
||||||
if sent_count < 0:
|
if sent_count < 0:
|
||||||
allowed = True
|
allowed = True
|
||||||
time_start = time_now_s
|
time_start = time_now_s
|
||||||
message_count = 1.
|
message_count = 1.0
|
||||||
elif sent_count > burst_count - 1.:
|
elif sent_count > burst_count - 1.0:
|
||||||
allowed = False
|
allowed = False
|
||||||
else:
|
else:
|
||||||
allowed = True
|
allowed = True
|
||||||
message_count += 1
|
message_count += 1
|
||||||
|
|
||||||
if update:
|
if update:
|
||||||
self.message_counts[key] = (
|
self.message_counts[key] = (message_count, time_start, rate_hz)
|
||||||
message_count, time_start, rate_hz
|
|
||||||
)
|
|
||||||
|
|
||||||
if rate_hz > 0:
|
if rate_hz > 0:
|
||||||
time_allowed = (
|
time_allowed = time_start + (message_count - burst_count + 1) / rate_hz
|
||||||
time_start + (message_count - burst_count + 1) / rate_hz
|
|
||||||
)
|
|
||||||
if time_allowed < time_now_s:
|
if time_allowed < time_now_s:
|
||||||
time_allowed = time_now_s
|
time_allowed = time_now_s
|
||||||
else:
|
else:
|
||||||
@ -76,9 +72,7 @@ class Ratelimiter(object):
|
|||||||
|
|
||||||
def prune_message_counts(self, time_now_s):
|
def prune_message_counts(self, time_now_s):
|
||||||
for key in list(self.message_counts.keys()):
|
for key in list(self.message_counts.keys()):
|
||||||
message_count, time_start, rate_hz = (
|
message_count, time_start, rate_hz = self.message_counts[key]
|
||||||
self.message_counts[key]
|
|
||||||
)
|
|
||||||
time_delta = time_now_s - time_start
|
time_delta = time_now_s - time_start
|
||||||
if message_count - time_delta * rate_hz > 0:
|
if message_count - time_delta * rate_hz > 0:
|
||||||
break
|
break
|
||||||
@ -92,5 +86,5 @@ class Ratelimiter(object):
|
|||||||
|
|
||||||
if not allowed:
|
if not allowed:
|
||||||
raise LimitExceededError(
|
raise LimitExceededError(
|
||||||
retry_after_ms=int(1000 * (time_allowed - time_now_s)),
|
retry_after_ms=int(1000 * (time_allowed - time_now_s))
|
||||||
)
|
)
|
||||||
|
@ -19,6 +19,7 @@ class EventFormatVersions(object):
|
|||||||
"""This is an internal enum for tracking the version of the event format,
|
"""This is an internal enum for tracking the version of the event format,
|
||||||
independently from the room version.
|
independently from the room version.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
V1 = 1 # $id:server event id format
|
V1 = 1 # $id:server event id format
|
||||||
V2 = 2 # MSC1659-style $hash event id format: introduced for room v3
|
V2 = 2 # MSC1659-style $hash event id format: introduced for room v3
|
||||||
V3 = 3 # MSC1884-style $hash format: introduced for room v4
|
V3 = 3 # MSC1884-style $hash format: introduced for room v4
|
||||||
@ -33,6 +34,7 @@ KNOWN_EVENT_FORMAT_VERSIONS = {
|
|||||||
|
|
||||||
class StateResolutionVersions(object):
|
class StateResolutionVersions(object):
|
||||||
"""Enum to identify the state resolution algorithms"""
|
"""Enum to identify the state resolution algorithms"""
|
||||||
|
|
||||||
V1 = 1 # room v1 state res
|
V1 = 1 # room v1 state res
|
||||||
V2 = 2 # MSC1442 state res: room v2 and later
|
V2 = 2 # MSC1442 state res: room v2 and later
|
||||||
|
|
||||||
@ -92,7 +94,8 @@ class RoomVersions(object):
|
|||||||
|
|
||||||
|
|
||||||
KNOWN_ROOM_VERSIONS = {
|
KNOWN_ROOM_VERSIONS = {
|
||||||
v.identifier: v for v in (
|
v.identifier: v
|
||||||
|
for v in (
|
||||||
RoomVersions.V1,
|
RoomVersions.V1,
|
||||||
RoomVersions.V2,
|
RoomVersions.V2,
|
||||||
RoomVersions.V3,
|
RoomVersions.V3,
|
||||||
|
@ -42,13 +42,9 @@ class ConsentURIBuilder(object):
|
|||||||
hs_config (synapse.config.homeserver.HomeServerConfig):
|
hs_config (synapse.config.homeserver.HomeServerConfig):
|
||||||
"""
|
"""
|
||||||
if hs_config.form_secret is None:
|
if hs_config.form_secret is None:
|
||||||
raise ConfigError(
|
raise ConfigError("form_secret not set in config")
|
||||||
"form_secret not set in config",
|
|
||||||
)
|
|
||||||
if hs_config.public_baseurl is None:
|
if hs_config.public_baseurl is None:
|
||||||
raise ConfigError(
|
raise ConfigError("public_baseurl not set in config")
|
||||||
"public_baseurl not set in config",
|
|
||||||
)
|
|
||||||
|
|
||||||
self._hmac_secret = hs_config.form_secret.encode("utf-8")
|
self._hmac_secret = hs_config.form_secret.encode("utf-8")
|
||||||
self._public_baseurl = hs_config.public_baseurl
|
self._public_baseurl = hs_config.public_baseurl
|
||||||
@ -64,15 +60,10 @@ class ConsentURIBuilder(object):
|
|||||||
(str) the URI where the user can do consent
|
(str) the URI where the user can do consent
|
||||||
"""
|
"""
|
||||||
mac = hmac.new(
|
mac = hmac.new(
|
||||||
key=self._hmac_secret,
|
key=self._hmac_secret, msg=user_id.encode("ascii"), digestmod=sha256
|
||||||
msg=user_id.encode('ascii'),
|
|
||||||
digestmod=sha256,
|
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
consent_uri = "%s_matrix/consent?%s" % (
|
consent_uri = "%s_matrix/consent?%s" % (
|
||||||
self._public_baseurl,
|
self._public_baseurl,
|
||||||
urlencode({
|
urlencode({"u": user_id, "h": mac}),
|
||||||
"u": user_id,
|
|
||||||
"h": mac
|
|
||||||
}),
|
|
||||||
)
|
)
|
||||||
return consent_uri
|
return consent_uri
|
||||||
|
@ -43,7 +43,7 @@ def check_bind_error(e, address, bind_addresses):
|
|||||||
address (str): Address on which binding was attempted.
|
address (str): Address on which binding was attempted.
|
||||||
bind_addresses (list): Addresses on which the service listens.
|
bind_addresses (list): Addresses on which the service listens.
|
||||||
"""
|
"""
|
||||||
if address == '0.0.0.0' and '::' in bind_addresses:
|
if address == "0.0.0.0" and "::" in bind_addresses:
|
||||||
logger.warn('Failed to listen on 0.0.0.0, continuing because listening on [::]')
|
logger.warn("Failed to listen on 0.0.0.0, continuing because listening on [::]")
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
@ -149,10 +149,10 @@ def start_reactor(
|
|||||||
def quit_with_error(error_string):
|
def quit_with_error(error_string):
|
||||||
message_lines = error_string.split("\n")
|
message_lines = error_string.split("\n")
|
||||||
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
||||||
sys.stderr.write("*" * line_length + '\n')
|
sys.stderr.write("*" * line_length + "\n")
|
||||||
for line in message_lines:
|
for line in message_lines:
|
||||||
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
||||||
sys.stderr.write("*" * line_length + '\n')
|
sys.stderr.write("*" * line_length + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -178,14 +178,7 @@ def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
|
|||||||
r = []
|
r = []
|
||||||
for address in bind_addresses:
|
for address in bind_addresses:
|
||||||
try:
|
try:
|
||||||
r.append(
|
r.append(reactor.listenTCP(port, factory, backlog, address))
|
||||||
reactor.listenTCP(
|
|
||||||
port,
|
|
||||||
factory,
|
|
||||||
backlog,
|
|
||||||
address
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except error.CannotListenError as e:
|
except error.CannotListenError as e:
|
||||||
check_bind_error(e, address, bind_addresses)
|
check_bind_error(e, address, bind_addresses)
|
||||||
|
|
||||||
@ -205,13 +198,7 @@ def listen_ssl(
|
|||||||
for address in bind_addresses:
|
for address in bind_addresses:
|
||||||
try:
|
try:
|
||||||
r.append(
|
r.append(
|
||||||
reactor.listenSSL(
|
reactor.listenSSL(port, factory, context_factory, backlog, address)
|
||||||
port,
|
|
||||||
factory,
|
|
||||||
context_factory,
|
|
||||||
backlog,
|
|
||||||
address
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
except error.CannotListenError as e:
|
except error.CannotListenError as e:
|
||||||
check_bind_error(e, address, bind_addresses)
|
check_bind_error(e, address, bind_addresses)
|
||||||
@ -243,15 +230,13 @@ def refresh_certificate(hs):
|
|||||||
if isinstance(i.factory, TLSMemoryBIOFactory):
|
if isinstance(i.factory, TLSMemoryBIOFactory):
|
||||||
addr = i.getHost()
|
addr = i.getHost()
|
||||||
logger.info(
|
logger.info(
|
||||||
"Replacing TLS context factory on [%s]:%i", addr.host, addr.port,
|
"Replacing TLS context factory on [%s]:%i", addr.host, addr.port
|
||||||
)
|
)
|
||||||
# We want to replace TLS factories with a new one, with the new
|
# We want to replace TLS factories with a new one, with the new
|
||||||
# TLS configuration. We do this by reaching in and pulling out
|
# TLS configuration. We do this by reaching in and pulling out
|
||||||
# the wrappedFactory, and then re-wrapping it.
|
# the wrappedFactory, and then re-wrapping it.
|
||||||
i.factory = TLSMemoryBIOFactory(
|
i.factory = TLSMemoryBIOFactory(
|
||||||
hs.tls_server_context_factory,
|
hs.tls_server_context_factory, False, i.factory.wrappedFactory
|
||||||
False,
|
|
||||||
i.factory.wrappedFactory
|
|
||||||
)
|
)
|
||||||
logger.info("Context factories updated.")
|
logger.info("Context factories updated.")
|
||||||
|
|
||||||
@ -267,6 +252,7 @@ def start(hs, listeners=None):
|
|||||||
try:
|
try:
|
||||||
# Set up the SIGHUP machinery.
|
# Set up the SIGHUP machinery.
|
||||||
if hasattr(signal, "SIGHUP"):
|
if hasattr(signal, "SIGHUP"):
|
||||||
|
|
||||||
def handle_sighup(*args, **kwargs):
|
def handle_sighup(*args, **kwargs):
|
||||||
for i in _sighup_callbacks:
|
for i in _sighup_callbacks:
|
||||||
i(hs)
|
i(hs)
|
||||||
@ -302,10 +288,8 @@ def setup_sentry(hs):
|
|||||||
return
|
return
|
||||||
|
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
sentry_sdk.init(
|
|
||||||
dsn=hs.config.sentry_dsn,
|
sentry_sdk.init(dsn=hs.config.sentry_dsn, release=get_version_string(synapse))
|
||||||
release=get_version_string(synapse),
|
|
||||||
)
|
|
||||||
|
|
||||||
# We set some default tags that give some context to this instance
|
# We set some default tags that give some context to this instance
|
||||||
with sentry_sdk.configure_scope() as scope:
|
with sentry_sdk.configure_scope() as scope:
|
||||||
@ -326,7 +310,7 @@ def install_dns_limiter(reactor, max_dns_requests_in_flight=100):
|
|||||||
many DNS queries at once
|
many DNS queries at once
|
||||||
"""
|
"""
|
||||||
new_resolver = _LimitedHostnameResolver(
|
new_resolver = _LimitedHostnameResolver(
|
||||||
reactor.nameResolver, max_dns_requests_in_flight,
|
reactor.nameResolver, max_dns_requests_in_flight
|
||||||
)
|
)
|
||||||
|
|
||||||
reactor.installNameResolver(new_resolver)
|
reactor.installNameResolver(new_resolver)
|
||||||
@ -339,11 +323,17 @@ class _LimitedHostnameResolver(object):
|
|||||||
def __init__(self, resolver, max_dns_requests_in_flight):
|
def __init__(self, resolver, max_dns_requests_in_flight):
|
||||||
self._resolver = resolver
|
self._resolver = resolver
|
||||||
self._limiter = Linearizer(
|
self._limiter = Linearizer(
|
||||||
name="dns_client_limiter", max_count=max_dns_requests_in_flight,
|
name="dns_client_limiter", max_count=max_dns_requests_in_flight
|
||||||
)
|
)
|
||||||
|
|
||||||
def resolveHostName(self, resolutionReceiver, hostName, portNumber=0,
|
def resolveHostName(
|
||||||
addressTypes=None, transportSemantics='TCP'):
|
self,
|
||||||
|
resolutionReceiver,
|
||||||
|
hostName,
|
||||||
|
portNumber=0,
|
||||||
|
addressTypes=None,
|
||||||
|
transportSemantics="TCP",
|
||||||
|
):
|
||||||
# We need this function to return `resolutionReceiver` so we do all the
|
# We need this function to return `resolutionReceiver` so we do all the
|
||||||
# actual logic involving deferreds in a separate function.
|
# actual logic involving deferreds in a separate function.
|
||||||
|
|
||||||
@ -363,8 +353,14 @@ class _LimitedHostnameResolver(object):
|
|||||||
return resolutionReceiver
|
return resolutionReceiver
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _resolve(self, resolutionReceiver, hostName, portNumber=0,
|
def _resolve(
|
||||||
addressTypes=None, transportSemantics='TCP'):
|
self,
|
||||||
|
resolutionReceiver,
|
||||||
|
hostName,
|
||||||
|
portNumber=0,
|
||||||
|
addressTypes=None,
|
||||||
|
transportSemantics="TCP",
|
||||||
|
):
|
||||||
|
|
||||||
with (yield self._limiter.queue(())):
|
with (yield self._limiter.queue(())):
|
||||||
# resolveHostName doesn't return a Deferred, so we need to hook into
|
# resolveHostName doesn't return a Deferred, so we need to hook into
|
||||||
@ -374,8 +370,7 @@ class _LimitedHostnameResolver(object):
|
|||||||
receiver = _DeferredResolutionReceiver(resolutionReceiver, deferred)
|
receiver = _DeferredResolutionReceiver(resolutionReceiver, deferred)
|
||||||
|
|
||||||
self._resolver.resolveHostName(
|
self._resolver.resolveHostName(
|
||||||
receiver, hostName, portNumber,
|
receiver, hostName, portNumber, addressTypes, transportSemantics
|
||||||
addressTypes, transportSemantics,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
yield deferred
|
yield deferred
|
||||||
|
@ -44,7 +44,9 @@ logger = logging.getLogger("synapse.app.appservice")
|
|||||||
|
|
||||||
|
|
||||||
class AppserviceSlaveStore(
|
class AppserviceSlaveStore(
|
||||||
DirectoryStore, SlavedEventStore, SlavedApplicationServiceStore,
|
DirectoryStore,
|
||||||
|
SlavedEventStore,
|
||||||
|
SlavedApplicationServiceStore,
|
||||||
SlavedRegistrationStore,
|
SlavedRegistrationStore,
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
@ -74,7 +76,7 @@ class AppserviceServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse appservice now listening on port %d", port)
|
logger.info("Synapse appservice now listening on port %d", port)
|
||||||
@ -88,18 +90,19 @@ class AppserviceServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -132,9 +135,7 @@ class ASReplicationHandler(ReplicationClientHandler):
|
|||||||
|
|
||||||
def start(config_options):
|
def start(config_options):
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config("Synapse appservice", config_options)
|
||||||
"Synapse appservice", config_options
|
|
||||||
)
|
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -173,6 +174,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-appservice", config)
|
_base.start_worker_reactor("synapse-appservice", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -118,9 +118,7 @@ class ClientReaderServer(HomeServer):
|
|||||||
PushRuleRestServlet(self).register(resource)
|
PushRuleRestServlet(self).register(resource)
|
||||||
VersionsRestServlet().register(resource)
|
VersionsRestServlet().register(resource)
|
||||||
|
|
||||||
resources.update({
|
resources.update({"/_matrix/client": resource})
|
||||||
"/_matrix/client": resource,
|
|
||||||
})
|
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources, NoResource())
|
root_resource = create_resource_tree(resources, NoResource())
|
||||||
|
|
||||||
@ -133,7 +131,7 @@ class ClientReaderServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse client reader now listening on port %d", port)
|
logger.info("Synapse client reader now listening on port %d", port)
|
||||||
@ -147,18 +145,19 @@ class ClientReaderServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -170,9 +169,7 @@ class ClientReaderServer(HomeServer):
|
|||||||
|
|
||||||
def start(config_options):
|
def start(config_options):
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config("Synapse client reader", config_options)
|
||||||
"Synapse client reader", config_options
|
|
||||||
)
|
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -199,6 +196,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-client-reader", config)
|
_base.start_worker_reactor("synapse-client-reader", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -109,12 +109,14 @@ class EventCreatorServer(HomeServer):
|
|||||||
ProfileAvatarURLRestServlet(self).register(resource)
|
ProfileAvatarURLRestServlet(self).register(resource)
|
||||||
ProfileDisplaynameRestServlet(self).register(resource)
|
ProfileDisplaynameRestServlet(self).register(resource)
|
||||||
ProfileRestServlet(self).register(resource)
|
ProfileRestServlet(self).register(resource)
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
"/_matrix/client/r0": resource,
|
"/_matrix/client/r0": resource,
|
||||||
"/_matrix/client/unstable": resource,
|
"/_matrix/client/unstable": resource,
|
||||||
"/_matrix/client/v2_alpha": resource,
|
"/_matrix/client/v2_alpha": resource,
|
||||||
"/_matrix/client/api/v1": resource,
|
"/_matrix/client/api/v1": resource,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources, NoResource())
|
root_resource = create_resource_tree(resources, NoResource())
|
||||||
|
|
||||||
@ -127,7 +129,7 @@ class EventCreatorServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse event creator now listening on port %d", port)
|
logger.info("Synapse event creator now listening on port %d", port)
|
||||||
@ -141,18 +143,19 @@ class EventCreatorServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -164,9 +167,7 @@ class EventCreatorServer(HomeServer):
|
|||||||
|
|
||||||
def start(config_options):
|
def start(config_options):
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config("Synapse event creator", config_options)
|
||||||
"Synapse event creator", config_options
|
|
||||||
)
|
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -198,6 +199,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-event-creator", config)
|
_base.start_worker_reactor("synapse-event-creator", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -86,19 +86,18 @@ class FederationReaderServer(HomeServer):
|
|||||||
if name == "metrics":
|
if name == "metrics":
|
||||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||||
elif name == "federation":
|
elif name == "federation":
|
||||||
resources.update({
|
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||||
FEDERATION_PREFIX: TransportLayerServer(self),
|
|
||||||
})
|
|
||||||
if name == "openid" and "federation" not in res["names"]:
|
if name == "openid" and "federation" not in res["names"]:
|
||||||
# Only load the openid resource separately if federation resource
|
# Only load the openid resource separately if federation resource
|
||||||
# is not specified since federation resource includes openid
|
# is not specified since federation resource includes openid
|
||||||
# resource.
|
# resource.
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
FEDERATION_PREFIX: TransportLayerServer(
|
FEDERATION_PREFIX: TransportLayerServer(
|
||||||
self,
|
self, servlet_groups=["openid"]
|
||||||
servlet_groups=["openid"],
|
)
|
||||||
),
|
}
|
||||||
})
|
)
|
||||||
|
|
||||||
if name in ["keys", "federation"]:
|
if name in ["keys", "federation"]:
|
||||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
||||||
@ -115,7 +114,7 @@ class FederationReaderServer(HomeServer):
|
|||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
),
|
),
|
||||||
reactor=self.get_reactor()
|
reactor=self.get_reactor(),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse federation reader now listening on port %d", port)
|
logger.info("Synapse federation reader now listening on port %d", port)
|
||||||
@ -129,18 +128,19 @@ class FederationReaderServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -181,6 +181,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-federation-reader", config)
|
_base.start_worker_reactor("synapse-federation-reader", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -52,8 +52,13 @@ logger = logging.getLogger("synapse.app.federation_sender")
|
|||||||
|
|
||||||
|
|
||||||
class FederationSenderSlaveStore(
|
class FederationSenderSlaveStore(
|
||||||
SlavedDeviceInboxStore, SlavedTransactionStore, SlavedReceiptsStore, SlavedEventStore,
|
SlavedDeviceInboxStore,
|
||||||
SlavedRegistrationStore, SlavedDeviceStore, SlavedPresenceStore,
|
SlavedTransactionStore,
|
||||||
|
SlavedReceiptsStore,
|
||||||
|
SlavedEventStore,
|
||||||
|
SlavedRegistrationStore,
|
||||||
|
SlavedDeviceStore,
|
||||||
|
SlavedPresenceStore,
|
||||||
):
|
):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, db_conn, hs):
|
||||||
super(FederationSenderSlaveStore, self).__init__(db_conn, hs)
|
super(FederationSenderSlaveStore, self).__init__(db_conn, hs)
|
||||||
@ -65,10 +70,7 @@ class FederationSenderSlaveStore(
|
|||||||
self.federation_out_pos_startup = self._get_federation_out_pos(db_conn)
|
self.federation_out_pos_startup = self._get_federation_out_pos(db_conn)
|
||||||
|
|
||||||
def _get_federation_out_pos(self, db_conn):
|
def _get_federation_out_pos(self, db_conn):
|
||||||
sql = (
|
sql = "SELECT stream_id FROM federation_stream_position" " WHERE type = ?"
|
||||||
"SELECT stream_id FROM federation_stream_position"
|
|
||||||
" WHERE type = ?"
|
|
||||||
)
|
|
||||||
sql = self.database_engine.convert_param_style(sql)
|
sql = self.database_engine.convert_param_style(sql)
|
||||||
|
|
||||||
txn = db_conn.cursor()
|
txn = db_conn.cursor()
|
||||||
@ -103,7 +105,7 @@ class FederationSenderServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse federation_sender now listening on port %d", port)
|
logger.info("Synapse federation_sender now listening on port %d", port)
|
||||||
@ -117,18 +119,19 @@ class FederationSenderServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -151,7 +154,9 @@ class FederationSenderReplicationHandler(ReplicationClientHandler):
|
|||||||
self.send_handler.process_replication_rows(stream_name, token, rows)
|
self.send_handler.process_replication_rows(stream_name, token, rows)
|
||||||
|
|
||||||
def get_streams_to_replicate(self):
|
def get_streams_to_replicate(self):
|
||||||
args = super(FederationSenderReplicationHandler, self).get_streams_to_replicate()
|
args = super(
|
||||||
|
FederationSenderReplicationHandler, self
|
||||||
|
).get_streams_to_replicate()
|
||||||
args.update(self.send_handler.stream_positions())
|
args.update(self.send_handler.stream_positions())
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@ -203,6 +208,7 @@ class FederationSenderHandler(object):
|
|||||||
"""Processes the replication stream and forwards the appropriate entries
|
"""Processes the replication stream and forwards the appropriate entries
|
||||||
to the federation sender.
|
to the federation sender.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hs, replication_client):
|
def __init__(self, hs, replication_client):
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
self._is_mine_id = hs.is_mine_id
|
self._is_mine_id = hs.is_mine_id
|
||||||
@ -241,7 +247,7 @@ class FederationSenderHandler(object):
|
|||||||
# ... and when new receipts happen
|
# ... and when new receipts happen
|
||||||
elif stream_name == ReceiptsStream.NAME:
|
elif stream_name == ReceiptsStream.NAME:
|
||||||
run_as_background_process(
|
run_as_background_process(
|
||||||
"process_receipts_for_federation", self._on_new_receipts, rows,
|
"process_receipts_for_federation", self._on_new_receipts, rows
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@ -278,12 +284,14 @@ class FederationSenderHandler(object):
|
|||||||
|
|
||||||
# We ACK this token over replication so that the master can drop
|
# We ACK this token over replication so that the master can drop
|
||||||
# its in memory queues
|
# its in memory queues
|
||||||
self.replication_client.send_federation_ack(self.federation_position)
|
self.replication_client.send_federation_ack(
|
||||||
|
self.federation_position
|
||||||
|
)
|
||||||
self._last_ack = self.federation_position
|
self._last_ack = self.federation_position
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Error updating federation stream position")
|
logger.exception("Error updating federation stream position")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -62,14 +62,11 @@ class PresenceStatusStubServlet(RestServlet):
|
|||||||
# Pass through the auth headers, if any, in case the access token
|
# Pass through the auth headers, if any, in case the access token
|
||||||
# is there.
|
# is there.
|
||||||
auth_headers = request.requestHeaders.getRawHeaders("Authorization", [])
|
auth_headers = request.requestHeaders.getRawHeaders("Authorization", [])
|
||||||
headers = {
|
headers = {"Authorization": auth_headers}
|
||||||
"Authorization": auth_headers,
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = yield self.http_client.get_json(
|
result = yield self.http_client.get_json(
|
||||||
self.main_uri + request.uri.decode('ascii'),
|
self.main_uri + request.uri.decode("ascii"), headers=headers
|
||||||
headers=headers,
|
|
||||||
)
|
)
|
||||||
except HttpResponseException as e:
|
except HttpResponseException as e:
|
||||||
raise e.to_synapse_error()
|
raise e.to_synapse_error()
|
||||||
@ -105,18 +102,19 @@ class KeyUploadServlet(RestServlet):
|
|||||||
if device_id is not None:
|
if device_id is not None:
|
||||||
# passing the device_id here is deprecated; however, we allow it
|
# passing the device_id here is deprecated; however, we allow it
|
||||||
# for now for compatibility with older clients.
|
# for now for compatibility with older clients.
|
||||||
if (requester.device_id is not None and
|
if requester.device_id is not None and device_id != requester.device_id:
|
||||||
device_id != requester.device_id):
|
logger.warning(
|
||||||
logger.warning("Client uploading keys for a different device "
|
"Client uploading keys for a different device "
|
||||||
"(logged in as %s, uploading for %s)",
|
"(logged in as %s, uploading for %s)",
|
||||||
requester.device_id, device_id)
|
requester.device_id,
|
||||||
|
device_id,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
device_id = requester.device_id
|
device_id = requester.device_id
|
||||||
|
|
||||||
if device_id is None:
|
if device_id is None:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
400,
|
400, "To upload keys, you must pass device_id when authenticating"
|
||||||
"To upload keys, you must pass device_id when authenticating"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if body:
|
if body:
|
||||||
@ -124,13 +122,9 @@ class KeyUploadServlet(RestServlet):
|
|||||||
# Pass through the auth headers, if any, in case the access token
|
# Pass through the auth headers, if any, in case the access token
|
||||||
# is there.
|
# is there.
|
||||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization", [])
|
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization", [])
|
||||||
headers = {
|
headers = {"Authorization": auth_headers}
|
||||||
"Authorization": auth_headers,
|
|
||||||
}
|
|
||||||
result = yield self.http_client.post_json_get_json(
|
result = yield self.http_client.post_json_get_json(
|
||||||
self.main_uri + request.uri.decode('ascii'),
|
self.main_uri + request.uri.decode("ascii"), body, headers=headers
|
||||||
body,
|
|
||||||
headers=headers,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue((200, result))
|
defer.returnValue((200, result))
|
||||||
@ -171,12 +165,14 @@ class FrontendProxyServer(HomeServer):
|
|||||||
if not self.config.use_presence:
|
if not self.config.use_presence:
|
||||||
PresenceStatusStubServlet(self).register(resource)
|
PresenceStatusStubServlet(self).register(resource)
|
||||||
|
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
"/_matrix/client/r0": resource,
|
"/_matrix/client/r0": resource,
|
||||||
"/_matrix/client/unstable": resource,
|
"/_matrix/client/unstable": resource,
|
||||||
"/_matrix/client/v2_alpha": resource,
|
"/_matrix/client/v2_alpha": resource,
|
||||||
"/_matrix/client/api/v1": resource,
|
"/_matrix/client/api/v1": resource,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources, NoResource())
|
root_resource = create_resource_tree(resources, NoResource())
|
||||||
|
|
||||||
@ -190,7 +186,7 @@ class FrontendProxyServer(HomeServer):
|
|||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
),
|
),
|
||||||
reactor=self.get_reactor()
|
reactor=self.get_reactor(),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse client reader now listening on port %d", port)
|
logger.info("Synapse client reader now listening on port %d", port)
|
||||||
@ -204,18 +200,19 @@ class FrontendProxyServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -227,9 +224,7 @@ class FrontendProxyServer(HomeServer):
|
|||||||
|
|
||||||
def start(config_options):
|
def start(config_options):
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config("Synapse frontend proxy", config_options)
|
||||||
"Synapse frontend proxy", config_options
|
|
||||||
)
|
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -258,6 +253,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-frontend-proxy", config)
|
_base.start_worker_reactor("synapse-frontend-proxy", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -101,13 +101,12 @@ class SynapseHomeServer(HomeServer):
|
|||||||
# Skip loading openid resource if federation is defined
|
# Skip loading openid resource if federation is defined
|
||||||
# since federation resource will include openid
|
# since federation resource will include openid
|
||||||
continue
|
continue
|
||||||
resources.update(self._configure_named_resource(
|
resources.update(
|
||||||
name, res.get("compress", False),
|
self._configure_named_resource(name, res.get("compress", False))
|
||||||
))
|
)
|
||||||
|
|
||||||
additional_resources = listener_config.get("additional_resources", {})
|
additional_resources = listener_config.get("additional_resources", {})
|
||||||
logger.debug("Configuring additional resources: %r",
|
logger.debug("Configuring additional resources: %r", additional_resources)
|
||||||
additional_resources)
|
|
||||||
module_api = ModuleApi(self, self.get_auth_handler())
|
module_api = ModuleApi(self, self.get_auth_handler())
|
||||||
for path, resmodule in additional_resources.items():
|
for path, resmodule in additional_resources.items():
|
||||||
handler_cls, config = load_module(resmodule)
|
handler_cls, config = load_module(resmodule)
|
||||||
@ -174,7 +173,8 @@ class SynapseHomeServer(HomeServer):
|
|||||||
if compress:
|
if compress:
|
||||||
client_resource = gz_wrap(client_resource)
|
client_resource = gz_wrap(client_resource)
|
||||||
|
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
"/_matrix/client/api/v1": client_resource,
|
"/_matrix/client/api/v1": client_resource,
|
||||||
"/_matrix/client/r0": client_resource,
|
"/_matrix/client/r0": client_resource,
|
||||||
"/_matrix/client/unstable": client_resource,
|
"/_matrix/client/unstable": client_resource,
|
||||||
@ -182,51 +182,58 @@ class SynapseHomeServer(HomeServer):
|
|||||||
"/_matrix/client/versions": client_resource,
|
"/_matrix/client/versions": client_resource,
|
||||||
"/.well-known/matrix/client": WellKnownResource(self),
|
"/.well-known/matrix/client": WellKnownResource(self),
|
||||||
"/_synapse/admin": AdminRestResource(self),
|
"/_synapse/admin": AdminRestResource(self),
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if self.get_config().saml2_enabled:
|
if self.get_config().saml2_enabled:
|
||||||
from synapse.rest.saml2 import SAML2Resource
|
from synapse.rest.saml2 import SAML2Resource
|
||||||
|
|
||||||
resources["/_matrix/saml2"] = SAML2Resource(self)
|
resources["/_matrix/saml2"] = SAML2Resource(self)
|
||||||
|
|
||||||
if name == "consent":
|
if name == "consent":
|
||||||
from synapse.rest.consent.consent_resource import ConsentResource
|
from synapse.rest.consent.consent_resource import ConsentResource
|
||||||
|
|
||||||
consent_resource = ConsentResource(self)
|
consent_resource = ConsentResource(self)
|
||||||
if compress:
|
if compress:
|
||||||
consent_resource = gz_wrap(consent_resource)
|
consent_resource = gz_wrap(consent_resource)
|
||||||
resources.update({
|
resources.update({"/_matrix/consent": consent_resource})
|
||||||
"/_matrix/consent": consent_resource,
|
|
||||||
})
|
|
||||||
|
|
||||||
if name == "federation":
|
if name == "federation":
|
||||||
resources.update({
|
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||||
FEDERATION_PREFIX: TransportLayerServer(self),
|
|
||||||
})
|
|
||||||
|
|
||||||
if name == "openid":
|
if name == "openid":
|
||||||
resources.update({
|
resources.update(
|
||||||
FEDERATION_PREFIX: TransportLayerServer(self, servlet_groups=["openid"]),
|
{
|
||||||
})
|
FEDERATION_PREFIX: TransportLayerServer(
|
||||||
|
self, servlet_groups=["openid"]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if name in ["static", "client"]:
|
if name in ["static", "client"]:
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
STATIC_PREFIX: File(
|
STATIC_PREFIX: File(
|
||||||
os.path.join(os.path.dirname(synapse.__file__), "static")
|
os.path.join(os.path.dirname(synapse.__file__), "static")
|
||||||
),
|
)
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if name in ["media", "federation", "client"]:
|
if name in ["media", "federation", "client"]:
|
||||||
if self.get_config().enable_media_repo:
|
if self.get_config().enable_media_repo:
|
||||||
media_repo = self.get_media_repository_resource()
|
media_repo = self.get_media_repository_resource()
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
MEDIA_PREFIX: media_repo,
|
MEDIA_PREFIX: media_repo,
|
||||||
LEGACY_MEDIA_PREFIX: media_repo,
|
LEGACY_MEDIA_PREFIX: media_repo,
|
||||||
CONTENT_REPO_PREFIX: ContentRepoResource(
|
CONTENT_REPO_PREFIX: ContentRepoResource(
|
||||||
self, self.config.uploads_path
|
self, self.config.uploads_path
|
||||||
),
|
),
|
||||||
})
|
}
|
||||||
|
)
|
||||||
elif name == "media":
|
elif name == "media":
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"'media' resource conflicts with enable_media_repo=False",
|
"'media' resource conflicts with enable_media_repo=False"
|
||||||
)
|
)
|
||||||
|
|
||||||
if name in ["keys", "federation"]:
|
if name in ["keys", "federation"]:
|
||||||
@ -257,18 +264,14 @@ class SynapseHomeServer(HomeServer):
|
|||||||
|
|
||||||
for listener in listeners:
|
for listener in listeners:
|
||||||
if listener["type"] == "http":
|
if listener["type"] == "http":
|
||||||
self._listening_services.extend(
|
self._listening_services.extend(self._listener_http(config, listener))
|
||||||
self._listener_http(config, listener)
|
|
||||||
)
|
|
||||||
elif listener["type"] == "manhole":
|
elif listener["type"] == "manhole":
|
||||||
listen_tcp(
|
listen_tcp(
|
||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "replication":
|
elif listener["type"] == "replication":
|
||||||
services = listen_tcp(
|
services = listen_tcp(
|
||||||
@ -277,16 +280,17 @@ class SynapseHomeServer(HomeServer):
|
|||||||
ReplicationStreamProtocolFactory(self),
|
ReplicationStreamProtocolFactory(self),
|
||||||
)
|
)
|
||||||
for s in services:
|
for s in services:
|
||||||
reactor.addSystemEventTrigger(
|
reactor.addSystemEventTrigger("before", "shutdown", s.stopListening)
|
||||||
"before", "shutdown", s.stopListening,
|
|
||||||
)
|
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -312,7 +316,7 @@ current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
|
|||||||
max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
|
max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
|
||||||
registered_reserved_users_mau_gauge = Gauge(
|
registered_reserved_users_mau_gauge = Gauge(
|
||||||
"synapse_admin_mau:registered_reserved_users",
|
"synapse_admin_mau:registered_reserved_users",
|
||||||
"Registered users with reserved threepids"
|
"Registered users with reserved threepids",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -327,8 +331,7 @@ def setup(config_options):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_or_generate_config(
|
config = HomeServerConfig.load_or_generate_config(
|
||||||
"Synapse Homeserver",
|
"Synapse Homeserver", config_options
|
||||||
config_options,
|
|
||||||
)
|
)
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
@ -339,10 +342,7 @@ def setup(config_options):
|
|||||||
# generating config files and shouldn't try to continue.
|
# generating config files and shouldn't try to continue.
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
synapse.config.logger.setup_logging(
|
synapse.config.logger.setup_logging(config, use_worker_options=False)
|
||||||
config,
|
|
||||||
use_worker_options=False
|
|
||||||
)
|
|
||||||
|
|
||||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||||
|
|
||||||
@ -357,7 +357,7 @@ def setup(config_options):
|
|||||||
database_engine=database_engine,
|
database_engine=database_engine,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Preparing database: %s...", config.database_config['name'])
|
logger.info("Preparing database: %s...", config.database_config["name"])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with hs.get_db_conn(run_new_connection=False) as db_conn:
|
with hs.get_db_conn(run_new_connection=False) as db_conn:
|
||||||
@ -375,7 +375,7 @@ def setup(config_options):
|
|||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
logger.info("Database prepared in %s.", config.database_config['name'])
|
logger.info("Database prepared in %s.", config.database_config["name"])
|
||||||
|
|
||||||
hs.setup()
|
hs.setup()
|
||||||
hs.setup_master()
|
hs.setup_master()
|
||||||
@ -391,9 +391,7 @@ def setup(config_options):
|
|||||||
acme = hs.get_acme_handler()
|
acme = hs.get_acme_handler()
|
||||||
|
|
||||||
# Check how long the certificate is active for.
|
# Check how long the certificate is active for.
|
||||||
cert_days_remaining = hs.config.is_disk_cert_valid(
|
cert_days_remaining = hs.config.is_disk_cert_valid(allow_self_signed=False)
|
||||||
allow_self_signed=False
|
|
||||||
)
|
|
||||||
|
|
||||||
# We want to reprovision if cert_days_remaining is None (meaning no
|
# We want to reprovision if cert_days_remaining is None (meaning no
|
||||||
# certificate exists), or the days remaining number it returns
|
# certificate exists), or the days remaining number it returns
|
||||||
@ -401,8 +399,8 @@ def setup(config_options):
|
|||||||
provision = False
|
provision = False
|
||||||
|
|
||||||
if (
|
if (
|
||||||
cert_days_remaining is None or
|
cert_days_remaining is None
|
||||||
cert_days_remaining < hs.config.acme_reprovision_threshold
|
or cert_days_remaining < hs.config.acme_reprovision_threshold
|
||||||
):
|
):
|
||||||
provision = True
|
provision = True
|
||||||
|
|
||||||
@ -433,10 +431,7 @@ def setup(config_options):
|
|||||||
yield do_acme()
|
yield do_acme()
|
||||||
|
|
||||||
# Check if it needs to be reprovisioned every day.
|
# Check if it needs to be reprovisioned every day.
|
||||||
hs.get_clock().looping_call(
|
hs.get_clock().looping_call(reprovision_acme, 24 * 60 * 60 * 1000)
|
||||||
reprovision_acme,
|
|
||||||
24 * 60 * 60 * 1000
|
|
||||||
)
|
|
||||||
|
|
||||||
_base.start(hs, config.listeners)
|
_base.start(hs, config.listeners)
|
||||||
|
|
||||||
@ -463,6 +458,7 @@ class SynapseService(service.Service):
|
|||||||
A twisted Service class that will start synapse. Used to run synapse
|
A twisted Service class that will start synapse. Used to run synapse
|
||||||
via twistd and a .tac.
|
via twistd and a .tac.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
self.config = config
|
self.config = config
|
||||||
|
|
||||||
@ -479,6 +475,7 @@ class SynapseService(service.Service):
|
|||||||
def run(hs):
|
def run(hs):
|
||||||
PROFILE_SYNAPSE = False
|
PROFILE_SYNAPSE = False
|
||||||
if PROFILE_SYNAPSE:
|
if PROFILE_SYNAPSE:
|
||||||
|
|
||||||
def profile(func):
|
def profile(func):
|
||||||
from cProfile import Profile
|
from cProfile import Profile
|
||||||
from threading import current_thread
|
from threading import current_thread
|
||||||
@ -489,13 +486,14 @@ def run(hs):
|
|||||||
func(*args, **kargs)
|
func(*args, **kargs)
|
||||||
profile.disable()
|
profile.disable()
|
||||||
ident = current_thread().ident
|
ident = current_thread().ident
|
||||||
profile.dump_stats("/tmp/%s.%s.%i.pstat" % (
|
profile.dump_stats(
|
||||||
hs.hostname, func.__name__, ident
|
"/tmp/%s.%s.%i.pstat" % (hs.hostname, func.__name__, ident)
|
||||||
))
|
)
|
||||||
|
|
||||||
return profiled
|
return profiled
|
||||||
|
|
||||||
from twisted.python.threadpool import ThreadPool
|
from twisted.python.threadpool import ThreadPool
|
||||||
|
|
||||||
ThreadPool._worker = profile(ThreadPool._worker)
|
ThreadPool._worker = profile(ThreadPool._worker)
|
||||||
reactor.run = profile(reactor.run)
|
reactor.run = profile(reactor.run)
|
||||||
|
|
||||||
@ -541,7 +539,9 @@ def run(hs):
|
|||||||
|
|
||||||
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
||||||
stats["monthly_active_users"] = yield hs.get_datastore().count_monthly_users()
|
stats["monthly_active_users"] = yield hs.get_datastore().count_monthly_users()
|
||||||
stats["daily_active_rooms"] = yield hs.get_datastore().count_daily_active_rooms()
|
stats[
|
||||||
|
"daily_active_rooms"
|
||||||
|
] = yield hs.get_datastore().count_daily_active_rooms()
|
||||||
stats["daily_messages"] = yield hs.get_datastore().count_daily_messages()
|
stats["daily_messages"] = yield hs.get_datastore().count_daily_messages()
|
||||||
|
|
||||||
r30_results = yield hs.get_datastore().count_r30_users()
|
r30_results = yield hs.get_datastore().count_r30_users()
|
||||||
@ -565,8 +565,7 @@ def run(hs):
|
|||||||
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
||||||
try:
|
try:
|
||||||
yield hs.get_simple_http_client().put_json(
|
yield hs.get_simple_http_client().put_json(
|
||||||
"https://matrix.org/report-usage-stats/push",
|
"https://matrix.org/report-usage-stats/push", stats
|
||||||
stats
|
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn("Error reporting stats: %s", e)
|
logger.warn("Error reporting stats: %s", e)
|
||||||
@ -581,14 +580,11 @@ def run(hs):
|
|||||||
logger.info("report_stats can use psutil")
|
logger.info("report_stats can use psutil")
|
||||||
stats_process.append(process)
|
stats_process.append(process)
|
||||||
except (AttributeError):
|
except (AttributeError):
|
||||||
logger.warning(
|
logger.warning("Unable to read memory/cpu stats. Disabling reporting.")
|
||||||
"Unable to read memory/cpu stats. Disabling reporting."
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_user_daily_visit_stats():
|
def generate_user_daily_visit_stats():
|
||||||
return run_as_background_process(
|
return run_as_background_process(
|
||||||
"generate_user_daily_visits",
|
"generate_user_daily_visits", hs.get_datastore().generate_user_daily_visits
|
||||||
hs.get_datastore().generate_user_daily_visits,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Rather than update on per session basis, batch up the requests.
|
# Rather than update on per session basis, batch up the requests.
|
||||||
@ -599,9 +595,9 @@ def run(hs):
|
|||||||
# monthly active user limiting functionality
|
# monthly active user limiting functionality
|
||||||
def reap_monthly_active_users():
|
def reap_monthly_active_users():
|
||||||
return run_as_background_process(
|
return run_as_background_process(
|
||||||
"reap_monthly_active_users",
|
"reap_monthly_active_users", hs.get_datastore().reap_monthly_active_users
|
||||||
hs.get_datastore().reap_monthly_active_users,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
clock.looping_call(reap_monthly_active_users, 1000 * 60 * 60)
|
clock.looping_call(reap_monthly_active_users, 1000 * 60 * 60)
|
||||||
reap_monthly_active_users()
|
reap_monthly_active_users()
|
||||||
|
|
||||||
@ -619,8 +615,7 @@ def run(hs):
|
|||||||
|
|
||||||
def start_generate_monthly_active_users():
|
def start_generate_monthly_active_users():
|
||||||
return run_as_background_process(
|
return run_as_background_process(
|
||||||
"generate_monthly_active_users",
|
"generate_monthly_active_users", generate_monthly_active_users
|
||||||
generate_monthly_active_users,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
start_generate_monthly_active_users()
|
start_generate_monthly_active_users()
|
||||||
@ -660,5 +655,5 @@ def main():
|
|||||||
run(hs)
|
run(hs)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
@ -72,13 +72,15 @@ class MediaRepositoryServer(HomeServer):
|
|||||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||||
elif name == "media":
|
elif name == "media":
|
||||||
media_repo = self.get_media_repository_resource()
|
media_repo = self.get_media_repository_resource()
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
MEDIA_PREFIX: media_repo,
|
MEDIA_PREFIX: media_repo,
|
||||||
LEGACY_MEDIA_PREFIX: media_repo,
|
LEGACY_MEDIA_PREFIX: media_repo,
|
||||||
CONTENT_REPO_PREFIX: ContentRepoResource(
|
CONTENT_REPO_PREFIX: ContentRepoResource(
|
||||||
self, self.config.uploads_path
|
self, self.config.uploads_path
|
||||||
),
|
),
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources, NoResource())
|
root_resource = create_resource_tree(resources, NoResource())
|
||||||
|
|
||||||
@ -91,7 +93,7 @@ class MediaRepositoryServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse media repository now listening on port %d", port)
|
logger.info("Synapse media repository now listening on port %d", port)
|
||||||
@ -105,18 +107,19 @@ class MediaRepositoryServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -164,6 +167,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-media-repository", config)
|
_base.start_worker_reactor("synapse-media-repository", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -46,36 +46,27 @@ logger = logging.getLogger("synapse.app.pusher")
|
|||||||
|
|
||||||
|
|
||||||
class PusherSlaveStore(
|
class PusherSlaveStore(
|
||||||
SlavedEventStore, SlavedPusherStore, SlavedReceiptsStore,
|
SlavedEventStore, SlavedPusherStore, SlavedReceiptsStore, SlavedAccountDataStore
|
||||||
SlavedAccountDataStore
|
|
||||||
):
|
):
|
||||||
update_pusher_last_stream_ordering_and_success = (
|
update_pusher_last_stream_ordering_and_success = __func__(
|
||||||
__func__(DataStore.update_pusher_last_stream_ordering_and_success)
|
DataStore.update_pusher_last_stream_ordering_and_success
|
||||||
)
|
)
|
||||||
|
|
||||||
update_pusher_failing_since = (
|
update_pusher_failing_since = __func__(DataStore.update_pusher_failing_since)
|
||||||
__func__(DataStore.update_pusher_failing_since)
|
|
||||||
|
update_pusher_last_stream_ordering = __func__(
|
||||||
|
DataStore.update_pusher_last_stream_ordering
|
||||||
)
|
)
|
||||||
|
|
||||||
update_pusher_last_stream_ordering = (
|
get_throttle_params_by_room = __func__(DataStore.get_throttle_params_by_room)
|
||||||
__func__(DataStore.update_pusher_last_stream_ordering)
|
|
||||||
|
set_throttle_params = __func__(DataStore.set_throttle_params)
|
||||||
|
|
||||||
|
get_time_of_last_push_action_before = __func__(
|
||||||
|
DataStore.get_time_of_last_push_action_before
|
||||||
)
|
)
|
||||||
|
|
||||||
get_throttle_params_by_room = (
|
get_profile_displayname = __func__(DataStore.get_profile_displayname)
|
||||||
__func__(DataStore.get_throttle_params_by_room)
|
|
||||||
)
|
|
||||||
|
|
||||||
set_throttle_params = (
|
|
||||||
__func__(DataStore.set_throttle_params)
|
|
||||||
)
|
|
||||||
|
|
||||||
get_time_of_last_push_action_before = (
|
|
||||||
__func__(DataStore.get_time_of_last_push_action_before)
|
|
||||||
)
|
|
||||||
|
|
||||||
get_profile_displayname = (
|
|
||||||
__func__(DataStore.get_profile_displayname)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PusherServer(HomeServer):
|
class PusherServer(HomeServer):
|
||||||
@ -105,7 +96,7 @@ class PusherServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse pusher now listening on port %d", port)
|
logger.info("Synapse pusher now listening on port %d", port)
|
||||||
@ -119,18 +110,19 @@ class PusherServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -161,9 +153,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
|
|||||||
else:
|
else:
|
||||||
yield self.start_pusher(row.user_id, row.app_id, row.pushkey)
|
yield self.start_pusher(row.user_id, row.app_id, row.pushkey)
|
||||||
elif stream_name == "events":
|
elif stream_name == "events":
|
||||||
yield self.pusher_pool.on_new_notifications(
|
yield self.pusher_pool.on_new_notifications(token, token)
|
||||||
token, token,
|
|
||||||
)
|
|
||||||
elif stream_name == "receipts":
|
elif stream_name == "receipts":
|
||||||
yield self.pusher_pool.on_new_receipts(
|
yield self.pusher_pool.on_new_receipts(
|
||||||
token, token, set(row.room_id for row in rows)
|
token, token, set(row.room_id for row in rows)
|
||||||
@ -188,9 +178,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
|
|||||||
|
|
||||||
def start(config_options):
|
def start(config_options):
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config("Synapse pusher", config_options)
|
||||||
"Synapse pusher", config_options
|
|
||||||
)
|
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -234,6 +222,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-pusher", config)
|
_base.start_worker_reactor("synapse-pusher", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
ps = start(sys.argv[1:])
|
ps = start(sys.argv[1:])
|
||||||
|
@ -98,10 +98,7 @@ class SynchrotronPresence(object):
|
|||||||
self.notifier = hs.get_notifier()
|
self.notifier = hs.get_notifier()
|
||||||
|
|
||||||
active_presence = self.store.take_presence_startup_info()
|
active_presence = self.store.take_presence_startup_info()
|
||||||
self.user_to_current_state = {
|
self.user_to_current_state = {state.user_id: state for state in active_presence}
|
||||||
state.user_id: state
|
|
||||||
for state in active_presence
|
|
||||||
}
|
|
||||||
|
|
||||||
# user_id -> last_sync_ms. Lists the users that have stopped syncing
|
# user_id -> last_sync_ms. Lists the users that have stopped syncing
|
||||||
# but we haven't notified the master of that yet
|
# but we haven't notified the master of that yet
|
||||||
@ -196,17 +193,26 @@ class SynchrotronPresence(object):
|
|||||||
room_ids_to_states, users_to_states = parties
|
room_ids_to_states, users_to_states = parties
|
||||||
|
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event(
|
||||||
"presence_key", stream_id, rooms=room_ids_to_states.keys(),
|
"presence_key",
|
||||||
users=users_to_states.keys()
|
stream_id,
|
||||||
|
rooms=room_ids_to_states.keys(),
|
||||||
|
users=users_to_states.keys(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def process_replication_rows(self, token, rows):
|
def process_replication_rows(self, token, rows):
|
||||||
states = [UserPresenceState(
|
states = [
|
||||||
row.user_id, row.state, row.last_active_ts,
|
UserPresenceState(
|
||||||
row.last_federation_update_ts, row.last_user_sync_ts, row.status_msg,
|
row.user_id,
|
||||||
row.currently_active
|
row.state,
|
||||||
) for row in rows]
|
row.last_active_ts,
|
||||||
|
row.last_federation_update_ts,
|
||||||
|
row.last_user_sync_ts,
|
||||||
|
row.status_msg,
|
||||||
|
row.currently_active,
|
||||||
|
)
|
||||||
|
for row in rows
|
||||||
|
]
|
||||||
|
|
||||||
for state in states:
|
for state in states:
|
||||||
self.user_to_current_state[state.user_id] = state
|
self.user_to_current_state[state.user_id] = state
|
||||||
@ -217,7 +223,8 @@ class SynchrotronPresence(object):
|
|||||||
def get_currently_syncing_users(self):
|
def get_currently_syncing_users(self):
|
||||||
if self.hs.config.use_presence:
|
if self.hs.config.use_presence:
|
||||||
return [
|
return [
|
||||||
user_id for user_id, count in iteritems(self.user_to_num_current_syncs)
|
user_id
|
||||||
|
for user_id, count in iteritems(self.user_to_num_current_syncs)
|
||||||
if count > 0
|
if count > 0
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
@ -281,12 +288,14 @@ class SynchrotronServer(HomeServer):
|
|||||||
events.register_servlets(self, resource)
|
events.register_servlets(self, resource)
|
||||||
InitialSyncRestServlet(self).register(resource)
|
InitialSyncRestServlet(self).register(resource)
|
||||||
RoomInitialSyncRestServlet(self).register(resource)
|
RoomInitialSyncRestServlet(self).register(resource)
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
"/_matrix/client/r0": resource,
|
"/_matrix/client/r0": resource,
|
||||||
"/_matrix/client/unstable": resource,
|
"/_matrix/client/unstable": resource,
|
||||||
"/_matrix/client/v2_alpha": resource,
|
"/_matrix/client/v2_alpha": resource,
|
||||||
"/_matrix/client/api/v1": resource,
|
"/_matrix/client/api/v1": resource,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources, NoResource())
|
root_resource = create_resource_tree(resources, NoResource())
|
||||||
|
|
||||||
@ -299,7 +308,7 @@ class SynchrotronServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse synchrotron now listening on port %d", port)
|
logger.info("Synapse synchrotron now listening on port %d", port)
|
||||||
@ -313,18 +322,19 @@ class SynchrotronServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -382,40 +392,36 @@ class SyncReplicationHandler(ReplicationClientHandler):
|
|||||||
)
|
)
|
||||||
elif stream_name == "push_rules":
|
elif stream_name == "push_rules":
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event(
|
||||||
"push_rules_key", token, users=[row.user_id for row in rows],
|
"push_rules_key", token, users=[row.user_id for row in rows]
|
||||||
)
|
)
|
||||||
elif stream_name in ("account_data", "tag_account_data",):
|
elif stream_name in ("account_data", "tag_account_data"):
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event(
|
||||||
"account_data_key", token, users=[row.user_id for row in rows],
|
"account_data_key", token, users=[row.user_id for row in rows]
|
||||||
)
|
)
|
||||||
elif stream_name == "receipts":
|
elif stream_name == "receipts":
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event(
|
||||||
"receipt_key", token, rooms=[row.room_id for row in rows],
|
"receipt_key", token, rooms=[row.room_id for row in rows]
|
||||||
)
|
)
|
||||||
elif stream_name == "typing":
|
elif stream_name == "typing":
|
||||||
self.typing_handler.process_replication_rows(token, rows)
|
self.typing_handler.process_replication_rows(token, rows)
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event(
|
||||||
"typing_key", token, rooms=[row.room_id for row in rows],
|
"typing_key", token, rooms=[row.room_id for row in rows]
|
||||||
)
|
)
|
||||||
elif stream_name == "to_device":
|
elif stream_name == "to_device":
|
||||||
entities = [row.entity for row in rows if row.entity.startswith("@")]
|
entities = [row.entity for row in rows if row.entity.startswith("@")]
|
||||||
if entities:
|
if entities:
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event("to_device_key", token, users=entities)
|
||||||
"to_device_key", token, users=entities,
|
|
||||||
)
|
|
||||||
elif stream_name == "device_lists":
|
elif stream_name == "device_lists":
|
||||||
all_room_ids = set()
|
all_room_ids = set()
|
||||||
for row in rows:
|
for row in rows:
|
||||||
room_ids = yield self.store.get_rooms_for_user(row.user_id)
|
room_ids = yield self.store.get_rooms_for_user(row.user_id)
|
||||||
all_room_ids.update(room_ids)
|
all_room_ids.update(room_ids)
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event("device_list_key", token, rooms=all_room_ids)
|
||||||
"device_list_key", token, rooms=all_room_ids,
|
|
||||||
)
|
|
||||||
elif stream_name == "presence":
|
elif stream_name == "presence":
|
||||||
yield self.presence_handler.process_replication_rows(token, rows)
|
yield self.presence_handler.process_replication_rows(token, rows)
|
||||||
elif stream_name == "receipts":
|
elif stream_name == "receipts":
|
||||||
self.notifier.on_new_event(
|
self.notifier.on_new_event(
|
||||||
"groups_key", token, users=[row.user_id for row in rows],
|
"groups_key", token, users=[row.user_id for row in rows]
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Error processing replication")
|
logger.exception("Error processing replication")
|
||||||
@ -423,9 +429,7 @@ class SyncReplicationHandler(ReplicationClientHandler):
|
|||||||
|
|
||||||
def start(config_options):
|
def start(config_options):
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config("Synapse synchrotron", config_options)
|
||||||
"Synapse synchrotron", config_options
|
|
||||||
)
|
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -453,6 +457,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-synchrotron", config)
|
_base.start_worker_reactor("synapse-synchrotron", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -66,14 +66,16 @@ class UserDirectorySlaveStore(
|
|||||||
|
|
||||||
events_max = self._stream_id_gen.get_current_token()
|
events_max = self._stream_id_gen.get_current_token()
|
||||||
curr_state_delta_prefill, min_curr_state_delta_id = self._get_cache_dict(
|
curr_state_delta_prefill, min_curr_state_delta_id = self._get_cache_dict(
|
||||||
db_conn, "current_state_delta_stream",
|
db_conn,
|
||||||
|
"current_state_delta_stream",
|
||||||
entity_column="room_id",
|
entity_column="room_id",
|
||||||
stream_column="stream_id",
|
stream_column="stream_id",
|
||||||
max_value=events_max, # As we share the stream id with events token
|
max_value=events_max, # As we share the stream id with events token
|
||||||
limit=1000,
|
limit=1000,
|
||||||
)
|
)
|
||||||
self._curr_state_delta_stream_cache = StreamChangeCache(
|
self._curr_state_delta_stream_cache = StreamChangeCache(
|
||||||
"_curr_state_delta_stream_cache", min_curr_state_delta_id,
|
"_curr_state_delta_stream_cache",
|
||||||
|
min_curr_state_delta_id,
|
||||||
prefilled_cache=curr_state_delta_prefill,
|
prefilled_cache=curr_state_delta_prefill,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -110,12 +112,14 @@ class UserDirectoryServer(HomeServer):
|
|||||||
elif name == "client":
|
elif name == "client":
|
||||||
resource = JsonResource(self, canonical_json=False)
|
resource = JsonResource(self, canonical_json=False)
|
||||||
user_directory.register_servlets(self, resource)
|
user_directory.register_servlets(self, resource)
|
||||||
resources.update({
|
resources.update(
|
||||||
|
{
|
||||||
"/_matrix/client/r0": resource,
|
"/_matrix/client/r0": resource,
|
||||||
"/_matrix/client/unstable": resource,
|
"/_matrix/client/unstable": resource,
|
||||||
"/_matrix/client/v2_alpha": resource,
|
"/_matrix/client/v2_alpha": resource,
|
||||||
"/_matrix/client/api/v1": resource,
|
"/_matrix/client/api/v1": resource,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources, NoResource())
|
root_resource = create_resource_tree(resources, NoResource())
|
||||||
|
|
||||||
@ -128,7 +132,7 @@ class UserDirectoryServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
self.version_string,
|
self.version_string,
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Synapse user_dir now listening on port %d", port)
|
logger.info("Synapse user_dir now listening on port %d", port)
|
||||||
@ -142,18 +146,19 @@ class UserDirectoryServer(HomeServer):
|
|||||||
listener["bind_addresses"],
|
listener["bind_addresses"],
|
||||||
listener["port"],
|
listener["port"],
|
||||||
manhole(
|
manhole(
|
||||||
username="matrix",
|
username="matrix", password="rabbithole", globals={"hs": self}
|
||||||
password="rabbithole",
|
),
|
||||||
globals={"hs": self},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
elif listener["type"] == "metrics":
|
elif listener["type"] == "metrics":
|
||||||
if not self.get_config().enable_metrics:
|
if not self.get_config().enable_metrics:
|
||||||
logger.warn(("Metrics listener configured, but "
|
logger.warn(
|
||||||
"enable_metrics is not True!"))
|
(
|
||||||
|
"Metrics listener configured, but "
|
||||||
|
"enable_metrics is not True!"
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener["bind_addresses"],
|
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||||
listener["port"])
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
@ -186,9 +191,7 @@ class UserDirectoryReplicationHandler(ReplicationClientHandler):
|
|||||||
|
|
||||||
def start(config_options):
|
def start(config_options):
|
||||||
try:
|
try:
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config("Synapse user directory", config_options)
|
||||||
"Synapse user directory", config_options
|
|
||||||
)
|
|
||||||
except ConfigError as e:
|
except ConfigError as e:
|
||||||
sys.stderr.write("\n" + str(e) + "\n")
|
sys.stderr.write("\n" + str(e) + "\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -227,6 +230,6 @@ def start(config_options):
|
|||||||
_base.start_worker_reactor("synapse-user-dir", config)
|
_base.start_worker_reactor("synapse-user-dir", config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
with LoggingContext("main"):
|
with LoggingContext("main"):
|
||||||
start(sys.argv[1:])
|
start(sys.argv[1:])
|
||||||
|
@ -48,9 +48,7 @@ class AppServiceTransaction(object):
|
|||||||
A Deferred which resolves to True if the transaction was sent.
|
A Deferred which resolves to True if the transaction was sent.
|
||||||
"""
|
"""
|
||||||
return as_api.push_bulk(
|
return as_api.push_bulk(
|
||||||
service=self.service,
|
service=self.service, events=self.events, txn_id=self.id
|
||||||
events=self.events,
|
|
||||||
txn_id=self.id
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def complete(self, store):
|
def complete(self, store):
|
||||||
@ -64,10 +62,7 @@ class AppServiceTransaction(object):
|
|||||||
Returns:
|
Returns:
|
||||||
A Deferred which resolves to True if the transaction was completed.
|
A Deferred which resolves to True if the transaction was completed.
|
||||||
"""
|
"""
|
||||||
return store.complete_appservice_txn(
|
return store.complete_appservice_txn(service=self.service, txn_id=self.id)
|
||||||
service=self.service,
|
|
||||||
txn_id=self.id
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationService(object):
|
class ApplicationService(object):
|
||||||
@ -76,6 +71,7 @@ class ApplicationService(object):
|
|||||||
|
|
||||||
Provides methods to check if this service is "interested" in events.
|
Provides methods to check if this service is "interested" in events.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
NS_USERS = "users"
|
NS_USERS = "users"
|
||||||
NS_ALIASES = "aliases"
|
NS_ALIASES = "aliases"
|
||||||
NS_ROOMS = "rooms"
|
NS_ROOMS = "rooms"
|
||||||
@ -84,9 +80,19 @@ class ApplicationService(object):
|
|||||||
# values.
|
# values.
|
||||||
NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS]
|
NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS]
|
||||||
|
|
||||||
def __init__(self, token, hostname, url=None, namespaces=None, hs_token=None,
|
def __init__(
|
||||||
sender=None, id=None, protocols=None, rate_limited=True,
|
self,
|
||||||
ip_range_whitelist=None):
|
token,
|
||||||
|
hostname,
|
||||||
|
url=None,
|
||||||
|
namespaces=None,
|
||||||
|
hs_token=None,
|
||||||
|
sender=None,
|
||||||
|
id=None,
|
||||||
|
protocols=None,
|
||||||
|
rate_limited=True,
|
||||||
|
ip_range_whitelist=None,
|
||||||
|
):
|
||||||
self.token = token
|
self.token = token
|
||||||
self.url = url
|
self.url = url
|
||||||
self.hs_token = hs_token
|
self.hs_token = hs_token
|
||||||
@ -128,9 +134,7 @@ class ApplicationService(object):
|
|||||||
if not isinstance(regex_obj, dict):
|
if not isinstance(regex_obj, dict):
|
||||||
raise ValueError("Expected dict regex for ns '%s'" % ns)
|
raise ValueError("Expected dict regex for ns '%s'" % ns)
|
||||||
if not isinstance(regex_obj.get("exclusive"), bool):
|
if not isinstance(regex_obj.get("exclusive"), bool):
|
||||||
raise ValueError(
|
raise ValueError("Expected bool for 'exclusive' in ns '%s'" % ns)
|
||||||
"Expected bool for 'exclusive' in ns '%s'" % ns
|
|
||||||
)
|
|
||||||
group_id = regex_obj.get("group_id")
|
group_id = regex_obj.get("group_id")
|
||||||
if group_id:
|
if group_id:
|
||||||
if not isinstance(group_id, str):
|
if not isinstance(group_id, str):
|
||||||
@ -153,9 +157,7 @@ class ApplicationService(object):
|
|||||||
if isinstance(regex, string_types):
|
if isinstance(regex, string_types):
|
||||||
regex_obj["regex"] = re.compile(regex) # Pre-compile regex
|
regex_obj["regex"] = re.compile(regex) # Pre-compile regex
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError("Expected string for 'regex' in ns '%s'" % ns)
|
||||||
"Expected string for 'regex' in ns '%s'" % ns
|
|
||||||
)
|
|
||||||
return namespaces
|
return namespaces
|
||||||
|
|
||||||
def _matches_regex(self, test_string, namespace_key):
|
def _matches_regex(self, test_string, namespace_key):
|
||||||
@ -178,8 +180,9 @@ class ApplicationService(object):
|
|||||||
if self.is_interested_in_user(event.sender):
|
if self.is_interested_in_user(event.sender):
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
# also check m.room.member state key
|
# also check m.room.member state key
|
||||||
if (event.type == EventTypes.Member and
|
if event.type == EventTypes.Member and self.is_interested_in_user(
|
||||||
self.is_interested_in_user(event.state_key)):
|
event.state_key
|
||||||
|
):
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
|
|
||||||
if not store:
|
if not store:
|
||||||
|
@ -32,19 +32,17 @@ logger = logging.getLogger(__name__)
|
|||||||
sent_transactions_counter = Counter(
|
sent_transactions_counter = Counter(
|
||||||
"synapse_appservice_api_sent_transactions",
|
"synapse_appservice_api_sent_transactions",
|
||||||
"Number of /transactions/ requests sent",
|
"Number of /transactions/ requests sent",
|
||||||
["service"]
|
["service"],
|
||||||
)
|
)
|
||||||
|
|
||||||
failed_transactions_counter = Counter(
|
failed_transactions_counter = Counter(
|
||||||
"synapse_appservice_api_failed_transactions",
|
"synapse_appservice_api_failed_transactions",
|
||||||
"Number of /transactions/ requests that failed to send",
|
"Number of /transactions/ requests that failed to send",
|
||||||
["service"]
|
["service"],
|
||||||
)
|
)
|
||||||
|
|
||||||
sent_events_counter = Counter(
|
sent_events_counter = Counter(
|
||||||
"synapse_appservice_api_sent_events",
|
"synapse_appservice_api_sent_events", "Number of events sent to the AS", ["service"]
|
||||||
"Number of events sent to the AS",
|
|
||||||
["service"]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
HOUR_IN_MS = 60 * 60 * 1000
|
HOUR_IN_MS = 60 * 60 * 1000
|
||||||
@ -92,8 +90,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
super(ApplicationServiceApi, self).__init__(hs)
|
super(ApplicationServiceApi, self).__init__(hs)
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
self.protocol_meta_cache = ResponseCache(hs, "as_protocol_meta",
|
self.protocol_meta_cache = ResponseCache(
|
||||||
timeout_ms=HOUR_IN_MS)
|
hs, "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def query_user(self, service, user_id):
|
def query_user(self, service, user_id):
|
||||||
@ -102,9 +101,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
||||||
response = None
|
response = None
|
||||||
try:
|
try:
|
||||||
response = yield self.get_json(uri, {
|
response = yield self.get_json(uri, {"access_token": service.hs_token})
|
||||||
"access_token": service.hs_token
|
|
||||||
})
|
|
||||||
if response is not None: # just an empty json object
|
if response is not None: # just an empty json object
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
@ -123,9 +120,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
||||||
response = None
|
response = None
|
||||||
try:
|
try:
|
||||||
response = yield self.get_json(uri, {
|
response = yield self.get_json(uri, {"access_token": service.hs_token})
|
||||||
"access_token": service.hs_token
|
|
||||||
})
|
|
||||||
if response is not None: # just an empty json object
|
if response is not None: # just an empty json object
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
@ -144,9 +139,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
elif kind == ThirdPartyEntityKind.LOCATION:
|
elif kind == ThirdPartyEntityKind.LOCATION:
|
||||||
required_field = "alias"
|
required_field = "alias"
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError("Unrecognised 'kind' argument %r to query_3pe()", kind)
|
||||||
"Unrecognised 'kind' argument %r to query_3pe()", kind
|
|
||||||
)
|
|
||||||
if service.url is None:
|
if service.url is None:
|
||||||
defer.returnValue([])
|
defer.returnValue([])
|
||||||
|
|
||||||
@ -154,14 +147,13 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
service.url,
|
service.url,
|
||||||
APP_SERVICE_PREFIX,
|
APP_SERVICE_PREFIX,
|
||||||
kind,
|
kind,
|
||||||
urllib.parse.quote(protocol)
|
urllib.parse.quote(protocol),
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
response = yield self.get_json(uri, fields)
|
response = yield self.get_json(uri, fields)
|
||||||
if not isinstance(response, list):
|
if not isinstance(response, list):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"query_3pe to %s returned an invalid response %r",
|
"query_3pe to %s returned an invalid response %r", uri, response
|
||||||
uri, response
|
|
||||||
)
|
)
|
||||||
defer.returnValue([])
|
defer.returnValue([])
|
||||||
|
|
||||||
@ -171,8 +163,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
ret.append(r)
|
ret.append(r)
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"query_3pe to %s returned an invalid result %r",
|
"query_3pe to %s returned an invalid result %r", uri, r
|
||||||
uri, r
|
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue(ret)
|
defer.returnValue(ret)
|
||||||
@ -189,27 +180,27 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
uri = "%s%s/thirdparty/protocol/%s" % (
|
uri = "%s%s/thirdparty/protocol/%s" % (
|
||||||
service.url,
|
service.url,
|
||||||
APP_SERVICE_PREFIX,
|
APP_SERVICE_PREFIX,
|
||||||
urllib.parse.quote(protocol)
|
urllib.parse.quote(protocol),
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
info = yield self.get_json(uri, {})
|
info = yield self.get_json(uri, {})
|
||||||
|
|
||||||
if not _is_valid_3pe_metadata(info):
|
if not _is_valid_3pe_metadata(info):
|
||||||
logger.warning("query_3pe_protocol to %s did not return a"
|
logger.warning(
|
||||||
" valid result", uri)
|
"query_3pe_protocol to %s did not return a" " valid result", uri
|
||||||
|
)
|
||||||
defer.returnValue(None)
|
defer.returnValue(None)
|
||||||
|
|
||||||
for instance in info.get("instances", []):
|
for instance in info.get("instances", []):
|
||||||
network_id = instance.get("network_id", None)
|
network_id = instance.get("network_id", None)
|
||||||
if network_id is not None:
|
if network_id is not None:
|
||||||
instance["instance_id"] = ThirdPartyInstanceID(
|
instance["instance_id"] = ThirdPartyInstanceID(
|
||||||
service.id, network_id,
|
service.id, network_id
|
||||||
).to_string()
|
).to_string()
|
||||||
|
|
||||||
defer.returnValue(info)
|
defer.returnValue(info)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warning("query_3pe_protocol to %s threw exception %s",
|
logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex)
|
||||||
uri, ex)
|
|
||||||
defer.returnValue(None)
|
defer.returnValue(None)
|
||||||
|
|
||||||
key = (service.id, protocol)
|
key = (service.id, protocol)
|
||||||
@ -223,22 +214,19 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
events = self._serialize(events)
|
events = self._serialize(events)
|
||||||
|
|
||||||
if txn_id is None:
|
if txn_id is None:
|
||||||
logger.warning("push_bulk: Missing txn ID sending events to %s",
|
logger.warning(
|
||||||
service.url)
|
"push_bulk: Missing txn ID sending events to %s", service.url
|
||||||
|
)
|
||||||
txn_id = str(0)
|
txn_id = str(0)
|
||||||
txn_id = str(txn_id)
|
txn_id = str(txn_id)
|
||||||
|
|
||||||
uri = service.url + ("/transactions/%s" %
|
uri = service.url + ("/transactions/%s" % urllib.parse.quote(txn_id))
|
||||||
urllib.parse.quote(txn_id))
|
|
||||||
try:
|
try:
|
||||||
yield self.put_json(
|
yield self.put_json(
|
||||||
uri=uri,
|
uri=uri,
|
||||||
json_body={
|
json_body={"events": events},
|
||||||
"events": events
|
args={"access_token": service.hs_token},
|
||||||
},
|
)
|
||||||
args={
|
|
||||||
"access_token": service.hs_token
|
|
||||||
})
|
|
||||||
sent_transactions_counter.labels(service.id).inc()
|
sent_transactions_counter.labels(service.id).inc()
|
||||||
sent_events_counter.labels(service.id).inc(len(events))
|
sent_events_counter.labels(service.id).inc(len(events))
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
@ -252,6 +240,4 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
|
|
||||||
def _serialize(self, events):
|
def _serialize(self, events):
|
||||||
time_now = self.clock.time_msec()
|
time_now = self.clock.time_msec()
|
||||||
return [
|
return [serialize_event(e, time_now, as_client_event=True) for e in events]
|
||||||
serialize_event(e, time_now, as_client_event=True) for e in events
|
|
||||||
]
|
|
||||||
|
@ -112,15 +112,14 @@ class _ServiceQueuer(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
run_as_background_process(
|
run_as_background_process(
|
||||||
"as-sender-%s" % (service.id, ),
|
"as-sender-%s" % (service.id,), self._send_request, service
|
||||||
self._send_request, service,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _send_request(self, service):
|
def _send_request(self, service):
|
||||||
# sanity-check: we shouldn't get here if this service already has a sender
|
# sanity-check: we shouldn't get here if this service already has a sender
|
||||||
# running.
|
# running.
|
||||||
assert(service.id not in self.requests_in_flight)
|
assert service.id not in self.requests_in_flight
|
||||||
|
|
||||||
self.requests_in_flight.add(service.id)
|
self.requests_in_flight.add(service.id)
|
||||||
try:
|
try:
|
||||||
@ -137,7 +136,6 @@ class _ServiceQueuer(object):
|
|||||||
|
|
||||||
|
|
||||||
class _TransactionController(object):
|
class _TransactionController(object):
|
||||||
|
|
||||||
def __init__(self, clock, store, as_api, recoverer_fn):
|
def __init__(self, clock, store, as_api, recoverer_fn):
|
||||||
self.clock = clock
|
self.clock = clock
|
||||||
self.store = store
|
self.store = store
|
||||||
@ -149,10 +147,7 @@ class _TransactionController(object):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def send(self, service, events):
|
def send(self, service, events):
|
||||||
try:
|
try:
|
||||||
txn = yield self.store.create_appservice_txn(
|
txn = yield self.store.create_appservice_txn(service=service, events=events)
|
||||||
service=service,
|
|
||||||
events=events
|
|
||||||
)
|
|
||||||
service_is_up = yield self._is_service_up(service)
|
service_is_up = yield self._is_service_up(service)
|
||||||
if service_is_up:
|
if service_is_up:
|
||||||
sent = yield txn.send(self.as_api)
|
sent = yield txn.send(self.as_api)
|
||||||
@ -167,12 +162,12 @@ class _TransactionController(object):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_recovered(self, recoverer):
|
def on_recovered(self, recoverer):
|
||||||
self.recoverers.remove(recoverer)
|
self.recoverers.remove(recoverer)
|
||||||
logger.info("Successfully recovered application service AS ID %s",
|
logger.info(
|
||||||
recoverer.service.id)
|
"Successfully recovered application service AS ID %s", recoverer.service.id
|
||||||
|
)
|
||||||
logger.info("Remaining active recoverers: %s", len(self.recoverers))
|
logger.info("Remaining active recoverers: %s", len(self.recoverers))
|
||||||
yield self.store.set_appservice_state(
|
yield self.store.set_appservice_state(
|
||||||
recoverer.service,
|
recoverer.service, ApplicationServiceState.UP
|
||||||
ApplicationServiceState.UP
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_recoverers(self, recoverers):
|
def add_recoverers(self, recoverers):
|
||||||
@ -184,13 +179,10 @@ class _TransactionController(object):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _start_recoverer(self, service):
|
def _start_recoverer(self, service):
|
||||||
try:
|
try:
|
||||||
yield self.store.set_appservice_state(
|
yield self.store.set_appservice_state(service, ApplicationServiceState.DOWN)
|
||||||
service,
|
|
||||||
ApplicationServiceState.DOWN
|
|
||||||
)
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Application service falling behind. Starting recoverer. AS ID %s",
|
"Application service falling behind. Starting recoverer. AS ID %s",
|
||||||
service.id
|
service.id,
|
||||||
)
|
)
|
||||||
recoverer = self.recoverer_fn(service, self.on_recovered)
|
recoverer = self.recoverer_fn(service, self.on_recovered)
|
||||||
self.add_recoverers([recoverer])
|
self.add_recoverers([recoverer])
|
||||||
@ -205,19 +197,16 @@ class _TransactionController(object):
|
|||||||
|
|
||||||
|
|
||||||
class _Recoverer(object):
|
class _Recoverer(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def start(clock, store, as_api, callback):
|
def start(clock, store, as_api, callback):
|
||||||
services = yield store.get_appservices_by_state(
|
services = yield store.get_appservices_by_state(ApplicationServiceState.DOWN)
|
||||||
ApplicationServiceState.DOWN
|
recoverers = [_Recoverer(clock, store, as_api, s, callback) for s in services]
|
||||||
)
|
|
||||||
recoverers = [
|
|
||||||
_Recoverer(clock, store, as_api, s, callback) for s in services
|
|
||||||
]
|
|
||||||
for r in recoverers:
|
for r in recoverers:
|
||||||
logger.info("Starting recoverer for AS ID %s which was marked as "
|
logger.info(
|
||||||
"DOWN", r.service.id)
|
"Starting recoverer for AS ID %s which was marked as " "DOWN",
|
||||||
|
r.service.id,
|
||||||
|
)
|
||||||
r.recover()
|
r.recover()
|
||||||
defer.returnValue(recoverers)
|
defer.returnValue(recoverers)
|
||||||
|
|
||||||
@ -232,9 +221,9 @@ class _Recoverer(object):
|
|||||||
def recover(self):
|
def recover(self):
|
||||||
def _retry():
|
def _retry():
|
||||||
run_as_background_process(
|
run_as_background_process(
|
||||||
"as-recoverer-%s" % (self.service.id,),
|
"as-recoverer-%s" % (self.service.id,), self.retry
|
||||||
self.retry,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.clock.call_later((2 ** self.backoff_counter), _retry)
|
self.clock.call_later((2 ** self.backoff_counter), _retry)
|
||||||
|
|
||||||
def _backoff(self):
|
def _backoff(self):
|
||||||
@ -248,8 +237,9 @@ class _Recoverer(object):
|
|||||||
try:
|
try:
|
||||||
txn = yield self.store.get_oldest_unsent_txn(self.service)
|
txn = yield self.store.get_oldest_unsent_txn(self.service)
|
||||||
if txn:
|
if txn:
|
||||||
logger.info("Retrying transaction %s for AS ID %s",
|
logger.info(
|
||||||
txn.id, txn.service.id)
|
"Retrying transaction %s for AS ID %s", txn.id, txn.service.id
|
||||||
|
)
|
||||||
sent = yield txn.send(self.as_api)
|
sent = yield txn.send(self.as_api)
|
||||||
if sent:
|
if sent:
|
||||||
yield txn.complete(self.store)
|
yield txn.complete(self.store)
|
||||||
|
@ -284,8 +284,8 @@ class Config(object):
|
|||||||
if not config_files:
|
if not config_files:
|
||||||
config_parser.error(
|
config_parser.error(
|
||||||
"Must supply a config file.\nA config file can be automatically"
|
"Must supply a config file.\nA config file can be automatically"
|
||||||
" generated using \"--generate-config -H SERVER_NAME"
|
' generated using "--generate-config -H SERVER_NAME'
|
||||||
" -c CONFIG-FILE\""
|
' -c CONFIG-FILE"'
|
||||||
)
|
)
|
||||||
(config_path,) = config_files
|
(config_path,) = config_files
|
||||||
if not cls.path_exists(config_path):
|
if not cls.path_exists(config_path):
|
||||||
@ -313,9 +313,7 @@ class Config(object):
|
|||||||
if not cls.path_exists(config_dir_path):
|
if not cls.path_exists(config_dir_path):
|
||||||
os.makedirs(config_dir_path)
|
os.makedirs(config_dir_path)
|
||||||
with open(config_path, "w") as config_file:
|
with open(config_path, "w") as config_file:
|
||||||
config_file.write(
|
config_file.write("# vim:ft=yaml\n\n")
|
||||||
"# vim:ft=yaml\n\n"
|
|
||||||
)
|
|
||||||
config_file.write(config_str)
|
config_file.write(config_str)
|
||||||
|
|
||||||
config = yaml.safe_load(config_str)
|
config = yaml.safe_load(config_str)
|
||||||
@ -352,8 +350,8 @@ class Config(object):
|
|||||||
if not config_files:
|
if not config_files:
|
||||||
config_parser.error(
|
config_parser.error(
|
||||||
"Must supply a config file.\nA config file can be automatically"
|
"Must supply a config file.\nA config file can be automatically"
|
||||||
" generated using \"--generate-config -H SERVER_NAME"
|
' generated using "--generate-config -H SERVER_NAME'
|
||||||
" -c CONFIG-FILE\""
|
' -c CONFIG-FILE"'
|
||||||
)
|
)
|
||||||
|
|
||||||
obj.read_config_files(
|
obj.read_config_files(
|
||||||
|
@ -18,15 +18,17 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class ApiConfig(Config):
|
class ApiConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.room_invite_state_types = config.get("room_invite_state_types", [
|
self.room_invite_state_types = config.get(
|
||||||
|
"room_invite_state_types",
|
||||||
|
[
|
||||||
EventTypes.JoinRules,
|
EventTypes.JoinRules,
|
||||||
EventTypes.CanonicalAlias,
|
EventTypes.CanonicalAlias,
|
||||||
EventTypes.RoomAvatar,
|
EventTypes.RoomAvatar,
|
||||||
EventTypes.RoomEncryption,
|
EventTypes.RoomEncryption,
|
||||||
EventTypes.Name,
|
EventTypes.Name,
|
||||||
])
|
],
|
||||||
|
)
|
||||||
|
|
||||||
def default_config(cls, **kwargs):
|
def default_config(cls, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
@ -40,4 +42,6 @@ class ApiConfig(Config):
|
|||||||
# - "{RoomAvatar}"
|
# - "{RoomAvatar}"
|
||||||
# - "{RoomEncryption}"
|
# - "{RoomEncryption}"
|
||||||
# - "{Name}"
|
# - "{Name}"
|
||||||
""".format(**vars(EventTypes))
|
""".format(
|
||||||
|
**vars(EventTypes)
|
||||||
|
)
|
||||||
|
@ -29,7 +29,6 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class AppServiceConfig(Config):
|
class AppServiceConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.app_service_config_files = config.get("app_service_config_files", [])
|
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||||
self.notify_appservices = config.get("notify_appservices", True)
|
self.notify_appservices = config.get("notify_appservices", True)
|
||||||
@ -53,9 +52,7 @@ class AppServiceConfig(Config):
|
|||||||
def load_appservices(hostname, config_files):
|
def load_appservices(hostname, config_files):
|
||||||
"""Returns a list of Application Services from the config files."""
|
"""Returns a list of Application Services from the config files."""
|
||||||
if not isinstance(config_files, list):
|
if not isinstance(config_files, list):
|
||||||
logger.warning(
|
logger.warning("Expected %s to be a list of AS config files.", config_files)
|
||||||
"Expected %s to be a list of AS config files.", config_files
|
|
||||||
)
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Dicts of value -> filename
|
# Dicts of value -> filename
|
||||||
@ -66,22 +63,20 @@ def load_appservices(hostname, config_files):
|
|||||||
|
|
||||||
for config_file in config_files:
|
for config_file in config_files:
|
||||||
try:
|
try:
|
||||||
with open(config_file, 'r') as f:
|
with open(config_file, "r") as f:
|
||||||
appservice = _load_appservice(
|
appservice = _load_appservice(hostname, yaml.safe_load(f), config_file)
|
||||||
hostname, yaml.safe_load(f), config_file
|
|
||||||
)
|
|
||||||
if appservice.id in seen_ids:
|
if appservice.id in seen_ids:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Cannot reuse ID across application services: "
|
"Cannot reuse ID across application services: "
|
||||||
"%s (files: %s, %s)" % (
|
"%s (files: %s, %s)"
|
||||||
appservice.id, config_file, seen_ids[appservice.id],
|
% (appservice.id, config_file, seen_ids[appservice.id])
|
||||||
)
|
|
||||||
)
|
)
|
||||||
seen_ids[appservice.id] = config_file
|
seen_ids[appservice.id] = config_file
|
||||||
if appservice.token in seen_as_tokens:
|
if appservice.token in seen_as_tokens:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Cannot reuse as_token across application services: "
|
"Cannot reuse as_token across application services: "
|
||||||
"%s (files: %s, %s)" % (
|
"%s (files: %s, %s)"
|
||||||
|
% (
|
||||||
appservice.token,
|
appservice.token,
|
||||||
config_file,
|
config_file,
|
||||||
seen_as_tokens[appservice.token],
|
seen_as_tokens[appservice.token],
|
||||||
@ -98,28 +93,26 @@ def load_appservices(hostname, config_files):
|
|||||||
|
|
||||||
|
|
||||||
def _load_appservice(hostname, as_info, config_filename):
|
def _load_appservice(hostname, as_info, config_filename):
|
||||||
required_string_fields = [
|
required_string_fields = ["id", "as_token", "hs_token", "sender_localpart"]
|
||||||
"id", "as_token", "hs_token", "sender_localpart"
|
|
||||||
]
|
|
||||||
for field in required_string_fields:
|
for field in required_string_fields:
|
||||||
if not isinstance(as_info.get(field), string_types):
|
if not isinstance(as_info.get(field), string_types):
|
||||||
raise KeyError("Required string field: '%s' (%s)" % (
|
raise KeyError(
|
||||||
field, config_filename,
|
"Required string field: '%s' (%s)" % (field, config_filename)
|
||||||
))
|
)
|
||||||
|
|
||||||
# 'url' must either be a string or explicitly null, not missing
|
# 'url' must either be a string or explicitly null, not missing
|
||||||
# to avoid accidentally turning off push for ASes.
|
# to avoid accidentally turning off push for ASes.
|
||||||
if (not isinstance(as_info.get("url"), string_types) and
|
if (
|
||||||
as_info.get("url", "") is not None):
|
not isinstance(as_info.get("url"), string_types)
|
||||||
|
and as_info.get("url", "") is not None
|
||||||
|
):
|
||||||
raise KeyError(
|
raise KeyError(
|
||||||
"Required string field or explicit null: 'url' (%s)" % (config_filename,)
|
"Required string field or explicit null: 'url' (%s)" % (config_filename,)
|
||||||
)
|
)
|
||||||
|
|
||||||
localpart = as_info["sender_localpart"]
|
localpart = as_info["sender_localpart"]
|
||||||
if urlparse.quote(localpart) != localpart:
|
if urlparse.quote(localpart) != localpart:
|
||||||
raise ValueError(
|
raise ValueError("sender_localpart needs characters which are not URL encoded.")
|
||||||
"sender_localpart needs characters which are not URL encoded."
|
|
||||||
)
|
|
||||||
user = UserID(localpart, hostname)
|
user = UserID(localpart, hostname)
|
||||||
user_id = user.to_string()
|
user_id = user.to_string()
|
||||||
|
|
||||||
@ -138,13 +131,12 @@ def _load_appservice(hostname, as_info, config_filename):
|
|||||||
for regex_obj in as_info["namespaces"][ns]:
|
for regex_obj in as_info["namespaces"][ns]:
|
||||||
if not isinstance(regex_obj, dict):
|
if not isinstance(regex_obj, dict):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Expected namespace entry in %s to be an object,"
|
"Expected namespace entry in %s to be an object," " but got %s",
|
||||||
" but got %s", ns, regex_obj
|
ns,
|
||||||
|
regex_obj,
|
||||||
)
|
)
|
||||||
if not isinstance(regex_obj.get("regex"), string_types):
|
if not isinstance(regex_obj.get("regex"), string_types):
|
||||||
raise ValueError(
|
raise ValueError("Missing/bad type 'regex' key in %s", regex_obj)
|
||||||
"Missing/bad type 'regex' key in %s", regex_obj
|
|
||||||
)
|
|
||||||
if not isinstance(regex_obj.get("exclusive"), bool):
|
if not isinstance(regex_obj.get("exclusive"), bool):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Missing/bad type 'exclusive' key in %s", regex_obj
|
"Missing/bad type 'exclusive' key in %s", regex_obj
|
||||||
@ -167,10 +159,8 @@ def _load_appservice(hostname, as_info, config_filename):
|
|||||||
)
|
)
|
||||||
|
|
||||||
ip_range_whitelist = None
|
ip_range_whitelist = None
|
||||||
if as_info.get('ip_range_whitelist'):
|
if as_info.get("ip_range_whitelist"):
|
||||||
ip_range_whitelist = IPSet(
|
ip_range_whitelist = IPSet(as_info.get("ip_range_whitelist"))
|
||||||
as_info.get('ip_range_whitelist')
|
|
||||||
)
|
|
||||||
|
|
||||||
return ApplicationService(
|
return ApplicationService(
|
||||||
token=as_info["as_token"],
|
token=as_info["as_token"],
|
||||||
|
@ -16,7 +16,6 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class CaptchaConfig(Config):
|
class CaptchaConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.recaptcha_private_key = config.get("recaptcha_private_key")
|
self.recaptcha_private_key = config.get("recaptcha_private_key")
|
||||||
self.recaptcha_public_key = config.get("recaptcha_public_key")
|
self.recaptcha_public_key = config.get("recaptcha_public_key")
|
||||||
|
@ -89,29 +89,26 @@ class ConsentConfig(Config):
|
|||||||
if consent_config is None:
|
if consent_config is None:
|
||||||
return
|
return
|
||||||
self.user_consent_version = str(consent_config["version"])
|
self.user_consent_version = str(consent_config["version"])
|
||||||
self.user_consent_template_dir = self.abspath(
|
self.user_consent_template_dir = self.abspath(consent_config["template_dir"])
|
||||||
consent_config["template_dir"]
|
|
||||||
)
|
|
||||||
if not path.isdir(self.user_consent_template_dir):
|
if not path.isdir(self.user_consent_template_dir):
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Could not find template directory '%s'" % (
|
"Could not find template directory '%s'"
|
||||||
self.user_consent_template_dir,
|
% (self.user_consent_template_dir,)
|
||||||
),
|
|
||||||
)
|
)
|
||||||
self.user_consent_server_notice_content = consent_config.get(
|
self.user_consent_server_notice_content = consent_config.get(
|
||||||
"server_notice_content",
|
"server_notice_content"
|
||||||
)
|
)
|
||||||
self.block_events_without_consent_error = consent_config.get(
|
self.block_events_without_consent_error = consent_config.get(
|
||||||
"block_events_error",
|
"block_events_error"
|
||||||
|
)
|
||||||
|
self.user_consent_server_notice_to_guests = bool(
|
||||||
|
consent_config.get("send_server_notice_to_guests", False)
|
||||||
|
)
|
||||||
|
self.user_consent_at_registration = bool(
|
||||||
|
consent_config.get("require_at_registration", False)
|
||||||
)
|
)
|
||||||
self.user_consent_server_notice_to_guests = bool(consent_config.get(
|
|
||||||
"send_server_notice_to_guests", False,
|
|
||||||
))
|
|
||||||
self.user_consent_at_registration = bool(consent_config.get(
|
|
||||||
"require_at_registration", False,
|
|
||||||
))
|
|
||||||
self.user_consent_policy_name = consent_config.get(
|
self.user_consent_policy_name = consent_config.get(
|
||||||
"policy_name", "Privacy Policy",
|
"policy_name", "Privacy Policy"
|
||||||
)
|
)
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, **kwargs):
|
||||||
|
@ -18,29 +18,21 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class DatabaseConfig(Config):
|
class DatabaseConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.event_cache_size = self.parse_size(
|
self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K"))
|
||||||
config.get("event_cache_size", "10K")
|
|
||||||
)
|
|
||||||
|
|
||||||
self.database_config = config.get("database")
|
self.database_config = config.get("database")
|
||||||
|
|
||||||
if self.database_config is None:
|
if self.database_config is None:
|
||||||
self.database_config = {
|
self.database_config = {"name": "sqlite3", "args": {}}
|
||||||
"name": "sqlite3",
|
|
||||||
"args": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
name = self.database_config.get("name", None)
|
name = self.database_config.get("name", None)
|
||||||
if name == "psycopg2":
|
if name == "psycopg2":
|
||||||
pass
|
pass
|
||||||
elif name == "sqlite3":
|
elif name == "sqlite3":
|
||||||
self.database_config.setdefault("args", {}).update({
|
self.database_config.setdefault("args", {}).update(
|
||||||
"cp_min": 1,
|
{"cp_min": 1, "cp_max": 1, "check_same_thread": False}
|
||||||
"cp_max": 1,
|
)
|
||||||
"check_same_thread": False,
|
|
||||||
})
|
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Unsupported database type '%s'" % (name,))
|
raise RuntimeError("Unsupported database type '%s'" % (name,))
|
||||||
|
|
||||||
@ -48,7 +40,8 @@ class DatabaseConfig(Config):
|
|||||||
|
|
||||||
def default_config(self, data_dir_path, **kwargs):
|
def default_config(self, data_dir_path, **kwargs):
|
||||||
database_path = os.path.join(data_dir_path, "homeserver.db")
|
database_path = os.path.join(data_dir_path, "homeserver.db")
|
||||||
return """\
|
return (
|
||||||
|
"""\
|
||||||
## Database ##
|
## Database ##
|
||||||
|
|
||||||
database:
|
database:
|
||||||
@ -62,7 +55,9 @@ class DatabaseConfig(Config):
|
|||||||
# Number of events to cache in memory.
|
# Number of events to cache in memory.
|
||||||
#
|
#
|
||||||
#event_cache_size: 10K
|
#event_cache_size: 10K
|
||||||
""" % locals()
|
"""
|
||||||
|
% locals()
|
||||||
|
)
|
||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
self.set_databasepath(args.database_path)
|
self.set_databasepath(args.database_path)
|
||||||
@ -77,6 +72,8 @@ class DatabaseConfig(Config):
|
|||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
db_group = parser.add_argument_group("database")
|
db_group = parser.add_argument_group("database")
|
||||||
db_group.add_argument(
|
db_group.add_argument(
|
||||||
"-d", "--database-path", metavar="SQLITE_DATABASE_PATH",
|
"-d",
|
||||||
help="The path to a sqlite database to use."
|
"--database-path",
|
||||||
|
metavar="SQLITE_DATABASE_PATH",
|
||||||
|
help="The path to a sqlite database to use.",
|
||||||
)
|
)
|
||||||
|
@ -56,7 +56,7 @@ class EmailConfig(Config):
|
|||||||
if self.email_notif_from is not None:
|
if self.email_notif_from is not None:
|
||||||
# make sure it's valid
|
# make sure it's valid
|
||||||
parsed = email.utils.parseaddr(self.email_notif_from)
|
parsed = email.utils.parseaddr(self.email_notif_from)
|
||||||
if parsed[1] == '':
|
if parsed[1] == "":
|
||||||
raise RuntimeError("Invalid notif_from address")
|
raise RuntimeError("Invalid notif_from address")
|
||||||
|
|
||||||
template_dir = email_config.get("template_dir")
|
template_dir = email_config.get("template_dir")
|
||||||
@ -65,19 +65,17 @@ class EmailConfig(Config):
|
|||||||
# (Note that loading as package_resources with jinja.PackageLoader doesn't
|
# (Note that loading as package_resources with jinja.PackageLoader doesn't
|
||||||
# work for the same reason.)
|
# work for the same reason.)
|
||||||
if not template_dir:
|
if not template_dir:
|
||||||
template_dir = pkg_resources.resource_filename(
|
template_dir = pkg_resources.resource_filename("synapse", "res/templates")
|
||||||
'synapse', 'res/templates'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.email_template_dir = os.path.abspath(template_dir)
|
self.email_template_dir = os.path.abspath(template_dir)
|
||||||
|
|
||||||
self.email_enable_notifs = email_config.get("enable_notifs", False)
|
self.email_enable_notifs = email_config.get("enable_notifs", False)
|
||||||
account_validity_renewal_enabled = config.get(
|
account_validity_renewal_enabled = config.get("account_validity", {}).get(
|
||||||
"account_validity", {},
|
"renew_at"
|
||||||
).get("renew_at")
|
)
|
||||||
|
|
||||||
email_trust_identity_server_for_password_resets = email_config.get(
|
email_trust_identity_server_for_password_resets = email_config.get(
|
||||||
"trust_identity_server_for_password_resets", False,
|
"trust_identity_server_for_password_resets", False
|
||||||
)
|
)
|
||||||
self.email_password_reset_behaviour = (
|
self.email_password_reset_behaviour = (
|
||||||
"remote" if email_trust_identity_server_for_password_resets else "local"
|
"remote" if email_trust_identity_server_for_password_resets else "local"
|
||||||
@ -103,62 +101,59 @@ class EmailConfig(Config):
|
|||||||
# make sure we can import the required deps
|
# make sure we can import the required deps
|
||||||
import jinja2
|
import jinja2
|
||||||
import bleach
|
import bleach
|
||||||
|
|
||||||
# prevent unused warnings
|
# prevent unused warnings
|
||||||
jinja2
|
jinja2
|
||||||
bleach
|
bleach
|
||||||
|
|
||||||
if self.email_password_reset_behaviour == "local":
|
if self.email_password_reset_behaviour == "local":
|
||||||
required = [
|
required = ["smtp_host", "smtp_port", "notif_from"]
|
||||||
"smtp_host",
|
|
||||||
"smtp_port",
|
|
||||||
"notif_from",
|
|
||||||
]
|
|
||||||
|
|
||||||
missing = []
|
missing = []
|
||||||
for k in required:
|
for k in required:
|
||||||
if k not in email_config:
|
if k not in email_config:
|
||||||
missing.append(k)
|
missing.append(k)
|
||||||
|
|
||||||
if (len(missing) > 0):
|
if len(missing) > 0:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"email.password_reset_behaviour is set to 'local' "
|
"email.password_reset_behaviour is set to 'local' "
|
||||||
"but required keys are missing: %s" %
|
"but required keys are missing: %s"
|
||||||
(", ".join(["email." + k for k in missing]),)
|
% (", ".join(["email." + k for k in missing]),)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Templates for password reset emails
|
# Templates for password reset emails
|
||||||
self.email_password_reset_template_html = email_config.get(
|
self.email_password_reset_template_html = email_config.get(
|
||||||
"password_reset_template_html", "password_reset.html",
|
"password_reset_template_html", "password_reset.html"
|
||||||
)
|
)
|
||||||
self.email_password_reset_template_text = email_config.get(
|
self.email_password_reset_template_text = email_config.get(
|
||||||
"password_reset_template_text", "password_reset.txt",
|
"password_reset_template_text", "password_reset.txt"
|
||||||
)
|
)
|
||||||
self.email_password_reset_failure_template = email_config.get(
|
self.email_password_reset_failure_template = email_config.get(
|
||||||
"password_reset_failure_template", "password_reset_failure.html",
|
"password_reset_failure_template", "password_reset_failure.html"
|
||||||
)
|
)
|
||||||
# This template does not support any replaceable variables, so we will
|
# This template does not support any replaceable variables, so we will
|
||||||
# read it from the disk once during setup
|
# read it from the disk once during setup
|
||||||
email_password_reset_success_template = email_config.get(
|
email_password_reset_success_template = email_config.get(
|
||||||
"password_reset_success_template", "password_reset_success.html",
|
"password_reset_success_template", "password_reset_success.html"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check templates exist
|
# Check templates exist
|
||||||
for f in [self.email_password_reset_template_html,
|
for f in [
|
||||||
|
self.email_password_reset_template_html,
|
||||||
self.email_password_reset_template_text,
|
self.email_password_reset_template_text,
|
||||||
self.email_password_reset_failure_template,
|
self.email_password_reset_failure_template,
|
||||||
email_password_reset_success_template]:
|
email_password_reset_success_template,
|
||||||
|
]:
|
||||||
p = os.path.join(self.email_template_dir, f)
|
p = os.path.join(self.email_template_dir, f)
|
||||||
if not os.path.isfile(p):
|
if not os.path.isfile(p):
|
||||||
raise ConfigError("Unable to find template file %s" % (p, ))
|
raise ConfigError("Unable to find template file %s" % (p,))
|
||||||
|
|
||||||
# Retrieve content of web templates
|
# Retrieve content of web templates
|
||||||
filepath = os.path.join(
|
filepath = os.path.join(
|
||||||
self.email_template_dir,
|
self.email_template_dir, email_password_reset_success_template
|
||||||
email_password_reset_success_template,
|
|
||||||
)
|
)
|
||||||
self.email_password_reset_success_html_content = self.read_file(
|
self.email_password_reset_success_html_content = self.read_file(
|
||||||
filepath,
|
filepath, "email.password_reset_template_success_html"
|
||||||
"email.password_reset_template_success_html",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.get("public_baseurl") is None:
|
if config.get("public_baseurl") is None:
|
||||||
@ -182,10 +177,10 @@ class EmailConfig(Config):
|
|||||||
if k not in email_config:
|
if k not in email_config:
|
||||||
missing.append(k)
|
missing.append(k)
|
||||||
|
|
||||||
if (len(missing) > 0):
|
if len(missing) > 0:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"email.enable_notifs is True but required keys are missing: %s" %
|
"email.enable_notifs is True but required keys are missing: %s"
|
||||||
(", ".join(["email." + k for k in missing]),)
|
% (", ".join(["email." + k for k in missing]),)
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.get("public_baseurl") is None:
|
if config.get("public_baseurl") is None:
|
||||||
@ -199,27 +194,25 @@ class EmailConfig(Config):
|
|||||||
for f in self.email_notif_template_text, self.email_notif_template_html:
|
for f in self.email_notif_template_text, self.email_notif_template_html:
|
||||||
p = os.path.join(self.email_template_dir, f)
|
p = os.path.join(self.email_template_dir, f)
|
||||||
if not os.path.isfile(p):
|
if not os.path.isfile(p):
|
||||||
raise ConfigError("Unable to find email template file %s" % (p, ))
|
raise ConfigError("Unable to find email template file %s" % (p,))
|
||||||
|
|
||||||
self.email_notif_for_new_users = email_config.get(
|
self.email_notif_for_new_users = email_config.get(
|
||||||
"notif_for_new_users", True
|
"notif_for_new_users", True
|
||||||
)
|
)
|
||||||
self.email_riot_base_url = email_config.get(
|
self.email_riot_base_url = email_config.get("riot_base_url", None)
|
||||||
"riot_base_url", None
|
|
||||||
)
|
|
||||||
|
|
||||||
if account_validity_renewal_enabled:
|
if account_validity_renewal_enabled:
|
||||||
self.email_expiry_template_html = email_config.get(
|
self.email_expiry_template_html = email_config.get(
|
||||||
"expiry_template_html", "notice_expiry.html",
|
"expiry_template_html", "notice_expiry.html"
|
||||||
)
|
)
|
||||||
self.email_expiry_template_text = email_config.get(
|
self.email_expiry_template_text = email_config.get(
|
||||||
"expiry_template_text", "notice_expiry.txt",
|
"expiry_template_text", "notice_expiry.txt"
|
||||||
)
|
)
|
||||||
|
|
||||||
for f in self.email_expiry_template_text, self.email_expiry_template_html:
|
for f in self.email_expiry_template_text, self.email_expiry_template_html:
|
||||||
p = os.path.join(self.email_template_dir, f)
|
p = os.path.join(self.email_template_dir, f)
|
||||||
if not os.path.isfile(p):
|
if not os.path.isfile(p):
|
||||||
raise ConfigError("Unable to find email template file %s" % (p, ))
|
raise ConfigError("Unable to find email template file %s" % (p,))
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
return """
|
return """
|
||||||
|
@ -15,13 +15,11 @@
|
|||||||
|
|
||||||
from ._base import Config, ConfigError
|
from ._base import Config, ConfigError
|
||||||
|
|
||||||
MISSING_JWT = (
|
MISSING_JWT = """Missing jwt library. This is required for jwt login.
|
||||||
"""Missing jwt library. This is required for jwt login.
|
|
||||||
|
|
||||||
Install by running:
|
Install by running:
|
||||||
pip install pyjwt
|
pip install pyjwt
|
||||||
"""
|
"""
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class JWTConfig(Config):
|
class JWTConfig(Config):
|
||||||
@ -34,6 +32,7 @@ class JWTConfig(Config):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
import jwt
|
import jwt
|
||||||
|
|
||||||
jwt # To stop unused lint.
|
jwt # To stop unused lint.
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ConfigError(MISSING_JWT)
|
raise ConfigError(MISSING_JWT)
|
||||||
|
@ -348,9 +348,8 @@ def _parse_key_servers(key_servers, federation_verify_certificates):
|
|||||||
|
|
||||||
result.verify_keys[key_id] = verify_key
|
result.verify_keys[key_id] = verify_key
|
||||||
|
|
||||||
if (
|
if not federation_verify_certificates and not server.get(
|
||||||
not federation_verify_certificates and
|
"accept_keys_insecurely"
|
||||||
not server.get("accept_keys_insecurely")
|
|
||||||
):
|
):
|
||||||
_assert_keyserver_has_verify_keys(result)
|
_assert_keyserver_has_verify_keys(result)
|
||||||
|
|
||||||
|
@ -29,7 +29,8 @@ from synapse.util.versionstring import get_version_string
|
|||||||
|
|
||||||
from ._base import Config
|
from ._base import Config
|
||||||
|
|
||||||
DEFAULT_LOG_CONFIG = Template("""
|
DEFAULT_LOG_CONFIG = Template(
|
||||||
|
"""
|
||||||
version: 1
|
version: 1
|
||||||
|
|
||||||
formatters:
|
formatters:
|
||||||
@ -68,11 +69,11 @@ loggers:
|
|||||||
root:
|
root:
|
||||||
level: INFO
|
level: INFO
|
||||||
handlers: [file, console]
|
handlers: [file, console]
|
||||||
""")
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class LoggingConfig(Config):
|
class LoggingConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.verbosity = config.get("verbose", 0)
|
self.verbosity = config.get("verbose", 0)
|
||||||
self.no_redirect_stdio = config.get("no_redirect_stdio", False)
|
self.no_redirect_stdio = config.get("no_redirect_stdio", False)
|
||||||
@ -81,13 +82,16 @@ class LoggingConfig(Config):
|
|||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
log_config = os.path.join(config_dir_path, server_name + ".log.config")
|
log_config = os.path.join(config_dir_path, server_name + ".log.config")
|
||||||
return """\
|
return (
|
||||||
|
"""\
|
||||||
## Logging ##
|
## Logging ##
|
||||||
|
|
||||||
# A yaml python logging config file
|
# A yaml python logging config file
|
||||||
#
|
#
|
||||||
log_config: "%(log_config)s"
|
log_config: "%(log_config)s"
|
||||||
""" % locals()
|
"""
|
||||||
|
% locals()
|
||||||
|
)
|
||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
if args.verbose is not None:
|
if args.verbose is not None:
|
||||||
@ -102,22 +106,31 @@ class LoggingConfig(Config):
|
|||||||
def add_arguments(cls, parser):
|
def add_arguments(cls, parser):
|
||||||
logging_group = parser.add_argument_group("logging")
|
logging_group = parser.add_argument_group("logging")
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
'-v', '--verbose', dest="verbose", action='count',
|
"-v",
|
||||||
|
"--verbose",
|
||||||
|
dest="verbose",
|
||||||
|
action="count",
|
||||||
help="The verbosity level. Specify multiple times to increase "
|
help="The verbosity level. Specify multiple times to increase "
|
||||||
"verbosity. (Ignored if --log-config is specified.)"
|
"verbosity. (Ignored if --log-config is specified.)",
|
||||||
)
|
)
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
'-f', '--log-file', dest="log_file",
|
"-f",
|
||||||
help="File to log to. (Ignored if --log-config is specified.)"
|
"--log-file",
|
||||||
|
dest="log_file",
|
||||||
|
help="File to log to. (Ignored if --log-config is specified.)",
|
||||||
)
|
)
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
'--log-config', dest="log_config", default=None,
|
"--log-config",
|
||||||
help="Python logging config file"
|
dest="log_config",
|
||||||
|
default=None,
|
||||||
|
help="Python logging config file",
|
||||||
)
|
)
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
'-n', '--no-redirect-stdio',
|
"-n",
|
||||||
action='store_true', default=None,
|
"--no-redirect-stdio",
|
||||||
help="Do not redirect stdout/stderr to the log"
|
action="store_true",
|
||||||
|
default=None,
|
||||||
|
help="Do not redirect stdout/stderr to the log",
|
||||||
)
|
)
|
||||||
|
|
||||||
def generate_files(self, config):
|
def generate_files(self, config):
|
||||||
@ -125,9 +138,7 @@ class LoggingConfig(Config):
|
|||||||
if log_config and not os.path.exists(log_config):
|
if log_config and not os.path.exists(log_config):
|
||||||
log_file = self.abspath("homeserver.log")
|
log_file = self.abspath("homeserver.log")
|
||||||
with open(log_config, "w") as log_config_file:
|
with open(log_config, "w") as log_config_file:
|
||||||
log_config_file.write(
|
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
|
||||||
DEFAULT_LOG_CONFIG.substitute(log_file=log_file)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(config, use_worker_options=False):
|
def setup_logging(config, use_worker_options=False):
|
||||||
@ -143,10 +154,8 @@ def setup_logging(config, use_worker_options=False):
|
|||||||
register_sighup (func | None): Function to call to register a
|
register_sighup (func | None): Function to call to register a
|
||||||
sighup handler.
|
sighup handler.
|
||||||
"""
|
"""
|
||||||
log_config = (config.worker_log_config if use_worker_options
|
log_config = config.worker_log_config if use_worker_options else config.log_config
|
||||||
else config.log_config)
|
log_file = config.worker_log_file if use_worker_options else config.log_file
|
||||||
log_file = (config.worker_log_file if use_worker_options
|
|
||||||
else config.log_file)
|
|
||||||
|
|
||||||
log_format = (
|
log_format = (
|
||||||
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
|
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
|
||||||
@ -164,23 +173,23 @@ def setup_logging(config, use_worker_options=False):
|
|||||||
if config.verbosity > 1:
|
if config.verbosity > 1:
|
||||||
level_for_storage = logging.DEBUG
|
level_for_storage = logging.DEBUG
|
||||||
|
|
||||||
logger = logging.getLogger('')
|
logger = logging.getLogger("")
|
||||||
logger.setLevel(level)
|
logger.setLevel(level)
|
||||||
|
|
||||||
logging.getLogger('synapse.storage.SQL').setLevel(level_for_storage)
|
logging.getLogger("synapse.storage.SQL").setLevel(level_for_storage)
|
||||||
|
|
||||||
formatter = logging.Formatter(log_format)
|
formatter = logging.Formatter(log_format)
|
||||||
if log_file:
|
if log_file:
|
||||||
# TODO: Customisable file size / backup count
|
# TODO: Customisable file size / backup count
|
||||||
handler = logging.handlers.RotatingFileHandler(
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3,
|
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3, encoding="utf8"
|
||||||
encoding='utf8'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def sighup(signum, stack):
|
def sighup(signum, stack):
|
||||||
logger.info("Closing log file due to SIGHUP")
|
logger.info("Closing log file due to SIGHUP")
|
||||||
handler.doRollover()
|
handler.doRollover()
|
||||||
logger.info("Opened new log file due to SIGHUP")
|
logger.info("Opened new log file due to SIGHUP")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
handler = logging.StreamHandler()
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
@ -193,8 +202,9 @@ def setup_logging(config, use_worker_options=False):
|
|||||||
|
|
||||||
logger.addHandler(handler)
|
logger.addHandler(handler)
|
||||||
else:
|
else:
|
||||||
|
|
||||||
def load_log_config():
|
def load_log_config():
|
||||||
with open(log_config, 'r') as f:
|
with open(log_config, "r") as f:
|
||||||
logging.config.dictConfig(yaml.safe_load(f))
|
logging.config.dictConfig(yaml.safe_load(f))
|
||||||
|
|
||||||
def sighup(*args):
|
def sighup(*args):
|
||||||
@ -209,10 +219,7 @@ def setup_logging(config, use_worker_options=False):
|
|||||||
# make sure that the first thing we log is a thing we can grep backwards
|
# make sure that the first thing we log is a thing we can grep backwards
|
||||||
# for
|
# for
|
||||||
logging.warn("***** STARTING SERVER *****")
|
logging.warn("***** STARTING SERVER *****")
|
||||||
logging.warn(
|
logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
|
||||||
"Server %s version %s",
|
|
||||||
sys.argv[0], get_version_string(synapse),
|
|
||||||
)
|
|
||||||
logging.info("Server hostname: %s", config.server_name)
|
logging.info("Server hostname: %s", config.server_name)
|
||||||
|
|
||||||
# It's critical to point twisted's internal logging somewhere, otherwise it
|
# It's critical to point twisted's internal logging somewhere, otherwise it
|
||||||
@ -242,8 +249,7 @@ def setup_logging(config, use_worker_options=False):
|
|||||||
return observer(event)
|
return observer(event)
|
||||||
|
|
||||||
globalLogBeginner.beginLoggingTo(
|
globalLogBeginner.beginLoggingTo(
|
||||||
[_log],
|
[_log], redirectStandardIO=not config.no_redirect_stdio
|
||||||
redirectStandardIO=not config.no_redirect_stdio,
|
|
||||||
)
|
)
|
||||||
if not config.no_redirect_stdio:
|
if not config.no_redirect_stdio:
|
||||||
print("Redirected stdout/stderr to logs")
|
print("Redirected stdout/stderr to logs")
|
||||||
|
@ -15,11 +15,9 @@
|
|||||||
|
|
||||||
from ._base import Config, ConfigError
|
from ._base import Config, ConfigError
|
||||||
|
|
||||||
MISSING_SENTRY = (
|
MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentry
|
||||||
"""Missing sentry-sdk library. This is required to enable sentry
|
|
||||||
integration.
|
integration.
|
||||||
"""
|
"""
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MetricsConfig(Config):
|
class MetricsConfig(Config):
|
||||||
@ -39,7 +37,7 @@ class MetricsConfig(Config):
|
|||||||
self.sentry_dsn = config["sentry"].get("dsn")
|
self.sentry_dsn = config["sentry"].get("dsn")
|
||||||
if not self.sentry_dsn:
|
if not self.sentry_dsn:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"sentry.dsn field is required when sentry integration is enabled",
|
"sentry.dsn field is required when sentry integration is enabled"
|
||||||
)
|
)
|
||||||
|
|
||||||
def default_config(self, report_stats=None, **kwargs):
|
def default_config(self, report_stats=None, **kwargs):
|
||||||
@ -66,6 +64,6 @@ class MetricsConfig(Config):
|
|||||||
if report_stats is None:
|
if report_stats is None:
|
||||||
res += "# report_stats: true|false\n"
|
res += "# report_stats: true|false\n"
|
||||||
else:
|
else:
|
||||||
res += "report_stats: %s\n" % ('true' if report_stats else 'false')
|
res += "report_stats: %s\n" % ("true" if report_stats else "false")
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
@ -17,7 +17,7 @@ from synapse.util.module_loader import load_module
|
|||||||
|
|
||||||
from ._base import Config
|
from ._base import Config
|
||||||
|
|
||||||
LDAP_PROVIDER = 'ldap_auth_provider.LdapAuthProvider'
|
LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider"
|
||||||
|
|
||||||
|
|
||||||
class PasswordAuthProviderConfig(Config):
|
class PasswordAuthProviderConfig(Config):
|
||||||
@ -29,24 +29,20 @@ class PasswordAuthProviderConfig(Config):
|
|||||||
# param.
|
# param.
|
||||||
ldap_config = config.get("ldap_config", {})
|
ldap_config = config.get("ldap_config", {})
|
||||||
if ldap_config.get("enabled", False):
|
if ldap_config.get("enabled", False):
|
||||||
providers.append({
|
providers.append({"module": LDAP_PROVIDER, "config": ldap_config})
|
||||||
'module': LDAP_PROVIDER,
|
|
||||||
'config': ldap_config,
|
|
||||||
})
|
|
||||||
|
|
||||||
providers.extend(config.get("password_providers", []))
|
providers.extend(config.get("password_providers", []))
|
||||||
for provider in providers:
|
for provider in providers:
|
||||||
mod_name = provider['module']
|
mod_name = provider["module"]
|
||||||
|
|
||||||
# This is for backwards compat when the ldap auth provider resided
|
# This is for backwards compat when the ldap auth provider resided
|
||||||
# in this package.
|
# in this package.
|
||||||
if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider":
|
if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider":
|
||||||
mod_name = LDAP_PROVIDER
|
mod_name = LDAP_PROVIDER
|
||||||
|
|
||||||
(provider_class, provider_config) = load_module({
|
(provider_class, provider_config) = load_module(
|
||||||
"module": mod_name,
|
{"module": mod_name, "config": provider["config"]}
|
||||||
"config": provider['config'],
|
)
|
||||||
})
|
|
||||||
|
|
||||||
self.password_providers.append((provider_class, provider_config))
|
self.password_providers.append((provider_class, provider_config))
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ from synapse.util.stringutils import random_string_with_symbols
|
|||||||
class AccountValidityConfig(Config):
|
class AccountValidityConfig(Config):
|
||||||
def __init__(self, config, synapse_config):
|
def __init__(self, config, synapse_config):
|
||||||
self.enabled = config.get("enabled", False)
|
self.enabled = config.get("enabled", False)
|
||||||
self.renew_by_email_enabled = ("renew_at" in config)
|
self.renew_by_email_enabled = "renew_at" in config
|
||||||
|
|
||||||
if self.enabled:
|
if self.enabled:
|
||||||
if "period" in config:
|
if "period" in config:
|
||||||
@ -39,14 +39,13 @@ class AccountValidityConfig(Config):
|
|||||||
else:
|
else:
|
||||||
self.renew_email_subject = "Renew your %(app)s account"
|
self.renew_email_subject = "Renew your %(app)s account"
|
||||||
|
|
||||||
self.startup_job_max_delta = self.period * 10. / 100.
|
self.startup_job_max_delta = self.period * 10.0 / 100.0
|
||||||
|
|
||||||
if self.renew_by_email_enabled and "public_baseurl" not in synapse_config:
|
if self.renew_by_email_enabled and "public_baseurl" not in synapse_config:
|
||||||
raise ConfigError("Can't send renewal emails without 'public_baseurl'")
|
raise ConfigError("Can't send renewal emails without 'public_baseurl'")
|
||||||
|
|
||||||
|
|
||||||
class RegistrationConfig(Config):
|
class RegistrationConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.enable_registration = bool(
|
self.enable_registration = bool(
|
||||||
strtobool(str(config.get("enable_registration", False)))
|
strtobool(str(config.get("enable_registration", False)))
|
||||||
@ -57,7 +56,7 @@ class RegistrationConfig(Config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.account_validity = AccountValidityConfig(
|
self.account_validity = AccountValidityConfig(
|
||||||
config.get("account_validity", {}), config,
|
config.get("account_validity", {}), config
|
||||||
)
|
)
|
||||||
|
|
||||||
self.registrations_require_3pid = config.get("registrations_require_3pid", [])
|
self.registrations_require_3pid = config.get("registrations_require_3pid", [])
|
||||||
@ -67,24 +66,23 @@ class RegistrationConfig(Config):
|
|||||||
|
|
||||||
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
||||||
self.trusted_third_party_id_servers = config.get(
|
self.trusted_third_party_id_servers = config.get(
|
||||||
"trusted_third_party_id_servers",
|
"trusted_third_party_id_servers", ["matrix.org", "vector.im"]
|
||||||
["matrix.org", "vector.im"],
|
|
||||||
)
|
)
|
||||||
self.default_identity_server = config.get("default_identity_server")
|
self.default_identity_server = config.get("default_identity_server")
|
||||||
self.allow_guest_access = config.get("allow_guest_access", False)
|
self.allow_guest_access = config.get("allow_guest_access", False)
|
||||||
|
|
||||||
self.invite_3pid_guest = (
|
self.invite_3pid_guest = self.allow_guest_access and config.get(
|
||||||
self.allow_guest_access and config.get("invite_3pid_guest", False)
|
"invite_3pid_guest", False
|
||||||
)
|
)
|
||||||
|
|
||||||
self.auto_join_rooms = config.get("auto_join_rooms", [])
|
self.auto_join_rooms = config.get("auto_join_rooms", [])
|
||||||
for room_alias in self.auto_join_rooms:
|
for room_alias in self.auto_join_rooms:
|
||||||
if not RoomAlias.is_valid(room_alias):
|
if not RoomAlias.is_valid(room_alias):
|
||||||
raise ConfigError('Invalid auto_join_rooms entry %s' % (room_alias,))
|
raise ConfigError("Invalid auto_join_rooms entry %s" % (room_alias,))
|
||||||
self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
|
self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
|
||||||
|
|
||||||
self.disable_msisdn_registration = (
|
self.disable_msisdn_registration = config.get(
|
||||||
config.get("disable_msisdn_registration", False)
|
"disable_msisdn_registration", False
|
||||||
)
|
)
|
||||||
|
|
||||||
def default_config(self, generate_secrets=False, **kwargs):
|
def default_config(self, generate_secrets=False, **kwargs):
|
||||||
@ -93,9 +91,12 @@ class RegistrationConfig(Config):
|
|||||||
random_string_with_symbols(50),
|
random_string_with_symbols(50),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
registration_shared_secret = '# registration_shared_secret: <PRIVATE STRING>'
|
registration_shared_secret = (
|
||||||
|
"# registration_shared_secret: <PRIVATE STRING>"
|
||||||
|
)
|
||||||
|
|
||||||
return """\
|
return (
|
||||||
|
"""\
|
||||||
## Registration ##
|
## Registration ##
|
||||||
#
|
#
|
||||||
# Registration can be rate-limited using the parameters in the "Ratelimiting"
|
# Registration can be rate-limited using the parameters in the "Ratelimiting"
|
||||||
@ -217,17 +218,19 @@ class RegistrationConfig(Config):
|
|||||||
# users cannot be auto-joined since they do not exist.
|
# users cannot be auto-joined since they do not exist.
|
||||||
#
|
#
|
||||||
#autocreate_auto_join_rooms: true
|
#autocreate_auto_join_rooms: true
|
||||||
""" % locals()
|
"""
|
||||||
|
% locals()
|
||||||
|
)
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
reg_group = parser.add_argument_group("registration")
|
reg_group = parser.add_argument_group("registration")
|
||||||
reg_group.add_argument(
|
reg_group.add_argument(
|
||||||
"--enable-registration", action="store_true", default=None,
|
"--enable-registration",
|
||||||
help="Enable registration for new users."
|
action="store_true",
|
||||||
|
default=None,
|
||||||
|
help="Enable registration for new users.",
|
||||||
)
|
)
|
||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
if args.enable_registration is not None:
|
if args.enable_registration is not None:
|
||||||
self.enable_registration = bool(
|
self.enable_registration = bool(strtobool(str(args.enable_registration)))
|
||||||
strtobool(str(args.enable_registration))
|
|
||||||
)
|
|
||||||
|
@ -20,27 +20,11 @@ from synapse.util.module_loader import load_module
|
|||||||
from ._base import Config, ConfigError
|
from ._base import Config, ConfigError
|
||||||
|
|
||||||
DEFAULT_THUMBNAIL_SIZES = [
|
DEFAULT_THUMBNAIL_SIZES = [
|
||||||
{
|
{"width": 32, "height": 32, "method": "crop"},
|
||||||
"width": 32,
|
{"width": 96, "height": 96, "method": "crop"},
|
||||||
"height": 32,
|
{"width": 320, "height": 240, "method": "scale"},
|
||||||
"method": "crop",
|
{"width": 640, "height": 480, "method": "scale"},
|
||||||
}, {
|
{"width": 800, "height": 600, "method": "scale"},
|
||||||
"width": 96,
|
|
||||||
"height": 96,
|
|
||||||
"method": "crop",
|
|
||||||
}, {
|
|
||||||
"width": 320,
|
|
||||||
"height": 240,
|
|
||||||
"method": "scale",
|
|
||||||
}, {
|
|
||||||
"width": 640,
|
|
||||||
"height": 480,
|
|
||||||
"method": "scale",
|
|
||||||
}, {
|
|
||||||
"width": 800,
|
|
||||||
"height": 600,
|
|
||||||
"method": "scale"
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
THUMBNAIL_SIZE_YAML = """\
|
THUMBNAIL_SIZE_YAML = """\
|
||||||
@ -49,19 +33,15 @@ THUMBNAIL_SIZE_YAML = """\
|
|||||||
# method: %(method)s
|
# method: %(method)s
|
||||||
"""
|
"""
|
||||||
|
|
||||||
MISSING_NETADDR = (
|
MISSING_NETADDR = "Missing netaddr library. This is required for URL preview API."
|
||||||
"Missing netaddr library. This is required for URL preview API."
|
|
||||||
)
|
|
||||||
|
|
||||||
MISSING_LXML = (
|
MISSING_LXML = """Missing lxml library. This is required for URL preview API.
|
||||||
"""Missing lxml library. This is required for URL preview API.
|
|
||||||
|
|
||||||
Install by running:
|
Install by running:
|
||||||
pip install lxml
|
pip install lxml
|
||||||
|
|
||||||
Requires libxslt1-dev system package.
|
Requires libxslt1-dev system package.
|
||||||
"""
|
"""
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
ThumbnailRequirement = namedtuple(
|
ThumbnailRequirement = namedtuple(
|
||||||
@ -69,7 +49,8 @@ ThumbnailRequirement = namedtuple(
|
|||||||
)
|
)
|
||||||
|
|
||||||
MediaStorageProviderConfig = namedtuple(
|
MediaStorageProviderConfig = namedtuple(
|
||||||
"MediaStorageProviderConfig", (
|
"MediaStorageProviderConfig",
|
||||||
|
(
|
||||||
"store_local", # Whether to store newly uploaded local files
|
"store_local", # Whether to store newly uploaded local files
|
||||||
"store_remote", # Whether to store newly downloaded remote files
|
"store_remote", # Whether to store newly downloaded remote files
|
||||||
"store_synchronous", # Whether to wait for successful storage for local uploads
|
"store_synchronous", # Whether to wait for successful storage for local uploads
|
||||||
@ -100,8 +81,7 @@ def parse_thumbnail_requirements(thumbnail_sizes):
|
|||||||
requirements.setdefault("image/gif", []).append(png_thumbnail)
|
requirements.setdefault("image/gif", []).append(png_thumbnail)
|
||||||
requirements.setdefault("image/png", []).append(png_thumbnail)
|
requirements.setdefault("image/png", []).append(png_thumbnail)
|
||||||
return {
|
return {
|
||||||
media_type: tuple(thumbnails)
|
media_type: tuple(thumbnails) for media_type, thumbnails in requirements.items()
|
||||||
for media_type, thumbnails in requirements.items()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -127,15 +107,15 @@ class ContentRepositoryConfig(Config):
|
|||||||
"Cannot use both 'backup_media_store_path' and 'storage_providers'"
|
"Cannot use both 'backup_media_store_path' and 'storage_providers'"
|
||||||
)
|
)
|
||||||
|
|
||||||
storage_providers = [{
|
storage_providers = [
|
||||||
|
{
|
||||||
"module": "file_system",
|
"module": "file_system",
|
||||||
"store_local": True,
|
"store_local": True,
|
||||||
"store_synchronous": synchronous_backup_media_store,
|
"store_synchronous": synchronous_backup_media_store,
|
||||||
"store_remote": True,
|
"store_remote": True,
|
||||||
"config": {
|
"config": {"directory": backup_media_store_path},
|
||||||
"directory": backup_media_store_path,
|
|
||||||
}
|
}
|
||||||
}]
|
]
|
||||||
|
|
||||||
# This is a list of config that can be used to create the storage
|
# This is a list of config that can be used to create the storage
|
||||||
# providers. The entries are tuples of (Class, class_config,
|
# providers. The entries are tuples of (Class, class_config,
|
||||||
@ -165,18 +145,19 @@ class ContentRepositoryConfig(Config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.media_storage_providers.append(
|
self.media_storage_providers.append(
|
||||||
(provider_class, parsed_config, wrapper_config,)
|
(provider_class, parsed_config, wrapper_config)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
||||||
self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
|
self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
|
||||||
self.thumbnail_requirements = parse_thumbnail_requirements(
|
self.thumbnail_requirements = parse_thumbnail_requirements(
|
||||||
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES),
|
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
|
||||||
)
|
)
|
||||||
self.url_preview_enabled = config.get("url_preview_enabled", False)
|
self.url_preview_enabled = config.get("url_preview_enabled", False)
|
||||||
if self.url_preview_enabled:
|
if self.url_preview_enabled:
|
||||||
try:
|
try:
|
||||||
import lxml
|
import lxml
|
||||||
|
|
||||||
lxml # To stop unused lint.
|
lxml # To stop unused lint.
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ConfigError(MISSING_LXML)
|
raise ConfigError(MISSING_LXML)
|
||||||
@ -199,15 +180,13 @@ class ContentRepositoryConfig(Config):
|
|||||||
|
|
||||||
# we always blacklist '0.0.0.0' and '::', which are supposed to be
|
# we always blacklist '0.0.0.0' and '::', which are supposed to be
|
||||||
# unroutable addresses.
|
# unroutable addresses.
|
||||||
self.url_preview_ip_range_blacklist.update(['0.0.0.0', '::'])
|
self.url_preview_ip_range_blacklist.update(["0.0.0.0", "::"])
|
||||||
|
|
||||||
self.url_preview_ip_range_whitelist = IPSet(
|
self.url_preview_ip_range_whitelist = IPSet(
|
||||||
config.get("url_preview_ip_range_whitelist", ())
|
config.get("url_preview_ip_range_whitelist", ())
|
||||||
)
|
)
|
||||||
|
|
||||||
self.url_preview_url_blacklist = config.get(
|
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
|
||||||
"url_preview_url_blacklist", ()
|
|
||||||
)
|
|
||||||
|
|
||||||
def default_config(self, data_dir_path, **kwargs):
|
def default_config(self, data_dir_path, **kwargs):
|
||||||
media_store = os.path.join(data_dir_path, "media_store")
|
media_store = os.path.join(data_dir_path, "media_store")
|
||||||
@ -219,7 +198,8 @@ class ContentRepositoryConfig(Config):
|
|||||||
# strip final NL
|
# strip final NL
|
||||||
formatted_thumbnail_sizes = formatted_thumbnail_sizes[:-1]
|
formatted_thumbnail_sizes = formatted_thumbnail_sizes[:-1]
|
||||||
|
|
||||||
return r"""
|
return (
|
||||||
|
r"""
|
||||||
# Directory where uploaded images and attachments are stored.
|
# Directory where uploaded images and attachments are stored.
|
||||||
#
|
#
|
||||||
media_store_path: "%(media_store)s"
|
media_store_path: "%(media_store)s"
|
||||||
@ -342,4 +322,6 @@ class ContentRepositoryConfig(Config):
|
|||||||
# The largest allowed URL preview spidering size in bytes
|
# The largest allowed URL preview spidering size in bytes
|
||||||
#
|
#
|
||||||
#max_spider_size: 10M
|
#max_spider_size: 10M
|
||||||
""" % locals()
|
"""
|
||||||
|
% locals()
|
||||||
|
)
|
||||||
|
@ -20,9 +20,7 @@ from ._base import Config, ConfigError
|
|||||||
|
|
||||||
class RoomDirectoryConfig(Config):
|
class RoomDirectoryConfig(Config):
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.enable_room_list_search = config.get(
|
self.enable_room_list_search = config.get("enable_room_list_search", True)
|
||||||
"enable_room_list_search", True,
|
|
||||||
)
|
|
||||||
|
|
||||||
alias_creation_rules = config.get("alias_creation_rules")
|
alias_creation_rules = config.get("alias_creation_rules")
|
||||||
|
|
||||||
@ -33,11 +31,7 @@ class RoomDirectoryConfig(Config):
|
|||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
self._alias_creation_rules = [
|
self._alias_creation_rules = [
|
||||||
_RoomDirectoryRule(
|
_RoomDirectoryRule("alias_creation_rules", {"action": "allow"})
|
||||||
"alias_creation_rules", {
|
|
||||||
"action": "allow",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
room_list_publication_rules = config.get("room_list_publication_rules")
|
room_list_publication_rules = config.get("room_list_publication_rules")
|
||||||
@ -49,11 +43,7 @@ class RoomDirectoryConfig(Config):
|
|||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
self._room_list_publication_rules = [
|
self._room_list_publication_rules = [
|
||||||
_RoomDirectoryRule(
|
_RoomDirectoryRule("room_list_publication_rules", {"action": "allow"})
|
||||||
"room_list_publication_rules", {
|
|
||||||
"action": "allow",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
@ -178,8 +168,7 @@ class _RoomDirectoryRule(object):
|
|||||||
self.action = action
|
self.action = action
|
||||||
else:
|
else:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"%s rules can only have action of 'allow'"
|
"%s rules can only have action of 'allow'" " or 'deny'" % (option_name,)
|
||||||
" or 'deny'" % (option_name,)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self._alias_matches_all = alias == "*"
|
self._alias_matches_all = alias == "*"
|
||||||
|
@ -28,6 +28,7 @@ class SAML2Config(Config):
|
|||||||
self.saml2_enabled = True
|
self.saml2_enabled = True
|
||||||
|
|
||||||
import saml2.config
|
import saml2.config
|
||||||
|
|
||||||
self.saml2_sp_config = saml2.config.SPConfig()
|
self.saml2_sp_config = saml2.config.SPConfig()
|
||||||
self.saml2_sp_config.load(self._default_saml_config_dict())
|
self.saml2_sp_config.load(self._default_saml_config_dict())
|
||||||
self.saml2_sp_config.load(saml2_config.get("sp_config", {}))
|
self.saml2_sp_config.load(saml2_config.get("sp_config", {}))
|
||||||
@ -41,26 +42,23 @@ class SAML2Config(Config):
|
|||||||
|
|
||||||
public_baseurl = self.public_baseurl
|
public_baseurl = self.public_baseurl
|
||||||
if public_baseurl is None:
|
if public_baseurl is None:
|
||||||
raise ConfigError(
|
raise ConfigError("saml2_config requires a public_baseurl to be set")
|
||||||
"saml2_config requires a public_baseurl to be set"
|
|
||||||
)
|
|
||||||
|
|
||||||
metadata_url = public_baseurl + "_matrix/saml2/metadata.xml"
|
metadata_url = public_baseurl + "_matrix/saml2/metadata.xml"
|
||||||
response_url = public_baseurl + "_matrix/saml2/authn_response"
|
response_url = public_baseurl + "_matrix/saml2/authn_response"
|
||||||
return {
|
return {
|
||||||
"entityid": metadata_url,
|
"entityid": metadata_url,
|
||||||
|
|
||||||
"service": {
|
"service": {
|
||||||
"sp": {
|
"sp": {
|
||||||
"endpoints": {
|
"endpoints": {
|
||||||
"assertion_consumer_service": [
|
"assertion_consumer_service": [
|
||||||
(response_url, saml2.BINDING_HTTP_POST),
|
(response_url, saml2.BINDING_HTTP_POST)
|
||||||
],
|
]
|
||||||
},
|
},
|
||||||
"required_attributes": ["uid"],
|
"required_attributes": ["uid"],
|
||||||
"optional_attributes": ["mail", "surname", "givenname"],
|
"optional_attributes": ["mail", "surname", "givenname"],
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
@ -106,4 +104,6 @@ class SAML2Config(Config):
|
|||||||
# # separate pysaml2 configuration file:
|
# # separate pysaml2 configuration file:
|
||||||
# #
|
# #
|
||||||
# config_path: "%(config_dir_path)s/sp_conf.py"
|
# config_path: "%(config_dir_path)s/sp_conf.py"
|
||||||
""" % {"config_dir_path": config_dir_path}
|
""" % {
|
||||||
|
"config_dir_path": config_dir_path
|
||||||
|
}
|
||||||
|
@ -34,13 +34,12 @@ logger = logging.Logger(__name__)
|
|||||||
#
|
#
|
||||||
# We later check for errors when binding to 0.0.0.0 and ignore them if :: is also in
|
# We later check for errors when binding to 0.0.0.0 and ignore them if :: is also in
|
||||||
# in the list.
|
# in the list.
|
||||||
DEFAULT_BIND_ADDRESSES = ['::', '0.0.0.0']
|
DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
|
||||||
|
|
||||||
DEFAULT_ROOM_VERSION = "4"
|
DEFAULT_ROOM_VERSION = "4"
|
||||||
|
|
||||||
|
|
||||||
class ServerConfig(Config):
|
class ServerConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.server_name = config["server_name"]
|
self.server_name = config["server_name"]
|
||||||
self.server_context = config.get("server_context", None)
|
self.server_context = config.get("server_context", None)
|
||||||
@ -81,27 +80,25 @@ class ServerConfig(Config):
|
|||||||
# Whether to require authentication to retrieve profile data (avatars,
|
# Whether to require authentication to retrieve profile data (avatars,
|
||||||
# display names) of other users through the client API.
|
# display names) of other users through the client API.
|
||||||
self.require_auth_for_profile_requests = config.get(
|
self.require_auth_for_profile_requests = config.get(
|
||||||
"require_auth_for_profile_requests", False,
|
"require_auth_for_profile_requests", False
|
||||||
)
|
)
|
||||||
|
|
||||||
# If set to 'True', requires authentication to access the server's
|
# If set to 'True', requires authentication to access the server's
|
||||||
# public rooms directory through the client API, and forbids any other
|
# public rooms directory through the client API, and forbids any other
|
||||||
# homeserver to fetch it via federation.
|
# homeserver to fetch it via federation.
|
||||||
self.restrict_public_rooms_to_local_users = config.get(
|
self.restrict_public_rooms_to_local_users = config.get(
|
||||||
"restrict_public_rooms_to_local_users", False,
|
"restrict_public_rooms_to_local_users", False
|
||||||
)
|
)
|
||||||
|
|
||||||
default_room_version = config.get(
|
default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION)
|
||||||
"default_room_version", DEFAULT_ROOM_VERSION,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ensure room version is a str
|
# Ensure room version is a str
|
||||||
default_room_version = str(default_room_version)
|
default_room_version = str(default_room_version)
|
||||||
|
|
||||||
if default_room_version not in KNOWN_ROOM_VERSIONS:
|
if default_room_version not in KNOWN_ROOM_VERSIONS:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Unknown default_room_version: %s, known room versions: %s" %
|
"Unknown default_room_version: %s, known room versions: %s"
|
||||||
(default_room_version, list(KNOWN_ROOM_VERSIONS.keys()))
|
% (default_room_version, list(KNOWN_ROOM_VERSIONS.keys()))
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get the actual room version object rather than just the identifier
|
# Get the actual room version object rather than just the identifier
|
||||||
@ -116,31 +113,25 @@ class ServerConfig(Config):
|
|||||||
|
|
||||||
# Whether we should block invites sent to users on this server
|
# Whether we should block invites sent to users on this server
|
||||||
# (other than those sent by local server admins)
|
# (other than those sent by local server admins)
|
||||||
self.block_non_admin_invites = config.get(
|
self.block_non_admin_invites = config.get("block_non_admin_invites", False)
|
||||||
"block_non_admin_invites", False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Whether to enable experimental MSC1849 (aka relations) support
|
# Whether to enable experimental MSC1849 (aka relations) support
|
||||||
self.experimental_msc1849_support_enabled = config.get(
|
self.experimental_msc1849_support_enabled = config.get(
|
||||||
"experimental_msc1849_support_enabled", False,
|
"experimental_msc1849_support_enabled", False
|
||||||
)
|
)
|
||||||
|
|
||||||
# Options to control access by tracking MAU
|
# Options to control access by tracking MAU
|
||||||
self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
|
self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
|
||||||
self.max_mau_value = 0
|
self.max_mau_value = 0
|
||||||
if self.limit_usage_by_mau:
|
if self.limit_usage_by_mau:
|
||||||
self.max_mau_value = config.get(
|
self.max_mau_value = config.get("max_mau_value", 0)
|
||||||
"max_mau_value", 0,
|
|
||||||
)
|
|
||||||
self.mau_stats_only = config.get("mau_stats_only", False)
|
self.mau_stats_only = config.get("mau_stats_only", False)
|
||||||
|
|
||||||
self.mau_limits_reserved_threepids = config.get(
|
self.mau_limits_reserved_threepids = config.get(
|
||||||
"mau_limit_reserved_threepids", []
|
"mau_limit_reserved_threepids", []
|
||||||
)
|
)
|
||||||
|
|
||||||
self.mau_trial_days = config.get(
|
self.mau_trial_days = config.get("mau_trial_days", 0)
|
||||||
"mau_trial_days", 0,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Options to disable HS
|
# Options to disable HS
|
||||||
self.hs_disabled = config.get("hs_disabled", False)
|
self.hs_disabled = config.get("hs_disabled", False)
|
||||||
@ -153,9 +144,7 @@ class ServerConfig(Config):
|
|||||||
|
|
||||||
# FIXME: federation_domain_whitelist needs sytests
|
# FIXME: federation_domain_whitelist needs sytests
|
||||||
self.federation_domain_whitelist = None
|
self.federation_domain_whitelist = None
|
||||||
federation_domain_whitelist = config.get(
|
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
|
||||||
"federation_domain_whitelist", None,
|
|
||||||
)
|
|
||||||
|
|
||||||
if federation_domain_whitelist is not None:
|
if federation_domain_whitelist is not None:
|
||||||
# turn the whitelist into a hash for speed of lookup
|
# turn the whitelist into a hash for speed of lookup
|
||||||
@ -165,7 +154,7 @@ class ServerConfig(Config):
|
|||||||
self.federation_domain_whitelist[domain] = True
|
self.federation_domain_whitelist[domain] = True
|
||||||
|
|
||||||
self.federation_ip_range_blacklist = config.get(
|
self.federation_ip_range_blacklist = config.get(
|
||||||
"federation_ip_range_blacklist", [],
|
"federation_ip_range_blacklist", []
|
||||||
)
|
)
|
||||||
|
|
||||||
# Attempt to create an IPSet from the given ranges
|
# Attempt to create an IPSet from the given ranges
|
||||||
@ -178,13 +167,12 @@ class ServerConfig(Config):
|
|||||||
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
|
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Invalid range(s) provided in "
|
"Invalid range(s) provided in " "federation_ip_range_blacklist: %s" % e
|
||||||
"federation_ip_range_blacklist: %s" % e
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.public_baseurl is not None:
|
if self.public_baseurl is not None:
|
||||||
if self.public_baseurl[-1] != '/':
|
if self.public_baseurl[-1] != "/":
|
||||||
self.public_baseurl += '/'
|
self.public_baseurl += "/"
|
||||||
self.start_pushers = config.get("start_pushers", True)
|
self.start_pushers = config.get("start_pushers", True)
|
||||||
|
|
||||||
# (undocumented) option for torturing the worker-mode replication a bit,
|
# (undocumented) option for torturing the worker-mode replication a bit,
|
||||||
@ -195,7 +183,7 @@ class ServerConfig(Config):
|
|||||||
# Whether to require a user to be in the room to add an alias to it.
|
# Whether to require a user to be in the room to add an alias to it.
|
||||||
# Defaults to True.
|
# Defaults to True.
|
||||||
self.require_membership_for_aliases = config.get(
|
self.require_membership_for_aliases = config.get(
|
||||||
"require_membership_for_aliases", True,
|
"require_membership_for_aliases", True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Whether to allow per-room membership profiles through the send of membership
|
# Whether to allow per-room membership profiles through the send of membership
|
||||||
@ -227,9 +215,9 @@ class ServerConfig(Config):
|
|||||||
|
|
||||||
# if we still have an empty list of addresses, use the default list
|
# if we still have an empty list of addresses, use the default list
|
||||||
if not bind_addresses:
|
if not bind_addresses:
|
||||||
if listener['type'] == 'metrics':
|
if listener["type"] == "metrics":
|
||||||
# the metrics listener doesn't support IPv6
|
# the metrics listener doesn't support IPv6
|
||||||
bind_addresses.append('0.0.0.0')
|
bind_addresses.append("0.0.0.0")
|
||||||
else:
|
else:
|
||||||
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
||||||
|
|
||||||
@ -249,74 +237,74 @@ class ServerConfig(Config):
|
|||||||
bind_host = config.get("bind_host", "")
|
bind_host = config.get("bind_host", "")
|
||||||
gzip_responses = config.get("gzip_responses", True)
|
gzip_responses = config.get("gzip_responses", True)
|
||||||
|
|
||||||
self.listeners.append({
|
self.listeners.append(
|
||||||
|
{
|
||||||
"port": bind_port,
|
"port": bind_port,
|
||||||
"bind_addresses": [bind_host],
|
"bind_addresses": [bind_host],
|
||||||
"tls": True,
|
"tls": True,
|
||||||
"type": "http",
|
"type": "http",
|
||||||
"resources": [
|
"resources": [
|
||||||
{
|
{"names": ["client"], "compress": gzip_responses},
|
||||||
"names": ["client"],
|
{"names": ["federation"], "compress": False},
|
||||||
"compress": gzip_responses,
|
],
|
||||||
},
|
|
||||||
{
|
|
||||||
"names": ["federation"],
|
|
||||||
"compress": False,
|
|
||||||
}
|
}
|
||||||
]
|
)
|
||||||
})
|
|
||||||
|
|
||||||
unsecure_port = config.get("unsecure_port", bind_port - 400)
|
unsecure_port = config.get("unsecure_port", bind_port - 400)
|
||||||
if unsecure_port:
|
if unsecure_port:
|
||||||
self.listeners.append({
|
self.listeners.append(
|
||||||
|
{
|
||||||
"port": unsecure_port,
|
"port": unsecure_port,
|
||||||
"bind_addresses": [bind_host],
|
"bind_addresses": [bind_host],
|
||||||
"tls": False,
|
"tls": False,
|
||||||
"type": "http",
|
"type": "http",
|
||||||
"resources": [
|
"resources": [
|
||||||
{
|
{"names": ["client"], "compress": gzip_responses},
|
||||||
"names": ["client"],
|
{"names": ["federation"], "compress": False},
|
||||||
"compress": gzip_responses,
|
],
|
||||||
},
|
|
||||||
{
|
|
||||||
"names": ["federation"],
|
|
||||||
"compress": False,
|
|
||||||
}
|
}
|
||||||
]
|
)
|
||||||
})
|
|
||||||
|
|
||||||
manhole = config.get("manhole")
|
manhole = config.get("manhole")
|
||||||
if manhole:
|
if manhole:
|
||||||
self.listeners.append({
|
self.listeners.append(
|
||||||
|
{
|
||||||
"port": manhole,
|
"port": manhole,
|
||||||
"bind_addresses": ["127.0.0.1"],
|
"bind_addresses": ["127.0.0.1"],
|
||||||
"type": "manhole",
|
"type": "manhole",
|
||||||
"tls": False,
|
"tls": False,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
metrics_port = config.get("metrics_port")
|
metrics_port = config.get("metrics_port")
|
||||||
if metrics_port:
|
if metrics_port:
|
||||||
logger.warn(
|
logger.warn(
|
||||||
("The metrics_port configuration option is deprecated in Synapse 0.31 "
|
(
|
||||||
|
"The metrics_port configuration option is deprecated in Synapse 0.31 "
|
||||||
"in favour of a listener. Please see "
|
"in favour of a listener. Please see "
|
||||||
"http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst"
|
"http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst"
|
||||||
" on how to configure the new listener."))
|
" on how to configure the new listener."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self.listeners.append({
|
self.listeners.append(
|
||||||
|
{
|
||||||
"port": metrics_port,
|
"port": metrics_port,
|
||||||
"bind_addresses": [config.get("metrics_bind_host", "127.0.0.1")],
|
"bind_addresses": [config.get("metrics_bind_host", "127.0.0.1")],
|
||||||
"tls": False,
|
"tls": False,
|
||||||
"type": "http",
|
"type": "http",
|
||||||
"resources": [
|
"resources": [{"names": ["metrics"], "compress": False}],
|
||||||
{
|
}
|
||||||
"names": ["metrics"],
|
)
|
||||||
"compress": False,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
})
|
|
||||||
|
|
||||||
_check_resource_config(self.listeners)
|
_check_resource_config(self.listeners)
|
||||||
|
|
||||||
|
# An experimental option to try and periodically clean up extremities
|
||||||
|
# by sending dummy events.
|
||||||
|
self.cleanup_extremities_with_dummy_events = config.get(
|
||||||
|
"cleanup_extremities_with_dummy_events", False
|
||||||
|
)
|
||||||
|
|
||||||
def has_tls_listener(self):
|
def has_tls_listener(self):
|
||||||
return any(l["tls"] for l in self.listeners)
|
return any(l["tls"] for l in self.listeners)
|
||||||
|
|
||||||
@ -333,7 +321,8 @@ class ServerConfig(Config):
|
|||||||
# Bring DEFAULT_ROOM_VERSION into the local-scope for use in the
|
# Bring DEFAULT_ROOM_VERSION into the local-scope for use in the
|
||||||
# default config string
|
# default config string
|
||||||
default_room_version = DEFAULT_ROOM_VERSION
|
default_room_version = DEFAULT_ROOM_VERSION
|
||||||
return """\
|
return (
|
||||||
|
"""\
|
||||||
## Server ##
|
## Server ##
|
||||||
|
|
||||||
# The domain name of the server, with optional explicit port.
|
# The domain name of the server, with optional explicit port.
|
||||||
@ -631,7 +620,9 @@ class ServerConfig(Config):
|
|||||||
# Defaults to 'true'.
|
# Defaults to 'true'.
|
||||||
#
|
#
|
||||||
#allow_per_room_profiles: false
|
#allow_per_room_profiles: false
|
||||||
""" % locals()
|
"""
|
||||||
|
% locals()
|
||||||
|
)
|
||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
if args.manhole is not None:
|
if args.manhole is not None:
|
||||||
@ -643,17 +634,26 @@ class ServerConfig(Config):
|
|||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
server_group = parser.add_argument_group("server")
|
server_group = parser.add_argument_group("server")
|
||||||
server_group.add_argument("-D", "--daemonize", action='store_true',
|
server_group.add_argument(
|
||||||
|
"-D",
|
||||||
|
"--daemonize",
|
||||||
|
action="store_true",
|
||||||
default=None,
|
default=None,
|
||||||
help="Daemonize the home server")
|
help="Daemonize the home server",
|
||||||
server_group.add_argument("--print-pidfile", action='store_true',
|
)
|
||||||
|
server_group.add_argument(
|
||||||
|
"--print-pidfile",
|
||||||
|
action="store_true",
|
||||||
default=None,
|
default=None,
|
||||||
help="Print the path to the pidfile just"
|
help="Print the path to the pidfile just" " before daemonizing",
|
||||||
" before daemonizing")
|
)
|
||||||
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
|
server_group.add_argument(
|
||||||
|
"--manhole",
|
||||||
|
metavar="PORT",
|
||||||
|
dest="manhole",
|
||||||
type=int,
|
type=int,
|
||||||
help="Turn on the twisted telnet manhole"
|
help="Turn on the twisted telnet manhole" " service on the given port.",
|
||||||
" service on the given port.")
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_threepid_reserved(reserved_threepids, threepid):
|
def is_threepid_reserved(reserved_threepids, threepid):
|
||||||
@ -667,7 +667,7 @@ def is_threepid_reserved(reserved_threepids, threepid):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
for tp in reserved_threepids:
|
for tp in reserved_threepids:
|
||||||
if (threepid['medium'] == tp['medium'] and threepid['address'] == tp['address']):
|
if threepid["medium"] == tp["medium"] and threepid["address"] == tp["address"]:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -680,9 +680,7 @@ def read_gc_thresholds(thresholds):
|
|||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
assert len(thresholds) == 3
|
assert len(thresholds) == 3
|
||||||
return (
|
return (int(thresholds[0]), int(thresholds[1]), int(thresholds[2]))
|
||||||
int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
|
|
||||||
)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Value of `gc_threshold` must be a list of three integers if set"
|
"Value of `gc_threshold` must be a list of three integers if set"
|
||||||
@ -700,22 +698,22 @@ def _warn_if_webclient_configured(listeners):
|
|||||||
for listener in listeners:
|
for listener in listeners:
|
||||||
for res in listener.get("resources", []):
|
for res in listener.get("resources", []):
|
||||||
for name in res.get("names", []):
|
for name in res.get("names", []):
|
||||||
if name == 'webclient':
|
if name == "webclient":
|
||||||
logger.warning(NO_MORE_WEB_CLIENT_WARNING)
|
logger.warning(NO_MORE_WEB_CLIENT_WARNING)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
KNOWN_RESOURCES = (
|
KNOWN_RESOURCES = (
|
||||||
'client',
|
"client",
|
||||||
'consent',
|
"consent",
|
||||||
'federation',
|
"federation",
|
||||||
'keys',
|
"keys",
|
||||||
'media',
|
"media",
|
||||||
'metrics',
|
"metrics",
|
||||||
'openid',
|
"openid",
|
||||||
'replication',
|
"replication",
|
||||||
'static',
|
"static",
|
||||||
'webclient',
|
"webclient",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -729,11 +727,9 @@ def _check_resource_config(listeners):
|
|||||||
|
|
||||||
for resource in resource_names:
|
for resource in resource_names:
|
||||||
if resource not in KNOWN_RESOURCES:
|
if resource not in KNOWN_RESOURCES:
|
||||||
raise ConfigError(
|
raise ConfigError("Unknown listener resource '%s'" % (resource,))
|
||||||
"Unknown listener resource '%s'" % (resource, )
|
|
||||||
)
|
|
||||||
if resource == "consent":
|
if resource == "consent":
|
||||||
try:
|
try:
|
||||||
check_requirements('resources.consent')
|
check_requirements("resources.consent")
|
||||||
except DependencyException as e:
|
except DependencyException as e:
|
||||||
raise ConfigError(e.message)
|
raise ConfigError(e.message)
|
||||||
|
@ -58,6 +58,7 @@ class ServerNoticesConfig(Config):
|
|||||||
The name to use for the server notices room.
|
The name to use for the server notices room.
|
||||||
None if server notices are not enabled.
|
None if server notices are not enabled.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(ServerNoticesConfig, self).__init__()
|
super(ServerNoticesConfig, self).__init__()
|
||||||
self.server_notices_mxid = None
|
self.server_notices_mxid = None
|
||||||
@ -70,18 +71,12 @@ class ServerNoticesConfig(Config):
|
|||||||
if c is None:
|
if c is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
mxid_localpart = c['system_mxid_localpart']
|
mxid_localpart = c["system_mxid_localpart"]
|
||||||
self.server_notices_mxid = UserID(
|
self.server_notices_mxid = UserID(mxid_localpart, self.server_name).to_string()
|
||||||
mxid_localpart, self.server_name,
|
self.server_notices_mxid_display_name = c.get("system_mxid_display_name", None)
|
||||||
).to_string()
|
self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None)
|
||||||
self.server_notices_mxid_display_name = c.get(
|
|
||||||
'system_mxid_display_name', None,
|
|
||||||
)
|
|
||||||
self.server_notices_mxid_avatar_url = c.get(
|
|
||||||
'system_mxid_avatar_url', None,
|
|
||||||
)
|
|
||||||
# todo: i18n
|
# todo: i18n
|
||||||
self.server_notices_room_name = c.get('room_name', "Server Notices")
|
self.server_notices_room_name = c.get("room_name", "Server Notices")
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, **kwargs):
|
||||||
return DEFAULT_CONFIG
|
return DEFAULT_CONFIG
|
||||||
|
@ -42,11 +42,11 @@ class TlsConfig(Config):
|
|||||||
self.acme_enabled = acme_config.get("enabled", False)
|
self.acme_enabled = acme_config.get("enabled", False)
|
||||||
|
|
||||||
# hyperlink complains on py2 if this is not a Unicode
|
# hyperlink complains on py2 if this is not a Unicode
|
||||||
self.acme_url = six.text_type(acme_config.get(
|
self.acme_url = six.text_type(
|
||||||
"url", u"https://acme-v01.api.letsencrypt.org/directory"
|
acme_config.get("url", "https://acme-v01.api.letsencrypt.org/directory")
|
||||||
))
|
)
|
||||||
self.acme_port = acme_config.get("port", 80)
|
self.acme_port = acme_config.get("port", 80)
|
||||||
self.acme_bind_addresses = acme_config.get("bind_addresses", ['::', '0.0.0.0'])
|
self.acme_bind_addresses = acme_config.get("bind_addresses", ["::", "0.0.0.0"])
|
||||||
self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
|
self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
|
||||||
self.acme_domain = acme_config.get("domain", config.get("server_name"))
|
self.acme_domain = acme_config.get("domain", config.get("server_name"))
|
||||||
|
|
||||||
@ -74,12 +74,12 @@ class TlsConfig(Config):
|
|||||||
|
|
||||||
# Whether to verify certificates on outbound federation traffic
|
# Whether to verify certificates on outbound federation traffic
|
||||||
self.federation_verify_certificates = config.get(
|
self.federation_verify_certificates = config.get(
|
||||||
"federation_verify_certificates", True,
|
"federation_verify_certificates", True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Whitelist of domains to not verify certificates for
|
# Whitelist of domains to not verify certificates for
|
||||||
fed_whitelist_entries = config.get(
|
fed_whitelist_entries = config.get(
|
||||||
"federation_certificate_verification_whitelist", [],
|
"federation_certificate_verification_whitelist", []
|
||||||
)
|
)
|
||||||
|
|
||||||
# Support globs (*) in whitelist values
|
# Support globs (*) in whitelist values
|
||||||
@ -90,9 +90,7 @@ class TlsConfig(Config):
|
|||||||
self.federation_certificate_verification_whitelist.append(entry_regex)
|
self.federation_certificate_verification_whitelist.append(entry_regex)
|
||||||
|
|
||||||
# List of custom certificate authorities for federation traffic validation
|
# List of custom certificate authorities for federation traffic validation
|
||||||
custom_ca_list = config.get(
|
custom_ca_list = config.get("federation_custom_ca_list", None)
|
||||||
"federation_custom_ca_list", None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Read in and parse custom CA certificates
|
# Read in and parse custom CA certificates
|
||||||
self.federation_ca_trust_root = None
|
self.federation_ca_trust_root = None
|
||||||
@ -101,8 +99,10 @@ class TlsConfig(Config):
|
|||||||
# A trustroot cannot be generated without any CA certificates.
|
# A trustroot cannot be generated without any CA certificates.
|
||||||
# Raise an error if this option has been specified without any
|
# Raise an error if this option has been specified without any
|
||||||
# corresponding certificates.
|
# corresponding certificates.
|
||||||
raise ConfigError("federation_custom_ca_list specified without "
|
raise ConfigError(
|
||||||
"any certificate files")
|
"federation_custom_ca_list specified without "
|
||||||
|
"any certificate files"
|
||||||
|
)
|
||||||
|
|
||||||
certs = []
|
certs = []
|
||||||
for ca_file in custom_ca_list:
|
for ca_file in custom_ca_list:
|
||||||
@ -114,8 +114,9 @@ class TlsConfig(Config):
|
|||||||
cert_base = Certificate.loadPEM(content)
|
cert_base = Certificate.loadPEM(content)
|
||||||
certs.append(cert_base)
|
certs.append(cert_base)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ConfigError("Error parsing custom CA certificate file %s: %s"
|
raise ConfigError(
|
||||||
% (ca_file, e))
|
"Error parsing custom CA certificate file %s: %s" % (ca_file, e)
|
||||||
|
)
|
||||||
|
|
||||||
self.federation_ca_trust_root = trustRootFromCertificates(certs)
|
self.federation_ca_trust_root = trustRootFromCertificates(certs)
|
||||||
|
|
||||||
@ -146,17 +147,21 @@ class TlsConfig(Config):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(self.tls_certificate_file, 'rb') as f:
|
with open(self.tls_certificate_file, "rb") as f:
|
||||||
cert_pem = f.read()
|
cert_pem = f.read()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ConfigError("Failed to read existing certificate file %s: %s"
|
raise ConfigError(
|
||||||
% (self.tls_certificate_file, e))
|
"Failed to read existing certificate file %s: %s"
|
||||||
|
% (self.tls_certificate_file, e)
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tls_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem)
|
tls_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ConfigError("Failed to parse existing certificate file %s: %s"
|
raise ConfigError(
|
||||||
% (self.tls_certificate_file, e))
|
"Failed to parse existing certificate file %s: %s"
|
||||||
|
% (self.tls_certificate_file, e)
|
||||||
|
)
|
||||||
|
|
||||||
if not allow_self_signed:
|
if not allow_self_signed:
|
||||||
if tls_certificate.get_subject() == tls_certificate.get_issuer():
|
if tls_certificate.get_subject() == tls_certificate.get_issuer():
|
||||||
@ -166,7 +171,7 @@ class TlsConfig(Config):
|
|||||||
|
|
||||||
# YYYYMMDDhhmmssZ -- in UTC
|
# YYYYMMDDhhmmssZ -- in UTC
|
||||||
expires_on = datetime.strptime(
|
expires_on = datetime.strptime(
|
||||||
tls_certificate.get_notAfter().decode('ascii'), "%Y%m%d%H%M%SZ"
|
tls_certificate.get_notAfter().decode("ascii"), "%Y%m%d%H%M%SZ"
|
||||||
)
|
)
|
||||||
now = datetime.utcnow()
|
now = datetime.utcnow()
|
||||||
days_remaining = (expires_on - now).days
|
days_remaining = (expires_on - now).days
|
||||||
@ -191,7 +196,8 @@ class TlsConfig(Config):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Unable to read TLS certificate (%s). Ignoring as no "
|
"Unable to read TLS certificate (%s). Ignoring as no "
|
||||||
"tls listeners enabled.", e,
|
"tls listeners enabled.",
|
||||||
|
e,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tls_fingerprints = list(self._original_tls_fingerprints)
|
self.tls_fingerprints = list(self._original_tls_fingerprints)
|
||||||
@ -205,7 +211,7 @@ class TlsConfig(Config):
|
|||||||
sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest())
|
sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest())
|
||||||
sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints)
|
sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints)
|
||||||
if sha256_fingerprint not in sha256_fingerprints:
|
if sha256_fingerprint not in sha256_fingerprints:
|
||||||
self.tls_fingerprints.append({u"sha256": sha256_fingerprint})
|
self.tls_fingerprints.append({"sha256": sha256_fingerprint})
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
base_key_name = os.path.join(config_dir_path, server_name)
|
base_key_name = os.path.join(config_dir_path, server_name)
|
||||||
@ -215,8 +221,8 @@ class TlsConfig(Config):
|
|||||||
|
|
||||||
# this is to avoid the max line length. Sorrynotsorry
|
# this is to avoid the max line length. Sorrynotsorry
|
||||||
proxypassline = (
|
proxypassline = (
|
||||||
'ProxyPass /.well-known/acme-challenge '
|
"ProxyPass /.well-known/acme-challenge "
|
||||||
'http://localhost:8009/.well-known/acme-challenge'
|
"http://localhost:8009/.well-known/acme-challenge"
|
||||||
)
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -26,11 +26,11 @@ class UserDirectoryConfig(Config):
|
|||||||
self.user_directory_search_all_users = False
|
self.user_directory_search_all_users = False
|
||||||
user_directory_config = config.get("user_directory", None)
|
user_directory_config = config.get("user_directory", None)
|
||||||
if user_directory_config:
|
if user_directory_config:
|
||||||
self.user_directory_search_enabled = (
|
self.user_directory_search_enabled = user_directory_config.get(
|
||||||
user_directory_config.get("enabled", True)
|
"enabled", True
|
||||||
)
|
)
|
||||||
self.user_directory_search_all_users = (
|
self.user_directory_search_all_users = user_directory_config.get(
|
||||||
user_directory_config.get("search_all_users", False)
|
"search_all_users", False
|
||||||
)
|
)
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
|
@ -16,14 +16,13 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class VoipConfig(Config):
|
class VoipConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.turn_uris = config.get("turn_uris", [])
|
self.turn_uris = config.get("turn_uris", [])
|
||||||
self.turn_shared_secret = config.get("turn_shared_secret")
|
self.turn_shared_secret = config.get("turn_shared_secret")
|
||||||
self.turn_username = config.get("turn_username")
|
self.turn_username = config.get("turn_username")
|
||||||
self.turn_password = config.get("turn_password")
|
self.turn_password = config.get("turn_password")
|
||||||
self.turn_user_lifetime = self.parse_duration(
|
self.turn_user_lifetime = self.parse_duration(
|
||||||
config.get("turn_user_lifetime", "1h"),
|
config.get("turn_user_lifetime", "1h")
|
||||||
)
|
)
|
||||||
self.turn_allow_guests = config.get("turn_allow_guests", True)
|
self.turn_allow_guests = config.get("turn_allow_guests", True)
|
||||||
|
|
||||||
|
@ -52,12 +52,14 @@ class WorkerConfig(Config):
|
|||||||
# argument.
|
# argument.
|
||||||
manhole = config.get("worker_manhole")
|
manhole = config.get("worker_manhole")
|
||||||
if manhole:
|
if manhole:
|
||||||
self.worker_listeners.append({
|
self.worker_listeners.append(
|
||||||
|
{
|
||||||
"port": manhole,
|
"port": manhole,
|
||||||
"bind_addresses": ["127.0.0.1"],
|
"bind_addresses": ["127.0.0.1"],
|
||||||
"type": "manhole",
|
"type": "manhole",
|
||||||
"tls": False,
|
"tls": False,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if self.worker_listeners:
|
if self.worker_listeners:
|
||||||
for listener in self.worker_listeners:
|
for listener in self.worker_listeners:
|
||||||
@ -67,7 +69,7 @@ class WorkerConfig(Config):
|
|||||||
if bind_address:
|
if bind_address:
|
||||||
bind_addresses.append(bind_address)
|
bind_addresses.append(bind_address)
|
||||||
elif not bind_addresses:
|
elif not bind_addresses:
|
||||||
bind_addresses.append('')
|
bind_addresses.append("")
|
||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
# We support a bunch of command line arguments that override options in
|
# We support a bunch of command line arguments that override options in
|
||||||
|
@ -46,9 +46,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||||||
if name not in hashes:
|
if name not in hashes:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
400,
|
400,
|
||||||
"Algorithm %s not in hashes %s" % (
|
"Algorithm %s not in hashes %s" % (name, list(hashes)),
|
||||||
name, list(hashes),
|
|
||||||
),
|
|
||||||
Codes.UNAUTHORIZED,
|
Codes.UNAUTHORIZED,
|
||||||
)
|
)
|
||||||
message_hash_base64 = hashes[name]
|
message_hash_base64 = hashes[name]
|
||||||
@ -56,9 +54,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||||||
message_hash_bytes = decode_base64(message_hash_base64)
|
message_hash_bytes = decode_base64(message_hash_base64)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
400,
|
400, "Invalid base64: %s" % (message_hash_base64,), Codes.UNAUTHORIZED
|
||||||
"Invalid base64: %s" % (message_hash_base64,),
|
|
||||||
Codes.UNAUTHORIZED,
|
|
||||||
)
|
)
|
||||||
return message_hash_bytes == expected_hash
|
return message_hash_bytes == expected_hash
|
||||||
|
|
||||||
@ -135,8 +131,9 @@ def compute_event_signature(event_dict, signature_name, signing_key):
|
|||||||
return redact_json["signatures"]
|
return redact_json["signatures"]
|
||||||
|
|
||||||
|
|
||||||
def add_hashes_and_signatures(event_dict, signature_name, signing_key,
|
def add_hashes_and_signatures(
|
||||||
hash_algorithm=hashlib.sha256):
|
event_dict, signature_name, signing_key, hash_algorithm=hashlib.sha256
|
||||||
|
):
|
||||||
"""Add content hash and sign the event
|
"""Add content hash and sign the event
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -153,7 +150,5 @@ def add_hashes_and_signatures(event_dict, signature_name, signing_key,
|
|||||||
event_dict.setdefault("hashes", {})[name] = encode_base64(digest)
|
event_dict.setdefault("hashes", {})[name] = encode_base64(digest)
|
||||||
|
|
||||||
event_dict["signatures"] = compute_event_signature(
|
event_dict["signatures"] = compute_event_signature(
|
||||||
event_dict,
|
event_dict, signature_name=signature_name, signing_key=signing_key
|
||||||
signature_name=signature_name,
|
|
||||||
signing_key=signing_key,
|
|
||||||
)
|
)
|
||||||
|
@ -505,7 +505,7 @@ class BaseV2KeyFetcher(object):
|
|||||||
Returns:
|
Returns:
|
||||||
Deferred[dict[str, FetchKeyResult]]: map from key_id to result object
|
Deferred[dict[str, FetchKeyResult]]: map from key_id to result object
|
||||||
"""
|
"""
|
||||||
ts_valid_until_ms = response_json[u"valid_until_ts"]
|
ts_valid_until_ms = response_json["valid_until_ts"]
|
||||||
|
|
||||||
# start by extracting the keys from the response, since they may be required
|
# start by extracting the keys from the response, since they may be required
|
||||||
# to validate the signature on the response.
|
# to validate the signature on the response.
|
||||||
@ -614,10 +614,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||||||
|
|
||||||
results = yield logcontext.make_deferred_yieldable(
|
results = yield logcontext.make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[
|
[run_in_background(get_key, server) for server in self.key_servers],
|
||||||
run_in_background(get_key, server)
|
|
||||||
for server in self.key_servers
|
|
||||||
],
|
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
).addErrback(unwrapFirstError)
|
).addErrback(unwrapFirstError)
|
||||||
)
|
)
|
||||||
@ -630,9 +627,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||||||
defer.returnValue(union_of_keys)
|
defer.returnValue(union_of_keys)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_server_verify_key_v2_indirect(
|
def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server):
|
||||||
self, keys_to_fetch, key_server
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
keys_to_fetch (dict[str, dict[str, int]]):
|
keys_to_fetch (dict[str, dict[str, int]]):
|
||||||
@ -661,9 +656,9 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||||||
destination=perspective_name,
|
destination=perspective_name,
|
||||||
path="/_matrix/key/v2/query",
|
path="/_matrix/key/v2/query",
|
||||||
data={
|
data={
|
||||||
u"server_keys": {
|
"server_keys": {
|
||||||
server_name: {
|
server_name: {
|
||||||
key_id: {u"minimum_valid_until_ts": min_valid_ts}
|
key_id: {"minimum_valid_until_ts": min_valid_ts}
|
||||||
for key_id, min_valid_ts in server_keys.items()
|
for key_id, min_valid_ts in server_keys.items()
|
||||||
}
|
}
|
||||||
for server_name, server_keys in keys_to_fetch.items()
|
for server_name, server_keys in keys_to_fetch.items()
|
||||||
@ -690,10 +685,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._validate_perspectives_response(
|
self._validate_perspectives_response(key_server, response)
|
||||||
key_server,
|
|
||||||
response,
|
|
||||||
)
|
|
||||||
|
|
||||||
processed_response = yield self.process_v2_response(
|
processed_response = yield self.process_v2_response(
|
||||||
perspective_name, response, time_added_ms=time_now_ms
|
perspective_name, response, time_added_ms=time_now_ms
|
||||||
@ -720,9 +712,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||||||
|
|
||||||
defer.returnValue(keys)
|
defer.returnValue(keys)
|
||||||
|
|
||||||
def _validate_perspectives_response(
|
def _validate_perspectives_response(self, key_server, response):
|
||||||
self, key_server, response,
|
|
||||||
):
|
|
||||||
"""Optionally check the signature on the result of a /key/query request
|
"""Optionally check the signature on the result of a /key/query request
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -739,13 +729,13 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if (
|
if (
|
||||||
u"signatures" not in response
|
"signatures" not in response
|
||||||
or perspective_name not in response[u"signatures"]
|
or perspective_name not in response["signatures"]
|
||||||
):
|
):
|
||||||
raise KeyLookupError("Response not signed by the notary server")
|
raise KeyLookupError("Response not signed by the notary server")
|
||||||
|
|
||||||
verified = False
|
verified = False
|
||||||
for key_id in response[u"signatures"][perspective_name]:
|
for key_id in response["signatures"][perspective_name]:
|
||||||
if key_id in perspective_keys:
|
if key_id in perspective_keys:
|
||||||
verify_signed_json(response, perspective_name, perspective_keys[key_id])
|
verify_signed_json(response, perspective_name, perspective_keys[key_id])
|
||||||
verified = True
|
verified = True
|
||||||
@ -754,7 +744,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||||||
raise KeyLookupError(
|
raise KeyLookupError(
|
||||||
"Response not signed with a known key: signed with: %r, known keys: %r"
|
"Response not signed with a known key: signed with: %r, known keys: %r"
|
||||||
% (
|
% (
|
||||||
list(response[u"signatures"][perspective_name].keys()),
|
list(response["signatures"][perspective_name].keys()),
|
||||||
list(perspective_keys.keys()),
|
list(perspective_keys.keys()),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -826,7 +816,6 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
|||||||
path="/_matrix/key/v2/server/"
|
path="/_matrix/key/v2/server/"
|
||||||
+ urllib.parse.quote(requested_key_id),
|
+ urllib.parse.quote(requested_key_id),
|
||||||
ignore_backoff=True,
|
ignore_backoff=True,
|
||||||
|
|
||||||
# we only give the remote server 10s to respond. It should be an
|
# we only give the remote server 10s to respond. It should be an
|
||||||
# easy request to handle, so if it doesn't reply within 10s, it's
|
# easy request to handle, so if it doesn't reply within 10s, it's
|
||||||
# probably not going to.
|
# probably not going to.
|
||||||
|
@ -85,17 +85,14 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru
|
|||||||
room_id_domain = get_domain_from_id(event.room_id)
|
room_id_domain = get_domain_from_id(event.room_id)
|
||||||
if room_id_domain != sender_domain:
|
if room_id_domain != sender_domain:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403, "Creation event's room_id domain does not match sender's"
|
||||||
"Creation event's room_id domain does not match sender's"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
room_version = event.content.get("room_version", "1")
|
room_version = event.content.get("room_version", "1")
|
||||||
if room_version not in KNOWN_ROOM_VERSIONS:
|
if room_version not in KNOWN_ROOM_VERSIONS:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403, "room appears to have unsupported version %s" % (room_version,)
|
||||||
"room appears to have unsupported version %s" % (
|
)
|
||||||
room_version,
|
|
||||||
))
|
|
||||||
# FIXME
|
# FIXME
|
||||||
logger.debug("Allowing! %s", event)
|
logger.debug("Allowing! %s", event)
|
||||||
return
|
return
|
||||||
@ -103,46 +100,30 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru
|
|||||||
creation_event = auth_events.get((EventTypes.Create, ""), None)
|
creation_event = auth_events.get((EventTypes.Create, ""), None)
|
||||||
|
|
||||||
if not creation_event:
|
if not creation_event:
|
||||||
raise AuthError(
|
raise AuthError(403, "No create event in auth events")
|
||||||
403,
|
|
||||||
"No create event in auth events",
|
|
||||||
)
|
|
||||||
|
|
||||||
creating_domain = get_domain_from_id(event.room_id)
|
creating_domain = get_domain_from_id(event.room_id)
|
||||||
originating_domain = get_domain_from_id(event.sender)
|
originating_domain = get_domain_from_id(event.sender)
|
||||||
if creating_domain != originating_domain:
|
if creating_domain != originating_domain:
|
||||||
if not _can_federate(event, auth_events):
|
if not _can_federate(event, auth_events):
|
||||||
raise AuthError(
|
raise AuthError(403, "This room has been marked as unfederatable.")
|
||||||
403,
|
|
||||||
"This room has been marked as unfederatable."
|
|
||||||
)
|
|
||||||
|
|
||||||
# FIXME: Temp hack
|
# FIXME: Temp hack
|
||||||
if event.type == EventTypes.Aliases:
|
if event.type == EventTypes.Aliases:
|
||||||
if not event.is_state():
|
if not event.is_state():
|
||||||
raise AuthError(
|
raise AuthError(403, "Alias event must be a state event")
|
||||||
403,
|
|
||||||
"Alias event must be a state event",
|
|
||||||
)
|
|
||||||
if not event.state_key:
|
if not event.state_key:
|
||||||
raise AuthError(
|
raise AuthError(403, "Alias event must have non-empty state_key")
|
||||||
403,
|
|
||||||
"Alias event must have non-empty state_key"
|
|
||||||
)
|
|
||||||
sender_domain = get_domain_from_id(event.sender)
|
sender_domain = get_domain_from_id(event.sender)
|
||||||
if event.state_key != sender_domain:
|
if event.state_key != sender_domain:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403, "Alias event's state_key does not match sender's domain"
|
||||||
"Alias event's state_key does not match sender's domain"
|
|
||||||
)
|
)
|
||||||
logger.debug("Allowing! %s", event)
|
logger.debug("Allowing! %s", event)
|
||||||
return
|
return
|
||||||
|
|
||||||
if logger.isEnabledFor(logging.DEBUG):
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
logger.debug(
|
logger.debug("Auth events: %s", [a.event_id for a in auth_events.values()])
|
||||||
"Auth events: %s",
|
|
||||||
[a.event_id for a in auth_events.values()]
|
|
||||||
)
|
|
||||||
|
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
_is_membership_change_allowed(event, auth_events)
|
_is_membership_change_allowed(event, auth_events)
|
||||||
@ -159,9 +140,7 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru
|
|||||||
invite_level = _get_named_level(auth_events, "invite", 0)
|
invite_level = _get_named_level(auth_events, "invite", 0)
|
||||||
|
|
||||||
if user_level < invite_level:
|
if user_level < invite_level:
|
||||||
raise AuthError(
|
raise AuthError(403, "You don't have permission to invite users")
|
||||||
403, "You don't have permission to invite users",
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
logger.debug("Allowing! %s", event)
|
logger.debug("Allowing! %s", event)
|
||||||
return
|
return
|
||||||
@ -207,7 +186,7 @@ def _is_membership_change_allowed(event, auth_events):
|
|||||||
# Check if this is the room creator joining:
|
# Check if this is the room creator joining:
|
||||||
if len(event.prev_event_ids()) == 1 and Membership.JOIN == membership:
|
if len(event.prev_event_ids()) == 1 and Membership.JOIN == membership:
|
||||||
# Get room creation event:
|
# Get room creation event:
|
||||||
key = (EventTypes.Create, "", )
|
key = (EventTypes.Create, "")
|
||||||
create = auth_events.get(key)
|
create = auth_events.get(key)
|
||||||
if create and event.prev_event_ids()[0] == create.event_id:
|
if create and event.prev_event_ids()[0] == create.event_id:
|
||||||
if create.content["creator"] == event.state_key:
|
if create.content["creator"] == event.state_key:
|
||||||
@ -219,38 +198,31 @@ def _is_membership_change_allowed(event, auth_events):
|
|||||||
target_domain = get_domain_from_id(target_user_id)
|
target_domain = get_domain_from_id(target_user_id)
|
||||||
if creating_domain != target_domain:
|
if creating_domain != target_domain:
|
||||||
if not _can_federate(event, auth_events):
|
if not _can_federate(event, auth_events):
|
||||||
raise AuthError(
|
raise AuthError(403, "This room has been marked as unfederatable.")
|
||||||
403,
|
|
||||||
"This room has been marked as unfederatable."
|
|
||||||
)
|
|
||||||
|
|
||||||
# get info about the caller
|
# get info about the caller
|
||||||
key = (EventTypes.Member, event.user_id, )
|
key = (EventTypes.Member, event.user_id)
|
||||||
caller = auth_events.get(key)
|
caller = auth_events.get(key)
|
||||||
|
|
||||||
caller_in_room = caller and caller.membership == Membership.JOIN
|
caller_in_room = caller and caller.membership == Membership.JOIN
|
||||||
caller_invited = caller and caller.membership == Membership.INVITE
|
caller_invited = caller and caller.membership == Membership.INVITE
|
||||||
|
|
||||||
# get info about the target
|
# get info about the target
|
||||||
key = (EventTypes.Member, target_user_id, )
|
key = (EventTypes.Member, target_user_id)
|
||||||
target = auth_events.get(key)
|
target = auth_events.get(key)
|
||||||
|
|
||||||
target_in_room = target and target.membership == Membership.JOIN
|
target_in_room = target and target.membership == Membership.JOIN
|
||||||
target_banned = target and target.membership == Membership.BAN
|
target_banned = target and target.membership == Membership.BAN
|
||||||
|
|
||||||
key = (EventTypes.JoinRules, "", )
|
key = (EventTypes.JoinRules, "")
|
||||||
join_rule_event = auth_events.get(key)
|
join_rule_event = auth_events.get(key)
|
||||||
if join_rule_event:
|
if join_rule_event:
|
||||||
join_rule = join_rule_event.content.get(
|
join_rule = join_rule_event.content.get("join_rule", JoinRules.INVITE)
|
||||||
"join_rule", JoinRules.INVITE
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
join_rule = JoinRules.INVITE
|
join_rule = JoinRules.INVITE
|
||||||
|
|
||||||
user_level = get_user_power_level(event.user_id, auth_events)
|
user_level = get_user_power_level(event.user_id, auth_events)
|
||||||
target_level = get_user_power_level(
|
target_level = get_user_power_level(target_user_id, auth_events)
|
||||||
target_user_id, auth_events
|
|
||||||
)
|
|
||||||
|
|
||||||
# FIXME (erikj): What should we do here as the default?
|
# FIXME (erikj): What should we do here as the default?
|
||||||
ban_level = _get_named_level(auth_events, "ban", 50)
|
ban_level = _get_named_level(auth_events, "ban", 50)
|
||||||
@ -266,29 +238,26 @@ def _is_membership_change_allowed(event, auth_events):
|
|||||||
"join_rule": join_rule,
|
"join_rule": join_rule,
|
||||||
"target_user_id": target_user_id,
|
"target_user_id": target_user_id,
|
||||||
"event.user_id": event.user_id,
|
"event.user_id": event.user_id,
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
if Membership.INVITE == membership and "third_party_invite" in event.content:
|
if Membership.INVITE == membership and "third_party_invite" in event.content:
|
||||||
if not _verify_third_party_invite(event, auth_events):
|
if not _verify_third_party_invite(event, auth_events):
|
||||||
raise AuthError(403, "You are not invited to this room.")
|
raise AuthError(403, "You are not invited to this room.")
|
||||||
if target_banned:
|
if target_banned:
|
||||||
raise AuthError(
|
raise AuthError(403, "%s is banned from the room" % (target_user_id,))
|
||||||
403, "%s is banned from the room" % (target_user_id,)
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if Membership.JOIN != membership:
|
if Membership.JOIN != membership:
|
||||||
if (caller_invited
|
if (
|
||||||
|
caller_invited
|
||||||
and Membership.LEAVE == membership
|
and Membership.LEAVE == membership
|
||||||
and target_user_id == event.user_id):
|
and target_user_id == event.user_id
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
if not caller_in_room: # caller isn't joined
|
if not caller_in_room: # caller isn't joined
|
||||||
raise AuthError(
|
raise AuthError(403, "%s not in room %s." % (event.user_id, event.room_id))
|
||||||
403,
|
|
||||||
"%s not in room %s." % (event.user_id, event.room_id,)
|
|
||||||
)
|
|
||||||
|
|
||||||
if Membership.INVITE == membership:
|
if Membership.INVITE == membership:
|
||||||
# TODO (erikj): We should probably handle this more intelligently
|
# TODO (erikj): We should probably handle this more intelligently
|
||||||
@ -296,19 +265,14 @@ def _is_membership_change_allowed(event, auth_events):
|
|||||||
|
|
||||||
# Invites are valid iff caller is in the room and target isn't.
|
# Invites are valid iff caller is in the room and target isn't.
|
||||||
if target_banned:
|
if target_banned:
|
||||||
raise AuthError(
|
raise AuthError(403, "%s is banned from the room" % (target_user_id,))
|
||||||
403, "%s is banned from the room" % (target_user_id,)
|
|
||||||
)
|
|
||||||
elif target_in_room: # the target is already in the room.
|
elif target_in_room: # the target is already in the room.
|
||||||
raise AuthError(403, "%s is already in the room." %
|
raise AuthError(403, "%s is already in the room." % target_user_id)
|
||||||
target_user_id)
|
|
||||||
else:
|
else:
|
||||||
invite_level = _get_named_level(auth_events, "invite", 0)
|
invite_level = _get_named_level(auth_events, "invite", 0)
|
||||||
|
|
||||||
if user_level < invite_level:
|
if user_level < invite_level:
|
||||||
raise AuthError(
|
raise AuthError(403, "You don't have permission to invite users")
|
||||||
403, "You don't have permission to invite users",
|
|
||||||
)
|
|
||||||
elif Membership.JOIN == membership:
|
elif Membership.JOIN == membership:
|
||||||
# Joins are valid iff caller == target and they were:
|
# Joins are valid iff caller == target and they were:
|
||||||
# invited: They are accepting the invitation
|
# invited: They are accepting the invitation
|
||||||
@ -329,16 +293,12 @@ def _is_membership_change_allowed(event, auth_events):
|
|||||||
elif Membership.LEAVE == membership:
|
elif Membership.LEAVE == membership:
|
||||||
# TODO (erikj): Implement kicks.
|
# TODO (erikj): Implement kicks.
|
||||||
if target_banned and user_level < ban_level:
|
if target_banned and user_level < ban_level:
|
||||||
raise AuthError(
|
raise AuthError(403, "You cannot unban user %s." % (target_user_id,))
|
||||||
403, "You cannot unban user %s." % (target_user_id,)
|
|
||||||
)
|
|
||||||
elif target_user_id != event.user_id:
|
elif target_user_id != event.user_id:
|
||||||
kick_level = _get_named_level(auth_events, "kick", 50)
|
kick_level = _get_named_level(auth_events, "kick", 50)
|
||||||
|
|
||||||
if user_level < kick_level or user_level <= target_level:
|
if user_level < kick_level or user_level <= target_level:
|
||||||
raise AuthError(
|
raise AuthError(403, "You cannot kick user %s." % target_user_id)
|
||||||
403, "You cannot kick user %s." % target_user_id
|
|
||||||
)
|
|
||||||
elif Membership.BAN == membership:
|
elif Membership.BAN == membership:
|
||||||
if user_level < ban_level or user_level <= target_level:
|
if user_level < ban_level or user_level <= target_level:
|
||||||
raise AuthError(403, "You don't have permission to ban")
|
raise AuthError(403, "You don't have permission to ban")
|
||||||
@ -347,21 +307,17 @@ def _is_membership_change_allowed(event, auth_events):
|
|||||||
|
|
||||||
|
|
||||||
def _check_event_sender_in_room(event, auth_events):
|
def _check_event_sender_in_room(event, auth_events):
|
||||||
key = (EventTypes.Member, event.user_id, )
|
key = (EventTypes.Member, event.user_id)
|
||||||
member_event = auth_events.get(key)
|
member_event = auth_events.get(key)
|
||||||
|
|
||||||
return _check_joined_room(
|
return _check_joined_room(member_event, event.user_id, event.room_id)
|
||||||
member_event,
|
|
||||||
event.user_id,
|
|
||||||
event.room_id
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_joined_room(member, user_id, room_id):
|
def _check_joined_room(member, user_id, room_id):
|
||||||
if not member or member.membership != Membership.JOIN:
|
if not member or member.membership != Membership.JOIN:
|
||||||
raise AuthError(403, "User %s not in room %s (%s)" % (
|
raise AuthError(
|
||||||
user_id, room_id, repr(member)
|
403, "User %s not in room %s (%s)" % (user_id, room_id, repr(member))
|
||||||
))
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_send_level(etype, state_key, power_levels_event):
|
def get_send_level(etype, state_key, power_levels_event):
|
||||||
@ -402,26 +358,21 @@ def get_send_level(etype, state_key, power_levels_event):
|
|||||||
def _can_send_event(event, auth_events):
|
def _can_send_event(event, auth_events):
|
||||||
power_levels_event = _get_power_level_event(auth_events)
|
power_levels_event = _get_power_level_event(auth_events)
|
||||||
|
|
||||||
send_level = get_send_level(
|
send_level = get_send_level(event.type, event.get("state_key"), power_levels_event)
|
||||||
event.type, event.get("state_key"), power_levels_event,
|
|
||||||
)
|
|
||||||
user_level = get_user_power_level(event.user_id, auth_events)
|
user_level = get_user_power_level(event.user_id, auth_events)
|
||||||
|
|
||||||
if user_level < send_level:
|
if user_level < send_level:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403,
|
||||||
"You don't have permission to post that to the room. " +
|
"You don't have permission to post that to the room. "
|
||||||
"user_level (%d) < send_level (%d)" % (user_level, send_level)
|
+ "user_level (%d) < send_level (%d)" % (user_level, send_level),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check state_key
|
# Check state_key
|
||||||
if hasattr(event, "state_key"):
|
if hasattr(event, "state_key"):
|
||||||
if event.state_key.startswith("@"):
|
if event.state_key.startswith("@"):
|
||||||
if event.state_key != event.user_id:
|
if event.state_key != event.user_id:
|
||||||
raise AuthError(
|
raise AuthError(403, "You are not allowed to set others state")
|
||||||
403,
|
|
||||||
"You are not allowed to set others state"
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -459,10 +410,7 @@ def check_redaction(room_version, event, auth_events):
|
|||||||
event.internal_metadata.recheck_redaction = True
|
event.internal_metadata.recheck_redaction = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
raise AuthError(
|
raise AuthError(403, "You don't have permission to redact events")
|
||||||
403,
|
|
||||||
"You don't have permission to redact events"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_power_levels(event, auth_events):
|
def _check_power_levels(event, auth_events):
|
||||||
@ -479,7 +427,7 @@ def _check_power_levels(event, auth_events):
|
|||||||
except Exception:
|
except Exception:
|
||||||
raise SynapseError(400, "Not a valid power level: %s" % (v,))
|
raise SynapseError(400, "Not a valid power level: %s" % (v,))
|
||||||
|
|
||||||
key = (event.type, event.state_key, )
|
key = (event.type, event.state_key)
|
||||||
current_state = auth_events.get(key)
|
current_state = auth_events.get(key)
|
||||||
|
|
||||||
if not current_state:
|
if not current_state:
|
||||||
@ -500,16 +448,12 @@ def _check_power_levels(event, auth_events):
|
|||||||
|
|
||||||
old_list = current_state.content.get("users", {})
|
old_list = current_state.content.get("users", {})
|
||||||
for user in set(list(old_list) + list(user_list)):
|
for user in set(list(old_list) + list(user_list)):
|
||||||
levels_to_check.append(
|
levels_to_check.append((user, "users"))
|
||||||
(user, "users")
|
|
||||||
)
|
|
||||||
|
|
||||||
old_list = current_state.content.get("events", {})
|
old_list = current_state.content.get("events", {})
|
||||||
new_list = event.content.get("events", {})
|
new_list = event.content.get("events", {})
|
||||||
for ev_id in set(list(old_list) + list(new_list)):
|
for ev_id in set(list(old_list) + list(new_list)):
|
||||||
levels_to_check.append(
|
levels_to_check.append((ev_id, "events"))
|
||||||
(ev_id, "events")
|
|
||||||
)
|
|
||||||
|
|
||||||
old_state = current_state.content
|
old_state = current_state.content
|
||||||
new_state = event.content
|
new_state = event.content
|
||||||
@ -540,7 +484,7 @@ def _check_power_levels(event, auth_events):
|
|||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403,
|
||||||
"You don't have permission to remove ops level equal "
|
"You don't have permission to remove ops level equal "
|
||||||
"to your own"
|
"to your own",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if the old and new levels are greater than the user level
|
# Check if the old and new levels are greater than the user level
|
||||||
@ -550,8 +494,7 @@ def _check_power_levels(event, auth_events):
|
|||||||
if old_level_too_big or new_level_too_big:
|
if old_level_too_big or new_level_too_big:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403,
|
||||||
"You don't have permission to add ops level greater "
|
"You don't have permission to add ops level greater " "than your own",
|
||||||
"than your own"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -587,10 +530,9 @@ def get_user_power_level(user_id, auth_events):
|
|||||||
|
|
||||||
# some things which call this don't pass the create event: hack around
|
# some things which call this don't pass the create event: hack around
|
||||||
# that.
|
# that.
|
||||||
key = (EventTypes.Create, "", )
|
key = (EventTypes.Create, "")
|
||||||
create_event = auth_events.get(key)
|
create_event = auth_events.get(key)
|
||||||
if (create_event is not None and
|
if create_event is not None and create_event.content["creator"] == user_id:
|
||||||
create_event.content["creator"] == user_id):
|
|
||||||
return 100
|
return 100
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
@ -636,9 +578,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||||||
|
|
||||||
token = signed["token"]
|
token = signed["token"]
|
||||||
|
|
||||||
invite_event = auth_events.get(
|
invite_event = auth_events.get((EventTypes.ThirdPartyInvite, token))
|
||||||
(EventTypes.ThirdPartyInvite, token,)
|
|
||||||
)
|
|
||||||
if not invite_event:
|
if not invite_event:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -661,8 +601,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||||||
if not key_name.startswith("ed25519:"):
|
if not key_name.startswith("ed25519:"):
|
||||||
continue
|
continue
|
||||||
verify_key = decode_verify_key_bytes(
|
verify_key = decode_verify_key_bytes(
|
||||||
key_name,
|
key_name, decode_base64(public_key)
|
||||||
decode_base64(public_key)
|
|
||||||
)
|
)
|
||||||
verify_signed_json(signed, server, verify_key)
|
verify_signed_json(signed, server, verify_key)
|
||||||
|
|
||||||
@ -671,7 +610,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||||||
# The caller is responsible for checking that the signing
|
# The caller is responsible for checking that the signing
|
||||||
# server has not revoked that public key.
|
# server has not revoked that public key.
|
||||||
return True
|
return True
|
||||||
except (KeyError, SignatureVerifyException,):
|
except (KeyError, SignatureVerifyException):
|
||||||
continue
|
continue
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -679,9 +618,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||||||
def get_public_keys(invite_event):
|
def get_public_keys(invite_event):
|
||||||
public_keys = []
|
public_keys = []
|
||||||
if "public_key" in invite_event.content:
|
if "public_key" in invite_event.content:
|
||||||
o = {
|
o = {"public_key": invite_event.content["public_key"]}
|
||||||
"public_key": invite_event.content["public_key"],
|
|
||||||
}
|
|
||||||
if "key_validity_url" in invite_event.content:
|
if "key_validity_url" in invite_event.content:
|
||||||
o["key_validity_url"] = invite_event.content["key_validity_url"]
|
o["key_validity_url"] = invite_event.content["key_validity_url"]
|
||||||
public_keys.append(o)
|
public_keys.append(o)
|
||||||
@ -702,22 +639,22 @@ def auth_types_for_event(event):
|
|||||||
|
|
||||||
auth_types = []
|
auth_types = []
|
||||||
|
|
||||||
auth_types.append((EventTypes.PowerLevels, "", ))
|
auth_types.append((EventTypes.PowerLevels, ""))
|
||||||
auth_types.append((EventTypes.Member, event.sender, ))
|
auth_types.append((EventTypes.Member, event.sender))
|
||||||
auth_types.append((EventTypes.Create, "", ))
|
auth_types.append((EventTypes.Create, ""))
|
||||||
|
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
membership = event.content["membership"]
|
membership = event.content["membership"]
|
||||||
if membership in [Membership.JOIN, Membership.INVITE]:
|
if membership in [Membership.JOIN, Membership.INVITE]:
|
||||||
auth_types.append((EventTypes.JoinRules, "", ))
|
auth_types.append((EventTypes.JoinRules, ""))
|
||||||
|
|
||||||
auth_types.append((EventTypes.Member, event.state_key, ))
|
auth_types.append((EventTypes.Member, event.state_key))
|
||||||
|
|
||||||
if membership == Membership.INVITE:
|
if membership == Membership.INVITE:
|
||||||
if "third_party_invite" in event.content:
|
if "third_party_invite" in event.content:
|
||||||
key = (
|
key = (
|
||||||
EventTypes.ThirdPartyInvite,
|
EventTypes.ThirdPartyInvite,
|
||||||
event.content["third_party_invite"]["signed"]["token"]
|
event.content["third_party_invite"]["signed"]["token"],
|
||||||
)
|
)
|
||||||
auth_types.append(key)
|
auth_types.append(key)
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user