mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-12-25 20:39:24 -05:00
Merge branch 'develop' into rav/saml2_client
This commit is contained in:
commit
a4daa899ec
@ -1,21 +0,0 @@
|
|||||||
version: '3.1'
|
|
||||||
|
|
||||||
services:
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: postgres:9.4
|
|
||||||
environment:
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
|
|
||||||
testenv:
|
|
||||||
image: python:2.7
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
env_file: .env
|
|
||||||
environment:
|
|
||||||
SYNAPSE_POSTGRES_HOST: postgres
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
working_dir: /app
|
|
||||||
volumes:
|
|
||||||
- ..:/app
|
|
@ -1,21 +0,0 @@
|
|||||||
version: '3.1'
|
|
||||||
|
|
||||||
services:
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: postgres:9.5
|
|
||||||
environment:
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
|
|
||||||
testenv:
|
|
||||||
image: python:2.7
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
env_file: .env
|
|
||||||
environment:
|
|
||||||
SYNAPSE_POSTGRES_HOST: postgres
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
working_dir: /app
|
|
||||||
volumes:
|
|
||||||
- ..:/app
|
|
@ -1,21 +0,0 @@
|
|||||||
version: '3.1'
|
|
||||||
|
|
||||||
services:
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: postgres:9.4
|
|
||||||
environment:
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
|
|
||||||
testenv:
|
|
||||||
image: python:3.5
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
env_file: .env
|
|
||||||
environment:
|
|
||||||
SYNAPSE_POSTGRES_HOST: postgres
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
working_dir: /app
|
|
||||||
volumes:
|
|
||||||
- ..:/app
|
|
33
.buildkite/format_tap.py
Normal file
33
.buildkite/format_tap.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import sys
|
||||||
|
from tap.parser import Parser
|
||||||
|
from tap.line import Result, Unknown, Diagnostic
|
||||||
|
|
||||||
|
out = ["### TAP Output for " + sys.argv[2]]
|
||||||
|
|
||||||
|
p = Parser()
|
||||||
|
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
for line in p.parse_file(sys.argv[1]):
|
||||||
|
if isinstance(line, Result):
|
||||||
|
if in_error:
|
||||||
|
out.append("")
|
||||||
|
out.append("</pre></code></details>")
|
||||||
|
out.append("")
|
||||||
|
out.append("----")
|
||||||
|
out.append("")
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
if not line.ok and not line.todo:
|
||||||
|
in_error = True
|
||||||
|
|
||||||
|
out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description))
|
||||||
|
out.append("")
|
||||||
|
out.append("<details><summary>Show log</summary><code><pre>")
|
||||||
|
|
||||||
|
elif isinstance(line, Diagnostic) and in_error:
|
||||||
|
out.append(line.text)
|
||||||
|
|
||||||
|
if out:
|
||||||
|
for line in out[:-3]:
|
||||||
|
print(line)
|
@ -1,22 +1,21 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -ex
|
||||||
|
|
||||||
# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
|
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||||
# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
|
echo "Not merging forward, as this is a release branch"
|
||||||
echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV
|
exit 0
|
||||||
source $BASH_ENV
|
fi
|
||||||
|
|
||||||
if [[ -z "${CIRCLE_PR_NUMBER}" ]]
|
if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then
|
||||||
then
|
echo "Not a pull request, or hasn't had a PR opened yet..."
|
||||||
echo "Can't figure out what the PR number is! Assuming merge target is develop."
|
|
||||||
|
|
||||||
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||||
# can probably assume it's based on it and will be merged into it.
|
# can probably assume it's based on it and will be merged into it.
|
||||||
GITBASE="develop"
|
GITBASE="develop"
|
||||||
else
|
else
|
||||||
# Get the reference, using the GitHub API
|
# Get the reference, using the GitHub API
|
||||||
GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
|
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Show what we are before
|
# Show what we are before
|
@ -2,10 +2,11 @@ env:
|
|||||||
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e pep8"
|
- "tox -e check_codestyle"
|
||||||
label: "\U0001F9F9 PEP-8"
|
label: "\U0001F9F9 Check Style"
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
@ -46,15 +47,16 @@ steps:
|
|||||||
|
|
||||||
- wait
|
- wait
|
||||||
|
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e py27,codecov"
|
- "tox -e py35-old,codecov"
|
||||||
label: ":python: 2.7 / SQLite"
|
label: ":python: 3.5 / SQLite / Old Deps"
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 2"
|
TRIAL_FLAGS: "-j 2"
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:2.7"
|
image: "python:3.5"
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
@ -114,74 +116,6 @@ steps:
|
|||||||
- exit_status: 2
|
- exit_status: 2
|
||||||
limit: 2
|
limit: 2
|
||||||
|
|
||||||
- command:
|
|
||||||
- "python -m pip install tox"
|
|
||||||
- "tox -e py27-old,codecov"
|
|
||||||
label: ":python: 2.7 / SQLite / Old Deps"
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "-j 2"
|
|
||||||
plugins:
|
|
||||||
- docker#v3.0.1:
|
|
||||||
image: "python:2.7"
|
|
||||||
propagate-environment: true
|
|
||||||
retry:
|
|
||||||
automatic:
|
|
||||||
- exit_status: -1
|
|
||||||
limit: 2
|
|
||||||
- exit_status: 2
|
|
||||||
limit: 2
|
|
||||||
|
|
||||||
- label: ":python: 2.7 / :postgres: 9.4"
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "-j 4"
|
|
||||||
command:
|
|
||||||
- "bash -c 'python -m pip install tox && python -m tox -e py27-postgres,codecov'"
|
|
||||||
plugins:
|
|
||||||
- docker-compose#v2.1.0:
|
|
||||||
run: testenv
|
|
||||||
config:
|
|
||||||
- .buildkite/docker-compose.py27.pg94.yaml
|
|
||||||
retry:
|
|
||||||
automatic:
|
|
||||||
- exit_status: -1
|
|
||||||
limit: 2
|
|
||||||
- exit_status: 2
|
|
||||||
limit: 2
|
|
||||||
|
|
||||||
- label: ":python: 2.7 / :postgres: 9.5"
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "-j 4"
|
|
||||||
command:
|
|
||||||
- "bash -c 'python -m pip install tox && python -m tox -e py27-postgres,codecov'"
|
|
||||||
plugins:
|
|
||||||
- docker-compose#v2.1.0:
|
|
||||||
run: testenv
|
|
||||||
config:
|
|
||||||
- .buildkite/docker-compose.py27.pg95.yaml
|
|
||||||
retry:
|
|
||||||
automatic:
|
|
||||||
- exit_status: -1
|
|
||||||
limit: 2
|
|
||||||
- exit_status: 2
|
|
||||||
limit: 2
|
|
||||||
|
|
||||||
- label: ":python: 3.5 / :postgres: 9.4"
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "-j 4"
|
|
||||||
command:
|
|
||||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
|
||||||
plugins:
|
|
||||||
- docker-compose#v2.1.0:
|
|
||||||
run: testenv
|
|
||||||
config:
|
|
||||||
- .buildkite/docker-compose.py35.pg94.yaml
|
|
||||||
retry:
|
|
||||||
automatic:
|
|
||||||
- exit_status: -1
|
|
||||||
limit: 2
|
|
||||||
- exit_status: 2
|
|
||||||
limit: 2
|
|
||||||
|
|
||||||
- label: ":python: 3.5 / :postgres: 9.5"
|
- label: ":python: 3.5 / :postgres: 9.5"
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 4"
|
TRIAL_FLAGS: "-j 4"
|
||||||
@ -232,3 +166,61 @@ steps:
|
|||||||
limit: 2
|
limit: 2
|
||||||
- exit_status: 2
|
- exit_status: 2
|
||||||
limit: 2
|
limit: 2
|
||||||
|
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash .buildkite/synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash .buildkite/synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
WORKERS: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash .buildkite/synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
soft_fail: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
145
.buildkite/synapse_sytest.sh
Normal file
145
.buildkite/synapse_sytest.sh
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Fetch sytest, and then run the tests for synapse. The entrypoint for the
|
||||||
|
# sytest-synapse docker images.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
if [ -n "$BUILDKITE" ]
|
||||||
|
then
|
||||||
|
SYNAPSE_DIR=`pwd`
|
||||||
|
else
|
||||||
|
SYNAPSE_DIR="/src"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Attempt to find a sytest to use.
|
||||||
|
# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image.
|
||||||
|
if [ -d "/sytest" ]; then
|
||||||
|
# If the user has mounted in a SyTest checkout, use that.
|
||||||
|
echo "Using local sytests..."
|
||||||
|
|
||||||
|
# create ourselves a working directory and dos2unix some scripts therein
|
||||||
|
mkdir -p /work/jenkins
|
||||||
|
for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do
|
||||||
|
dos2unix -n "/sytest/$i" "/work/$i"
|
||||||
|
done
|
||||||
|
ln -sf /sytest/tests /work
|
||||||
|
ln -sf /sytest/keys /work
|
||||||
|
SYTEST_LIB="/sytest/lib"
|
||||||
|
else
|
||||||
|
if [ -n "BUILDKITE_BRANCH" ]
|
||||||
|
then
|
||||||
|
branch_name=$BUILDKITE_BRANCH
|
||||||
|
else
|
||||||
|
# Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch.
|
||||||
|
branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Try and fetch the branch
|
||||||
|
echo "Trying to get same-named sytest branch..."
|
||||||
|
wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || {
|
||||||
|
# Probably a 404, fall back to develop
|
||||||
|
echo "Using develop instead..."
|
||||||
|
wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz
|
||||||
|
}
|
||||||
|
|
||||||
|
mkdir -p /work
|
||||||
|
tar -C /work --strip-components=1 -xf sytest.tar.gz
|
||||||
|
SYTEST_LIB="/work/lib"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd /work
|
||||||
|
|
||||||
|
# PostgreSQL setup
|
||||||
|
if [ -n "$POSTGRES" ]
|
||||||
|
then
|
||||||
|
export PGUSER=postgres
|
||||||
|
export POSTGRES_DB_1=pg1
|
||||||
|
export POSTGRES_DB_2=pg2
|
||||||
|
|
||||||
|
# Start the database
|
||||||
|
su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres
|
||||||
|
|
||||||
|
# Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse
|
||||||
|
jenkins/prep_sytest_for_postgres.sh
|
||||||
|
|
||||||
|
# Make the test databases for the two Synapse servers that will be spun up
|
||||||
|
su -c 'psql -c "CREATE DATABASE pg1;"' postgres
|
||||||
|
su -c 'psql -c "CREATE DATABASE pg2;"' postgres
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$OFFLINE" ]; then
|
||||||
|
# if we're in offline mode, just put synapse into the virtualenv, and
|
||||||
|
# hope that the deps are up-to-date.
|
||||||
|
#
|
||||||
|
# (`pip install -e` likes to reinstall setuptools even if it's already installed,
|
||||||
|
# so we just run setup.py explicitly.)
|
||||||
|
#
|
||||||
|
(cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop)
|
||||||
|
else
|
||||||
|
# We've already created the virtualenv, but lets double check we have all
|
||||||
|
# deps.
|
||||||
|
/venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR
|
||||||
|
/venv/bin/pip install -q --upgrade --no-cache-dir \
|
||||||
|
lxml psycopg2 coverage codecov tap.py
|
||||||
|
|
||||||
|
# Make sure all Perl deps are installed -- this is done in the docker build
|
||||||
|
# so will only install packages added since the last Docker build
|
||||||
|
./install-deps.pl
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# Run the tests
|
||||||
|
>&2 echo "+++ Running tests"
|
||||||
|
|
||||||
|
RUN_TESTS=(
|
||||||
|
perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all
|
||||||
|
)
|
||||||
|
|
||||||
|
TEST_STATUS=0
|
||||||
|
|
||||||
|
if [ -n "$WORKERS" ]; then
|
||||||
|
RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py)
|
||||||
|
else
|
||||||
|
RUN_TESTS+=(-I Synapse)
|
||||||
|
fi
|
||||||
|
|
||||||
|
"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$?
|
||||||
|
|
||||||
|
if [ $TEST_STATUS -ne 0 ]; then
|
||||||
|
>&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS"
|
||||||
|
else
|
||||||
|
>&2 echo -e "run-tests \e[32mPASSED\e[0m"
|
||||||
|
fi
|
||||||
|
|
||||||
|
>&2 echo "--- Copying assets"
|
||||||
|
|
||||||
|
# Copy out the logs
|
||||||
|
mkdir -p /logs
|
||||||
|
cp results.tap /logs/results.tap
|
||||||
|
rsync --ignore-missing-args --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*"
|
||||||
|
|
||||||
|
# Upload coverage to codecov and upload files, if running on Buildkite
|
||||||
|
if [ -n "$BUILDKITE" ]
|
||||||
|
then
|
||||||
|
/venv/bin/coverage combine || true
|
||||||
|
/venv/bin/coverage xml || true
|
||||||
|
/venv/bin/codecov -X gcov -f coverage.xml
|
||||||
|
|
||||||
|
wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz
|
||||||
|
tar xvf buildkite.tar.gz
|
||||||
|
chmod +x ./buildkite-agent
|
||||||
|
|
||||||
|
# Upload the files
|
||||||
|
./buildkite-agent artifact upload "/logs/**/*.log*"
|
||||||
|
./buildkite-agent artifact upload "/logs/results.tap"
|
||||||
|
|
||||||
|
if [ $TEST_STATUS -ne 0 ]; then
|
||||||
|
# Annotate, if failure
|
||||||
|
/venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
exit $TEST_STATUS
|
@ -4,160 +4,23 @@ jobs:
|
|||||||
machine: true
|
machine: true
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG}-py2 .
|
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 .
|
||||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
|
|
||||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
|
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
|
||||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py2
|
|
||||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
|
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
|
||||||
dockerhubuploadlatest:
|
dockerhubuploadlatest:
|
||||||
machine: true
|
machine: true
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest-py2 .
|
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest -t matrixdotorg/synapse:latest-py3 .
|
||||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest -t matrixdotorg/synapse:latest-py3 --build-arg PYTHON_VERSION=3.6 .
|
|
||||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
- run: docker push matrixdotorg/synapse:latest
|
- run: docker push matrixdotorg/synapse:latest
|
||||||
- run: docker push matrixdotorg/synapse:latest-py2
|
|
||||||
- run: docker push matrixdotorg/synapse:latest-py3
|
- run: docker push matrixdotorg/synapse:latest-py3
|
||||||
sytestpy2:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy2
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy2postgres:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy2
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: POSTGRES=1 /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy2merged:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy2
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: bash .circleci/merge_base_branch.sh
|
|
||||||
- run: /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy2postgresmerged:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy2
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: bash .circleci/merge_base_branch.sh
|
|
||||||
- run: POSTGRES=1 /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
|
|
||||||
sytestpy3:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy3postgres:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: POSTGRES=1 /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy3merged:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: bash .circleci/merge_base_branch.sh
|
|
||||||
- run: /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy3postgresmerged:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: bash .circleci/merge_base_branch.sh
|
|
||||||
- run: POSTGRES=1 /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
build:
|
build:
|
||||||
jobs:
|
jobs:
|
||||||
- sytestpy2:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: /develop|master|release-.*/
|
|
||||||
- sytestpy2postgres:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: /develop|master|release-.*/
|
|
||||||
- sytestpy3:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: /develop|master|release-.*/
|
|
||||||
- sytestpy3postgres:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: /develop|master|release-.*/
|
|
||||||
- sytestpy2merged:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
ignore: /develop|master|release-.*/
|
|
||||||
- sytestpy2postgresmerged:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
ignore: /develop|master|release-.*/
|
|
||||||
- sytestpy3merged:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
ignore: /develop|master|release-.*/
|
|
||||||
- sytestpy3postgresmerged:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
ignore: /develop|master|release-.*/
|
|
||||||
- dockerhubuploadrelease:
|
- dockerhubuploadrelease:
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
|
@ -72,3 +72,6 @@ Jason Robinson <jasonr at matrix.org>
|
|||||||
|
|
||||||
Joseph Weston <joseph at weston.cloud>
|
Joseph Weston <joseph at weston.cloud>
|
||||||
+ Add admin API for querying HS version
|
+ Add admin API for querying HS version
|
||||||
|
|
||||||
|
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
||||||
|
* Documentation improvements
|
||||||
|
27
CHANGES.md
27
CHANGES.md
@ -1,3 +1,24 @@
|
|||||||
|
Synapse 1.0.0 (2019-06-11)
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix bug where attempting to send transactions with large number of EDUs can fail. ([\#5418](https://github.com/matrix-org/synapse/issues/5418))
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Expand the federation guide to include relevant content from the MSC1711 FAQ ([\#5419](https://github.com/matrix-org/synapse/issues/5419))
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Move password reset links to /_matrix/client/unstable namespace. ([\#5424](https://github.com/matrix-org/synapse/issues/5424))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.0.0rc3 (2019-06-10)
|
Synapse 1.0.0rc3 (2019-06-10)
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
@ -31,7 +52,11 @@ Features
|
|||||||
- Add a script to generate new signing-key files. ([\#5361](https://github.com/matrix-org/synapse/issues/5361))
|
- Add a script to generate new signing-key files. ([\#5361](https://github.com/matrix-org/synapse/issues/5361))
|
||||||
- Update upgrade and installation guides ahead of 1.0. ([\#5371](https://github.com/matrix-org/synapse/issues/5371))
|
- Update upgrade and installation guides ahead of 1.0. ([\#5371](https://github.com/matrix-org/synapse/issues/5371))
|
||||||
- Replace the `perspectives` configuration section with `trusted_key_servers`, and make validating the signatures on responses optional (since TLS will do this job for us). ([\#5374](https://github.com/matrix-org/synapse/issues/5374))
|
- Replace the `perspectives` configuration section with `trusted_key_servers`, and make validating the signatures on responses optional (since TLS will do this job for us). ([\#5374](https://github.com/matrix-org/synapse/issues/5374))
|
||||||
- Add ability to perform password reset via email without trusting the identity server. ([\#5377](https://github.com/matrix-org/synapse/issues/5377))
|
- Add ability to perform password reset via email without trusting the identity server. **As a result of this PR, password resets will now be disabled on the default configuration.**
|
||||||
|
|
||||||
|
Password reset emails are now sent from the homeserver by default, instead of the identity server. To enable this functionality, ensure `email` and `public_baseurl` config options are filled out.
|
||||||
|
|
||||||
|
If you would like to re-enable password resets being sent from the identity server (warning: this is dangerous! See [#5345](https://github.com/matrix-org/synapse/pull/5345)), set `email.trust_identity_server_for_password_resets` to true. ([\#5377](https://github.com/matrix-org/synapse/issues/5377))
|
||||||
- Set default room version to v4. ([\#5379](https://github.com/matrix-org/synapse/issues/5379))
|
- Set default room version to v4. ([\#5379](https://github.com/matrix-org/synapse/issues/5379))
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,21 +30,20 @@ use github's pull request workflow to review the contribution, and either ask
|
|||||||
you to make any refinements needed or merge it and make them ourselves. The
|
you to make any refinements needed or merge it and make them ourselves. The
|
||||||
changes will then land on master when we next do a release.
|
changes will then land on master when we next do a release.
|
||||||
|
|
||||||
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Travis CI
|
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Buildkite
|
||||||
<https://travis-ci.org/matrix-org/synapse>`_ for continuous integration. All
|
<https://buildkite.com/matrix-dot-org/synapse>`_ for continuous integration.
|
||||||
pull requests to synapse get automatically tested by Travis and CircleCI.
|
Buildkite builds need to be authorised by a maintainer. If your change breaks
|
||||||
If your change breaks the build, this will be shown in GitHub, so please
|
the build, this will be shown in GitHub, so please keep an eye on the pull
|
||||||
keep an eye on the pull request for feedback.
|
request for feedback.
|
||||||
|
|
||||||
To run unit tests in a local development environment, you can use:
|
To run unit tests in a local development environment, you can use:
|
||||||
|
|
||||||
- ``tox -e py27`` (requires tox to be installed by ``pip install tox``) for
|
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||||
SQLite-backed Synapse on Python 2.7.
|
for SQLite-backed Synapse on Python 3.5.
|
||||||
- ``tox -e py35`` for SQLite-backed Synapse on Python 3.5.
|
|
||||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||||
- ``tox -e py27-postgres`` for PostgreSQL-backed Synapse on Python 2.7
|
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||||
(requires a running local PostgreSQL with access to create databases).
|
(requires a running local PostgreSQL with access to create databases).
|
||||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 2.7
|
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||||
set up PostgreSQL yourself.
|
set up PostgreSQL yourself.
|
||||||
|
|
||||||
|
58
INSTALL.md
58
INSTALL.md
@ -1,14 +1,31 @@
|
|||||||
* [Installing Synapse](#installing-synapse)
|
- [Choosing your server name](#choosing-your-server-name)
|
||||||
* [Installing from source](#installing-from-source)
|
- [Installing Synapse](#installing-synapse)
|
||||||
* [Platform-Specific Instructions](#platform-specific-instructions)
|
- [Installing from source](#installing-from-source)
|
||||||
* [Troubleshooting Installation](#troubleshooting-installation)
|
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||||
* [Prebuilt packages](#prebuilt-packages)
|
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||||
* [Setting up Synapse](#setting-up-synapse)
|
- [Prebuilt packages](#prebuilt-packages)
|
||||||
* [TLS certificates](#tls-certificates)
|
- [Setting up Synapse](#setting-up-synapse)
|
||||||
* [Email](#email)
|
- [TLS certificates](#tls-certificates)
|
||||||
* [Registering a user](#registering-a-user)
|
- [Email](#email)
|
||||||
* [Setting up a TURN server](#setting-up-a-turn-server)
|
- [Registering a user](#registering-a-user)
|
||||||
* [URL previews](#url-previews)
|
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||||
|
- [URL previews](#url-previews)
|
||||||
|
|
||||||
|
# Choosing your server name
|
||||||
|
|
||||||
|
It is important to choose the name for your server before you install Synapse,
|
||||||
|
because it cannot be changed later.
|
||||||
|
|
||||||
|
The server name determines the "domain" part of user-ids for users on your
|
||||||
|
server: these will all be of the format `@user:my.domain.name`. It also
|
||||||
|
determines how other matrix servers will reach yours for federation.
|
||||||
|
|
||||||
|
For a test configuration, set this to the hostname of your server. For a more
|
||||||
|
production-ready setup, you will probably want to specify your domain
|
||||||
|
(`example.com`) rather than a matrix-specific hostname here (in the same way
|
||||||
|
that your email address is probably `user@example.com` rather than
|
||||||
|
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||||
|
[Setting up Federation](docs/federate.md).
|
||||||
|
|
||||||
# Installing Synapse
|
# Installing Synapse
|
||||||
|
|
||||||
@ -64,16 +81,7 @@ python -m synapse.app.homeserver \
|
|||||||
--report-stats=[yes|no]
|
--report-stats=[yes|no]
|
||||||
```
|
```
|
||||||
|
|
||||||
... substituting an appropriate value for `--server-name`. The server name
|
... substituting an appropriate value for `--server-name`.
|
||||||
determines the "domain" part of user-ids for users on your server: these will
|
|
||||||
all be of the format `@user:my.domain.name`. It also determines how other
|
|
||||||
matrix servers will reach yours for Federation. For a test configuration,
|
|
||||||
set this to the hostname of your server. For a more production-ready setup, you
|
|
||||||
will probably want to specify your domain (`example.com`) rather than a
|
|
||||||
matrix-specific hostname here (in the same way that your email address is
|
|
||||||
probably `user@example.com` rather than `user@email.example.com`) - but
|
|
||||||
doing so may require more advanced setup: see [Setting up Federation](docs/federate.md).
|
|
||||||
Beware that the server name cannot be changed later.
|
|
||||||
|
|
||||||
This command will generate you a config file that you can then customise, but it will
|
This command will generate you a config file that you can then customise, but it will
|
||||||
also generate a set of keys for you. These keys will allow your Home Server to
|
also generate a set of keys for you. These keys will allow your Home Server to
|
||||||
@ -86,9 +94,6 @@ different. See the
|
|||||||
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||||
for more information on key management.)
|
for more information on key management.)
|
||||||
|
|
||||||
You will need to give Synapse a TLS certficate before it will start - see [TLS
|
|
||||||
certificates](#tls-certificates).
|
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
run (e.g. `~/synapse`), and::
|
run (e.g. `~/synapse`), and::
|
||||||
|
|
||||||
@ -395,8 +400,9 @@ To configure Synapse to expose an HTTPS port, you will need to edit
|
|||||||
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||||
`cert.pem`).
|
`cert.pem`).
|
||||||
|
|
||||||
For those of you upgrading your TLS certificate for Synapse 1.0 compliance,
|
For a more detailed guide to configuring your server for federation, see
|
||||||
please take a look at [our guide](docs/MSC1711_certificates_FAQ.md#configuring-certificates-for-compatibility-with-synapse-100).
|
[federate.md](docs/federate.md)
|
||||||
|
|
||||||
|
|
||||||
## Email
|
## Email
|
||||||
|
|
||||||
|
32
README.rst
32
README.rst
@ -340,8 +340,11 @@ log lines and looking for any 'Processed request' lines which take more than
|
|||||||
a few seconds to execute. Please let us know at #synapse:matrix.org if
|
a few seconds to execute. Please let us know at #synapse:matrix.org if
|
||||||
you see this failure mode so we can help debug it, however.
|
you see this failure mode so we can help debug it, however.
|
||||||
|
|
||||||
Help!! Synapse eats all my RAM!
|
Help!! Synapse is slow and eats all my RAM/CPU!
|
||||||
-------------------------------
|
-----------------------------------------------
|
||||||
|
|
||||||
|
First, ensure you are running the latest version of Synapse, using Python 3
|
||||||
|
with a PostgreSQL database.
|
||||||
|
|
||||||
Synapse's architecture is quite RAM hungry currently - we deliberately
|
Synapse's architecture is quite RAM hungry currently - we deliberately
|
||||||
cache a lot of recent room data and metadata in RAM in order to speed up
|
cache a lot of recent room data and metadata in RAM in order to speed up
|
||||||
@ -352,14 +355,29 @@ variable. The default is 0.5, which can be decreased to reduce RAM usage
|
|||||||
in memory constrained enviroments, or increased if performance starts to
|
in memory constrained enviroments, or increased if performance starts to
|
||||||
degrade.
|
degrade.
|
||||||
|
|
||||||
|
However, degraded performance due to a low cache factor, common on
|
||||||
|
machines with slow disks, often leads to explosions in memory use due
|
||||||
|
backlogged requests. In this case, reducing the cache factor will make
|
||||||
|
things worse. Instead, try increasing it drastically. 2.0 is a good
|
||||||
|
starting value.
|
||||||
|
|
||||||
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
||||||
improvement in overall amount, and especially in terms of giving back RAM
|
improvement in overall memory use, and especially in terms of giving back
|
||||||
to the OS. To use it, the library must simply be put in the LD_PRELOAD
|
RAM to the OS. To use it, the library must simply be put in the
|
||||||
environment variable when launching Synapse. On Debian, this can be done
|
LD_PRELOAD environment variable when launching Synapse. On Debian, this
|
||||||
by installing the ``libjemalloc1`` package and adding this line to
|
can be done by installing the ``libjemalloc1`` package and adding this
|
||||||
``/etc/default/matrix-synapse``::
|
line to ``/etc/default/matrix-synapse``::
|
||||||
|
|
||||||
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
||||||
|
|
||||||
This can make a significant difference on Python 2.7 - it's unclear how
|
This can make a significant difference on Python 2.7 - it's unclear how
|
||||||
much of an improvement it provides on Python 3.x.
|
much of an improvement it provides on Python 3.x.
|
||||||
|
|
||||||
|
If you're encountering high CPU use by the Synapse process itself, you
|
||||||
|
may be affected by a bug with presence tracking that leads to a
|
||||||
|
massive excess of outgoing federation requests (see `discussion
|
||||||
|
<https://github.com/matrix-org/synapse/issues/3971>`_). If metrics
|
||||||
|
indicate that your server is also issuing far more outgoing federation
|
||||||
|
requests than can be accounted for by your users' activity, this is a
|
||||||
|
likely cause. The misbehavior can be worked around by setting
|
||||||
|
``use_presence: false`` in the Synapse config file.
|
||||||
|
31
UPGRADE.rst
31
UPGRADE.rst
@ -49,6 +49,33 @@ returned by the Client-Server API:
|
|||||||
# configured on port 443.
|
# configured on port 443.
|
||||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||||
|
|
||||||
|
Upgrading to v1.1
|
||||||
|
=================
|
||||||
|
|
||||||
|
Synapse 1.1 removes support for older Python and PostgreSQL versions, as
|
||||||
|
outlined in `our deprecation notice <https://matrix.org/blog/2019/04/08/synapse-deprecating-postgres-9-4-and-python-2-x>`_.
|
||||||
|
|
||||||
|
Minimum Python Version
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Synapse v1.1 has a minimum Python requirement of Python 3.5. Python 3.6 or
|
||||||
|
Python 3.7 are recommended as they have improved internal string handling,
|
||||||
|
significantly reducing memory usage.
|
||||||
|
|
||||||
|
If you use current versions of the Matrix.org-distributed Debian packages or
|
||||||
|
Docker images, action is not required.
|
||||||
|
|
||||||
|
If you install Synapse in a Python virtual environment, please see "Upgrading to
|
||||||
|
v0.34.0" for notes on setting up a new virtualenv under Python 3.
|
||||||
|
|
||||||
|
Minimum PostgreSQL Version
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
If using PostgreSQL under Synapse, you will need to use PostgreSQL 9.5 or above.
|
||||||
|
Please see the
|
||||||
|
`PostgreSQL documentation <https://www.postgresql.org/docs/11/upgrading.html>`_
|
||||||
|
for more details on upgrading your database.
|
||||||
|
|
||||||
Upgrading to v1.0
|
Upgrading to v1.0
|
||||||
=================
|
=================
|
||||||
|
|
||||||
@ -71,11 +98,11 @@ server in a closed federation. This can be done in one of two ways:-
|
|||||||
* Configure a whitelist of server domains to trust via ``federation_certificate_verification_whitelist``.
|
* Configure a whitelist of server domains to trust via ``federation_certificate_verification_whitelist``.
|
||||||
|
|
||||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||||
for more details on these settings.
|
for more details on these settings.
|
||||||
|
|
||||||
Email
|
Email
|
||||||
-----
|
-----
|
||||||
When a user requests a password reset, Synapse will send an email to the
|
When a user requests a password reset, Synapse will send an email to the
|
||||||
user to confirm the request.
|
user to confirm the request.
|
||||||
|
|
||||||
Previous versions of Synapse delegated the job of sending this email to an
|
Previous versions of Synapse delegated the job of sending this email to an
|
||||||
|
1
changelog.d/4276.misc
Normal file
1
changelog.d/4276.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Improve README section on performance troubleshooting.
|
1
changelog.d/5015.misc
Normal file
1
changelog.d/5015.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add logging to 3pid invite signature verification.
|
1
changelog.d/5042.bugfix
Normal file
1
changelog.d/5042.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix bug processing incoming events over federation if call to `/get_missing_events` fails.
|
1
changelog.d/5051.bugfix
Normal file
1
changelog.d/5051.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Prevent >1 room upgrades happening simultaneously on the same room.
|
1
changelog.d/5252.feature
Normal file
1
changelog.d/5252.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add monthly active users to phonehome stats.
|
1
changelog.d/5378.misc
Normal file
1
changelog.d/5378.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Track deactivated accounts in the database.
|
1
changelog.d/5381.misc
Normal file
1
changelog.d/5381.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Clean up code for sending federation EDUs.
|
1
changelog.d/5383.misc
Normal file
1
changelog.d/5383.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Don't log non-200 responses from federation queries as exceptions.
|
1
changelog.d/5384.feature
Normal file
1
changelog.d/5384.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Statistics on forward extremities per room are now exposed via Prometheus.
|
1
changelog.d/5387.bugfix
Normal file
1
changelog.d/5387.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset.
|
1
changelog.d/5388.bugfix
Normal file
1
changelog.d/5388.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix email notifications for unnamed rooms with multiple people.
|
1
changelog.d/5389.bugfix
Normal file
1
changelog.d/5389.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix exceptions in federation reader worker caused by attempting to renew attestations, which should only happen on master worker.
|
1
changelog.d/5390.bugfix
Normal file
1
changelog.d/5390.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix handling of failures fetching remote content to not log failures as exceptions.
|
1
changelog.d/5394.bugfix
Normal file
1
changelog.d/5394.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix a bug where deactivated users could receive renewal emails if the account validity feature is on.
|
@ -1 +0,0 @@
|
|||||||
Fix bug where attempting to send transactions with large number of EDUs can fail.
|
|
1
changelog.d/5425.removal
Normal file
1
changelog.d/5425.removal
Normal file
@ -0,0 +1 @@
|
|||||||
|
Python 2.7 is no longer a supported platform. Synapse now requires Python 3.5+ to run.
|
1
changelog.d/5440.feature
Normal file
1
changelog.d/5440.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow server admins to define implementations of extra rules for allowing or denying incoming events.
|
1
changelog.d/5446.misc
Normal file
1
changelog.d/5446.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Update Python syntax in contrib/ to Python 3.
|
1
changelog.d/5447.misc
Normal file
1
changelog.d/5447.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Update federation_client dev script to support `.well-known` and work with python3.
|
1
changelog.d/5448.removal
Normal file
1
changelog.d/5448.removal
Normal file
@ -0,0 +1 @@
|
|||||||
|
PostgreSQL 9.4 is no longer supported. Synapse requires Postgres 9.5+ or above for Postgres support.
|
1
changelog.d/5458.feature
Normal file
1
changelog.d/5458.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Statistics on forward extremities per room are now exposed via Prometheus.
|
1
changelog.d/5459.misc
Normal file
1
changelog.d/5459.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
SyTest has been moved to Buildkite.
|
1
changelog.d/5460.misc
Normal file
1
changelog.d/5460.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Demo script now uses python3.
|
1
changelog.d/5461.feature
Normal file
1
changelog.d/5461.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Statistics on forward extremities per room are now exposed via Prometheus.
|
1
changelog.d/5464.bugfix
Normal file
1
changelog.d/5464.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix missing invite state after exchanging 3PID invites over federaton.
|
2
changelog.d/5465.misc
Normal file
2
changelog.d/5465.misc
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
Track deactivated accounts in the database.
|
||||||
|
|
1
changelog.d/5474.feature
Normal file
1
changelog.d/5474.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow server admins to define implementations of extra rules for allowing or denying incoming events.
|
1
changelog.d/5476.misc
Normal file
1
changelog.d/5476.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add new metrics for number of forward extremities being persisted and number of state groups involved in resolution.
|
1
changelog.d/5477.feature
Normal file
1
changelog.d/5477.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow server admins to define implementations of extra rules for allowing or denying incoming events.
|
1
changelog.d/5478.misc
Normal file
1
changelog.d/5478.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
The demo servers talk to each other again.
|
1
changelog.d/5480.misc
Normal file
1
changelog.d/5480.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add an EXPERIMENTAL config option to try and periodically clean up extremities by sending dummy events.
|
1
changelog.d/5482.misc
Normal file
1
changelog.d/5482.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Synapse's codebase is now formatted by `black`.
|
1
changelog.d/5490.bugfix
Normal file
1
changelog.d/5490.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix failure to start under docker with SAML support enabled.
|
1
changelog.d/5493.misc
Normal file
1
changelog.d/5493.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Track deactivated accounts in the database.
|
1
changelog.d/5498.bugfix
Normal file
1
changelog.d/5498.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines.
|
1
changelog.d/5499.misc
Normal file
1
changelog.d/5499.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Some cleanups and sanity-checking in the CPU and database metrics.
|
1
changelog.d/5500.bugfix
Normal file
1
changelog.d/5500.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix logging error when a tampered event is detected.
|
1
changelog.d/5502.misc
Normal file
1
changelog.d/5502.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Improve email notification logging.
|
1
changelog.d/5505.feature
Normal file
1
changelog.d/5505.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add support for handling pagination APIs on client reader worker.
|
1
changelog.d/5509.misc
Normal file
1
changelog.d/5509.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix "Unexpected entry in 'full_schemas'" log warning.
|
1
changelog.d/5510.misc
Normal file
1
changelog.d/5510.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Improve logging when generating config files.
|
1
changelog.d/5511.misc
Normal file
1
changelog.d/5511.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Refactor and clean up Config parser for maintainability.
|
1
changelog.d/5512.feature
Normal file
1
changelog.d/5512.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Improve help and cmdline option names for --generate-config options.
|
1
changelog.d/5513.feature
Normal file
1
changelog.d/5513.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add support for handling pagination APIs on client reader worker.
|
1
changelog.d/5514.bugfix
Normal file
1
changelog.d/5514.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`.
|
1
changelog.d/5516.feature
Normal file
1
changelog.d/5516.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow configuration of the path used for ACME account keys.
|
1
changelog.d/5521.feature
Normal file
1
changelog.d/5521.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow configuration of the path used for ACME account keys.
|
1
changelog.d/5522.feature
Normal file
1
changelog.d/5522.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Allow configuration of the path used for ACME account keys.
|
1
changelog.d/5523.bugfix
Normal file
1
changelog.d/5523.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fix a regression where homeservers on private IP addresses were incorrectly blacklisted.
|
1
changelog.d/5524.feature
Normal file
1
changelog.d/5524.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add --data-dir and --open-private-ports options.
|
1
changelog.d/5525.removal
Normal file
1
changelog.d/5525.removal
Normal file
@ -0,0 +1 @@
|
|||||||
|
Remove support for cpu_affinity setting.
|
1
changelog.d/5531.feature
Normal file
1
changelog.d/5531.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add support for handling pagination APIs on client reader worker.
|
1
changelog.d/5534.feature
Normal file
1
changelog.d/5534.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Split public rooms directory auth config in two settings, in order to manage client auth independently from the federation part of it. Obsoletes the "restrict_public_rooms_to_local_users" configuration setting. If "restrict_public_rooms_to_local_users" is set in the config, Synapse will act as if both new options are enabled, i.e. require authentication through the client API and deny federation requests.
|
1
changelog.d/5537.misc
Normal file
1
changelog.d/5537.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Add information about how to install and run `black` on the codebase to code_style.rst.
|
1
changelog.d/5545.misc
Normal file
1
changelog.d/5545.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Update v1.0.0 release changelog to include more information about changes to password resets.
|
1
changelog.d/5546.feature
Normal file
1
changelog.d/5546.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Update docker image to use Python 3.7.
|
1
changelog.d/5547.feature
Normal file
1
changelog.d/5547.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file.
|
1
changelog.d/5548.misc
Normal file
1
changelog.d/5548.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Remove non-functioning check_event_hash.py dev script.
|
1
changelog.d/5558.misc
Normal file
1
changelog.d/5558.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Improve install docs on choosing server_name.
|
@ -15,6 +15,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from twisted.internet import reactor, defer, threads
|
from twisted.internet import reactor, defer, threads
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
@ -36,9 +37,8 @@ from signedjson.sign import verify_signed_json, SignatureVerifyException
|
|||||||
|
|
||||||
CONFIG_JSON = "cmdclient_config.json"
|
CONFIG_JSON = "cmdclient_config.json"
|
||||||
|
|
||||||
TRUSTED_ID_SERVERS = [
|
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||||
'localhost:8001'
|
|
||||||
]
|
|
||||||
|
|
||||||
class SynapseCmd(cmd.Cmd):
|
class SynapseCmd(cmd.Cmd):
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"token": token,
|
"token": token,
|
||||||
"verbose": "on",
|
"verbose": "on",
|
||||||
"complete_usernames": "on",
|
"complete_usernames": "on",
|
||||||
"send_delivery_receipts": "on"
|
"send_delivery_receipts": "on",
|
||||||
}
|
}
|
||||||
self.path_prefix = "/_matrix/client/api/v1"
|
self.path_prefix = "/_matrix/client/api/v1"
|
||||||
self.event_stream_token = "END"
|
self.event_stream_token = "END"
|
||||||
@ -109,7 +109,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
by using $. E.g. 'config roomid room1' then 'raw get /rooms/$roomid'.
|
by using $. E.g. 'config roomid room1' then 'raw get /rooms/$roomid'.
|
||||||
"""
|
"""
|
||||||
if len(line) == 0:
|
if len(line) == 0:
|
||||||
print json.dumps(self.config, indent=4)
|
print(json.dumps(self.config, indent=4))
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -119,12 +119,11 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
config_rules = [ # key, valid_values
|
config_rules = [ # key, valid_values
|
||||||
("verbose", ["on", "off"]),
|
("verbose", ["on", "off"]),
|
||||||
("complete_usernames", ["on", "off"]),
|
("complete_usernames", ["on", "off"]),
|
||||||
("send_delivery_receipts", ["on", "off"])
|
("send_delivery_receipts", ["on", "off"]),
|
||||||
]
|
]
|
||||||
for key, valid_vals in config_rules:
|
for key, valid_vals in config_rules:
|
||||||
if key == args["key"] and args["val"] not in valid_vals:
|
if key == args["key"] and args["val"] not in valid_vals:
|
||||||
print "%s value must be one of %s" % (args["key"],
|
print("%s value must be one of %s" % (args["key"], valid_vals))
|
||||||
valid_vals)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# toggle the http client verbosity
|
# toggle the http client verbosity
|
||||||
@ -133,11 +132,11 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
# assign the new config
|
# assign the new config
|
||||||
self.config[args["key"]] = args["val"]
|
self.config[args["key"]] = args["val"]
|
||||||
print json.dumps(self.config, indent=4)
|
print(json.dumps(self.config, indent=4))
|
||||||
|
|
||||||
save_config(self.config)
|
save_config(self.config)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_register(self, line):
|
def do_register(self, line):
|
||||||
"""Registers for a new account: "register <userid> <noupdate>"
|
"""Registers for a new account: "register <userid> <noupdate>"
|
||||||
@ -153,33 +152,32 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
pwd = getpass.getpass("Type a password for this user: ")
|
pwd = getpass.getpass("Type a password for this user: ")
|
||||||
pwd2 = getpass.getpass("Retype the password: ")
|
pwd2 = getpass.getpass("Retype the password: ")
|
||||||
if pwd != pwd2 or len(pwd) == 0:
|
if pwd != pwd2 or len(pwd) == 0:
|
||||||
print "Password mismatch."
|
print("Password mismatch.")
|
||||||
pwd = None
|
pwd = None
|
||||||
else:
|
else:
|
||||||
password = pwd
|
password = pwd
|
||||||
|
|
||||||
body = {
|
body = {"type": "m.login.password"}
|
||||||
"type": "m.login.password"
|
|
||||||
}
|
|
||||||
if "userid" in args:
|
if "userid" in args:
|
||||||
body["user"] = args["userid"]
|
body["user"] = args["userid"]
|
||||||
if password:
|
if password:
|
||||||
body["password"] = password
|
body["password"] = password
|
||||||
|
|
||||||
reactor.callFromThread(self._do_register, body,
|
reactor.callFromThread(self._do_register, body, "noupdate" not in args)
|
||||||
"noupdate" not in args)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_register(self, data, update_config):
|
def _do_register(self, data, update_config):
|
||||||
# check the registration flows
|
# check the registration flows
|
||||||
url = self._url() + "/register"
|
url = self._url() + "/register"
|
||||||
json_res = yield self.http_client.do_request("GET", url)
|
json_res = yield self.http_client.do_request("GET", url)
|
||||||
print json.dumps(json_res, indent=4)
|
print(json.dumps(json_res, indent=4))
|
||||||
|
|
||||||
passwordFlow = None
|
passwordFlow = None
|
||||||
for flow in json_res["flows"]:
|
for flow in json_res["flows"]:
|
||||||
if flow["type"] == "m.login.recaptcha" or ("stages" in flow and "m.login.recaptcha" in flow["stages"]):
|
if flow["type"] == "m.login.recaptcha" or (
|
||||||
print "Unable to register: Home server requires captcha."
|
"stages" in flow and "m.login.recaptcha" in flow["stages"]
|
||||||
|
):
|
||||||
|
print("Unable to register: Home server requires captcha.")
|
||||||
return
|
return
|
||||||
if flow["type"] == "m.login.password" and "stages" not in flow:
|
if flow["type"] == "m.login.password" and "stages" not in flow:
|
||||||
passwordFlow = flow
|
passwordFlow = flow
|
||||||
@ -189,7 +187,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
return
|
return
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||||
print json.dumps(json_res, indent=4)
|
print(json.dumps(json_res, indent=4))
|
||||||
if update_config and "user_id" in json_res:
|
if update_config and "user_id" in json_res:
|
||||||
self.config["user"] = json_res["user_id"]
|
self.config["user"] = json_res["user_id"]
|
||||||
self.config["token"] = json_res["access_token"]
|
self.config["token"] = json_res["access_token"]
|
||||||
@ -201,9 +199,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
args = self._parse(line, ["user_id"], force_keys=True)
|
args = self._parse(line, ["user_id"], force_keys=True)
|
||||||
can_login = threads.blockingCallFromThread(
|
can_login = threads.blockingCallFromThread(reactor, self._check_can_login)
|
||||||
reactor,
|
|
||||||
self._check_can_login)
|
|
||||||
if can_login:
|
if can_login:
|
||||||
p = getpass.getpass("Enter your password: ")
|
p = getpass.getpass("Enter your password: ")
|
||||||
user = args["user_id"]
|
user = args["user_id"]
|
||||||
@ -211,29 +207,25 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
domain = self._domain()
|
domain = self._domain()
|
||||||
if domain:
|
if domain:
|
||||||
user = "@" + user + ":" + domain
|
user = "@" + user + ":" + domain
|
||||||
|
|
||||||
reactor.callFromThread(self._do_login, user, p)
|
reactor.callFromThread(self._do_login, user, p)
|
||||||
#print " got %s " % p
|
# print " got %s " % p
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_login(self, user, password):
|
def _do_login(self, user, password):
|
||||||
path = "/login"
|
path = "/login"
|
||||||
data = {
|
data = {"user": user, "password": password, "type": "m.login.password"}
|
||||||
"user": user,
|
|
||||||
"password": password,
|
|
||||||
"type": "m.login.password"
|
|
||||||
}
|
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||||
print json_res
|
print(json_res)
|
||||||
|
|
||||||
if "access_token" in json_res:
|
if "access_token" in json_res:
|
||||||
self.config["user"] = user
|
self.config["user"] = user
|
||||||
self.config["token"] = json_res["access_token"]
|
self.config["token"] = json_res["access_token"]
|
||||||
save_config(self.config)
|
save_config(self.config)
|
||||||
print "Login successful."
|
print("Login successful.")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _check_can_login(self):
|
def _check_can_login(self):
|
||||||
@ -242,18 +234,19 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
# submitting!
|
# submitting!
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
json_res = yield self.http_client.do_request("GET", url)
|
json_res = yield self.http_client.do_request("GET", url)
|
||||||
print json_res
|
print(json_res)
|
||||||
|
|
||||||
if "flows" not in json_res:
|
if "flows" not in json_res:
|
||||||
print "Failed to find any login flows."
|
print("Failed to find any login flows.")
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
|
||||||
flow = json_res["flows"][0] # assume first is the one we want.
|
flow = json_res["flows"][0] # assume first is the one we want.
|
||||||
if ("type" not in flow or "m.login.password" != flow["type"] or
|
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||||
"stages" in flow):
|
|
||||||
fallback_url = self._url() + "/login/fallback"
|
fallback_url = self._url() + "/login/fallback"
|
||||||
print ("Unable to login via the command line client. Please visit "
|
print(
|
||||||
"%s to login." % fallback_url)
|
"Unable to login via the command line client. Please visit "
|
||||||
|
"%s to login." % fallback_url
|
||||||
|
)
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
|
|
||||||
@ -263,21 +256,33 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
||||||
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ['address', 'clientSecret', 'sendAttempt'])
|
args = self._parse(line, ["address", "clientSecret", "sendAttempt"])
|
||||||
|
|
||||||
postArgs = {'email': args['address'], 'clientSecret': args['clientSecret'], 'sendAttempt': args['sendAttempt']}
|
postArgs = {
|
||||||
|
"email": args["address"],
|
||||||
|
"clientSecret": args["clientSecret"],
|
||||||
|
"sendAttempt": args["sendAttempt"],
|
||||||
|
}
|
||||||
|
|
||||||
reactor.callFromThread(self._do_emailrequest, postArgs)
|
reactor.callFromThread(self._do_emailrequest, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailrequest(self, args):
|
def _do_emailrequest(self, args):
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/requestToken"
|
url = (
|
||||||
|
self._identityServerUrl()
|
||||||
|
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
|
)
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
json_res = yield self.http_client.do_request(
|
||||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
"POST",
|
||||||
print json_res
|
url,
|
||||||
if 'sid' in json_res:
|
data=urllib.urlencode(args),
|
||||||
print "Token sent. Your session ID is %s" % (json_res['sid'])
|
jsonreq=False,
|
||||||
|
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||||
|
)
|
||||||
|
print(json_res)
|
||||||
|
if "sid" in json_res:
|
||||||
|
print("Token sent. Your session ID is %s" % (json_res["sid"]))
|
||||||
|
|
||||||
def do_emailvalidate(self, line):
|
def do_emailvalidate(self, line):
|
||||||
"""Validate and associate a third party ID
|
"""Validate and associate a third party ID
|
||||||
@ -285,39 +290,56 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
<token> The token sent to your third party identifier address
|
<token> The token sent to your third party identifier address
|
||||||
<clientSecret> The same clientSecret you supplied in requestToken
|
<clientSecret> The same clientSecret you supplied in requestToken
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ['sid', 'token', 'clientSecret'])
|
args = self._parse(line, ["sid", "token", "clientSecret"])
|
||||||
|
|
||||||
postArgs = { 'sid' : args['sid'], 'token' : args['token'], 'clientSecret': args['clientSecret'] }
|
postArgs = {
|
||||||
|
"sid": args["sid"],
|
||||||
|
"token": args["token"],
|
||||||
|
"clientSecret": args["clientSecret"],
|
||||||
|
}
|
||||||
|
|
||||||
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailvalidate(self, args):
|
def _do_emailvalidate(self, args):
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/submitToken"
|
url = (
|
||||||
|
self._identityServerUrl()
|
||||||
|
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||||
|
)
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
json_res = yield self.http_client.do_request(
|
||||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
"POST",
|
||||||
print json_res
|
url,
|
||||||
|
data=urllib.urlencode(args),
|
||||||
|
jsonreq=False,
|
||||||
|
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||||
|
)
|
||||||
|
print(json_res)
|
||||||
|
|
||||||
def do_3pidbind(self, line):
|
def do_3pidbind(self, line):
|
||||||
"""Validate and associate a third party ID
|
"""Validate and associate a third party ID
|
||||||
<sid> The session ID (sid) given to you in the response to requestToken
|
<sid> The session ID (sid) given to you in the response to requestToken
|
||||||
<clientSecret> The same clientSecret you supplied in requestToken
|
<clientSecret> The same clientSecret you supplied in requestToken
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ['sid', 'clientSecret'])
|
args = self._parse(line, ["sid", "clientSecret"])
|
||||||
|
|
||||||
postArgs = { 'sid' : args['sid'], 'clientSecret': args['clientSecret'] }
|
postArgs = {"sid": args["sid"], "clientSecret": args["clientSecret"]}
|
||||||
postArgs['mxid'] = self.config["user"]
|
postArgs["mxid"] = self.config["user"]
|
||||||
|
|
||||||
reactor.callFromThread(self._do_3pidbind, postArgs)
|
reactor.callFromThread(self._do_3pidbind, postArgs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_3pidbind(self, args):
|
def _do_3pidbind(self, args):
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/3pid/bind"
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
json_res = yield self.http_client.do_request(
|
||||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
"POST",
|
||||||
print json_res
|
url,
|
||||||
|
data=urllib.urlencode(args),
|
||||||
|
jsonreq=False,
|
||||||
|
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||||
|
)
|
||||||
|
print(json_res)
|
||||||
|
|
||||||
def do_join(self, line):
|
def do_join(self, line):
|
||||||
"""Joins a room: "join <roomid>" """
|
"""Joins a room: "join <roomid>" """
|
||||||
@ -325,7 +347,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["roomid"], force_keys=True)
|
args = self._parse(line, ["roomid"], force_keys=True)
|
||||||
self._do_membership_change(args["roomid"], "join", self._usr())
|
self._do_membership_change(args["roomid"], "join", self._usr())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_joinalias(self, line):
|
def do_joinalias(self, line):
|
||||||
try:
|
try:
|
||||||
@ -333,7 +355,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
path = "/join/%s" % urllib.quote(args["roomname"])
|
path = "/join/%s" % urllib.quote(args["roomname"])
|
||||||
reactor.callFromThread(self._run_and_pprint, "POST", path, {})
|
reactor.callFromThread(self._run_and_pprint, "POST", path, {})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_topic(self, line):
|
def do_topic(self, line):
|
||||||
""""topic [set|get] <roomid> [<newtopic>]"
|
""""topic [set|get] <roomid> [<newtopic>]"
|
||||||
@ -343,26 +365,24 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
args = self._parse(line, ["action", "roomid", "topic"])
|
args = self._parse(line, ["action", "roomid", "topic"])
|
||||||
if "action" not in args or "roomid" not in args:
|
if "action" not in args or "roomid" not in args:
|
||||||
print "Must specify set|get and a room ID."
|
print("Must specify set|get and a room ID.")
|
||||||
return
|
return
|
||||||
if args["action"].lower() not in ["set", "get"]:
|
if args["action"].lower() not in ["set", "get"]:
|
||||||
print "Must specify set|get, not %s" % args["action"]
|
print("Must specify set|get, not %s" % args["action"])
|
||||||
return
|
return
|
||||||
|
|
||||||
path = "/rooms/%s/topic" % urllib.quote(args["roomid"])
|
path = "/rooms/%s/topic" % urllib.quote(args["roomid"])
|
||||||
|
|
||||||
if args["action"].lower() == "set":
|
if args["action"].lower() == "set":
|
||||||
if "topic" not in args:
|
if "topic" not in args:
|
||||||
print "Must specify a new topic."
|
print("Must specify a new topic.")
|
||||||
return
|
return
|
||||||
body = {
|
body = {"topic": args["topic"]}
|
||||||
"topic": args["topic"]
|
|
||||||
}
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
||||||
elif args["action"].lower() == "get":
|
elif args["action"].lower() == "get":
|
||||||
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_invite(self, line):
|
def do_invite(self, line):
|
||||||
"""Invite a user to a room: "invite <userid> <roomid>" """
|
"""Invite a user to a room: "invite <userid> <roomid>" """
|
||||||
@ -373,49 +393,64 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
reactor.callFromThread(self._do_invite, args["roomid"], user_id)
|
reactor.callFromThread(self._do_invite, args["roomid"], user_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_invite(self, roomid, userstring):
|
def _do_invite(self, roomid, userstring):
|
||||||
if (not userstring.startswith('@') and
|
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||||
self._is_on("complete_usernames")):
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/lookup"
|
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request("GET", url, qparams={'medium':'email','address':userstring})
|
json_res = yield self.http_client.do_request(
|
||||||
|
"GET", url, qparams={"medium": "email", "address": userstring}
|
||||||
|
)
|
||||||
|
|
||||||
mxid = None
|
mxid = None
|
||||||
|
|
||||||
if 'mxid' in json_res and 'signatures' in json_res:
|
if "mxid" in json_res and "signatures" in json_res:
|
||||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/pubkey/ed25519"
|
url = (
|
||||||
|
self._identityServerUrl()
|
||||||
|
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||||
|
)
|
||||||
|
|
||||||
pubKey = None
|
pubKey = None
|
||||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||||
if 'public_key' in pubKeyObj:
|
if "public_key" in pubKeyObj:
|
||||||
pubKey = nacl.signing.VerifyKey(pubKeyObj['public_key'], encoder=nacl.encoding.HexEncoder)
|
pubKey = nacl.signing.VerifyKey(
|
||||||
|
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print "No public key found in pubkey response!"
|
print("No public key found in pubkey response!")
|
||||||
|
|
||||||
sigValid = False
|
sigValid = False
|
||||||
|
|
||||||
if pubKey:
|
if pubKey:
|
||||||
for signame in json_res['signatures']:
|
for signame in json_res["signatures"]:
|
||||||
if signame not in TRUSTED_ID_SERVERS:
|
if signame not in TRUSTED_ID_SERVERS:
|
||||||
print "Ignoring signature from untrusted server %s" % (signame)
|
print(
|
||||||
|
"Ignoring signature from untrusted server %s"
|
||||||
|
% (signame)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
verify_signed_json(json_res, signame, pubKey)
|
verify_signed_json(json_res, signame, pubKey)
|
||||||
sigValid = True
|
sigValid = True
|
||||||
print "Mapping %s -> %s correctly signed by %s" % (userstring, json_res['mxid'], signame)
|
print(
|
||||||
|
"Mapping %s -> %s correctly signed by %s"
|
||||||
|
% (userstring, json_res["mxid"], signame)
|
||||||
|
)
|
||||||
break
|
break
|
||||||
except SignatureVerifyException as e:
|
except SignatureVerifyException as e:
|
||||||
print "Invalid signature from %s" % (signame)
|
print("Invalid signature from %s" % (signame))
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
if sigValid:
|
if sigValid:
|
||||||
print "Resolved 3pid %s to %s" % (userstring, json_res['mxid'])
|
print("Resolved 3pid %s to %s" % (userstring, json_res["mxid"]))
|
||||||
mxid = json_res['mxid']
|
mxid = json_res["mxid"]
|
||||||
else:
|
else:
|
||||||
print "Got association for %s but couldn't verify signature" % (userstring)
|
print(
|
||||||
|
"Got association for %s but couldn't verify signature"
|
||||||
|
% (userstring)
|
||||||
|
)
|
||||||
|
|
||||||
if not mxid:
|
if not mxid:
|
||||||
mxid = "@" + userstring + ":" + self._domain()
|
mxid = "@" + userstring + ":" + self._domain()
|
||||||
@ -428,18 +463,17 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["roomid"], force_keys=True)
|
args = self._parse(line, ["roomid"], force_keys=True)
|
||||||
self._do_membership_change(args["roomid"], "leave", self._usr())
|
self._do_membership_change(args["roomid"], "leave", self._usr())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
def do_send(self, line):
|
def do_send(self, line):
|
||||||
"""Sends a message. "send <roomid> <body>" """
|
"""Sends a message. "send <roomid> <body>" """
|
||||||
args = self._parse(line, ["roomid", "body"])
|
args = self._parse(line, ["roomid", "body"])
|
||||||
txn_id = "txn%s" % int(time.time())
|
txn_id = "txn%s" % int(time.time())
|
||||||
path = "/rooms/%s/send/m.room.message/%s" % (urllib.quote(args["roomid"]),
|
path = "/rooms/%s/send/m.room.message/%s" % (
|
||||||
txn_id)
|
urllib.quote(args["roomid"]),
|
||||||
body_json = {
|
txn_id,
|
||||||
"msgtype": "m.text",
|
)
|
||||||
"body": args["body"]
|
body_json = {"msgtype": "m.text", "body": args["body"]}
|
||||||
}
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
||||||
|
|
||||||
def do_list(self, line):
|
def do_list(self, line):
|
||||||
@ -453,10 +487,10 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"""
|
"""
|
||||||
args = self._parse(line, ["type", "roomid", "qp"])
|
args = self._parse(line, ["type", "roomid", "qp"])
|
||||||
if not "type" in args or not "roomid" in args:
|
if not "type" in args or not "roomid" in args:
|
||||||
print "Must specify type and room ID."
|
print("Must specify type and room ID.")
|
||||||
return
|
return
|
||||||
if args["type"] not in ["members", "messages"]:
|
if args["type"] not in ["members", "messages"]:
|
||||||
print "Unrecognised type: %s" % args["type"]
|
print("Unrecognised type: %s" % args["type"])
|
||||||
return
|
return
|
||||||
room_id = args["roomid"]
|
room_id = args["roomid"]
|
||||||
path = "/rooms/%s/%s" % (urllib.quote(room_id), args["type"])
|
path = "/rooms/%s/%s" % (urllib.quote(room_id), args["type"])
|
||||||
@ -468,11 +502,10 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
key_value = key_value_str.split("=")
|
key_value = key_value_str.split("=")
|
||||||
qp[key_value[0]] = key_value[1]
|
qp[key_value[0]] = key_value[1]
|
||||||
except:
|
except:
|
||||||
print "Bad query param: %s" % key_value
|
print("Bad query param: %s" % key_value)
|
||||||
return
|
return
|
||||||
|
|
||||||
reactor.callFromThread(self._run_and_pprint, "GET", path,
|
reactor.callFromThread(self._run_and_pprint, "GET", path, query_params=qp)
|
||||||
query_params=qp)
|
|
||||||
|
|
||||||
def do_create(self, line):
|
def do_create(self, line):
|
||||||
"""Creates a room.
|
"""Creates a room.
|
||||||
@ -508,14 +541,22 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
args = self._parse(line, ["method", "path", "data"])
|
args = self._parse(line, ["method", "path", "data"])
|
||||||
# sanity check
|
# sanity check
|
||||||
if "method" not in args or "path" not in args:
|
if "method" not in args or "path" not in args:
|
||||||
print "Must specify path and method."
|
print("Must specify path and method.")
|
||||||
return
|
return
|
||||||
|
|
||||||
args["method"] = args["method"].upper()
|
args["method"] = args["method"].upper()
|
||||||
valid_methods = ["PUT", "GET", "POST", "DELETE",
|
valid_methods = [
|
||||||
"XPUT", "XGET", "XPOST", "XDELETE"]
|
"PUT",
|
||||||
|
"GET",
|
||||||
|
"POST",
|
||||||
|
"DELETE",
|
||||||
|
"XPUT",
|
||||||
|
"XGET",
|
||||||
|
"XPOST",
|
||||||
|
"XDELETE",
|
||||||
|
]
|
||||||
if args["method"] not in valid_methods:
|
if args["method"] not in valid_methods:
|
||||||
print "Unsupported method: %s" % args["method"]
|
print("Unsupported method: %s" % args["method"])
|
||||||
return
|
return
|
||||||
|
|
||||||
if "data" not in args:
|
if "data" not in args:
|
||||||
@ -524,7 +565,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
args["data"] = json.loads(args["data"])
|
args["data"] = json.loads(args["data"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print "Data is not valid JSON. %s" % e
|
print("Data is not valid JSON. %s" % e)
|
||||||
return
|
return
|
||||||
|
|
||||||
qp = {"access_token": self._tok()}
|
qp = {"access_token": self._tok()}
|
||||||
@ -540,10 +581,13 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
reactor.callFromThread(self._run_and_pprint, args["method"],
|
reactor.callFromThread(
|
||||||
args["path"],
|
self._run_and_pprint,
|
||||||
args["data"],
|
args["method"],
|
||||||
query_params=qp)
|
args["path"],
|
||||||
|
args["data"],
|
||||||
|
query_params=qp,
|
||||||
|
)
|
||||||
|
|
||||||
def do_stream(self, line):
|
def do_stream(self, line):
|
||||||
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
||||||
@ -553,26 +597,29 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
timeout = int(args["timeout"])
|
timeout = int(args["timeout"])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print "Timeout must be in milliseconds."
|
print("Timeout must be in milliseconds.")
|
||||||
return
|
return
|
||||||
reactor.callFromThread(self._do_event_stream, timeout)
|
reactor.callFromThread(self._do_event_stream, timeout)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_event_stream(self, timeout):
|
def _do_event_stream(self, timeout):
|
||||||
res = yield self.http_client.get_json(
|
res = yield self.http_client.get_json(
|
||||||
self._url() + "/events",
|
self._url() + "/events",
|
||||||
{
|
{
|
||||||
"access_token": self._tok(),
|
"access_token": self._tok(),
|
||||||
"timeout": str(timeout),
|
"timeout": str(timeout),
|
||||||
"from": self.event_stream_token
|
"from": self.event_stream_token,
|
||||||
})
|
},
|
||||||
print json.dumps(res, indent=4)
|
)
|
||||||
|
print(json.dumps(res, indent=4))
|
||||||
|
|
||||||
if "chunk" in res:
|
if "chunk" in res:
|
||||||
for event in res["chunk"]:
|
for event in res["chunk"]:
|
||||||
if (event["type"] == "m.room.message" and
|
if (
|
||||||
self._is_on("send_delivery_receipts") and
|
event["type"] == "m.room.message"
|
||||||
event["user_id"] != self._usr()): # not sent by us
|
and self._is_on("send_delivery_receipts")
|
||||||
|
and event["user_id"] != self._usr()
|
||||||
|
): # not sent by us
|
||||||
self._send_receipt(event, "d")
|
self._send_receipt(event, "d")
|
||||||
|
|
||||||
# update the position in the stram
|
# update the position in the stram
|
||||||
@ -580,18 +627,28 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
self.event_stream_token = res["end"]
|
self.event_stream_token = res["end"]
|
||||||
|
|
||||||
def _send_receipt(self, event, feedback_type):
|
def _send_receipt(self, event, feedback_type):
|
||||||
path = ("/rooms/%s/messages/%s/%s/feedback/%s/%s" %
|
path = "/rooms/%s/messages/%s/%s/feedback/%s/%s" % (
|
||||||
(urllib.quote(event["room_id"]), event["user_id"], event["msg_id"],
|
urllib.quote(event["room_id"]),
|
||||||
self._usr(), feedback_type))
|
event["user_id"],
|
||||||
|
event["msg_id"],
|
||||||
|
self._usr(),
|
||||||
|
feedback_type,
|
||||||
|
)
|
||||||
data = {}
|
data = {}
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data,
|
reactor.callFromThread(
|
||||||
alt_text="Sent receipt for %s" % event["msg_id"])
|
self._run_and_pprint,
|
||||||
|
"PUT",
|
||||||
|
path,
|
||||||
|
data=data,
|
||||||
|
alt_text="Sent receipt for %s" % event["msg_id"],
|
||||||
|
)
|
||||||
|
|
||||||
def _do_membership_change(self, roomid, membership, userid):
|
def _do_membership_change(self, roomid, membership, userid):
|
||||||
path = "/rooms/%s/state/m.room.member/%s" % (urllib.quote(roomid), urllib.quote(userid))
|
path = "/rooms/%s/state/m.room.member/%s" % (
|
||||||
data = {
|
urllib.quote(roomid),
|
||||||
"membership": membership
|
urllib.quote(userid),
|
||||||
}
|
)
|
||||||
|
data = {"membership": membership}
|
||||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
||||||
|
|
||||||
def do_displayname(self, line):
|
def do_displayname(self, line):
|
||||||
@ -644,15 +701,20 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
for i, arg in enumerate(line_args):
|
for i, arg in enumerate(line_args):
|
||||||
for config_key in self.config:
|
for config_key in self.config:
|
||||||
if ("$" + config_key) in arg:
|
if ("$" + config_key) in arg:
|
||||||
arg = arg.replace("$" + config_key,
|
arg = arg.replace("$" + config_key, self.config[config_key])
|
||||||
self.config[config_key])
|
|
||||||
line_args[i] = arg
|
line_args[i] = arg
|
||||||
|
|
||||||
return dict(zip(keys, line_args))
|
return dict(zip(keys, line_args))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _run_and_pprint(self, method, path, data=None,
|
def _run_and_pprint(
|
||||||
query_params={"access_token": None}, alt_text=None):
|
self,
|
||||||
|
method,
|
||||||
|
path,
|
||||||
|
data=None,
|
||||||
|
query_params={"access_token": None},
|
||||||
|
alt_text=None,
|
||||||
|
):
|
||||||
""" Runs an HTTP request and pretty prints the output.
|
""" Runs an HTTP request and pretty prints the output.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -665,31 +727,31 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
if "access_token" in query_params:
|
if "access_token" in query_params:
|
||||||
query_params["access_token"] = self._tok()
|
query_params["access_token"] = self._tok()
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(method, url,
|
json_res = yield self.http_client.do_request(
|
||||||
data=data,
|
method, url, data=data, qparams=query_params
|
||||||
qparams=query_params)
|
)
|
||||||
if alt_text:
|
if alt_text:
|
||||||
print alt_text
|
print(alt_text)
|
||||||
else:
|
else:
|
||||||
print json.dumps(json_res, indent=4)
|
print(json.dumps(json_res, indent=4))
|
||||||
|
|
||||||
|
|
||||||
def save_config(config):
|
def save_config(config):
|
||||||
with open(CONFIG_JSON, 'w') as out:
|
with open(CONFIG_JSON, "w") as out:
|
||||||
json.dump(config, out)
|
json.dump(config, out)
|
||||||
|
|
||||||
|
|
||||||
def main(server_url, identity_server_url, username, token, config_path):
|
def main(server_url, identity_server_url, username, token, config_path):
|
||||||
print "Synapse command line client"
|
print("Synapse command line client")
|
||||||
print "==========================="
|
print("===========================")
|
||||||
print "Server: %s" % server_url
|
print("Server: %s" % server_url)
|
||||||
print "Type 'help' to get started."
|
print("Type 'help' to get started.")
|
||||||
print "Close this console with CTRL+C then CTRL+D."
|
print("Close this console with CTRL+C then CTRL+D.")
|
||||||
if not username or not token:
|
if not username or not token:
|
||||||
print "- 'register <username>' - Register an account"
|
print("- 'register <username>' - Register an account")
|
||||||
print "- 'stream' - Connect to the event stream"
|
print("- 'stream' - Connect to the event stream")
|
||||||
print "- 'create <roomid>' - Create a room"
|
print("- 'create <roomid>' - Create a room")
|
||||||
print "- 'send <roomid> <message>' - Send a message"
|
print("- 'send <roomid> <message>' - Send a message")
|
||||||
http_client = TwistedHttpClient()
|
http_client = TwistedHttpClient()
|
||||||
|
|
||||||
# the command line client
|
# the command line client
|
||||||
@ -699,13 +761,13 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
global CONFIG_JSON
|
global CONFIG_JSON
|
||||||
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
||||||
try:
|
try:
|
||||||
with open(config_path, 'r') as config:
|
with open(config_path, "r") as config:
|
||||||
syn_cmd.config = json.load(config)
|
syn_cmd.config = json.load(config)
|
||||||
try:
|
try:
|
||||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
print "Loaded config from %s" % config_path
|
print("Loaded config from %s" % config_path)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -716,27 +778,37 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser("Starts a synapse client.")
|
parser = argparse.ArgumentParser("Starts a synapse client.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-s", "--server", dest="server", default="http://localhost:8008",
|
"-s",
|
||||||
help="The URL of the home server to talk to.")
|
"--server",
|
||||||
|
dest="server",
|
||||||
|
default="http://localhost:8008",
|
||||||
|
help="The URL of the home server to talk to.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-i", "--identity-server", dest="identityserver", default="http://localhost:8090",
|
"-i",
|
||||||
help="The URL of the identity server to talk to.")
|
"--identity-server",
|
||||||
|
dest="identityserver",
|
||||||
|
default="http://localhost:8090",
|
||||||
|
help="The URL of the identity server to talk to.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-u", "--username", dest="username",
|
"-u", "--username", dest="username", help="Your username on the server."
|
||||||
help="Your username on the server.")
|
)
|
||||||
|
parser.add_argument("-t", "--token", dest="token", help="Your access token.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-t", "--token", dest="token",
|
"-c",
|
||||||
help="Your access token.")
|
"--config",
|
||||||
parser.add_argument(
|
dest="config",
|
||||||
"-c", "--config", dest="config", default=CONFIG_JSON,
|
default=CONFIG_JSON,
|
||||||
help="The location of the config.json file to read from.")
|
help="The location of the config.json file to read from.",
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if not args.server:
|
if not args.server:
|
||||||
print "You must supply a server URL to communicate with."
|
print("You must supply a server URL to communicate with.")
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
from twisted.web.http_headers import Headers
|
from twisted.web.http_headers import Headers
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
@ -72,9 +73,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
response = yield self._create_put_request(
|
response = yield self._create_put_request(
|
||||||
url,
|
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||||
data,
|
|
||||||
headers_dict={"Content-Type": ["application/json"]}
|
|
||||||
)
|
)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
defer.returnValue((response.code, body))
|
defer.returnValue((response.code, body))
|
||||||
@ -94,40 +93,34 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if "Content-Type" not in headers_dict:
|
if "Content-Type" not in headers_dict:
|
||||||
raise defer.error(
|
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||||
RuntimeError("Must include Content-Type header for PUTs"))
|
|
||||||
|
|
||||||
return self._create_request(
|
return self._create_request(
|
||||||
"PUT",
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
url,
|
|
||||||
producer=_JsonProducer(json_data),
|
|
||||||
headers_dict=headers_dict
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict={}):
|
def _create_get_request(self, url, headers_dict={}):
|
||||||
""" Wrapper of _create_request to issue a GET request
|
""" Wrapper of _create_request to issue a GET request
|
||||||
"""
|
"""
|
||||||
return self._create_request(
|
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||||
"GET",
|
|
||||||
url,
|
|
||||||
headers_dict=headers_dict
|
|
||||||
)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_request(self, method, url, data=None, qparams=None, jsonreq=True, headers={}):
|
def do_request(
|
||||||
|
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||||
|
):
|
||||||
if qparams:
|
if qparams:
|
||||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||||
|
|
||||||
if jsonreq:
|
if jsonreq:
|
||||||
prod = _JsonProducer(data)
|
prod = _JsonProducer(data)
|
||||||
headers['Content-Type'] = ["application/json"];
|
headers["Content-Type"] = ["application/json"]
|
||||||
else:
|
else:
|
||||||
prod = _RawProducer(data)
|
prod = _RawProducer(data)
|
||||||
|
|
||||||
if method in ["POST", "PUT"]:
|
if method in ["POST", "PUT"]:
|
||||||
response = yield self._create_request(method, url,
|
response = yield self._create_request(
|
||||||
producer=prod,
|
method, url, producer=prod, headers_dict=headers
|
||||||
headers_dict=headers)
|
)
|
||||||
else:
|
else:
|
||||||
response = yield self._create_request(method, url)
|
response = yield self._create_request(method, url)
|
||||||
|
|
||||||
@ -141,27 +134,24 @@ class TwistedHttpClient(HttpClient):
|
|||||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||||
|
|
||||||
retries_left = 5
|
retries_left = 5
|
||||||
print "%s to %s with headers %s" % (method, url, headers_dict)
|
print("%s to %s with headers %s" % (method, url, headers_dict))
|
||||||
if self.verbose and producer:
|
if self.verbose and producer:
|
||||||
if "password" in producer.data:
|
if "password" in producer.data:
|
||||||
temp = producer.data["password"]
|
temp = producer.data["password"]
|
||||||
producer.data["password"] = "[REDACTED]"
|
producer.data["password"] = "[REDACTED]"
|
||||||
print json.dumps(producer.data, indent=4)
|
print(json.dumps(producer.data, indent=4))
|
||||||
producer.data["password"] = temp
|
producer.data["password"] = temp
|
||||||
else:
|
else:
|
||||||
print json.dumps(producer.data, indent=4)
|
print(json.dumps(producer.data, indent=4))
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
response = yield self.agent.request(
|
response = yield self.agent.request(
|
||||||
method,
|
method, url.encode("UTF8"), Headers(headers_dict), producer
|
||||||
url.encode("UTF8"),
|
|
||||||
Headers(headers_dict),
|
|
||||||
producer
|
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print "uh oh: %s" % e
|
print("uh oh: %s" % e)
|
||||||
if retries_left:
|
if retries_left:
|
||||||
yield self.sleep(2 ** (5 - retries_left))
|
yield self.sleep(2 ** (5 - retries_left))
|
||||||
retries_left -= 1
|
retries_left -= 1
|
||||||
@ -169,8 +159,8 @@ class TwistedHttpClient(HttpClient):
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print "Status %s %s" % (response.code, response.phrase)
|
print("Status %s %s" % (response.code, response.phrase))
|
||||||
print pformat(list(response.headers.getAllRawHeaders()))
|
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||||
defer.returnValue(response)
|
defer.returnValue(response)
|
||||||
|
|
||||||
def sleep(self, seconds):
|
def sleep(self, seconds):
|
||||||
@ -178,6 +168,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
reactor.callLater(seconds, d.callback, seconds)
|
reactor.callLater(seconds, d.callback, seconds)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class _RawProducer(object):
|
class _RawProducer(object):
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
@ -194,9 +185,11 @@ class _RawProducer(object):
|
|||||||
def stopProducing(self):
|
def stopProducing(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class _JsonProducer(object):
|
class _JsonProducer(object):
|
||||||
""" Used by the twisted http client to create the HTTP body from json
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
self.data = jsn
|
self.data = jsn
|
||||||
self.body = json.dumps(jsn).encode("utf8")
|
self.body = json.dumps(jsn).encode("utf8")
|
||||||
|
@ -19,13 +19,13 @@ from curses.ascii import isprint
|
|||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
|
|
||||||
|
|
||||||
class CursesStdIO():
|
class CursesStdIO:
|
||||||
def __init__(self, stdscr, callback=None):
|
def __init__(self, stdscr, callback=None):
|
||||||
self.statusText = "Synapse test app -"
|
self.statusText = "Synapse test app -"
|
||||||
self.searchText = ''
|
self.searchText = ""
|
||||||
self.stdscr = stdscr
|
self.stdscr = stdscr
|
||||||
|
|
||||||
self.logLine = ''
|
self.logLine = ""
|
||||||
|
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
|
|
||||||
@ -71,8 +71,7 @@ class CursesStdIO():
|
|||||||
i = 0
|
i = 0
|
||||||
index = len(self.lines) - 1
|
index = len(self.lines) - 1
|
||||||
while i < (self.rows - 3) and index >= 0:
|
while i < (self.rows - 3) and index >= 0:
|
||||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index],
|
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||||
curses.A_NORMAL)
|
|
||||||
i = i + 1
|
i = i + 1
|
||||||
index = index - 1
|
index = index - 1
|
||||||
|
|
||||||
@ -85,15 +84,13 @@ class CursesStdIO():
|
|||||||
raise RuntimeError("TextTooLongError")
|
raise RuntimeError("TextTooLongError")
|
||||||
|
|
||||||
self.stdscr.addstr(
|
self.stdscr.addstr(
|
||||||
self.rows - 2, 0,
|
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
text + ' ' * (self.cols - len(text)),
|
)
|
||||||
curses.A_STANDOUT)
|
|
||||||
|
|
||||||
def printLogLine(self, text):
|
def printLogLine(self, text):
|
||||||
self.stdscr.addstr(
|
self.stdscr.addstr(
|
||||||
0, 0,
|
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
text + ' ' * (self.cols - len(text)),
|
)
|
||||||
curses.A_STANDOUT)
|
|
||||||
|
|
||||||
def doRead(self):
|
def doRead(self):
|
||||||
""" Input is ready! """
|
""" Input is ready! """
|
||||||
@ -105,7 +102,7 @@ class CursesStdIO():
|
|||||||
|
|
||||||
elif c == curses.KEY_ENTER or c == 10:
|
elif c == curses.KEY_ENTER or c == 10:
|
||||||
text = self.searchText
|
text = self.searchText
|
||||||
self.searchText = ''
|
self.searchText = ""
|
||||||
|
|
||||||
self.print_line(">> %s" % text)
|
self.print_line(">> %s" % text)
|
||||||
|
|
||||||
@ -122,11 +119,13 @@ class CursesStdIO():
|
|||||||
return
|
return
|
||||||
self.searchText = self.searchText + chr(c)
|
self.searchText = self.searchText + chr(c)
|
||||||
|
|
||||||
self.stdscr.addstr(self.rows - 1, 0,
|
self.stdscr.addstr(
|
||||||
self.searchText + (' ' * (
|
self.rows - 1,
|
||||||
self.cols - len(self.searchText) - 2)))
|
0,
|
||||||
|
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||||
|
)
|
||||||
|
|
||||||
self.paintStatus(self.statusText + ' %d' % len(self.searchText))
|
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||||
self.stdscr.move(self.rows - 1, len(self.searchText))
|
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||||
self.stdscr.refresh()
|
self.stdscr.refresh()
|
||||||
|
|
||||||
@ -143,7 +142,6 @@ class CursesStdIO():
|
|||||||
|
|
||||||
|
|
||||||
class Callback(object):
|
class Callback(object):
|
||||||
|
|
||||||
def __init__(self, stdio):
|
def __init__(self, stdio):
|
||||||
self.stdio = stdio
|
self.stdio = stdio
|
||||||
|
|
||||||
@ -152,7 +150,7 @@ class Callback(object):
|
|||||||
|
|
||||||
|
|
||||||
def main(stdscr):
|
def main(stdscr):
|
||||||
screen = CursesStdIO(stdscr) # create Screen object
|
screen = CursesStdIO(stdscr) # create Screen object
|
||||||
|
|
||||||
callback = Callback(screen)
|
callback = Callback(screen)
|
||||||
|
|
||||||
@ -164,5 +162,5 @@ def main(stdscr):
|
|||||||
screen.close()
|
screen.close()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
curses.wrapper(main)
|
curses.wrapper(main)
|
||||||
|
@ -28,9 +28,7 @@ Currently assumes the local address is localhost:<port>
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
from synapse.federation import (
|
from synapse.federation import ReplicationHandler
|
||||||
ReplicationHandler
|
|
||||||
)
|
|
||||||
|
|
||||||
from synapse.federation.units import Pdu
|
from synapse.federation.units import Pdu
|
||||||
|
|
||||||
@ -38,7 +36,7 @@ from synapse.util import origin_from_ucid
|
|||||||
|
|
||||||
from synapse.app.homeserver import SynapseHomeServer
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
|
||||||
#from synapse.util.logutils import log_function
|
# from synapse.util.logutils import log_function
|
||||||
|
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
@ -83,7 +81,7 @@ class InputOutput(object):
|
|||||||
room_name, = m.groups()
|
room_name, = m.groups()
|
||||||
self.print_line("%s joining %s" % (self.user, room_name))
|
self.print_line("%s joining %s" % (self.user, room_name))
|
||||||
self.server.join_room(room_name, self.user, self.user)
|
self.server.join_room(room_name, self.user, self.user)
|
||||||
#self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match("^invite (\S+) (\S+)$", line)
|
m = re.match("^invite (\S+) (\S+)$", line)
|
||||||
@ -92,7 +90,7 @@ class InputOutput(object):
|
|||||||
room_name, invitee = m.groups()
|
room_name, invitee = m.groups()
|
||||||
self.print_line("%s invited to %s" % (invitee, room_name))
|
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||||
self.server.invite_to_room(room_name, self.user, invitee)
|
self.server.invite_to_room(room_name, self.user, invitee)
|
||||||
#self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match("^send (\S+) (.*)$", line)
|
m = re.match("^send (\S+) (.*)$", line)
|
||||||
@ -101,7 +99,7 @@ class InputOutput(object):
|
|||||||
room_name, body = m.groups()
|
room_name, body = m.groups()
|
||||||
self.print_line("%s send to %s" % (self.user, room_name))
|
self.print_line("%s send to %s" % (self.user, room_name))
|
||||||
self.server.send_message(room_name, self.user, body)
|
self.server.send_message(room_name, self.user, body)
|
||||||
#self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match("^backfill (\S+)$", line)
|
m = re.match("^backfill (\S+)$", line)
|
||||||
@ -125,7 +123,6 @@ class InputOutput(object):
|
|||||||
|
|
||||||
|
|
||||||
class IOLoggerHandler(logging.Handler):
|
class IOLoggerHandler(logging.Handler):
|
||||||
|
|
||||||
def __init__(self, io):
|
def __init__(self, io):
|
||||||
logging.Handler.__init__(self)
|
logging.Handler.__init__(self)
|
||||||
self.io = io
|
self.io = io
|
||||||
@ -142,6 +139,7 @@ class Room(object):
|
|||||||
""" Used to store (in memory) the current membership state of a room, and
|
""" Used to store (in memory) the current membership state of a room, and
|
||||||
which home servers we should send PDUs associated with the room to.
|
which home servers we should send PDUs associated with the room to.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, room_name):
|
def __init__(self, room_name):
|
||||||
self.room_name = room_name
|
self.room_name = room_name
|
||||||
self.invited = set()
|
self.invited = set()
|
||||||
@ -175,6 +173,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
""" A very basic home server implentation that allows people to join a
|
""" A very basic home server implentation that allows people to join a
|
||||||
room and then invite other people.
|
room and then invite other people.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, server_name, replication_layer, output):
|
def __init__(self, server_name, replication_layer, output):
|
||||||
self.server_name = server_name
|
self.server_name = server_name
|
||||||
self.replication_layer = replication_layer
|
self.replication_layer = replication_layer
|
||||||
@ -197,26 +196,27 @@ class HomeServer(ReplicationHandler):
|
|||||||
elif pdu.content["membership"] == "invite":
|
elif pdu.content["membership"] == "invite":
|
||||||
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||||
else:
|
else:
|
||||||
self.output.print_line("#%s (unrec) %s = %s" %
|
self.output.print_line(
|
||||||
(pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
"#%s (unrec) %s = %s"
|
||||||
|
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||||
)
|
)
|
||||||
|
|
||||||
#def on_state_change(self, pdu):
|
# def on_state_change(self, pdu):
|
||||||
##self.output.print_line("#%s (state) %s *** %s" %
|
##self.output.print_line("#%s (state) %s *** %s" %
|
||||||
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||||
##)
|
##)
|
||||||
|
|
||||||
#if "joinee" in pdu.content:
|
# if "joinee" in pdu.content:
|
||||||
#self._on_join(pdu.context, pdu.content["joinee"])
|
# self._on_join(pdu.context, pdu.content["joinee"])
|
||||||
#elif "invitee" in pdu.content:
|
# elif "invitee" in pdu.content:
|
||||||
#self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
# self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||||
|
|
||||||
def _on_message(self, pdu):
|
def _on_message(self, pdu):
|
||||||
""" We received a message
|
""" We received a message
|
||||||
"""
|
"""
|
||||||
self.output.print_line("#%s %s %s" %
|
self.output.print_line(
|
||||||
(pdu.context, pdu.content["sender"], pdu.content["body"])
|
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_join(self, context, joinee):
|
def _on_join(self, context, joinee):
|
||||||
""" Someone has joined a room, either a remote user or a local user
|
""" Someone has joined a room, either a remote user or a local user
|
||||||
@ -224,9 +224,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_participant(joinee)
|
room.add_participant(joinee)
|
||||||
|
|
||||||
self.output.print_line("#%s %s %s" %
|
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||||
(context, joinee, "*** JOINED")
|
|
||||||
)
|
|
||||||
|
|
||||||
def _on_invite(self, origin, context, invitee):
|
def _on_invite(self, origin, context, invitee):
|
||||||
""" Someone has been invited
|
""" Someone has been invited
|
||||||
@ -234,9 +232,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_invited(invitee)
|
room.add_invited(invitee)
|
||||||
|
|
||||||
self.output.print_line("#%s %s %s" %
|
self.output.print_line("#%s %s %s" % (context, invitee, "*** INVITED"))
|
||||||
(context, invitee, "*** INVITED")
|
|
||||||
)
|
|
||||||
|
|
||||||
if not room.have_got_metadata and origin is not self.server_name:
|
if not room.have_got_metadata and origin is not self.server_name:
|
||||||
logger.debug("Get room state")
|
logger.debug("Get room state")
|
||||||
@ -272,14 +268,14 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
pdu = Pdu.create_new(
|
pdu = Pdu.create_new(
|
||||||
context=room_name,
|
context=room_name,
|
||||||
pdu_type="sy.room.member",
|
pdu_type="sy.room.member",
|
||||||
is_state=True,
|
is_state=True,
|
||||||
state_key=joinee,
|
state_key=joinee,
|
||||||
content={"membership": "join"},
|
content={"membership": "join"},
|
||||||
origin=self.server_name,
|
origin=self.server_name,
|
||||||
destinations=destinations,
|
destinations=destinations,
|
||||||
)
|
)
|
||||||
yield self.replication_layer.send_pdu(pdu)
|
yield self.replication_layer.send_pdu(pdu)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
@ -318,21 +314,21 @@ class HomeServer(ReplicationHandler):
|
|||||||
return self.replication_layer.backfill(dest, room_name, limit)
|
return self.replication_layer.backfill(dest, room_name, limit)
|
||||||
|
|
||||||
def _get_room_remote_servers(self, room_name):
|
def _get_room_remote_servers(self, room_name):
|
||||||
return [i for i in self.joined_rooms.setdefault(room_name,).servers]
|
return [i for i in self.joined_rooms.setdefault(room_name).servers]
|
||||||
|
|
||||||
def _get_or_create_room(self, room_name):
|
def _get_or_create_room(self, room_name):
|
||||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||||
|
|
||||||
def get_servers_for_context(self, context):
|
def get_servers_for_context(self, context):
|
||||||
return defer.succeed(
|
return defer.succeed(
|
||||||
self.joined_rooms.setdefault(context, Room(context)).servers
|
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def main(stdscr):
|
def main(stdscr):
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('user', type=str)
|
parser.add_argument("user", type=str)
|
||||||
parser.add_argument('-v', '--verbose', action='count')
|
parser.add_argument("-v", "--verbose", action="count")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
user = args.user
|
user = args.user
|
||||||
@ -342,8 +338,9 @@ def main(stdscr):
|
|||||||
|
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(lineno)d - '
|
formatter = logging.Formatter(
|
||||||
'%(levelname)s - %(message)s')
|
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
|
||||||
|
)
|
||||||
if not os.path.exists("logs"):
|
if not os.path.exists("logs"):
|
||||||
os.makedirs("logs")
|
os.makedirs("logs")
|
||||||
fh = logging.FileHandler("logs/%s" % user)
|
fh = logging.FileHandler("logs/%s" % user)
|
||||||
|
File diff suppressed because one or more lines are too long
@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -48,7 +50,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
c = colors.pop()
|
c = colors.pop()
|
||||||
color_map[o] = c
|
color_map[o] = c
|
||||||
except:
|
except:
|
||||||
print "Run out of colours!"
|
print("Run out of colours!")
|
||||||
color_map[o] = "black"
|
color_map[o] = "black"
|
||||||
|
|
||||||
graph = pydot.Dot(graph_name="Test")
|
graph = pydot.Dot(graph_name="Test")
|
||||||
@ -57,9 +59,9 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
||||||
pdu_map[name] = pdu
|
pdu_map[name] = pdu
|
||||||
|
|
||||||
t = datetime.datetime.fromtimestamp(
|
t = datetime.datetime.fromtimestamp(float(pdu["ts"]) / 1000).strftime(
|
||||||
float(pdu["ts"]) / 1000
|
"%Y-%m-%d %H:%M:%S,%f"
|
||||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
)
|
||||||
|
|
||||||
label = (
|
label = (
|
||||||
"<"
|
"<"
|
||||||
@ -79,11 +81,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
"depth": pdu.get("depth"),
|
"depth": pdu.get("depth"),
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(
|
node = pydot.Node(name=name, label=label, color=color_map[pdu.get("origin")])
|
||||||
name=name,
|
|
||||||
label=label,
|
|
||||||
color=color_map[pdu.get("origin")]
|
|
||||||
)
|
|
||||||
node_map[name] = node
|
node_map[name] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
@ -93,7 +91,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
end_name = make_name(i, o)
|
end_name = make_name(i, o)
|
||||||
|
|
||||||
if end_name not in node_map:
|
if end_name not in node_map:
|
||||||
print "%s not in nodes" % end_name
|
print("%s not in nodes" % end_name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
edge = pydot.Edge(node_map[start_name], node_map[end_name])
|
edge = pydot.Edge(node_map[start_name], node_map[end_name])
|
||||||
@ -107,14 +105,13 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
|
|
||||||
if prev_state_name in node_map:
|
if prev_state_name in node_map:
|
||||||
state_edge = pydot.Edge(
|
state_edge = pydot.Edge(
|
||||||
node_map[start_name], node_map[prev_state_name],
|
node_map[start_name], node_map[prev_state_name], style="dotted"
|
||||||
style='dotted'
|
|
||||||
)
|
)
|
||||||
graph.add_edge(state_edge)
|
graph.add_edge(state_edge)
|
||||||
|
|
||||||
graph.write('%s.dot' % filename_prefix, format='raw', prog='dot')
|
graph.write("%s.dot" % filename_prefix, format="raw", prog="dot")
|
||||||
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
||||||
graph.write_svg("%s.svg" % filename_prefix, prog='dot')
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host, room):
|
def get_pdus(host, room):
|
||||||
@ -130,15 +127,14 @@ def get_pdus(host, room):
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate a PDU graph for a given room by talking "
|
description="Generate a PDU graph for a given room by talking "
|
||||||
"to the given homeserver to get the list of PDUs. \n"
|
"to the given homeserver to get the list of PDUs. \n"
|
||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--prefix", dest="prefix",
|
"-p", "--prefix", dest="prefix", help="String to prefix output files with"
|
||||||
help="String to prefix output files with"
|
|
||||||
)
|
)
|
||||||
parser.add_argument('host')
|
parser.add_argument("host")
|
||||||
parser.add_argument('room')
|
parser.add_argument("room")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -36,10 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
args = [room_id]
|
args = [room_id]
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
sql += (
|
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
|
||||||
" ORDER BY topological_ordering DESC, stream_ordering DESC "
|
|
||||||
"LIMIT ?"
|
|
||||||
)
|
|
||||||
|
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
|
|
||||||
@ -56,9 +53,8 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
c = conn.execute(
|
c = conn.execute(
|
||||||
"SELECT state_group FROM event_to_state_groups "
|
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
|
||||||
"WHERE event_id = ?",
|
(event.event_id,),
|
||||||
(event.event_id,)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
res = c.fetchone()
|
res = c.fetchone()
|
||||||
@ -69,7 +65,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
|
|
||||||
t = datetime.datetime.fromtimestamp(
|
t = datetime.datetime.fromtimestamp(
|
||||||
float(event.origin_server_ts) / 1000
|
float(event.origin_server_ts) / 1000
|
||||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||||
|
|
||||||
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
||||||
|
|
||||||
@ -93,10 +89,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
"state_group": state_group,
|
"state_group": state_group,
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(
|
node = pydot.Node(name=event.event_id, label=label)
|
||||||
name=event.event_id,
|
|
||||||
label=label,
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[event.event_id] = node
|
node_map[event.event_id] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
@ -106,10 +99,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except:
|
except:
|
||||||
end_node = pydot.Node(
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
name=prev_id,
|
|
||||||
label="<<b>%s</b>>" % (prev_id,),
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@ -121,36 +111,33 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
if len(event_ids) <= 1:
|
if len(event_ids) <= 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cluster = pydot.Cluster(
|
cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group),))
|
||||||
str(group),
|
|
||||||
label="<State Group: %s>" % (str(group),)
|
|
||||||
)
|
|
||||||
|
|
||||||
for event_id in event_ids:
|
for event_id in event_ids:
|
||||||
cluster.add_node(node_map[event_id])
|
cluster.add_node(node_map[event_id])
|
||||||
|
|
||||||
graph.add_subgraph(cluster)
|
graph.add_subgraph(cluster)
|
||||||
|
|
||||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate a PDU graph for a given room by talking "
|
description="Generate a PDU graph for a given room by talking "
|
||||||
"to the given homeserver to get the list of PDUs. \n"
|
"to the given homeserver to get the list of PDUs. \n"
|
||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--prefix", dest="prefix",
|
"-p",
|
||||||
|
"--prefix",
|
||||||
|
dest="prefix",
|
||||||
help="String to prefix output files with",
|
help="String to prefix output files with",
|
||||||
default="graph_output"
|
default="graph_output",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||||
"-l", "--limit",
|
parser.add_argument("db")
|
||||||
help="Only retrieve the last N events.",
|
parser.add_argument("room")
|
||||||
)
|
|
||||||
parser.add_argument('db')
|
|
||||||
parser.add_argument('room')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -26,22 +28,22 @@ from six import string_types
|
|||||||
|
|
||||||
|
|
||||||
def make_graph(file_name, room_id, file_prefix, limit):
|
def make_graph(file_name, room_id, file_prefix, limit):
|
||||||
print "Reading lines"
|
print("Reading lines")
|
||||||
with open(file_name) as f:
|
with open(file_name) as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
print "Read lines"
|
print("Read lines")
|
||||||
|
|
||||||
events = [FrozenEvent(json.loads(line)) for line in lines]
|
events = [FrozenEvent(json.loads(line)) for line in lines]
|
||||||
|
|
||||||
print "Loaded events."
|
print("Loaded events.")
|
||||||
|
|
||||||
events.sort(key=lambda e: e.depth)
|
events.sort(key=lambda e: e.depth)
|
||||||
|
|
||||||
print "Sorted events"
|
print("Sorted events")
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
events = events[-int(limit):]
|
events = events[-int(limit) :]
|
||||||
|
|
||||||
node_map = {}
|
node_map = {}
|
||||||
|
|
||||||
@ -50,12 +52,12 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
for event in events:
|
for event in events:
|
||||||
t = datetime.datetime.fromtimestamp(
|
t = datetime.datetime.fromtimestamp(
|
||||||
float(event.origin_server_ts) / 1000
|
float(event.origin_server_ts) / 1000
|
||||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||||
|
|
||||||
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
||||||
content = content.replace("\n", "<br/>\n")
|
content = content.replace("\n", "<br/>\n")
|
||||||
|
|
||||||
print content
|
print(content)
|
||||||
content = []
|
content = []
|
||||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
for key, value in unfreeze(event.get_dict()["content"]).items():
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -66,15 +68,16 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
value = json.dumps(value)
|
value = json.dumps(value)
|
||||||
|
|
||||||
content.append(
|
content.append(
|
||||||
"<b>%s</b>: %s," % (
|
"<b>%s</b>: %s,"
|
||||||
cgi.escape(key, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
% (
|
||||||
cgi.escape(value, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
cgi.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
|
cgi.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
content = "<br/>\n".join(content)
|
content = "<br/>\n".join(content)
|
||||||
|
|
||||||
print content
|
print(content)
|
||||||
|
|
||||||
label = (
|
label = (
|
||||||
"<"
|
"<"
|
||||||
@ -94,25 +97,19 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
"depth": event.depth,
|
"depth": event.depth,
|
||||||
}
|
}
|
||||||
|
|
||||||
node = pydot.Node(
|
node = pydot.Node(name=event.event_id, label=label)
|
||||||
name=event.event_id,
|
|
||||||
label=label,
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[event.event_id] = node
|
node_map[event.event_id] = node
|
||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
print "Created Nodes"
|
print("Created Nodes")
|
||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
for prev_id, _ in event.prev_events:
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except:
|
except:
|
||||||
end_node = pydot.Node(
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
name=prev_id,
|
|
||||||
label="<<b>%s</b>>" % (prev_id,),
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@ -120,33 +117,33 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
edge = pydot.Edge(node_map[event.event_id], end_node)
|
edge = pydot.Edge(node_map[event.event_id], end_node)
|
||||||
graph.add_edge(edge)
|
graph.add_edge(edge)
|
||||||
|
|
||||||
print "Created edges"
|
print("Created edges")
|
||||||
|
|
||||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||||
|
|
||||||
print "Created Dot"
|
print("Created Dot")
|
||||||
|
|
||||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||||
|
|
||||||
|
print("Created svg")
|
||||||
|
|
||||||
print "Created svg"
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate a PDU graph for a given room by reading "
|
description="Generate a PDU graph for a given room by reading "
|
||||||
"from a file with line deliminated events. \n"
|
"from a file with line deliminated events. \n"
|
||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--prefix", dest="prefix",
|
"-p",
|
||||||
|
"--prefix",
|
||||||
|
dest="prefix",
|
||||||
help="String to prefix output files with",
|
help="String to prefix output files with",
|
||||||
default="graph_output"
|
default="graph_output",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||||
"-l", "--limit",
|
parser.add_argument("event_file")
|
||||||
help="Only retrieve the last N events.",
|
parser.add_argument("room")
|
||||||
)
|
|
||||||
parser.add_argument('event_file')
|
|
||||||
parser.add_argument('room')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -8,8 +8,9 @@ we set the remote SDP at which point the stream ends. Our video never gets to
|
|||||||
the bridge.
|
the bridge.
|
||||||
|
|
||||||
Requires:
|
Requires:
|
||||||
npm install jquery jsdom
|
npm install jquery jsdom
|
||||||
"""
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
import grequests
|
import grequests
|
||||||
@ -19,24 +20,25 @@ import urllib
|
|||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
|
||||||
#ACCESS_TOKEN="" #
|
# ACCESS_TOKEN="" #
|
||||||
|
|
||||||
MATRIXBASE = 'https://matrix.org/_matrix/client/api/v1/'
|
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||||
MYUSERNAME = '@davetest:matrix.org'
|
MYUSERNAME = "@davetest:matrix.org"
|
||||||
|
|
||||||
HTTPBIND = 'https://meet.jit.si/http-bind'
|
HTTPBIND = "https://meet.jit.si/http-bind"
|
||||||
#HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
# HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||||
#ROOMNAME = "matrix"
|
# ROOMNAME = "matrix"
|
||||||
ROOMNAME = "pibble"
|
ROOMNAME = "pibble"
|
||||||
|
|
||||||
HOST="guest.jit.si"
|
HOST = "guest.jit.si"
|
||||||
#HOST="jitsi.vuc.me"
|
# HOST="jitsi.vuc.me"
|
||||||
|
|
||||||
TURNSERVER="turn.guest.jit.si"
|
TURNSERVER = "turn.guest.jit.si"
|
||||||
#TURNSERVER="turn.jitsi.vuc.me"
|
# TURNSERVER="turn.jitsi.vuc.me"
|
||||||
|
|
||||||
|
ROOMDOMAIN = "meet.jit.si"
|
||||||
|
# ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||||
|
|
||||||
ROOMDOMAIN="meet.jit.si"
|
|
||||||
#ROOMDOMAIN="conference.jitsi.vuc.me"
|
|
||||||
|
|
||||||
class TrivialMatrixClient:
|
class TrivialMatrixClient:
|
||||||
def __init__(self, access_token):
|
def __init__(self, access_token):
|
||||||
@ -45,38 +47,50 @@ class TrivialMatrixClient:
|
|||||||
|
|
||||||
def getEvent(self):
|
def getEvent(self):
|
||||||
while True:
|
while True:
|
||||||
url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
|
url = (
|
||||||
|
MATRIXBASE
|
||||||
|
+ "events?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
+ "&timeout=60000"
|
||||||
|
)
|
||||||
if self.token:
|
if self.token:
|
||||||
url += "&from="+self.token
|
url += "&from=" + self.token
|
||||||
req = grequests.get(url)
|
req = grequests.get(url)
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = json.loads(resps[0].content)
|
obj = json.loads(resps[0].content)
|
||||||
print "incoming from matrix",obj
|
print("incoming from matrix", obj)
|
||||||
if 'end' not in obj:
|
if "end" not in obj:
|
||||||
continue
|
continue
|
||||||
self.token = obj['end']
|
self.token = obj["end"]
|
||||||
if len(obj['chunk']):
|
if len(obj["chunk"]):
|
||||||
return obj['chunk'][0]
|
return obj["chunk"][0]
|
||||||
|
|
||||||
def joinRoom(self, roomId):
|
def joinRoom(self, roomId):
|
||||||
url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
|
url = MATRIXBASE + "rooms/" + roomId + "/join?access_token=" + self.access_token
|
||||||
print url
|
print(url)
|
||||||
headers={ 'Content-Type': 'application/json' }
|
headers = {"Content-Type": "application/json"}
|
||||||
req = grequests.post(url, headers=headers, data='{}')
|
req = grequests.post(url, headers=headers, data="{}")
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = json.loads(resps[0].content)
|
obj = json.loads(resps[0].content)
|
||||||
print "response: ",obj
|
print("response: ", obj)
|
||||||
|
|
||||||
def sendEvent(self, roomId, evType, event):
|
def sendEvent(self, roomId, evType, event):
|
||||||
url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
|
url = (
|
||||||
print url
|
MATRIXBASE
|
||||||
print json.dumps(event)
|
+ "rooms/"
|
||||||
headers={ 'Content-Type': 'application/json' }
|
+ roomId
|
||||||
|
+ "/send/"
|
||||||
|
+ evType
|
||||||
|
+ "?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
)
|
||||||
|
print(url)
|
||||||
|
print(json.dumps(event))
|
||||||
|
headers = {"Content-Type": "application/json"}
|
||||||
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = json.loads(resps[0].content)
|
obj = json.loads(resps[0].content)
|
||||||
print "response: ",obj
|
print("response: ", obj)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
xmppClients = {}
|
xmppClients = {}
|
||||||
@ -85,39 +99,40 @@ xmppClients = {}
|
|||||||
def matrixLoop():
|
def matrixLoop():
|
||||||
while True:
|
while True:
|
||||||
ev = matrixCli.getEvent()
|
ev = matrixCli.getEvent()
|
||||||
print ev
|
print(ev)
|
||||||
if ev['type'] == 'm.room.member':
|
if ev["type"] == "m.room.member":
|
||||||
print 'membership event'
|
print("membership event")
|
||||||
if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
|
if ev["membership"] == "invite" and ev["state_key"] == MYUSERNAME:
|
||||||
roomId = ev['room_id']
|
roomId = ev["room_id"]
|
||||||
print "joining room %s" % (roomId)
|
print("joining room %s" % (roomId))
|
||||||
matrixCli.joinRoom(roomId)
|
matrixCli.joinRoom(roomId)
|
||||||
elif ev['type'] == 'm.room.message':
|
elif ev["type"] == "m.room.message":
|
||||||
if ev['room_id'] in xmppClients:
|
if ev["room_id"] in xmppClients:
|
||||||
print "already have a bridge for that user, ignoring"
|
print("already have a bridge for that user, ignoring")
|
||||||
continue
|
continue
|
||||||
print "got message, connecting"
|
print("got message, connecting")
|
||||||
xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
xmppClients[ev["room_id"]] = TrivialXmppClient(ev["room_id"], ev["user_id"])
|
||||||
gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
gevent.spawn(xmppClients[ev["room_id"]].xmppLoop)
|
||||||
elif ev['type'] == 'm.call.invite':
|
elif ev["type"] == "m.call.invite":
|
||||||
print "Incoming call"
|
print("Incoming call")
|
||||||
#sdp = ev['content']['offer']['sdp']
|
# sdp = ev['content']['offer']['sdp']
|
||||||
#print "sdp: %s" % (sdp)
|
# print "sdp: %s" % (sdp)
|
||||||
#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
# xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||||
#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
# gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||||
elif ev['type'] == 'm.call.answer':
|
elif ev["type"] == "m.call.answer":
|
||||||
print "Call answered"
|
print("Call answered")
|
||||||
sdp = ev['content']['answer']['sdp']
|
sdp = ev["content"]["answer"]["sdp"]
|
||||||
if ev['room_id'] not in xmppClients:
|
if ev["room_id"] not in xmppClients:
|
||||||
print "We didn't have a call for that room"
|
print("We didn't have a call for that room")
|
||||||
continue
|
continue
|
||||||
# should probably check call ID too
|
# should probably check call ID too
|
||||||
xmppCli = xmppClients[ev['room_id']]
|
xmppCli = xmppClients[ev["room_id"]]
|
||||||
xmppCli.sendAnswer(sdp)
|
xmppCli.sendAnswer(sdp)
|
||||||
elif ev['type'] == 'm.call.hangup':
|
elif ev["type"] == "m.call.hangup":
|
||||||
if ev['room_id'] in xmppClients:
|
if ev["room_id"] in xmppClients:
|
||||||
xmppClients[ev['room_id']].stop()
|
xmppClients[ev["room_id"]].stop()
|
||||||
del xmppClients[ev['room_id']]
|
del xmppClients[ev["room_id"]]
|
||||||
|
|
||||||
|
|
||||||
class TrivialXmppClient:
|
class TrivialXmppClient:
|
||||||
def __init__(self, matrixRoom, userId):
|
def __init__(self, matrixRoom, userId):
|
||||||
@ -131,130 +146,155 @@ class TrivialXmppClient:
|
|||||||
|
|
||||||
def nextRid(self):
|
def nextRid(self):
|
||||||
self.rid += 1
|
self.rid += 1
|
||||||
return '%d' % (self.rid)
|
return "%d" % (self.rid)
|
||||||
|
|
||||||
def sendIq(self, xml):
|
def sendIq(self, xml):
|
||||||
fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
|
fullXml = (
|
||||||
#print "\t>>>%s" % (fullXml)
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>"
|
||||||
|
% (self.nextRid(), self.sid, xml)
|
||||||
|
)
|
||||||
|
# print "\t>>>%s" % (fullXml)
|
||||||
return self.xmppPoke(fullXml)
|
return self.xmppPoke(fullXml)
|
||||||
|
|
||||||
def xmppPoke(self, xml):
|
def xmppPoke(self, xml):
|
||||||
headers = {'Content-Type': 'application/xml'}
|
headers = {"Content-Type": "application/xml"}
|
||||||
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||||
resps = grequests.map([req])
|
resps = grequests.map([req])
|
||||||
obj = BeautifulSoup(resps[0].content)
|
obj = BeautifulSoup(resps[0].content)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def sendAnswer(self, answer):
|
def sendAnswer(self, answer):
|
||||||
print "sdp from matrix client",answer
|
print("sdp from matrix client", answer)
|
||||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
p = subprocess.Popen(
|
||||||
|
["node", "unjingle/unjingle.js", "--sdp"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
jingle, out_err = p.communicate(answer)
|
jingle, out_err = p.communicate(answer)
|
||||||
jingle = jingle % {
|
jingle = jingle % {
|
||||||
'tojid': self.callfrom,
|
"tojid": self.callfrom,
|
||||||
'action': 'session-accept',
|
"action": "session-accept",
|
||||||
'initiator': self.callfrom,
|
"initiator": self.callfrom,
|
||||||
'responder': self.jid,
|
"responder": self.jid,
|
||||||
'sid': self.callsid
|
"sid": self.callsid,
|
||||||
}
|
}
|
||||||
print "answer jingle from sdp",jingle
|
print("answer jingle from sdp", jingle)
|
||||||
res = self.sendIq(jingle)
|
res = self.sendIq(jingle)
|
||||||
print "reply from answer: ",res
|
print("reply from answer: ", res)
|
||||||
|
|
||||||
self.ssrcs = {}
|
self.ssrcs = {}
|
||||||
jingleSoup = BeautifulSoup(jingle)
|
jingleSoup = BeautifulSoup(jingle)
|
||||||
for cont in jingleSoup.iq.jingle.findAll('content'):
|
for cont in jingleSoup.iq.jingle.findAll("content"):
|
||||||
if cont.description:
|
if cont.description:
|
||||||
self.ssrcs[cont['name']] = cont.description['ssrc']
|
self.ssrcs[cont["name"]] = cont.description["ssrc"]
|
||||||
print "my ssrcs:",self.ssrcs
|
print("my ssrcs:", self.ssrcs)
|
||||||
|
|
||||||
gevent.joinall([
|
gevent.joinall([gevent.spawn(self.advertiseSsrcs)])
|
||||||
gevent.spawn(self.advertiseSsrcs)
|
|
||||||
])
|
|
||||||
|
|
||||||
def advertiseSsrcs(self):
|
def advertiseSsrcs(self):
|
||||||
time.sleep(7)
|
time.sleep(7)
|
||||||
print "SSRC spammer started"
|
print("SSRC spammer started")
|
||||||
while self.running:
|
while self.running:
|
||||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
|
ssrcMsg = (
|
||||||
|
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||||
|
% {
|
||||||
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
|
"nick": self.userId,
|
||||||
|
"assrc": self.ssrcs["audio"],
|
||||||
|
"vssrc": self.ssrcs["video"],
|
||||||
|
}
|
||||||
|
)
|
||||||
res = self.sendIq(ssrcMsg)
|
res = self.sendIq(ssrcMsg)
|
||||||
print "reply from ssrc announce: ",res
|
print("reply from ssrc announce: ", res)
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def xmppLoop(self):
|
def xmppLoop(self):
|
||||||
self.matrixCallId = time.time()
|
self.matrixCallId = time.time()
|
||||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), HOST)
|
||||||
|
)
|
||||||
|
|
||||||
print res
|
print(res)
|
||||||
self.sid = res.body['sid']
|
self.sid = res.body["sid"]
|
||||||
print "sid %s" % (self.sid)
|
print("sid %s" % (self.sid))
|
||||||
|
|
||||||
res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
|
res = self.sendIq(
|
||||||
|
"<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>"
|
||||||
|
)
|
||||||
|
|
||||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), self.sid, HOST)
|
||||||
|
)
|
||||||
|
|
||||||
res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
|
res = self.sendIq(
|
||||||
print res
|
"<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>"
|
||||||
|
)
|
||||||
|
print(res)
|
||||||
|
|
||||||
self.jid = res.body.iq.bind.jid.string
|
self.jid = res.body.iq.bind.jid.string
|
||||||
print "jid: %s" % (self.jid)
|
print("jid: %s" % (self.jid))
|
||||||
self.shortJid = self.jid.split('-')[0]
|
self.shortJid = self.jid.split("-")[0]
|
||||||
|
|
||||||
res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
|
res = self.sendIq(
|
||||||
|
"<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>"
|
||||||
|
)
|
||||||
|
|
||||||
#randomthing = res.body.iq['to']
|
# randomthing = res.body.iq['to']
|
||||||
#whatsitpart = randomthing.split('-')[0]
|
# whatsitpart = randomthing.split('-')[0]
|
||||||
|
|
||||||
#print "other random bind thing: %s" % (randomthing)
|
# print "other random bind thing: %s" % (randomthing)
|
||||||
|
|
||||||
# advertise preence to the jitsi room, with our nick
|
# advertise preence to the jitsi room, with our nick
|
||||||
res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
|
res = self.sendIq(
|
||||||
self.muc = {'users': []}
|
"<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>"
|
||||||
for p in res.body.findAll('presence'):
|
% (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId)
|
||||||
|
)
|
||||||
|
self.muc = {"users": []}
|
||||||
|
for p in res.body.findAll("presence"):
|
||||||
u = {}
|
u = {}
|
||||||
u['shortJid'] = p['from'].split('/')[1]
|
u["shortJid"] = p["from"].split("/")[1]
|
||||||
if p.c and p.c.nick:
|
if p.c and p.c.nick:
|
||||||
u['nick'] = p.c.nick.string
|
u["nick"] = p.c.nick.string
|
||||||
self.muc['users'].append(u)
|
self.muc["users"].append(u)
|
||||||
print "muc: ",self.muc
|
print("muc: ", self.muc)
|
||||||
|
|
||||||
# wait for stuff
|
# wait for stuff
|
||||||
while True:
|
while True:
|
||||||
print "waiting..."
|
print("waiting...")
|
||||||
res = self.sendIq("")
|
res = self.sendIq("")
|
||||||
print "got from stream: ",res
|
print("got from stream: ", res)
|
||||||
if res.body.iq:
|
if res.body.iq:
|
||||||
jingles = res.body.iq.findAll('jingle')
|
jingles = res.body.iq.findAll("jingle")
|
||||||
if len(jingles):
|
if len(jingles):
|
||||||
self.callfrom = res.body.iq['from']
|
self.callfrom = res.body.iq["from"]
|
||||||
self.handleInvite(jingles[0])
|
self.handleInvite(jingles[0])
|
||||||
elif 'type' in res.body and res.body['type'] == 'terminate':
|
elif "type" in res.body and res.body["type"] == "terminate":
|
||||||
self.running = False
|
self.running = False
|
||||||
del xmppClients[self.matrixRoom]
|
del xmppClients[self.matrixRoom]
|
||||||
return
|
return
|
||||||
|
|
||||||
def handleInvite(self, jingle):
|
def handleInvite(self, jingle):
|
||||||
self.initiator = jingle['initiator']
|
self.initiator = jingle["initiator"]
|
||||||
self.callsid = jingle['sid']
|
self.callsid = jingle["sid"]
|
||||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
p = subprocess.Popen(
|
||||||
print "raw jingle invite",str(jingle)
|
["node", "unjingle/unjingle.js", "--jingle"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
print("raw jingle invite", str(jingle))
|
||||||
sdp, out_err = p.communicate(str(jingle))
|
sdp, out_err = p.communicate(str(jingle))
|
||||||
print "transformed remote offer sdp",sdp
|
print("transformed remote offer sdp", sdp)
|
||||||
inviteEvent = {
|
inviteEvent = {
|
||||||
'offer': {
|
"offer": {"type": "offer", "sdp": sdp},
|
||||||
'type': 'offer',
|
"call_id": self.matrixCallId,
|
||||||
'sdp': sdp
|
"version": 0,
|
||||||
},
|
"lifetime": 30000,
|
||||||
'call_id': self.matrixCallId,
|
|
||||||
'version': 0,
|
|
||||||
'lifetime': 30000
|
|
||||||
}
|
}
|
||||||
matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
|
matrixCli.sendEvent(self.matrixRoom, "m.call.invite", inviteEvent)
|
||||||
|
|
||||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN)
|
|
||||||
|
|
||||||
gevent.joinall([
|
matrixCli = TrivialMatrixClient(ACCESS_TOKEN) # Undefined name
|
||||||
gevent.spawn(matrixLoop)
|
|
||||||
])
|
|
||||||
|
|
||||||
|
gevent.joinall([gevent.spawn(matrixLoop)])
|
||||||
|
@ -1,34 +1,40 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
import json
|
import json
|
||||||
import requests
|
import requests
|
||||||
import sys
|
import sys
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_input
|
||||||
|
except NameError: # Python 3
|
||||||
|
raw_input = input
|
||||||
|
|
||||||
|
|
||||||
def _mkurl(template, kws):
|
def _mkurl(template, kws):
|
||||||
for key in kws:
|
for key in kws:
|
||||||
template = template.replace(key, kws[key])
|
template = template.replace(key, kws[key])
|
||||||
return template
|
return template
|
||||||
|
|
||||||
|
|
||||||
def main(hs, room_id, access_token, user_id_prefix, why):
|
def main(hs, room_id, access_token, user_id_prefix, why):
|
||||||
if not why:
|
if not why:
|
||||||
why = "Automated kick."
|
why = "Automated kick."
|
||||||
print "Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
print(
|
||||||
|
"Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
||||||
|
)
|
||||||
room_state_url = _mkurl(
|
room_state_url = _mkurl(
|
||||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
||||||
{
|
{"$HS": hs, "$ROOM": room_id, "$TOKEN": access_token},
|
||||||
"$HS": hs,
|
|
||||||
"$ROOM": room_id,
|
|
||||||
"$TOKEN": access_token
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
print "Getting room state => %s" % room_state_url
|
print("Getting room state => %s" % room_state_url)
|
||||||
res = requests.get(room_state_url)
|
res = requests.get(room_state_url)
|
||||||
print "HTTP %s" % res.status_code
|
print("HTTP %s" % res.status_code)
|
||||||
state_events = res.json()
|
state_events = res.json()
|
||||||
if "error" in state_events:
|
if "error" in state_events:
|
||||||
print "FATAL"
|
print("FATAL")
|
||||||
print state_events
|
print(state_events)
|
||||||
return
|
return
|
||||||
|
|
||||||
kick_list = []
|
kick_list = []
|
||||||
@ -44,47 +50,40 @@ def main(hs, room_id, access_token, user_id_prefix, why):
|
|||||||
kick_list.append(event["state_key"])
|
kick_list.append(event["state_key"])
|
||||||
|
|
||||||
if len(kick_list) == 0:
|
if len(kick_list) == 0:
|
||||||
print "No user IDs match the prefix '%s'" % user_id_prefix
|
print("No user IDs match the prefix '%s'" % user_id_prefix)
|
||||||
return
|
return
|
||||||
|
|
||||||
print "The following user IDs will be kicked from %s" % room_name
|
print("The following user IDs will be kicked from %s" % room_name)
|
||||||
for uid in kick_list:
|
for uid in kick_list:
|
||||||
print uid
|
print(uid)
|
||||||
doit = raw_input("Continue? [Y]es\n")
|
doit = raw_input("Continue? [Y]es\n")
|
||||||
if len(doit) > 0 and doit.lower() == 'y':
|
if len(doit) > 0 and doit.lower() == "y":
|
||||||
print "Kicking members..."
|
print("Kicking members...")
|
||||||
# encode them all
|
# encode them all
|
||||||
kick_list = [urllib.quote(uid) for uid in kick_list]
|
kick_list = [urllib.quote(uid) for uid in kick_list]
|
||||||
for uid in kick_list:
|
for uid in kick_list:
|
||||||
kick_url = _mkurl(
|
kick_url = _mkurl(
|
||||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
||||||
{
|
{"$HS": hs, "$UID": uid, "$ROOM": room_id, "$TOKEN": access_token},
|
||||||
"$HS": hs,
|
|
||||||
"$UID": uid,
|
|
||||||
"$ROOM": room_id,
|
|
||||||
"$TOKEN": access_token
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
kick_body = {
|
kick_body = {"membership": "leave", "reason": why}
|
||||||
"membership": "leave",
|
print("Kicking %s" % uid)
|
||||||
"reason": why
|
|
||||||
}
|
|
||||||
print "Kicking %s" % uid
|
|
||||||
res = requests.put(kick_url, data=json.dumps(kick_body))
|
res = requests.put(kick_url, data=json.dumps(kick_body))
|
||||||
if res.status_code != 200:
|
if res.status_code != 200:
|
||||||
print "ERROR: HTTP %s" % res.status_code
|
print("ERROR: HTTP %s" % res.status_code)
|
||||||
if res.json().get("error"):
|
if res.json().get("error"):
|
||||||
print "ERROR: JSON %s" % res.json()
|
print("ERROR: JSON %s" % res.json())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
||||||
parser.add_argument("-u","--user-id",help="The user ID prefix e.g. '@irc_'")
|
parser.add_argument("-u", "--user-id", help="The user ID prefix e.g. '@irc_'")
|
||||||
parser.add_argument("-t","--token",help="Your access_token")
|
parser.add_argument("-t", "--token", help="Your access_token")
|
||||||
parser.add_argument("-r","--room",help="The room ID to kick members in")
|
parser.add_argument("-r", "--room", help="The room ID to kick members in")
|
||||||
parser.add_argument("-s","--homeserver",help="The base HS url e.g. http://matrix.org")
|
parser.add_argument(
|
||||||
parser.add_argument("-w","--why",help="Reason for the kick. Optional.")
|
"-s", "--homeserver", help="The base HS url e.g. http://matrix.org"
|
||||||
|
)
|
||||||
|
parser.add_argument("-w", "--why", help="Reason for the kick. Optional.")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
|
6
debian/changelog
vendored
6
debian/changelog
vendored
@ -1,3 +1,9 @@
|
|||||||
|
matrix-synapse-py3 (1.0.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.0.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jun 2019 17:09:53 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (0.99.5.2) stable; urgency=medium
|
matrix-synapse-py3 (0.99.5.2) stable; urgency=medium
|
||||||
|
|
||||||
* New synapse release 0.99.5.2.
|
* New synapse release 0.99.5.2.
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
|
DO NOT USE THESE DEMO SERVERS IN PRODUCTION
|
||||||
|
|
||||||
Requires you to have done:
|
Requires you to have done:
|
||||||
python setup.py develop
|
python setup.py develop
|
||||||
|
|
||||||
|
|
||||||
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
||||||
It will also start a web server on port 8000 pointed at the webclient.
|
|
||||||
|
To enable the servers to communicate untrusted ssl certs are used. In order to do this the servers do not check the certs
|
||||||
|
and are configured in a highly insecure way. Do not use these configuration files in production.
|
||||||
|
|
||||||
stop.sh will stop the synapse servers and the webclient.
|
stop.sh will stop the synapse servers and the webclient.
|
||||||
|
|
||||||
|
@ -21,14 +21,76 @@ for port in 8080 8081 8082; do
|
|||||||
pushd demo/$port
|
pushd demo/$port
|
||||||
|
|
||||||
#rm $DIR/etc/$port.config
|
#rm $DIR/etc/$port.config
|
||||||
python -m synapse.app.homeserver \
|
python3 -m synapse.app.homeserver \
|
||||||
--generate-config \
|
--generate-config \
|
||||||
-H "localhost:$https_port" \
|
-H "localhost:$https_port" \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
--report-stats no
|
--report-stats no
|
||||||
|
|
||||||
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
if ! grep -F "Customisation made by demo/start.sh" -q $DIR/etc/$port.config; then
|
||||||
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Warning, this heredoc depends on the interaction of tabs and spaces. Please don't
|
||||||
|
# accidentaly bork me with your fancy settings.
|
||||||
|
listeners=$(cat <<-PORTLISTENERS
|
||||||
|
# Configure server to listen on both $https_port and $port
|
||||||
|
# This overides some of the default settings above
|
||||||
|
listeners:
|
||||||
|
- port: $https_port
|
||||||
|
type: http
|
||||||
|
tls: true
|
||||||
|
resources:
|
||||||
|
- names: [client, federation]
|
||||||
|
|
||||||
|
- port: $port
|
||||||
|
tls: false
|
||||||
|
bind_addresses: ['::1', '127.0.0.1']
|
||||||
|
type: http
|
||||||
|
x_forwarded: true
|
||||||
|
resources:
|
||||||
|
- names: [client, federation]
|
||||||
|
compress: false
|
||||||
|
PORTLISTENERS
|
||||||
|
)
|
||||||
|
echo "${listeners}" >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Disable tls for the servers
|
||||||
|
printf '\n\n# Disable tls on the servers.' >> $DIR/etc/$port.config
|
||||||
|
echo '# DO NOT USE IN PRODUCTION' >> $DIR/etc/$port.config
|
||||||
|
echo 'use_insecure_ssl_client_just_for_testing_do_not_use: true' >> $DIR/etc/$port.config
|
||||||
|
echo 'federation_verify_certificates: false' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Set tls paths
|
||||||
|
echo "tls_certificate_path: \"$DIR/etc/localhost:$https_port.tls.crt\"" >> $DIR/etc/$port.config
|
||||||
|
echo "tls_private_key_path: \"$DIR/etc/localhost:$https_port.tls.key\"" >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Generate tls keys
|
||||||
|
openssl req -x509 -newkey rsa:4096 -keyout $DIR/etc/localhost\:$https_port.tls.key -out $DIR/etc/localhost\:$https_port.tls.crt -days 365 -nodes -subj "/O=matrix"
|
||||||
|
|
||||||
|
# Ignore keys from the trusted keys server
|
||||||
|
echo '# Ignore keys from the trusted keys server' >> $DIR/etc/$port.config
|
||||||
|
echo 'trusted_key_servers:' >> $DIR/etc/$port.config
|
||||||
|
echo ' - server_name: "matrix.org"' >> $DIR/etc/$port.config
|
||||||
|
echo ' accept_keys_insecurely: true' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
|
# Reduce the blacklist
|
||||||
|
blacklist=$(cat <<-BLACK
|
||||||
|
# Set the blacklist so that it doesn't include 127.0.0.1
|
||||||
|
federation_ip_range_blacklist:
|
||||||
|
- '10.0.0.0/8'
|
||||||
|
- '172.16.0.0/12'
|
||||||
|
- '192.168.0.0/16'
|
||||||
|
- '100.64.0.0/10'
|
||||||
|
- '169.254.0.0/16'
|
||||||
|
- '::1/128'
|
||||||
|
- 'fe80::/64'
|
||||||
|
- 'fc00::/7'
|
||||||
|
BLACK
|
||||||
|
)
|
||||||
|
echo "${blacklist}" >> $DIR/etc/$port.config
|
||||||
|
fi
|
||||||
|
|
||||||
# Check script parameters
|
# Check script parameters
|
||||||
if [ $# -eq 1 ]; then
|
if [ $# -eq 1 ]; then
|
||||||
@ -55,7 +117,7 @@ for port in 8080 8081 8082; do
|
|||||||
echo "report_stats: false" >> $DIR/etc/$port.config
|
echo "report_stats: false" >> $DIR/etc/$port.config
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python -m synapse.app.homeserver \
|
python3 -m synapse.app.homeserver \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
-D \
|
-D \
|
||||||
-vv \
|
-vv \
|
||||||
|
@ -6,23 +6,25 @@ import cgi, logging
|
|||||||
|
|
||||||
from daemonize import Daemonize
|
from daemonize import Daemonize
|
||||||
|
|
||||||
|
|
||||||
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||||
UPLOAD_PATH = "upload"
|
UPLOAD_PATH = "upload"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Accept all post request as file upload
|
Accept all post request as file upload
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def do_POST(self):
|
def do_POST(self):
|
||||||
|
|
||||||
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
||||||
length = self.headers['content-length']
|
length = self.headers["content-length"]
|
||||||
data = self.rfile.read(int(length))
|
data = self.rfile.read(int(length))
|
||||||
|
|
||||||
with open(path, 'wb') as fh:
|
with open(path, "wb") as fh:
|
||||||
fh.write(data)
|
fh.write(data)
|
||||||
|
|
||||||
self.send_response(200)
|
self.send_response(200)
|
||||||
self.send_header('Content-Type', 'application/json')
|
self.send_header("Content-Type", "application/json")
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
|
|
||||||
# Return the absolute path of the uploaded file
|
# Return the absolute path of the uploaded file
|
||||||
@ -33,30 +35,25 @@ def setup():
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("directory")
|
parser.add_argument("directory")
|
||||||
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
||||||
parser.add_argument('-P', "--pid-file", dest="pid", default="web.pid")
|
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# Get absolute path to directory to serve, as daemonize changes to '/'
|
# Get absolute path to directory to serve, as daemonize changes to '/'
|
||||||
os.chdir(args.directory)
|
os.chdir(args.directory)
|
||||||
dr = os.getcwd()
|
dr = os.getcwd()
|
||||||
|
|
||||||
httpd = BaseHTTPServer.HTTPServer(
|
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
|
||||||
('', args.port),
|
|
||||||
SimpleHTTPRequestHandlerWithPOST
|
|
||||||
)
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
os.chdir(dr)
|
os.chdir(dr)
|
||||||
httpd.serve_forever()
|
httpd.serve_forever()
|
||||||
|
|
||||||
daemon = Daemonize(
|
daemon = Daemonize(
|
||||||
app="synapse-webclient",
|
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
|
||||||
pid=args.pid,
|
)
|
||||||
action=run,
|
|
||||||
auto_close_fds=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
daemon.start()
|
daemon.start()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
if __name__ == "__main__":
|
||||||
setup()
|
setup()
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.6 .
|
# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.6 .
|
||||||
#
|
#
|
||||||
|
|
||||||
ARG PYTHON_VERSION=2
|
ARG PYTHON_VERSION=3.7
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: builder
|
### Stage 0: builder
|
||||||
@ -57,6 +57,7 @@ RUN pip install --prefix="/install" --no-warn-script-location \
|
|||||||
|
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
|
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
|
||||||
|
|
||||||
|
# xmlsec is required for saml support
|
||||||
RUN apk add --no-cache --virtual .runtime_deps \
|
RUN apk add --no-cache --virtual .runtime_deps \
|
||||||
libffi \
|
libffi \
|
||||||
libjpeg-turbo \
|
libjpeg-turbo \
|
||||||
@ -64,7 +65,8 @@ RUN apk add --no-cache --virtual .runtime_deps \
|
|||||||
libxslt \
|
libxslt \
|
||||||
libpq \
|
libpq \
|
||||||
zlib \
|
zlib \
|
||||||
su-exec
|
su-exec \
|
||||||
|
xmlsec
|
||||||
|
|
||||||
COPY --from=builder /install /usr/local
|
COPY --from=builder /install /usr/local
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
|
@ -3,10 +3,10 @@
|
|||||||
FROM matrixdotorg/sytest:latest
|
FROM matrixdotorg/sytest:latest
|
||||||
|
|
||||||
# The Sytest image doesn't come with python, so install that
|
# The Sytest image doesn't come with python, so install that
|
||||||
RUN apt-get -qq install -y python python-dev python-pip
|
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||||
|
|
||||||
# We need tox to run the tests in run_pg_tests.sh
|
# We need tox to run the tests in run_pg_tests.sh
|
||||||
RUN pip install tox
|
RUN python3 -m pip install tox
|
||||||
|
|
||||||
ADD run_pg_tests.sh /pg_tests.sh
|
ADD run_pg_tests.sh /pg_tests.sh
|
||||||
ENTRYPOINT /pg_tests.sh
|
ENTRYPOINT /pg_tests.sh
|
||||||
|
@ -14,7 +14,7 @@ This image is designed to run either with an automatically generated
|
|||||||
configuration file or with a custom configuration that requires manual editing.
|
configuration file or with a custom configuration that requires manual editing.
|
||||||
|
|
||||||
An easy way to make use of this image is via docker-compose. See the
|
An easy way to make use of this image is via docker-compose. See the
|
||||||
[contrib/docker](../contrib/docker) section of the synapse project for
|
[contrib/docker](https://github.com/matrix-org/synapse/tree/master/contrib/docker) section of the synapse project for
|
||||||
examples.
|
examples.
|
||||||
|
|
||||||
### Without Compose (harder)
|
### Without Compose (harder)
|
||||||
|
@ -16,14 +16,11 @@ handlers:
|
|||||||
filters: [context]
|
filters: [context]
|
||||||
|
|
||||||
loggers:
|
loggers:
|
||||||
synapse:
|
|
||||||
level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
|
|
||||||
|
|
||||||
synapse.storage.SQL:
|
synapse.storage.SQL:
|
||||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||||
# information such as access tokens.
|
# information such as access tokens.
|
||||||
level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
|
level: INFO
|
||||||
|
|
||||||
root:
|
root:
|
||||||
level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
|
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||||
handlers: [console]
|
handlers: [console]
|
||||||
|
@ -17,4 +17,4 @@ su -c '/usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start'
|
|||||||
# Run the tests
|
# Run the tests
|
||||||
cd /src
|
cd /src
|
||||||
export TRIAL_FLAGS="-j 4"
|
export TRIAL_FLAGS="-j 4"
|
||||||
tox --workdir=/tmp -e py27-postgres
|
tox --workdir=/tmp -e py35-postgres
|
||||||
|
@ -8,7 +8,10 @@ import glob
|
|||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
# Utility functions
|
# Utility functions
|
||||||
convert = lambda src, dst, environ: open(dst, "w").write(jinja2.Template(open(src).read()).render(**environ))
|
convert = lambda src, dst, environ: open(dst, "w").write(
|
||||||
|
jinja2.Template(open(src).read()).render(**environ)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_arguments(environ, args):
|
def check_arguments(environ, args):
|
||||||
for argument in args:
|
for argument in args:
|
||||||
@ -16,18 +19,22 @@ def check_arguments(environ, args):
|
|||||||
print("Environment variable %s is mandatory, exiting." % argument)
|
print("Environment variable %s is mandatory, exiting." % argument)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
def generate_secrets(environ, secrets):
|
def generate_secrets(environ, secrets):
|
||||||
for name, secret in secrets.items():
|
for name, secret in secrets.items():
|
||||||
if secret not in environ:
|
if secret not in environ:
|
||||||
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
|
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
|
||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
with open(filename) as handle: value = handle.read()
|
with open(filename) as handle:
|
||||||
|
value = handle.read()
|
||||||
else:
|
else:
|
||||||
print("Generating a random secret for {}".format(name))
|
print("Generating a random secret for {}".format(name))
|
||||||
value = codecs.encode(os.urandom(32), "hex").decode()
|
value = codecs.encode(os.urandom(32), "hex").decode()
|
||||||
with open(filename, "w") as handle: handle.write(value)
|
with open(filename, "w") as handle:
|
||||||
|
handle.write(value)
|
||||||
environ[secret] = value
|
environ[secret] = value
|
||||||
|
|
||||||
|
|
||||||
# Prepare the configuration
|
# Prepare the configuration
|
||||||
mode = sys.argv[1] if len(sys.argv) > 1 else None
|
mode = sys.argv[1] if len(sys.argv) > 1 else None
|
||||||
environ = os.environ.copy()
|
environ = os.environ.copy()
|
||||||
@ -36,12 +43,17 @@ args = ["python", "-m", "synapse.app.homeserver"]
|
|||||||
|
|
||||||
# In generate mode, generate a configuration, missing keys, then exit
|
# In generate mode, generate a configuration, missing keys, then exit
|
||||||
if mode == "generate":
|
if mode == "generate":
|
||||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH"))
|
check_arguments(
|
||||||
|
environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH")
|
||||||
|
)
|
||||||
args += [
|
args += [
|
||||||
"--server-name", environ["SYNAPSE_SERVER_NAME"],
|
"--server-name",
|
||||||
"--report-stats", environ["SYNAPSE_REPORT_STATS"],
|
environ["SYNAPSE_SERVER_NAME"],
|
||||||
"--config-path", environ["SYNAPSE_CONFIG_PATH"],
|
"--report-stats",
|
||||||
"--generate-config"
|
environ["SYNAPSE_REPORT_STATS"],
|
||||||
|
"--config-path",
|
||||||
|
environ["SYNAPSE_CONFIG_PATH"],
|
||||||
|
"--generate-config",
|
||||||
]
|
]
|
||||||
os.execv("/usr/local/bin/python", args)
|
os.execv("/usr/local/bin/python", args)
|
||||||
|
|
||||||
@ -51,15 +63,19 @@ else:
|
|||||||
config_path = environ["SYNAPSE_CONFIG_PATH"]
|
config_path = environ["SYNAPSE_CONFIG_PATH"]
|
||||||
else:
|
else:
|
||||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"))
|
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"))
|
||||||
generate_secrets(environ, {
|
generate_secrets(
|
||||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
environ,
|
||||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY"
|
{
|
||||||
})
|
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||||
|
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||||
|
},
|
||||||
|
)
|
||||||
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
||||||
if not os.path.exists("/compiled"): os.mkdir("/compiled")
|
if not os.path.exists("/compiled"):
|
||||||
|
os.mkdir("/compiled")
|
||||||
|
|
||||||
config_path = "/compiled/homeserver.yaml"
|
config_path = "/compiled/homeserver.yaml"
|
||||||
|
|
||||||
# Convert SYNAPSE_NO_TLS to boolean if exists
|
# Convert SYNAPSE_NO_TLS to boolean if exists
|
||||||
if "SYNAPSE_NO_TLS" in environ:
|
if "SYNAPSE_NO_TLS" in environ:
|
||||||
tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"])
|
tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"])
|
||||||
@ -69,19 +85,23 @@ else:
|
|||||||
if tlsanswerstring in ("false", "off", "0", "no"):
|
if tlsanswerstring in ("false", "off", "0", "no"):
|
||||||
environ["SYNAPSE_NO_TLS"] = False
|
environ["SYNAPSE_NO_TLS"] = False
|
||||||
else:
|
else:
|
||||||
print("Environment variable \"SYNAPSE_NO_TLS\" found but value \"" + tlsanswerstring + "\" unrecognized; exiting.")
|
print(
|
||||||
|
'Environment variable "SYNAPSE_NO_TLS" found but value "'
|
||||||
|
+ tlsanswerstring
|
||||||
|
+ '" unrecognized; exiting.'
|
||||||
|
)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
convert("/conf/homeserver.yaml", config_path, environ)
|
convert("/conf/homeserver.yaml", config_path, environ)
|
||||||
convert("/conf/log.config", "/compiled/log.config", environ)
|
convert("/conf/log.config", "/compiled/log.config", environ)
|
||||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||||
|
|
||||||
|
|
||||||
args += [
|
args += [
|
||||||
"--config-path", config_path,
|
"--config-path",
|
||||||
|
config_path,
|
||||||
# tell synapse to put any generated keys in /data rather than /compiled
|
# tell synapse to put any generated keys in /data rather than /compiled
|
||||||
"--keys-directory", "/data",
|
"--keys-directory",
|
||||||
|
"/data",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Generate missing keys and start synapse
|
# Generate missing keys and start synapse
|
||||||
|
@ -1,5 +1,22 @@
|
|||||||
# MSC1711 Certificates FAQ
|
# MSC1711 Certificates FAQ
|
||||||
|
|
||||||
|
## Historical Note
|
||||||
|
This document was originally written to guide server admins through the upgrade
|
||||||
|
path towards Synapse 1.0. Specifically,
|
||||||
|
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md)
|
||||||
|
required that all servers present valid TLS certificates on their federation
|
||||||
|
API. Admins were encouraged to achieve compliance from version 0.99.0 (released
|
||||||
|
in February 2019) ahead of version 1.0 (released June 2019) enforcing the
|
||||||
|
certificate checks.
|
||||||
|
|
||||||
|
Much of what follows is now outdated since most admins will have already
|
||||||
|
upgraded, however it may be of use to those with old installs returning to the
|
||||||
|
project.
|
||||||
|
|
||||||
|
If you are setting up a server from scratch you almost certainly should look at
|
||||||
|
the [installation guide](../INSTALL.md) instead.
|
||||||
|
|
||||||
|
## Introduction
|
||||||
The goal of Synapse 0.99.0 is to act as a stepping stone to Synapse 1.0.0. It
|
The goal of Synapse 0.99.0 is to act as a stepping stone to Synapse 1.0.0. It
|
||||||
supports the r0.1 release of the server to server specification, but is
|
supports the r0.1 release of the server to server specification, but is
|
||||||
compatible with both the legacy Matrix federation behaviour (pre-r0.1) as well
|
compatible with both the legacy Matrix federation behaviour (pre-r0.1) as well
|
||||||
|
@ -1,43 +1,60 @@
|
|||||||
- Everything should comply with PEP8. Code should pass
|
# Code Style
|
||||||
``pep8 --max-line-length=100`` without any warnings.
|
|
||||||
|
|
||||||
- **Indenting**:
|
The Synapse codebase uses a number of code formatting tools in order to
|
||||||
|
quickly and automatically check for formatting (and sometimes logical) errors
|
||||||
|
in code.
|
||||||
|
|
||||||
- NEVER tabs. 4 spaces to indent.
|
The necessary tools are detailed below.
|
||||||
|
|
||||||
- follow PEP8; either hanging indent or multiline-visual indent depending
|
## Formatting tools
|
||||||
on the size and shape of the arguments and what makes more sense to the
|
|
||||||
author. In other words, both this::
|
|
||||||
|
|
||||||
print("I am a fish %s" % "moo")
|
The Synapse codebase uses [black](https://pypi.org/project/black/) as an
|
||||||
|
opinionated code formatter, ensuring all comitted code is properly
|
||||||
|
formatted.
|
||||||
|
|
||||||
and this::
|
First install ``black`` with::
|
||||||
|
|
||||||
print("I am a fish %s" %
|
pip install --upgrade black
|
||||||
"moo")
|
|
||||||
|
|
||||||
and this::
|
Have ``black`` auto-format your code (it shouldn't change any
|
||||||
|
functionality) with::
|
||||||
|
|
||||||
print(
|
black . --exclude="\.tox|build|env"
|
||||||
"I am a fish %s" %
|
|
||||||
"moo",
|
|
||||||
)
|
|
||||||
|
|
||||||
...are valid, although given each one takes up 2x more vertical space than
|
- **flake8**
|
||||||
the previous, it's up to the author's discretion as to which layout makes
|
|
||||||
most sense for their function invocation. (e.g. if they want to add
|
|
||||||
comments per-argument, or put expressions in the arguments, or group
|
|
||||||
related arguments together, or want to deliberately extend or preserve
|
|
||||||
vertical/horizontal space)
|
|
||||||
|
|
||||||
- **Line length**:
|
``flake8`` is a code checking tool. We require code to pass ``flake8`` before being merged into the codebase.
|
||||||
|
|
||||||
Max line length is 79 chars (with flexibility to overflow by a "few chars" if
|
Install ``flake8`` with::
|
||||||
the overflowing content is not semantically significant and avoids an
|
|
||||||
explosion of vertical whitespace).
|
|
||||||
|
|
||||||
Use parentheses instead of ``\`` for line continuation where ever possible
|
pip install --upgrade flake8
|
||||||
(which is pretty much everywhere).
|
|
||||||
|
Check all application and test code with::
|
||||||
|
|
||||||
|
flake8 synapse tests
|
||||||
|
|
||||||
|
- **isort**
|
||||||
|
|
||||||
|
``isort`` ensures imports are nicely formatted, and can suggest and
|
||||||
|
auto-fix issues such as double-importing.
|
||||||
|
|
||||||
|
Install ``isort`` with::
|
||||||
|
|
||||||
|
pip install --upgrade isort
|
||||||
|
|
||||||
|
Auto-fix imports with::
|
||||||
|
|
||||||
|
isort -rc synapse tests
|
||||||
|
|
||||||
|
``-rc`` means to recursively search the given directories.
|
||||||
|
|
||||||
|
It's worth noting that modern IDEs and text editors can run these tools
|
||||||
|
automatically on save. It may be worth looking into whether this
|
||||||
|
functionality is supported in your editor for a more convenient development
|
||||||
|
workflow. It is not, however, recommended to run ``flake8`` on save as it
|
||||||
|
takes a while and is very resource intensive.
|
||||||
|
|
||||||
|
## General rules
|
||||||
|
|
||||||
- **Naming**:
|
- **Naming**:
|
||||||
|
|
||||||
@ -46,26 +63,6 @@
|
|||||||
|
|
||||||
- Use double quotes ``"foo"`` rather than single quotes ``'foo'``.
|
- Use double quotes ``"foo"`` rather than single quotes ``'foo'``.
|
||||||
|
|
||||||
- **Blank lines**:
|
|
||||||
|
|
||||||
- There should be max a single new line between:
|
|
||||||
|
|
||||||
- statements
|
|
||||||
- functions in a class
|
|
||||||
|
|
||||||
- There should be two new lines between:
|
|
||||||
|
|
||||||
- definitions in a module (e.g., between different classes)
|
|
||||||
|
|
||||||
- **Whitespace**:
|
|
||||||
|
|
||||||
There should be spaces where spaces should be and not where there shouldn't
|
|
||||||
be:
|
|
||||||
|
|
||||||
- a single space after a comma
|
|
||||||
- a single space before and after for '=' when used as assignment
|
|
||||||
- no spaces before and after for '=' for default values and keyword arguments.
|
|
||||||
|
|
||||||
- **Comments**: should follow the `google code style
|
- **Comments**: should follow the `google code style
|
||||||
<http://google.github.io/styleguide/pyguide.html?showone=Comments#Comments>`_.
|
<http://google.github.io/styleguide/pyguide.html?showone=Comments#Comments>`_.
|
||||||
This is so that we can generate documentation with `sphinx
|
This is so that we can generate documentation with `sphinx
|
||||||
@ -76,7 +73,7 @@
|
|||||||
|
|
||||||
- **Imports**:
|
- **Imports**:
|
||||||
|
|
||||||
- Prefer to import classes and functions than packages or modules.
|
- Prefer to import classes and functions rather than packages or modules.
|
||||||
|
|
||||||
Example::
|
Example::
|
||||||
|
|
||||||
|
@ -14,9 +14,9 @@ up and will work provided you set the ``server_name`` to match your
|
|||||||
machine's public DNS hostname, and provide Synapse with a TLS certificate
|
machine's public DNS hostname, and provide Synapse with a TLS certificate
|
||||||
which is valid for your ``server_name``.
|
which is valid for your ``server_name``.
|
||||||
|
|
||||||
Once you have completed the steps necessary to federate, you should be able to
|
Once federation has been configured, you should be able to join a room over
|
||||||
join a room via federation. (A good place to start is ``#synapse:matrix.org`` - a
|
federation. A good place to start is ``#synapse:matrix.org`` - a room for
|
||||||
room for Synapse admins.)
|
Synapse admins.
|
||||||
|
|
||||||
|
|
||||||
## Delegation
|
## Delegation
|
||||||
@ -98,6 +98,77 @@ _matrix._tcp.<server_name>``. In our example, we would expect this:
|
|||||||
Note that the target of a SRV record cannot be an alias (CNAME record): it has to point
|
Note that the target of a SRV record cannot be an alias (CNAME record): it has to point
|
||||||
directly to the server hosting the synapse instance.
|
directly to the server hosting the synapse instance.
|
||||||
|
|
||||||
|
### Delegation FAQ
|
||||||
|
#### When do I need a SRV record or .well-known URI?
|
||||||
|
|
||||||
|
If your homeserver listens on the default federation port (8448), and your
|
||||||
|
`server_name` points to the host that your homeserver runs on, you do not need an SRV
|
||||||
|
record or `.well-known/matrix/server` URI.
|
||||||
|
|
||||||
|
For instance, if you registered `example.com` and pointed its DNS A record at a
|
||||||
|
fresh server, you could install Synapse on that host,
|
||||||
|
giving it a `server_name` of `example.com`, and once [ACME](acme.md) support is enabled,
|
||||||
|
it would automatically generate a valid TLS certificate for you via Let's Encrypt
|
||||||
|
and no SRV record or .well-known URI would be needed.
|
||||||
|
|
||||||
|
This is the common case, although you can add an SRV record or
|
||||||
|
`.well-known/matrix/server` URI for completeness if you wish.
|
||||||
|
|
||||||
|
**However**, if your server does not listen on port 8448, or if your `server_name`
|
||||||
|
does not point to the host that your homeserver runs on, you will need to let
|
||||||
|
other servers know how to find it. The way to do this is via .well-known or an
|
||||||
|
SRV record.
|
||||||
|
|
||||||
|
#### I have created a .well-known URI. Do I still need an SRV record?
|
||||||
|
|
||||||
|
As of Synapse 0.99, Synapse will first check for the existence of a .well-known
|
||||||
|
URI and follow any delegation it suggests. It will only then check for the
|
||||||
|
existence of an SRV record.
|
||||||
|
|
||||||
|
That means that the SRV record will often be redundant. However, you should
|
||||||
|
remember that there may still be older versions of Synapse in the federation
|
||||||
|
which do not understand .well-known URIs, so if you removed your SRV record
|
||||||
|
you would no longer be able to federate with them.
|
||||||
|
|
||||||
|
It is therefore best to leave the SRV record in place for now. Synapse 0.34 and
|
||||||
|
earlier will follow the SRV record (and not care about the invalid
|
||||||
|
certificate). Synapse 0.99 and later will follow the .well-known URI, with the
|
||||||
|
correct certificate chain.
|
||||||
|
|
||||||
|
#### Can I manage my own certificates rather than having Synapse renew certificates itself?
|
||||||
|
|
||||||
|
Yes, you are welcome to manage your certificates yourself. Synapse will only
|
||||||
|
attempt to obtain certificates from Let's Encrypt if you configure it to do
|
||||||
|
so.The only requirement is that there is a valid TLS cert present for
|
||||||
|
federation end points.
|
||||||
|
|
||||||
|
#### Do you still recommend against using a reverse proxy on the federation port?
|
||||||
|
|
||||||
|
We no longer actively recommend against using a reverse proxy. Many admins will
|
||||||
|
find it easier to direct federation traffic to a reverse proxy and manage their
|
||||||
|
own TLS certificates, and this is a supported configuration.
|
||||||
|
|
||||||
|
See [reverse_proxy.rst](reverse_proxy.rst) for information on setting up a
|
||||||
|
reverse proxy.
|
||||||
|
|
||||||
|
#### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
||||||
|
|
||||||
|
Practically speaking, this is no longer necessary.
|
||||||
|
|
||||||
|
If you are using a reverse proxy for all of your TLS traffic, then you can set
|
||||||
|
`no_tls: True` in the Synapse config. In that case, the only reason Synapse
|
||||||
|
needs the certificate is to populate a legacy `tls_fingerprints` field in the
|
||||||
|
federation API. This is ignored by Synapse 0.99.0 and later, and the only time
|
||||||
|
pre-0.99 Synapses will check it is when attempting to fetch the server keys -
|
||||||
|
and generally this is delegated via `matrix.org`, which will be running a modern
|
||||||
|
version of Synapse.
|
||||||
|
|
||||||
|
#### Do I need the same certificate for the client and federation port?
|
||||||
|
|
||||||
|
No. There is nothing stopping you from using different certificates,
|
||||||
|
particularly if you are using a reverse proxy. However, Synapse will use the
|
||||||
|
same certificate on any ports where TLS is configured.
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
You can use the [federation tester](
|
You can use the [federation tester](
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
Using Postgres
|
Using Postgres
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
Postgres version 9.4 or later is known to work.
|
Postgres version 9.5 or later is known to work.
|
||||||
|
|
||||||
Install postgres client libraries
|
Install postgres client libraries
|
||||||
=================================
|
=================================
|
||||||
@ -16,7 +16,7 @@ a postgres database.
|
|||||||
* For other pre-built packages, please consult the documentation from the
|
* For other pre-built packages, please consult the documentation from the
|
||||||
relevant package.
|
relevant package.
|
||||||
|
|
||||||
* If you installed synapse `in a virtualenv
|
* If you installed synapse `in a virtualenv
|
||||||
<../INSTALL.md#installing-from-source>`_, you can install the library with::
|
<../INSTALL.md#installing-from-source>`_, you can install the library with::
|
||||||
|
|
||||||
~/synapse/env/bin/pip install matrix-synapse[postgres]
|
~/synapse/env/bin/pip install matrix-synapse[postgres]
|
||||||
|
@ -23,29 +23,6 @@ server_name: "SERVERNAME"
|
|||||||
#
|
#
|
||||||
pid_file: DATADIR/homeserver.pid
|
pid_file: DATADIR/homeserver.pid
|
||||||
|
|
||||||
# CPU affinity mask. Setting this restricts the CPUs on which the
|
|
||||||
# process will be scheduled. It is represented as a bitmask, with the
|
|
||||||
# lowest order bit corresponding to the first logical CPU and the
|
|
||||||
# highest order bit corresponding to the last logical CPU. Not all CPUs
|
|
||||||
# may exist on a given system but a mask may specify more CPUs than are
|
|
||||||
# present.
|
|
||||||
#
|
|
||||||
# For example:
|
|
||||||
# 0x00000001 is processor #0,
|
|
||||||
# 0x00000003 is processors #0 and #1,
|
|
||||||
# 0xFFFFFFFF is all processors (#0 through #31).
|
|
||||||
#
|
|
||||||
# Pinning a Python process to a single CPU is desirable, because Python
|
|
||||||
# is inherently single-threaded due to the GIL, and can suffer a
|
|
||||||
# 30-40% slowdown due to cache blow-out and thread context switching
|
|
||||||
# if the scheduler happens to schedule the underlying threads across
|
|
||||||
# different cores. See
|
|
||||||
# https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
|
|
||||||
#
|
|
||||||
# This setting requires the affinity package to be installed!
|
|
||||||
#
|
|
||||||
#cpu_affinity: 0xFFFFFFFF
|
|
||||||
|
|
||||||
# The path to the web client which will be served at /_matrix/client/
|
# The path to the web client which will be served at /_matrix/client/
|
||||||
# if 'webclient' is configured under the 'listeners' configuration.
|
# if 'webclient' is configured under the 'listeners' configuration.
|
||||||
#
|
#
|
||||||
@ -77,11 +54,15 @@ pid_file: DATADIR/homeserver.pid
|
|||||||
#
|
#
|
||||||
#require_auth_for_profile_requests: true
|
#require_auth_for_profile_requests: true
|
||||||
|
|
||||||
# If set to 'true', requires authentication to access the server's
|
# If set to 'false', requires authentication to access the server's public rooms
|
||||||
# public rooms directory through the client API, and forbids any other
|
# directory through the client API. Defaults to 'true'.
|
||||||
# homeserver to fetch it via federation. Defaults to 'false'.
|
|
||||||
#
|
#
|
||||||
#restrict_public_rooms_to_local_users: true
|
#allow_public_rooms_without_auth: false
|
||||||
|
|
||||||
|
# If set to 'false', forbids any other homeserver to fetch the server's public
|
||||||
|
# rooms directory via federation. Defaults to 'true'.
|
||||||
|
#
|
||||||
|
#allow_public_rooms_over_federation: false
|
||||||
|
|
||||||
# The default room version for newly created rooms.
|
# The default room version for newly created rooms.
|
||||||
#
|
#
|
||||||
@ -232,7 +213,7 @@ listeners:
|
|||||||
- names: [client, federation]
|
- names: [client, federation]
|
||||||
compress: false
|
compress: false
|
||||||
|
|
||||||
# example additonal_resources:
|
# example additional_resources:
|
||||||
#
|
#
|
||||||
#additional_resources:
|
#additional_resources:
|
||||||
# "/_matrix/my/custom/endpoint":
|
# "/_matrix/my/custom/endpoint":
|
||||||
@ -425,6 +406,13 @@ acme:
|
|||||||
#
|
#
|
||||||
#domain: matrix.example.com
|
#domain: matrix.example.com
|
||||||
|
|
||||||
|
# file to use for the account key. This will be generated if it doesn't
|
||||||
|
# exist.
|
||||||
|
#
|
||||||
|
# If unspecified, we will use CONFDIR/client.key.
|
||||||
|
#
|
||||||
|
account_key_file: DATADIR/acme_account.key
|
||||||
|
|
||||||
# List of allowed TLS fingerprints for this server to publish along
|
# List of allowed TLS fingerprints for this server to publish along
|
||||||
# with the signing keys for this server. Other matrix servers that
|
# with the signing keys for this server. Other matrix servers that
|
||||||
# make HTTPS requests to this server will check that the TLS
|
# make HTTPS requests to this server will check that the TLS
|
||||||
@ -1351,3 +1339,16 @@ password_config:
|
|||||||
# alias: "*"
|
# alias: "*"
|
||||||
# room_id: "*"
|
# room_id: "*"
|
||||||
# action: allow
|
# action: allow
|
||||||
|
|
||||||
|
|
||||||
|
# Server admins can define a Python module that implements extra rules for
|
||||||
|
# allowing or denying incoming events. In order to work, this module needs to
|
||||||
|
# override the methods defined in synapse/events/third_party_rules.py.
|
||||||
|
#
|
||||||
|
# This feature is designed to be used in closed federations only, where each
|
||||||
|
# participating server enforces the same rules.
|
||||||
|
#
|
||||||
|
#third_party_event_rules:
|
||||||
|
# module: "my_custom_project.SuperRulesSet"
|
||||||
|
# config:
|
||||||
|
# example_option: 'things'
|
||||||
|
@ -18,226 +18,220 @@ import os
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath(".."))
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
#needs_sphinx = '1.0'
|
# needs_sphinx = '1.0'
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
# ones.
|
# ones.
|
||||||
extensions = [
|
extensions = [
|
||||||
'sphinx.ext.autodoc',
|
"sphinx.ext.autodoc",
|
||||||
'sphinx.ext.intersphinx',
|
"sphinx.ext.intersphinx",
|
||||||
'sphinx.ext.coverage',
|
"sphinx.ext.coverage",
|
||||||
'sphinx.ext.ifconfig',
|
"sphinx.ext.ifconfig",
|
||||||
'sphinxcontrib.napoleon',
|
"sphinxcontrib.napoleon",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
#source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'Synapse'
|
project = "Synapse"
|
||||||
copyright = u'Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd'
|
copyright = (
|
||||||
|
"Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd"
|
||||||
|
)
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = '1.0'
|
version = "1.0"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = '1.0'
|
release = "1.0"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#language = None
|
# language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
#today = ''
|
# today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
#today_fmt = '%B %d, %Y'
|
# today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
#default_role = None
|
# default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
#add_function_parentheses = True
|
# add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
#add_module_names = True
|
# add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
#show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
#modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
|
|
||||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
#keep_warnings = False
|
# keep_warnings = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
html_theme = 'default'
|
html_theme = "default"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
#html_theme_options = {}
|
# html_theme_options = {}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
#html_theme_path = []
|
# html_theme_path = []
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
#html_title = None
|
# html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
#html_short_title = None
|
# html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
#html_logo = None
|
# html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
#html_favicon = None
|
# html_favicon = None
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
# directly to the root of the documentation.
|
# directly to the root of the documentation.
|
||||||
#html_extra_path = []
|
# html_extra_path = []
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
#html_last_updated_fmt = '%b %d, %Y'
|
# html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
#html_use_smartypants = True
|
# html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
#html_sidebars = {}
|
# html_sidebars = {}
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
#html_additional_pages = {}
|
# html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#html_domain_indices = True
|
# html_domain_indices = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
#html_use_index = True
|
# html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
#html_split_index = False
|
# html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
#html_show_sourcelink = True
|
# html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
#html_show_sphinx = True
|
# html_show_sphinx = True
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
#html_show_copyright = True
|
# html_show_copyright = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
#html_use_opensearch = ''
|
# html_use_opensearch = ''
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
#html_file_suffix = None
|
# html_file_suffix = None
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'Synapsedoc'
|
htmlhelp_basename = "Synapsedoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#'papersize': 'letterpaper',
|
#'papersize': 'letterpaper',
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
#'pointsize': '10pt',
|
||||||
#'pointsize': '10pt',
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
#'preamble': '',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [("index", "Synapse.tex", "Synapse Documentation", "TNG", "manual")]
|
||||||
('index', 'Synapse.tex', u'Synapse Documentation',
|
|
||||||
u'TNG', 'manual'),
|
|
||||||
]
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
#latex_logo = None
|
# latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
#latex_use_parts = False
|
# latex_use_parts = False
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
# If true, show page references after internal links.
|
||||||
#latex_show_pagerefs = False
|
# latex_show_pagerefs = False
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#latex_show_urls = False
|
# latex_show_urls = False
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#latex_appendices = []
|
# latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_domain_indices = True
|
# latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output ---------------------------------------
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [("index", "synapse", "Synapse Documentation", ["TNG"], 1)]
|
||||||
('index', 'synapse', u'Synapse Documentation',
|
|
||||||
[u'TNG'], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#man_show_urls = False
|
# man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output -------------------------------------------
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
@ -246,26 +240,32 @@ man_pages = [
|
|||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
('index', 'Synapse', u'Synapse Documentation',
|
(
|
||||||
u'TNG', 'Synapse', 'One line description of project.',
|
"index",
|
||||||
'Miscellaneous'),
|
"Synapse",
|
||||||
|
"Synapse Documentation",
|
||||||
|
"TNG",
|
||||||
|
"Synapse",
|
||||||
|
"One line description of project.",
|
||||||
|
"Miscellaneous",
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#texinfo_appendices = []
|
# texinfo_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#texinfo_domain_indices = True
|
# texinfo_domain_indices = True
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
#texinfo_show_urls = 'footnote'
|
# texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
#texinfo_no_detailmenu = False
|
# texinfo_no_detailmenu = False
|
||||||
|
|
||||||
|
|
||||||
# Example configuration for intersphinx: refer to the Python standard library.
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
intersphinx_mapping = {'http://docs.python.org/': None}
|
intersphinx_mapping = {"http://docs.python.org/": None}
|
||||||
|
|
||||||
napoleon_include_special_with_doc = True
|
napoleon_include_special_with_doc = True
|
||||||
napoleon_use_ivar = True
|
napoleon_use_ivar = True
|
||||||
|
@ -239,6 +239,13 @@ be routed to the same instance::
|
|||||||
|
|
||||||
^/_matrix/client/(r0|unstable)/register$
|
^/_matrix/client/(r0|unstable)/register$
|
||||||
|
|
||||||
|
Pagination requests can also be handled, but all requests with the same path
|
||||||
|
room must be routed to the same instance. Additionally, care must be taken to
|
||||||
|
ensure that the purge history admin API is not used while pagination requests
|
||||||
|
for the room are in flight::
|
||||||
|
|
||||||
|
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/messages$
|
||||||
|
|
||||||
|
|
||||||
``synapse.app.user_dir``
|
``synapse.app.user_dir``
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user