This commit is contained in:
deathrow 2022-11-01 18:33:11 -04:00
commit e4a5d5d929
No known key found for this signature in database
GPG Key ID: FF39D67A22069F73
5 changed files with 569 additions and 0 deletions

85
Dockerfile Normal file
View File

@ -0,0 +1,85 @@
ARG SYNAPSE_VERSION=1.70.1
ARG HARDENED_MALLOC_VERSION=11
ARG UID=991
ARG GID=991
### Build Hardened Malloc
FROM alpine:latest as build-malloc
ARG HARDENED_MALLOC_VERSION
ARG CONFIG_NATIVE=false
ARG VARIANT=default
RUN apk --no-cache add build-base git gnupg && cd /tmp \
&& wget -q https://github.com/thestinger.gpg && gpg --import thestinger.gpg \
&& git clone --depth 1 --branch ${HARDENED_MALLOC_VERSION} https://github.com/GrapheneOS/hardened_malloc \
&& cd hardened_malloc && git verify-tag $(git describe --tags) \
&& make CONFIG_NATIVE=${CONFIG_NATIVE} VARIANT=${VARIANT}
### Build Synapse
FROM python:alpine as builder
ARG SYNAPSE_VERSION
RUN apk -U upgrade \
&& apk add -t build-deps \
build-base \
libffi-dev \
libjpeg-turbo-dev \
libressl-dev \
libxslt-dev \
linux-headers \
postgresql-dev \
rustup \
zlib-dev \
&& rustup-init -y && source $HOME/.cargo/env \
&& pip install --upgrade pip \
&& pip install --prefix="/install" --no-warn-script-location \
matrix-synapse[all]==${SYNAPSE_VERSION}
### Build Production
FROM python:alpine
ARG UID
ARG GID
RUN apk -U upgrade \
&& apk add -t run-deps \
libffi \
libgcc \
libjpeg-turbo \
libressl \
libstdc++ \
libxslt \
libpq \
zlib \
tzdata \
xmlsec \
git \
curl \
&& adduser -g ${GID} -u ${UID} --disabled-password --gecos "" synapse \
&& rm -rf /var/cache/apk/*
RUN pip install --upgrade pip \
&& pip install -e "git+https://github.com/matrix-org/mjolnir.git#egg=mjolnir&subdirectory=synapse_antispam"
COPY --from=build-malloc /tmp/hardened_malloc/out/libhardened_malloc.so /usr/local/lib/
COPY --from=builder /install /usr/local
COPY --chown=synapse:synapse rootfs /
ENV LD_PRELOAD="/usr/local/lib/libhardened_malloc.so"
USER synapse
VOLUME /data
EXPOSE 8008/tcp 8009/tcp 8448/tcp
ENTRYPOINT ["python3", "start.py"]
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
CMD curl -fSs http://localhost:8008/health || exit 1

11
README.md Normal file
View File

@ -0,0 +1,11 @@
[Synapse-Docker](https://github.com/tommytran732/Synapse-Docker)
```
git clone https://codeberg.org/deathrow/synapse-docker/
```
```
cd synapse-synapse-docker
```
```
docker build .
```

200
rootfs/conf/homeserver.yaml Normal file
View File

@ -0,0 +1,200 @@
# vim:ft=yaml
## TLS ##
{% if not SYNAPSE_NO_TLS %}
tls_certificate_path: "/data/{{ SYNAPSE_SERVER_NAME }}.tls.crt"
tls_private_key_path: "/data/{{ SYNAPSE_SERVER_NAME }}.tls.key"
{% if SYNAPSE_ACME %}
acme:
enabled: true
port: 8009
{% endif %}
{% endif %}
## Server ##
server_name: "{{ SYNAPSE_SERVER_NAME }}"
pid_file: /homeserver.pid
web_client: False
soft_file_limit: 0
log_config: "{{ SYNAPSE_LOG_CONFIG }}"
## Ports ##
listeners:
{% if not SYNAPSE_NO_TLS %}
-
port: 8448
bind_addresses: ['::']
type: http
tls: true
x_forwarded: false
resources:
- names: [client]
compress: true
- names: [federation] # Federation APIs
compress: false
{% endif %}
# Allow configuring in case we want to reverse proxy 8008
# using another process in the same container
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
tls: false
bind_addresses: ['::']
type: http
x_forwarded: false
resources:
- names: [client]
compress: true
- names: [federation]
compress: false
## Database ##
{% if POSTGRES_PASSWORD %}
database:
name: "psycopg2"
args:
user: "{{ POSTGRES_USER or "synapse" }}"
password: "{{ POSTGRES_PASSWORD }}"
database: "{{ POSTGRES_DB or "synapse" }}"
host: "{{ POSTGRES_HOST or "db" }}"
port: "{{ POSTGRES_PORT or "5432" }}"
cp_min: 5
cp_max: 10
{% else %}
database:
name: "sqlite3"
args:
database: "/data/homeserver.db"
{% endif %}
## Performance ##
event_cache_size: "{{ SYNAPSE_EVENT_CACHE_SIZE or "10K" }}"
## Ratelimiting ##
rc_messages_per_second: 0.2
rc_message_burst_count: 10.0
federation_rc_window_size: 1000
federation_rc_sleep_limit: 10
federation_rc_sleep_delay: 500
federation_rc_reject_limit: 50
federation_rc_concurrent: 3
## Files ##
media_store_path: "/data/media"
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "50M" }}"
max_image_pixels: "32M"
dynamic_thumbnails: false
# List of thumbnail to precalculate when an image is uploaded.
thumbnail_sizes:
- width: 32
height: 32
method: crop
- width: 96
height: 96
method: crop
- width: 320
height: 240
method: scale
- width: 640
height: 480
method: scale
- width: 800
height: 600
method: scale
url_preview_enabled: False
max_spider_size: "10M"
## Captcha ##
{% if SYNAPSE_RECAPTCHA_PUBLIC_KEY %}
recaptcha_public_key: "{{ SYNAPSE_RECAPTCHA_PUBLIC_KEY }}"
recaptcha_private_key: "{{ SYNAPSE_RECAPTCHA_PRIVATE_KEY }}"
enable_registration_captcha: True
recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify"
{% else %}
recaptcha_public_key: "YOUR_PUBLIC_KEY"
recaptcha_private_key: "YOUR_PRIVATE_KEY"
enable_registration_captcha: False
recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify"
{% endif %}
## Turn ##
{% if SYNAPSE_TURN_URIS %}
turn_uris:
{% for uri in SYNAPSE_TURN_URIS.split(',') %} - "{{ uri }}"
{% endfor %}
turn_shared_secret: "{{ SYNAPSE_TURN_SECRET }}"
turn_user_lifetime: "1h"
turn_allow_guests: True
{% else %}
turn_uris: []
turn_shared_secret: "YOUR_SHARED_SECRET"
turn_user_lifetime: "1h"
turn_allow_guests: True
{% endif %}
## Registration ##
enable_registration: {{ "True" if SYNAPSE_ENABLE_REGISTRATION else "False" }}
registration_shared_secret: "{{ SYNAPSE_REGISTRATION_SHARED_SECRET }}"
bcrypt_rounds: 12
allow_guest_access: {{ "True" if SYNAPSE_ALLOW_GUEST else "False" }}
enable_group_creation: true
# The list of identity servers trusted to verify third party
# identifiers by this server.
#
# Also defines the ID server which will be called when an account is
# deactivated (one will be picked arbitrarily).
trusted_third_party_id_servers:
- matrix.org
- vector.im
## Metrics ###
{% if SYNAPSE_REPORT_STATS.lower() == "yes" %}
enable_metrics: True
report_stats: True
{% else %}
enable_metrics: False
report_stats: False
{% endif %}
## API Configuration ##
{% if SYNAPSE_APPSERVICES %}
app_service_config_files:
{% for appservice in SYNAPSE_APPSERVICES %} - "{{ appservice }}"
{% endfor %}
{% endif %}
macaroon_secret_key: "{{ SYNAPSE_MACAROON_SECRET_KEY }}"
expire_access_token: False
## Signing Keys ##
signing_key_path: "/data/{{ SYNAPSE_SERVER_NAME }}.signing.key"
old_signing_keys: {}
key_refresh_interval: "1d" # 1 Day.
# The trusted servers to download signing keys from.
trusted_key_servers:
- server_name: matrix.org
verify_keys:
"ed25519:auto": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
password_config:
enabled: true

54
rootfs/conf/log.config Normal file
View File

@ -0,0 +1,54 @@
version: 1
formatters:
precise:
{% if worker_name %}
format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
{% else %}
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
{% endif %}
handlers:
{% if LOG_FILE_PATH %}
file:
class: logging.handlers.TimedRotatingFileHandler
formatter: precise
filename: {{ LOG_FILE_PATH }}
when: "midnight"
backupCount: 6 # Does not include the current log file.
encoding: utf8
# Default to buffering writes to log file for efficiency. This means that
# there will be a delay for INFO/DEBUG logs to get written, but WARNING/ERROR
# logs will still be flushed immediately.
buffer:
class: logging.handlers.MemoryHandler
target: file
# The capacity is the number of log lines that are buffered before
# being written to disk. Increasing this will lead to better
# performance, at the expensive of it taking longer for log lines to
# be written to disk.
capacity: 10
flushLevel: 30 # Flush for WARNING logs as well
{% endif %}
console:
class: logging.StreamHandler
formatter: precise
loggers:
synapse.storage.SQL:
# beware: increasing this to DEBUG will make synapse log sensitive
# information such as access tokens.
level: INFO
root:
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
{% if LOG_FILE_PATH %}
handlers: [console, buffer]
{% else %}
handlers: [console]
{% endif %}
disable_existing_loggers: false

219
rootfs/start.py Normal file
View File

@ -0,0 +1,219 @@
#!/usr/local/bin/python
import codecs
import glob
import os
import subprocess
import sys
import jinja2
# Utility functions
def log(txt):
print(txt, file=sys.stderr)
def error(txt):
log(txt)
sys.exit(2)
def convert(src, dst, environ):
"""Generate a file from a template
Args:
src (str): path to input file
dst (str): path to file to write
environ (dict): environment dictionary, for replacement mappings.
"""
with open(src) as infile:
template = infile.read()
rendered = jinja2.Template(template).render(**environ)
with open(dst, "w") as outfile:
outfile.write(rendered)
def generate_config_from_template(config_dir, config_path, environ):
"""Generate a homeserver.yaml from environment variables
Args:
config_dir (str): where to put generated config files
config_path (str): where to put the main config file
environ (dict): environment dictionary
"""
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
if v not in environ:
error(
"Environment variable '%s' is mandatory when generating a config file."
% (v,)
)
# populate some params from data files (if they exist, else create new ones)
environ = environ.copy()
secrets = {
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
}
for name, secret in secrets.items():
if secret not in environ:
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
# if the file already exists, load in the existing value; otherwise,
# generate a new secret and write it to a file
if os.path.exists(filename):
log("Reading %s from %s" % (secret, filename))
with open(filename) as handle:
value = handle.read()
else:
log("Generating a random secret for {}".format(secret))
value = codecs.encode(os.urandom(32), "hex").decode()
with open(filename, "w") as handle:
handle.write(value)
environ[secret] = value
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
if not os.path.exists(config_dir):
os.mkdir(config_dir)
# Convert SYNAPSE_NO_TLS to boolean if exists
if "SYNAPSE_NO_TLS" in environ:
tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"])
if tlsanswerstring in ("true", "on", "1", "yes"):
environ["SYNAPSE_NO_TLS"] = True
else:
if tlsanswerstring in ("false", "off", "0", "no"):
environ["SYNAPSE_NO_TLS"] = False
else:
error(
'Environment variable "SYNAPSE_NO_TLS" found but value "'
+ tlsanswerstring
+ '" unrecognized; exiting.'
)
if "SYNAPSE_LOG_CONFIG" not in environ:
environ["SYNAPSE_LOG_CONFIG"] = config_dir + "/log.config"
log("Generating synapse config file " + config_path)
convert("/conf/homeserver.yaml", config_path, environ)
log_config_file = environ["SYNAPSE_LOG_CONFIG"]
log("Generating log config file " + log_config_file)
convert("/conf/log.config", log_config_file, environ)
# Hopefully we already have a signing key, but generate one if not.
args = [
"python",
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
# tell synapse to put generated keys in /data rather than /compiled
"--keys-directory",
config_dir,
"--generate-keys",
]
subprocess.check_output(args)
def run_generate_config(environ):
"""Run synapse with a --generate-config param to generate a template config file
Args:
environ (dict): env var dict
Never returns.
"""
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
if v not in environ:
error("Environment variable '%s' is mandatory in `generate` mode." % (v,))
server_name = environ["SYNAPSE_SERVER_NAME"]
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
# create a suitable log config from our template
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
if not os.path.exists(log_config_file):
log("Creating log config %s" % (log_config_file,))
convert("/conf/log.config", log_config_file, environ)
args = [
"python",
"-m",
"synapse.app.homeserver",
"--server-name",
server_name,
"--report-stats",
environ["SYNAPSE_REPORT_STATS"],
"--config-path",
config_path,
"--config-directory",
config_dir,
"--data-directory",
data_dir,
"--generate-config",
"--open-private-ports",
]
# log("running %s" % (args, ))
os.execv("/usr/local/bin/python", args)
def main(args, environ):
mode = args[1] if len(args) > 1 else None
synapse_worker = environ.get("SYNAPSE_WORKER", "synapse.app.homeserver")
# In generate mode, generate a configuration and missing keys, then exit
if mode == "generate":
return run_generate_config(environ)
if mode == "migrate_config":
# generate a config based on environment vars.
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get(
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
)
return generate_config_from_template(
config_dir, config_path, environ
)
if mode is not None:
error("Unknown execution mode '%s'" % (mode,))
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
if not os.path.exists(config_path):
if "SYNAPSE_SERVER_NAME" in environ:
error(
"""\
Config file '%s' does not exist.
The synapse docker image no longer supports generating a config file on-the-fly
based on environment variables. You can migrate to a static config file by
running with 'migrate_config'. See the README for more details.
"""
% (config_path,)
)
error(
"Config file '%s' does not exist. You should either create a new "
"config file by running with the `generate` argument (and then edit "
"the resulting file before restarting) or specify the path to an "
"existing config file with the SYNAPSE_CONFIG_PATH variable."
% (config_path,)
)
log("Starting synapse with config file " + config_path)
args = ["python", "-m", synapse_worker, "--config-path", config_path]
os.execv("/usr/local/bin/python", args)
if __name__ == "__main__":
main(sys.argv, os.environ)