Get rootfs from upstream

Signed-off-by: Tommy <contact@tommytran.io>
This commit is contained in:
Tommy 2024-05-28 01:42:38 -07:00
parent d4ee3cea6e
commit 8576b14906
No known key found for this signature in database
GPG Key ID: 555C902A34EC968F
4 changed files with 14 additions and 571 deletions

View File

@ -38,6 +38,18 @@ RUN rustup-init -y && source $HOME/.cargo/env \
matrix-synapse[all]==${SYNAPSE_VERSION} matrix-synapse[all]==${SYNAPSE_VERSION}
### Get RootFS Files
FROM alpine:latest as rootfs
ARG SYNAPSE_VERSION
RUN apk -U upgrade \
&& apk --no-cache add git
RUN cd /tmp \
&& git clone --depth 1 --branch v${SYNAPSE_VERSION} https://github.com/element-hq/synapse
### Build Production ### Build Production
FROM python:${PYTHON_VERSION}-alpine FROM python:${PYTHON_VERSION}-alpine
@ -58,7 +70,8 @@ RUN pip install --upgrade pip \
COPY --from=build-malloc /tmp/hardened_malloc/out/libhardened_malloc.so /usr/local/lib/ COPY --from=build-malloc /tmp/hardened_malloc/out/libhardened_malloc.so /usr/local/lib/
COPY --from=builder /install /usr/local COPY --from=builder /install /usr/local
COPY --chown=synapse:synapse rootfs / COPY --from=rootfs --chown=synapse:synapse /tmp/synapse/start.py /start.py
COPY --from=rootfs --chown=synapse:synapse /tmp/synapse/conf /conf
ENV LD_PRELOAD="/usr/local/lib/libhardened_malloc.so" ENV LD_PRELOAD="/usr/local/lib/libhardened_malloc.so"

View File

@ -1,194 +0,0 @@
# vim:ft=yaml
## TLS ##
{% if not SYNAPSE_NO_TLS %}
tls_certificate_path: "/data/{{ SYNAPSE_SERVER_NAME }}.tls.crt"
tls_private_key_path: "/data/{{ SYNAPSE_SERVER_NAME }}.tls.key"
{% endif %}
## Server ##
server_name: "{{ SYNAPSE_SERVER_NAME }}"
pid_file: /homeserver.pid
web_client: False
soft_file_limit: 0
log_config: "{{ SYNAPSE_LOG_CONFIG }}"
## Ports ##
listeners:
{% if not SYNAPSE_NO_TLS %}
-
port: 8448
bind_addresses: ['::']
type: http
tls: true
x_forwarded: false
resources:
- names: [client]
compress: true
- names: [federation] # Federation APIs
compress: false
{% endif %}
# Allow configuring in case we want to reverse proxy 8008
# using another process in the same container
{% if SYNAPSE_USE_UNIX_SOCKET %}
# Unix sockets don't care about TLS or IP addresses or ports
- path: '/run/main_public.sock'
type: http
{% else %}
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
tls: false
bind_addresses: ['::']
type: http
x_forwarded: false
{% endif %}
resources:
- names: [client]
compress: true
- names: [federation]
compress: false
## Database ##
{% if POSTGRES_PASSWORD %}
database:
name: "psycopg2"
args:
user: "{{ POSTGRES_USER or "synapse" }}"
password: "{{ POSTGRES_PASSWORD }}"
database: "{{ POSTGRES_DB or "synapse" }}"
{% if not SYNAPSE_USE_UNIX_SOCKET %}
{# Synapse will use a default unix socket for Postgres when host/port is not specified (behavior from `psycopg2`). #}
host: "{{ POSTGRES_HOST or "db" }}"
port: "{{ POSTGRES_PORT or "5432" }}"
{% endif %}
cp_min: {{ POSTGRES_CP_MIN or 5 }}
cp_max: {{ POSTGRES_CP_MAX or 10 }}
{% else %}
database:
name: "sqlite3"
args:
database: "/data/homeserver.db"
{% endif %}
## Performance ##
event_cache_size: "{{ SYNAPSE_EVENT_CACHE_SIZE or "10K" }}"
## Ratelimiting ##
rc_messages_per_second: 0.2
rc_message_burst_count: 10.0
federation_rc_window_size: 1000
federation_rc_sleep_limit: 10
federation_rc_sleep_delay: 500
federation_rc_reject_limit: 50
federation_rc_concurrent: 3
## Files ##
media_store_path: "/data/media"
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "50M" }}"
max_image_pixels: "32M"
dynamic_thumbnails: false
# List of thumbnail to precalculate when an image is uploaded.
thumbnail_sizes:
- width: 32
height: 32
method: crop
- width: 96
height: 96
method: crop
- width: 320
height: 240
method: scale
- width: 640
height: 480
method: scale
- width: 800
height: 600
method: scale
url_preview_enabled: False
max_spider_size: "10M"
## Captcha ##
{% if SYNAPSE_RECAPTCHA_PUBLIC_KEY %}
recaptcha_public_key: "{{ SYNAPSE_RECAPTCHA_PUBLIC_KEY }}"
recaptcha_private_key: "{{ SYNAPSE_RECAPTCHA_PRIVATE_KEY }}"
enable_registration_captcha: True
recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify"
{% else %}
recaptcha_public_key: "YOUR_PUBLIC_KEY"
recaptcha_private_key: "YOUR_PRIVATE_KEY"
enable_registration_captcha: False
recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify"
{% endif %}
## Turn ##
{% if SYNAPSE_TURN_URIS %}
turn_uris:
{% for uri in SYNAPSE_TURN_URIS.split(',') %} - "{{ uri }}"
{% endfor %}
turn_shared_secret: "{{ SYNAPSE_TURN_SECRET }}"
turn_user_lifetime: "1h"
turn_allow_guests: True
{% else %}
turn_uris: []
turn_shared_secret: "YOUR_SHARED_SECRET"
turn_user_lifetime: "1h"
turn_allow_guests: True
{% endif %}
## Registration ##
enable_registration: {{ "True" if SYNAPSE_ENABLE_REGISTRATION else "False" }}
registration_shared_secret: "{{ SYNAPSE_REGISTRATION_SHARED_SECRET }}"
bcrypt_rounds: 12
allow_guest_access: {{ "True" if SYNAPSE_ALLOW_GUEST else "False" }}
enable_group_creation: true
## Metrics ###
{% if SYNAPSE_REPORT_STATS.lower() == "yes" %}
enable_metrics: True
report_stats: True
{% else %}
enable_metrics: False
report_stats: False
{% endif %}
## API Configuration ##
{% if SYNAPSE_APPSERVICES %}
app_service_config_files:
{% for appservice in SYNAPSE_APPSERVICES %} - "{{ appservice }}"
{% endfor %}
{% endif %}
macaroon_secret_key: "{{ SYNAPSE_MACAROON_SECRET_KEY }}"
expire_access_token: False
## Signing Keys ##
signing_key_path: "/data/{{ SYNAPSE_SERVER_NAME }}.signing.key"
old_signing_keys: {}
key_refresh_interval: "1d" # 1 Day.
# The trusted servers to download signing keys from.
trusted_key_servers:
- server_name: matrix.org
verify_keys:
"ed25519:auto": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
password_config:
enabled: true

View File

@ -1,91 +0,0 @@
version: 1
formatters:
precise:
{% if include_worker_name_in_log_line %}
format: '{{ worker_name }} | %(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
{% else %}
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
{% endif %}
handlers:
{% if LOG_FILE_PATH %}
file:
class: logging.handlers.TimedRotatingFileHandler
formatter: precise
filename: {{ LOG_FILE_PATH }}
when: "midnight"
backupCount: 6 # Does not include the current log file.
encoding: utf8
# Default to buffering writes to log file for efficiency.
# WARNING/ERROR logs will still be flushed immediately, but there will be a
# delay (of up to `period` seconds, or until the buffer is full with
# `capacity` messages) before INFO/DEBUG logs get written.
buffer:
class: synapse.logging.handlers.PeriodicallyFlushingMemoryHandler
target: file
# The capacity is the maximum number of log lines that are buffered
# before being written to disk. Increasing this will lead to better
# performance, at the expensive of it taking longer for log lines to
# be written to disk.
# This parameter is required.
capacity: 10
# Logs with a level at or above the flush level will cause the buffer to
# be flushed immediately.
# Default value: 40 (ERROR)
# Other values: 50 (CRITICAL), 30 (WARNING), 20 (INFO), 10 (DEBUG)
flushLevel: 30 # Flush immediately for WARNING logs and higher
# The period of time, in seconds, between forced flushes.
# Messages will not be delayed for longer than this time.
# Default value: 5 seconds
period: 5
{% endif %}
console:
class: logging.StreamHandler
formatter: precise
loggers:
# This is just here so we can leave `loggers` in the config regardless of whether
# we configure other loggers below (avoid empty yaml dict error).
_placeholder:
level: "INFO"
{% if not SYNAPSE_LOG_SENSITIVE %}
{#
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
so that DEBUG entries (containing sensitive information) are not emitted.
#}
synapse.storage.SQL:
# beware: increasing this to DEBUG will make synapse log sensitive
# information such as access tokens.
level: INFO
{% endif %}
{% if SYNAPSE_LOG_TESTING %}
{#
If Synapse is under test, log a few more useful things for a developer
attempting to debug something particularly tricky.
With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe
unexpectedly) filtered out of responses in tests. It's just nice to be able to
look at the CI log and figure out why an event isn't being returned.
#}
synapse.visibility.filtered_event_debug:
level: DEBUG
{% endif %}
root:
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
{% if LOG_FILE_PATH %}
handlers: [console, buffer]
{% else %}
handlers: [console]
{% endif %}
disable_existing_loggers: false

View File

@ -1,285 +0,0 @@
#!/usr/local/bin/python
import codecs
import glob
import os
import platform
import subprocess
import sys
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
import jinja2
# Utility functions
def log(txt: str) -> None:
print(txt)
def error(txt: str) -> NoReturn:
print(txt, file=sys.stderr)
sys.exit(2)
def flush_buffers() -> None:
sys.stdout.flush()
sys.stderr.flush()
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
"""Generate a file from a template
Args:
src: path to input file
dst: path to file to write
environ: environment dictionary, for replacement mappings.
"""
with open(src) as infile:
template = infile.read()
rendered = jinja2.Template(template).render(**environ)
with open(dst, "w") as outfile:
outfile.write(rendered)
def generate_config_from_template(
config_dir: str,
config_path: str,
os_environ: Mapping[str, str],
ownership: Optional[str],
) -> None:
"""Generate a homeserver.yaml from environment variables
Args:
config_dir: where to put generated config files
config_path: where to put the main config file
os_environ: environment mapping
ownership: "<user>:<group>" string which will be used to set
ownership of the generated configs. If None, ownership will not change.
"""
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
if v not in os_environ:
error(
"Environment variable '%s' is mandatory when generating a config file."
% (v,)
)
# populate some params from data files (if they exist, else create new ones)
environ: Dict[str, Any] = dict(os_environ)
secrets = {
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
}
for name, secret in secrets.items():
if secret not in environ:
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
# if the file already exists, load in the existing value; otherwise,
# generate a new secret and write it to a file
if os.path.exists(filename):
log("Reading %s from %s" % (secret, filename))
with open(filename) as handle:
value = handle.read()
else:
log(f"Generating a random secret for {secret}")
value = codecs.encode(os.urandom(32), "hex").decode()
with open(filename, "w") as handle:
handle.write(value)
environ[secret] = value
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
if not os.path.exists(config_dir):
os.mkdir(config_dir)
# Convert SYNAPSE_NO_TLS to boolean if exists
if "SYNAPSE_NO_TLS" in environ:
tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"])
if tlsanswerstring in ("true", "on", "1", "yes"):
environ["SYNAPSE_NO_TLS"] = True
else:
if tlsanswerstring in ("false", "off", "0", "no"):
environ["SYNAPSE_NO_TLS"] = False
else:
error(
'Environment variable "SYNAPSE_NO_TLS" found but value "'
+ tlsanswerstring
+ '" unrecognized; exiting.'
)
if "SYNAPSE_LOG_CONFIG" not in environ:
environ["SYNAPSE_LOG_CONFIG"] = config_dir + "/log.config"
log("Generating synapse config file " + config_path)
convert("/conf/homeserver.yaml", config_path, environ)
log_config_file = environ["SYNAPSE_LOG_CONFIG"]
log("Generating log config file " + log_config_file)
convert(
"/conf/log.config",
log_config_file,
{**environ, "include_worker_name_in_log_line": False},
)
# Hopefully we already have a signing key, but generate one if not.
args = [
sys.executable,
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
# tell synapse to put generated keys in /data rather than /compiled
"--keys-directory",
config_dir,
"--generate-keys",
]
if ownership is not None:
log(f"Setting ownership on /data to {ownership}")
subprocess.run(["chown", "-R", ownership, "/data"], check=True)
args = ["gosu", ownership] + args
subprocess.run(args, check=True)
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
"""Run synapse with a --generate-config param to generate a template config file
Args:
environ: env vars from `os.enrivon`.
ownership: "userid:groupid" arg for chmod. If None, ownership will not change.
Never returns.
"""
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
if v not in environ:
error("Environment variable '%s' is mandatory in `generate` mode." % (v,))
server_name = environ["SYNAPSE_SERVER_NAME"]
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
# create a suitable log config from our template
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
if not os.path.exists(log_config_file):
log("Creating log config %s" % (log_config_file,))
convert("/conf/log.config", log_config_file, environ)
# generate the main config file, and a signing key.
args = [
sys.executable,
"-m",
"synapse.app.homeserver",
"--server-name",
server_name,
"--report-stats",
environ["SYNAPSE_REPORT_STATS"],
"--config-path",
config_path,
"--config-directory",
config_dir,
"--data-directory",
data_dir,
"--generate-config",
"--open-private-ports",
]
if ownership is not None:
# make sure that synapse has perms to write to the data dir.
log(f"Setting ownership on {data_dir} to {ownership}")
subprocess.run(["chown", ownership, data_dir], check=True)
args = ["gosu", ownership] + args
# log("running %s" % (args, ))
subprocess.run(args, check=True)
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
mode = args[1] if len(args) > 1 else "run"
# if we were given an explicit user to switch to, do so
ownership = None
if "UID" in environ:
desired_uid = int(environ["UID"])
desired_gid = int(environ.get("GID", "991"))
ownership = f"{desired_uid}:{desired_gid}"
elif os.getuid() == 0:
# otherwise, if we are running as root, use user 991
ownership = "991:991"
synapse_worker = environ.get("SYNAPSE_WORKER", "synapse.app.homeserver")
# In generate mode, generate a configuration and missing keys, then exit
if mode == "generate":
return run_generate_config(environ, ownership)
if mode == "migrate_config":
# generate a config based on environment vars.
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get(
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
)
return generate_config_from_template(
config_dir, config_path, environ, ownership
)
if mode != "run":
error("Unknown execution mode '%s'" % (mode,))
args = args[2:]
if "-m" not in args:
args = ["-m", synapse_worker] + args
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
if os.path.isfile(jemallocpath):
environ["LD_PRELOAD"] = jemallocpath
else:
log("Could not find %s, will not use" % (jemallocpath,))
# if there are no config files passed to synapse, try adding the default file
if not any(p.startswith(("--config-path", "-c")) for p in args):
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get(
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
)
if not os.path.exists(config_path):
if "SYNAPSE_SERVER_NAME" in environ:
error(
"""\
Config file '%s' does not exist.
The synapse docker image no longer supports generating a config file on-the-fly
based on environment variables. You can migrate to a static config file by
running with 'migrate_config'. See the README for more details.
"""
% (config_path,)
)
error(
"Config file '%s' does not exist. You should either create a new "
"config file by running with the `generate` argument (and then edit "
"the resulting file before restarting) or specify the path to an "
"existing config file with the SYNAPSE_CONFIG_PATH variable."
% (config_path,)
)
args += ["--config-path", config_path]
log("Starting synapse with args " + " ".join(args))
args = [sys.executable] + args
if ownership is not None:
args = ["gosu", ownership] + args
flush_buffers()
os.execve("/usr/sbin/gosu", args, environ)
else:
flush_buffers()
os.execve(sys.executable, args, environ)
if __name__ == "__main__":
main(sys.argv, os.environ)