Manually generate the default config yaml, remove most of the commandline arguments for synapse anticipating that people will use the yaml instead. Simpify implementing config options by not requiring the classes to hit the super class

This commit is contained in:
Mark Haines 2015-04-30 04:24:44 +01:00
parent 109c8aafd2
commit d624e2a638
17 changed files with 455 additions and 476 deletions

View File

@ -16,30 +16,29 @@ if [ $# -eq 1 ]; then
fi
fi
export PYTHONPATH=$(readlink -f $(pwd))
echo $PYTHONPATH
for port in 8080 8081 8082; do
echo "Starting server on port $port... "
https_port=$((port + 400))
mkdir -p demo/$port
pushd demo/$port
rm $DIR/etc/$port.config
python -m synapse.app.homeserver \
--generate-config "localhost:$https_port" \
--config-path "$DIR/etc/$port.config" \
python -m synapse.app.homeserver \
--generate-config \
--config-path "demo/etc/$port.config" \
-p "$https_port" \
--unsecure-port "$port" \
-H "localhost:$https_port" \
-f "$DIR/$port.log" \
-d "$DIR/$port.db" \
-D --pid-file "$DIR/$port.pid" \
--manhole $((port + 1000)) \
--tls-dh-params-path "demo/demo.tls.dh" \
--media-store-path "demo/media_store.$port" \
$PARAMS $SYNAPSE_PARAMS \
--enable-registration
python -m synapse.app.homeserver \
--config-path "demo/etc/$port.config" \
--config-path "$DIR/etc/$port.config" \
-D \
-vv \
popd
done
cd "$CWD"

View File

@ -394,7 +394,6 @@ def setup(config_options):
config.server_name,
domain_with_port=domain_with_port,
upload_dir=os.path.abspath("uploads"),
db_name=config.database_path,
db_config=config.database_config,
tls_context_factory=tls_context_factory,
config=config,
@ -407,9 +406,8 @@ def setup(config_options):
redirect_root_to_web_client=True,
)
db_name = hs.get_db_name()
logger.info("Preparing database: %s...", db_name)
logger.info("Preparing database: %r...", config.database_config)
try:
db_conn = database_engine.module.connect(
@ -431,7 +429,7 @@ def setup(config_options):
)
sys.exit(1)
logger.info("Database prepared in %s.", db_name)
logger.info("Database prepared in %r.", config.database_config)
if config.manhole:
f = twisted.manhole.telnet.ShellFactory()

View File

@ -14,9 +14,10 @@
# limitations under the License.
import argparse
import sys
import os
import yaml
import sys
from textwrap import dedent
class ConfigError(Exception):
@ -24,8 +25,6 @@ class ConfigError(Exception):
class Config(object):
def __init__(self, args):
pass
@staticmethod
def parse_size(string):
@ -37,6 +36,22 @@ class Config(object):
size = sizes[suffix]
return int(string) * size
@staticmethod
def parse_duration(string):
second = 1000
hour = 60 * 60 * second
day = 24 * hour
week = 7 * day
year = 365 * day
sizes = {"s": second, "h": hour, "d": day, "w": week, "y": year}
size = 1
suffix = string[-1]
if suffix in sizes:
string = string[:-1]
size = sizes[suffix]
return int(string) * size
@staticmethod
def abspath(file_path):
return os.path.abspath(file_path) if file_path else file_path
@ -77,17 +92,6 @@ class Config(object):
with open(file_path) as file_stream:
return file_stream.read()
@classmethod
def read_yaml_file(cls, file_path, config_name):
cls.check_file(file_path, config_name)
with open(file_path) as file_stream:
try:
return yaml.load(file_stream)
except:
raise ConfigError(
"Error parsing yaml in file %r" % (file_path,)
)
@staticmethod
def default_path(name):
return os.path.abspath(os.path.join(os.path.curdir, name))
@ -97,16 +101,33 @@ class Config(object):
with open(file_path) as file_stream:
return yaml.load(file_stream)
@classmethod
def add_arguments(cls, parser):
pass
def invoke_all(self, name, *args, **kargs):
results = []
for cls in type(self).mro():
if name in cls.__dict__:
results.append(getattr(cls, name)(self, *args, **kargs))
return results
@classmethod
def generate_config(cls, args, config_dir_path):
pass
def generate_config(self, config_dir_path, server_name):
default_config = "# vim:ft=yaml\n"
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
"default_config", config_dir_path, server_name
))
config = yaml.load(default_config)
if not os.path.exists(config_dir_path):
os.makedirs(config_dir_path)
self.invoke_all("generate_keys", config)
return default_config
@classmethod
def load_config(cls, description, argv, generate_section=None):
result = cls()
config_parser = argparse.ArgumentParser(add_help=False)
config_parser.add_argument(
"-c", "--config-path",
@ -115,66 +136,56 @@ class Config(object):
)
config_parser.add_argument(
"--generate-config",
action="store_true",
help="Generate config file"
metavar="SERVER_NAME",
help="Generate a config file for the server name"
)
config_args, remaining_args = config_parser.parse_known_args(argv)
if not config_args.config_path:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config SERVER_NAME"
" -c CONFIG-FILE\""
)
if config_args.generate_config:
if not config_args.config_path:
config_parser.error(
"Must specify where to generate the config file"
server_name = config_args.generate_config
config_path = config_args.config_path
if os.path.exists(config_path):
print "Config file %r already exists. Not overwriting" % (
config_args.config_path
)
config_dir_path = os.path.dirname(config_args.config_path)
if os.path.exists(config_args.config_path):
defaults = cls.read_config_file(config_args.config_path)
else:
defaults = {}
else:
if config_args.config_path:
defaults = cls.read_config_file(config_args.config_path)
else:
defaults = {}
parser = argparse.ArgumentParser(
parents=[config_parser],
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
cls.add_arguments(parser)
parser.set_defaults(**defaults)
args = parser.parse_args(remaining_args)
if config_args.generate_config:
sys.exit(0)
config_dir_path = os.path.dirname(config_args.config_path)
config_dir_path = os.path.abspath(config_dir_path)
if not os.path.exists(config_dir_path):
os.makedirs(config_dir_path)
cls.generate_config(args, config_dir_path)
config = {}
for key, value in vars(args).items():
if (key not in set(["config_path", "generate_config"])
and value is not None):
config[key] = value
with open(config_args.config_path, "w") as config_file:
# TODO(mark/paul) We might want to output emacs-style mode
# markers as well as vim-style mode markers into the file,
# to further hint to people this is a YAML file.
config_file.write("# vim:ft=yaml\n")
yaml.dump(config, config_file, default_flow_style=False)
with open(config_path, "wb") as config_file:
config_file.write(
result.generate_config(config_dir_path, server_name)
)
print (
"A config file has been generated in %s for server name"
" '%s' with corresponding SSL keys and self-signed"
" certificates. Please review this file and customise it to"
" your needs."
) % (
config_args.config_path, config['server_name']
)
) % (config_path, server_name)
print (
"If this server name is incorrect, you will need to regenerate"
" the SSL certificates"
)
sys.exit(0)
return cls(args)
config = cls.read_config_file(config_args.config_path)
result.invoke_all("read_config", config)
parser = argparse.ArgumentParser(
parents=[config_parser],
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
result.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args)
result.invoke_all("read_arguments", args)
return result

View File

@ -17,15 +17,11 @@ from ._base import Config
class AppServiceConfig(Config):
def __init__(self, args):
super(AppServiceConfig, self).__init__(args)
self.app_service_config_files = args.app_service_config_files
def read_config(self, config):
self.app_service_config_files = config.get("app_service_config_files", [])
@classmethod
def add_arguments(cls, parser):
super(AppServiceConfig, cls).add_arguments(parser)
group = parser.add_argument_group("appservice")
group.add_argument(
"--app-service-config-files", type=str, nargs='+',
help="A list of application service config files to use."
)
def default_config(cls, config_dir_path, server_name):
return """\
# A list of application service config file to use
app_service_config_files: []
"""

View File

@ -17,40 +17,34 @@ from ._base import Config
class CaptchaConfig(Config):
def __init__(self, args):
super(CaptchaConfig, self).__init__(args)
self.recaptcha_private_key = args.recaptcha_private_key
self.recaptcha_public_key = args.recaptcha_public_key
self.enable_registration_captcha = args.enable_registration_captcha
def read_config(self, config):
self.recaptcha_private_key = config["recaptcha_private_key"]
self.recaptcha_public_key = config["recaptcha_public_key"]
self.enable_registration_captcha = config["enable_registration_captcha"]
self.captcha_ip_origin_is_x_forwarded = (
args.captcha_ip_origin_is_x_forwarded
config["captcha_ip_origin_is_x_forwarded"]
)
self.captcha_bypass_secret = args.captcha_bypass_secret
self.captcha_bypass_secret = config.get("captcha_bypass_secret")
@classmethod
def add_arguments(cls, parser):
super(CaptchaConfig, cls).add_arguments(parser)
group = parser.add_argument_group("recaptcha")
group.add_argument(
"--recaptcha-public-key", type=str, default="YOUR_PUBLIC_KEY",
help="This Home Server's ReCAPTCHA public key."
)
group.add_argument(
"--recaptcha-private-key", type=str, default="YOUR_PRIVATE_KEY",
help="This Home Server's ReCAPTCHA private key."
)
group.add_argument(
"--enable-registration-captcha", type=bool, default=False,
help="Enables ReCaptcha checks when registering, preventing signup"
+ " unless a captcha is answered. Requires a valid ReCaptcha "
+ "public/private key."
)
group.add_argument(
"--captcha_ip_origin_is_x_forwarded", type=bool, default=False,
help="When checking captchas, use the X-Forwarded-For (XFF) header"
+ " as the client IP and not the actual client IP."
)
group.add_argument(
"--captcha_bypass_secret", type=str,
help="A secret key used to bypass the captcha test entirely."
)
def default_config(self, config_dir_path, server_name):
return """\
## Captcha ##
# This Home Server's ReCAPTCHA public key.
recaptcha_private_key: "YOUR_PUBLIC_KEY"
# This Home Server's ReCAPTCHA private key.
recaptcha_public_key: "YOUR_PRIVATE_KEY"
# Enables ReCaptcha checks when registering, preventing signup
# unless a captcha is answered. Requires a valid ReCaptcha
# public/private key.
enable_registration_captcha: False
# When checking captchas, use the X-Forwarded-For (XFF) header
# as the client IP and not the actual client IP.
captcha_ip_origin_is_x_forwarded: False
# A secret key used to bypass the captcha test entirely.
captcha_bypass_secret: ~
"""

View File

@ -14,28 +14,21 @@
# limitations under the License.
from ._base import Config
import os
import yaml
class DatabaseConfig(Config):
def __init__(self, args):
super(DatabaseConfig, self).__init__(args)
if args.database_path == ":memory:":
self.database_path = ":memory:"
else:
self.database_path = self.abspath(args.database_path)
self.event_cache_size = self.parse_size(args.event_cache_size)
if args.database_config:
with open(args.database_config) as f:
self.database_config = yaml.safe_load(f)
else:
def read_config(self, config):
self.event_cache_size = self.parse_size(
config.get("event_cache_size", "10K")
)
self.database_config = config.get("database")
if self.database_config is None:
self.database_config = {
"name": "sqlite3",
"args": {
"database": self.database_path,
},
"args": {},
}
name = self.database_config.get("name", None)
@ -50,24 +43,36 @@ class DatabaseConfig(Config):
else:
raise RuntimeError("Unsupported database type '%s'" % (name,))
@classmethod
def add_arguments(cls, parser):
super(DatabaseConfig, cls).add_arguments(parser)
self.set_databasepath(config.get("database_path"))
def default_config(self, config, config_dir_path):
database_path = self.abspath("homeserver.db")
return """\
# Database configuration
database:
# The database engine name
name: "sqlite3"
# Arguments to pass to the engine
args:
# Path to the database
database: "%(database_path)s"
# Number of events to cache in memory.
event_cache_size: "10K"
""" % locals()
def read_arguments(self, args):
self.set_databasepath(args.database_path)
def set_databasepath(self, database_path):
if database_path != ":memory:":
database_path = self.abspath(database_path)
if self.database_config.get("name", None) == "sqlite3":
if database_path is not None:
self.database_config["database"] = database_path
def add_arguments(self, parser):
db_group = parser.add_argument_group("database")
db_group.add_argument(
"-d", "--database-path", default="homeserver.db",
metavar="SQLITE_DATABASE_PATH", help="The database name."
"-d", "--database-path", metavar="SQLITE_DATABASE_PATH",
help="The path to a sqlite database to use."
)
db_group.add_argument(
"--event-cache-size", default="100K",
help="Number of events to cache in memory."
)
db_group.add_argument(
"--database-config", default=None,
help="Location of the database configuration file."
)
@classmethod
def generate_config(cls, args, config_dir_path):
super(DatabaseConfig, cls).generate_config(args, config_dir_path)
args.database_path = os.path.abspath(args.database_path)

View File

@ -36,4 +36,6 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
if __name__ == '__main__':
import sys
HomeServerConfig.load_config("Generate config", sys.argv[1:], "HomeServer")
sys.stdout.write(
HomeServerConfig().generate_config(sys.argv[1], sys.argv[2])
)

View File

@ -24,44 +24,53 @@ from syutil.base64util import decode_base64
class KeyConfig(Config):
def __init__(self, args):
super(KeyConfig, self).__init__(args)
self.signing_key = self.read_signing_key(args.signing_key_path)
def read_config(self, config):
self.signing_key = self.read_signing_key(config["signing_key_path"])
self.old_signing_keys = self.read_old_signing_keys(
args.old_signing_key_path
config["old_signing_keys"]
)
self.key_refresh_interval = self.parse_duration(
config["key_refresh_interval"]
)
self.key_refresh_interval = args.key_refresh_interval
self.perspectives = self.read_perspectives(
args.perspectives_config_path
config["perspectives"]
)
@classmethod
def add_arguments(cls, parser):
super(KeyConfig, cls).add_arguments(parser)
key_group = parser.add_argument_group("keys")
key_group.add_argument("--signing-key-path",
help="The signing key to sign messages with")
key_group.add_argument("--old-signing-key-path",
help="The keys that the server used to sign"
" sign messages with but won't use"
" to sign new messages. E.g. it has"
" lost its private key")
key_group.add_argument("--key-refresh-interval",
default=24 * 60 * 60 * 1000, # 1 Day
help="How long a key response is valid for."
" Used to set the exipiry in /key/v2/."
" Controls how frequently servers will"
" query what keys are still valid")
key_group.add_argument("--perspectives-config-path",
help="The trusted servers to download signing"
" keys from")
def default_config(self, config_dir_path, server_name):
base_key_name = os.path.join(config_dir_path, server_name)
return """\
## Signing Keys ##
def read_perspectives(self, perspectives_config_path):
config = self.read_yaml_file(
perspectives_config_path, "perspectives_config_path"
)
# Path to the signing key to sign messages with
signing_key_path: "%(base_key_name)s.signing.key"
# The keys that the server used to sign messages with but won't use
# to sign new messages. E.g. it has lost its private key
old_signing_keys: {}
# "ed25519:auto":
# # Base64 encoded public key
# key: "The public part of your old signing key."
# # Millisecond POSIX timestamp when the key expired.
# expired_ts: 123456789123
# How long key response published by this server is valid for.
# Used to set the valid_until_ts in /key/v2 APIs.
# Determines how quickly servers will query to check which keys
# are still valid.
key_refresh_interval: "1d" # 1 Day.
# The trusted servers to download signing keys from.
perspectives:
servers:
"matrix.org":
verify_keys:
"ed25519:auto":
key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
""" % locals()
def read_perspectives(self, perspectives_config):
servers = {}
for server_name, server_config in config["servers"].items():
for server_name, server_config in perspectives_config["servers"].items():
for key_id, key_data in server_config["verify_keys"].items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
@ -82,37 +91,31 @@ class KeyConfig(Config):
" Try running again with --generate-config"
)
def read_old_signing_keys(self, old_signing_key_path):
old_signing_keys = self.read_file(
old_signing_key_path, "old_signing_key"
)
try:
return syutil.crypto.signing_key.read_old_signing_keys(
old_signing_keys.splitlines(True)
)
except Exception:
raise ConfigError(
"Error reading old signing keys."
)
def read_old_signing_keys(self, old_signing_keys):
keys = {}
for key_id, key_data in old_signing_keys.items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_key.expired_ts = key_data["expired_ts"]
keys[key_id] = verify_key
else:
raise ConfigError(
"Unsupported signing algorithm for old key: %r" % (key_id,)
)
return keys
@classmethod
def generate_config(cls, args, config_dir_path):
super(KeyConfig, cls).generate_config(args, config_dir_path)
base_key_name = os.path.join(config_dir_path, args.server_name)
args.pid_file = os.path.abspath(args.pid_file)
if not args.signing_key_path:
args.signing_key_path = base_key_name + ".signing.key"
if not os.path.exists(args.signing_key_path):
with open(args.signing_key_path, "w") as signing_key_file:
def generate_keys(self, config):
signing_key_path = config["signing_key_path"]
if not os.path.exists(signing_key_path):
with open(signing_key_path, "w") as signing_key_file:
syutil.crypto.signing_key.write_signing_keys(
signing_key_file,
(syutil.crypto.signing_key.generate_signing_key("auto"),),
)
else:
signing_keys = cls.read_file(args.signing_key_path, "signing_key")
signing_keys = self.read_file(signing_key_path, "signing_key")
if len(signing_keys.split("\n")[0].split()) == 1:
# handle keys in the old format.
key = syutil.crypto.signing_key.decode_signing_key_base64(
@ -120,28 +123,8 @@ class KeyConfig(Config):
"auto",
signing_keys.split("\n")[0]
)
with open(args.signing_key_path, "w") as signing_key_file:
with open(signing_key_path, "w") as signing_key_file:
syutil.crypto.signing_key.write_signing_keys(
signing_key_file,
(key,),
)
if not args.old_signing_key_path:
args.old_signing_key_path = base_key_name + ".old.signing.keys"
if not os.path.exists(args.old_signing_key_path):
with open(args.old_signing_key_path, "w"):
pass
if not args.perspectives_config_path:
args.perspectives_config_path = base_key_name + ".perspectives"
if not os.path.exists(args.perspectives_config_path):
with open(args.perspectives_config_path, "w") as perspectives_file:
perspectives_file.write(
'servers:\n'
' matrix.org:\n'
' verify_keys:\n'
' "ed25519:auto":\n'
' key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"\n'
)

View File

@ -22,22 +22,41 @@ import yaml
class LoggingConfig(Config):
def __init__(self, args):
super(LoggingConfig, self).__init__(args)
self.verbosity = int(args.verbose) if args.verbose else None
self.log_config = self.abspath(args.log_config)
self.log_file = self.abspath(args.log_file)
@classmethod
def read_config(self, config):
self.verbosity = config.get("verbose", 0)
self.log_config = self.abspath(config.get("log_config"))
self.log_file = self.abspath(config.get("log_file"))
def default_config(self, config_dir_path, server_name):
log_file = self.abspath("homeserver.log")
return """
# Logging verbosity level.
verbose: 0
# File to write logging to
log_file: "%(log_file)s"
# A yaml python logging config file
#log_config: "your.log.config.yaml"
""" % locals()
def read_arguments(self, args):
if args.verbose is not None:
self.verbosity = args.verbose
if args.log_config is not None:
self.log_config = args.log_config
if args.log_file is not None:
self.log_file = args.log_file
def add_arguments(cls, parser):
super(LoggingConfig, cls).add_arguments(parser)
logging_group = parser.add_argument_group("logging")
logging_group.add_argument(
'-v', '--verbose', dest="verbose", action='count',
help="The verbosity level."
)
logging_group.add_argument(
'-f', '--log-file', dest="log_file", default="homeserver.log",
'-f', '--log-file', dest="log_file",
help="File to log to."
)
logging_group.add_argument(

View File

@ -17,20 +17,17 @@ from ._base import Config
class MetricsConfig(Config):
def __init__(self, args):
super(MetricsConfig, self).__init__(args)
self.enable_metrics = args.enable_metrics
self.metrics_port = args.metrics_port
def read_config(self, config):
self.enable_metrics = config["enable_metrics"]
self.metrics_port = config["metrics_port"]
@classmethod
def add_arguments(cls, parser):
super(MetricsConfig, cls).add_arguments(parser)
metrics_group = parser.add_argument_group("metrics")
metrics_group.add_argument(
'--enable-metrics', dest="enable_metrics", action="store_true",
help="Enable collection and rendering of performance metrics"
)
metrics_group.add_argument(
'--metrics-port', metavar="PORT", type=int,
help="Separate port to accept metrics requests on (on localhost)"
)
def default_config(self, config_dir_path, server_name):
return """\
## Metrics ###
# Enable collection and rendering of performance metrics
enable_metrics: False
# Separate port to accept metrics requests on (on localhost)
metrics_port: ~
"""

View File

@ -17,56 +17,42 @@ from ._base import Config
class RatelimitConfig(Config):
def __init__(self, args):
super(RatelimitConfig, self).__init__(args)
self.rc_messages_per_second = args.rc_messages_per_second
self.rc_message_burst_count = args.rc_message_burst_count
def read_config(self, config):
self.rc_messages_per_second = config["rc_messages_per_second"]
self.rc_message_burst_count = config["rc_message_burst_count"]
self.federation_rc_window_size = args.federation_rc_window_size
self.federation_rc_sleep_limit = args.federation_rc_sleep_limit
self.federation_rc_sleep_delay = args.federation_rc_sleep_delay
self.federation_rc_reject_limit = args.federation_rc_reject_limit
self.federation_rc_concurrent = args.federation_rc_concurrent
self.federation_rc_window_size = config["federation_rc_window_size"]
self.federation_rc_sleep_limit = config["federation_rc_sleep_limit"]
self.federation_rc_sleep_delay = config["federation_rc_sleep_delay"]
self.federation_rc_reject_limit = config["federation_rc_reject_limit"]
self.federation_rc_concurrent = config["federation_rc_concurrent"]
@classmethod
def add_arguments(cls, parser):
super(RatelimitConfig, cls).add_arguments(parser)
rc_group = parser.add_argument_group("ratelimiting")
rc_group.add_argument(
"--rc-messages-per-second", type=float, default=0.2,
help="number of messages a client can send per second"
)
rc_group.add_argument(
"--rc-message-burst-count", type=float, default=10,
help="number of message a client can send before being throttled"
)
def default_config(self, config_dir_path, server_name):
return """\
## Ratelimiting ##
rc_group.add_argument(
"--federation-rc-window-size", type=int, default=10000,
help="The federation window size in milliseconds",
)
# Number of messages a client can send per second
rc_messages_per_second: 0.2
rc_group.add_argument(
"--federation-rc-sleep-limit", type=int, default=10,
help="The number of federation requests from a single server"
" in a window before the server will delay processing the"
" request.",
)
# Number of message a client can send before being throttled
rc_message_burst_count: 10.0
rc_group.add_argument(
"--federation-rc-sleep-delay", type=int, default=500,
help="The duration in milliseconds to delay processing events from"
" remote servers by if they go over the sleep limit.",
)
# The federation window size in milliseconds
federation_rc_window_size: 1000
rc_group.add_argument(
"--federation-rc-reject-limit", type=int, default=50,
help="The maximum number of concurrent federation requests allowed"
" from a single server",
)
# The number of federation requests from a single server in a window
# before the server will delay processing the request.
federation_rc_sleep_limit: 10
rc_group.add_argument(
"--federation-rc-concurrent", type=int, default=3,
help="The number of federation requests to concurrently process"
" from a single server",
)
# The duration in milliseconds to delay processing events from
# remote servers by if they go over the sleep limit.
federation_rc_sleep_delay: 500
# The maximum number of concurrent federation requests allowed
# from a single server
federation_rc_reject_limit: 50
# The number of federation requests to concurrently process from a
# single server
federation_rc_concurrent: 3
"""

View File

@ -22,40 +22,21 @@ import distutils.util
class RegistrationConfig(Config):
def __init__(self, args):
super(RegistrationConfig, self).__init__(args)
# `args.enable_registration` may either be a bool or a string depending
# on if the option was given a value (e.g. --enable-registration=true
# would set `args.enable_registration` to "true" not True.)
def read_config(self, config):
self.disable_registration = not bool(
distutils.util.strtobool(str(args.enable_registration))
distutils.util.strtobool(str(config["enable_registration"]))
)
self.registration_shared_secret = args.registration_shared_secret
self.registration_shared_secret = config.get("registration_shared_secret")
@classmethod
def add_arguments(cls, parser):
super(RegistrationConfig, cls).add_arguments(parser)
reg_group = parser.add_argument_group("registration")
def default_config(self, config_dir, server_name):
registration_shared_secret = random_string_with_symbols(50)
return """\
## Registration ##
reg_group.add_argument(
"--enable-registration",
const=True,
default=False,
nargs='?',
help="Enable registration for new users.",
)
reg_group.add_argument(
"--registration-shared-secret", type=str,
help="If set, allows registration by anyone who also has the shared"
" secret, even if registration is otherwise disabled.",
)
# Enable registration for new users.
enable_registration: True
@classmethod
def generate_config(cls, args, config_dir_path):
super(RegistrationConfig, cls).generate_config(args, config_dir_path)
if args.enable_registration is None:
args.enable_registration = False
if args.registration_shared_secret is None:
args.registration_shared_secret = random_string_with_symbols(50)
# If set, allows registration by anyone who also has the shared
# secret, even if registration is otherwise disabled.
registration_shared_secret: "%(registration_shared_secret)s"
""" % locals()

View File

@ -17,11 +17,10 @@ from ._base import Config
class ContentRepositoryConfig(Config):
def __init__(self, args):
super(ContentRepositoryConfig, self).__init__(args)
self.max_upload_size = self.parse_size(args.max_upload_size)
self.max_image_pixels = self.parse_size(args.max_image_pixels)
self.media_store_path = self.ensure_directory(args.media_store_path)
def read_config(self, config):
self.max_upload_size = self.parse_size(config["max_upload_size"])
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
self.media_store_path = self.ensure_directory(config["media_store_path"])
def parse_size(self, string):
sizes = {"K": 1024, "M": 1024 * 1024}
@ -32,17 +31,15 @@ class ContentRepositoryConfig(Config):
size = sizes[suffix]
return int(string) * size
@classmethod
def add_arguments(cls, parser):
super(ContentRepositoryConfig, cls).add_arguments(parser)
db_group = parser.add_argument_group("content_repository")
db_group.add_argument(
"--max-upload-size", default="10M"
)
db_group.add_argument(
"--media-store-path", default=cls.default_path("media_store")
)
db_group.add_argument(
"--max-image-pixels", default="32M",
help="Maximum number of pixels that will be thumbnailed"
)
def default_config(self, config_dir_path, server_name):
media_store = self.default_path("media_store")
return """
# Directory where uploaded images and attachments are stored.
media_store_path: "%(media_store)s"
# The largest allowed upload size in bytes
max_upload_size: "10M"
# Maximum number of pixels that will be thumbnailed
max_image_pixels: "32M"
""" % locals()

View File

@ -17,64 +17,85 @@ from ._base import Config
class ServerConfig(Config):
def __init__(self, args):
super(ServerConfig, self).__init__(args)
self.server_name = args.server_name
self.bind_port = args.bind_port
self.bind_host = args.bind_host
self.unsecure_port = args.unsecure_port
self.daemonize = args.daemonize
self.pid_file = self.abspath(args.pid_file)
self.web_client = args.web_client
self.manhole = args.manhole
self.soft_file_limit = args.soft_file_limit
if not args.content_addr:
host = args.server_name
def read_config(self, config):
self.server_name = config["server_name"]
self.bind_port = config["bind_port"]
self.bind_host = config["bind_host"]
self.unsecure_port = config["unsecure_port"]
self.manhole = config["manhole"]
self.pid_file = self.abspath(config.get("pid_file"))
self.web_client = config["web_client"]
self.soft_file_limit = config["soft_file_limit"]
# Attempt to guess the content_addr for the v0 content repostitory
content_addr = config.get("content_addr")
if not content_addr:
host = self.server_name
if ':' not in host:
host = "%s:%d" % (host, args.unsecure_port)
host = "%s:%d" % (host, self.unsecure_port)
else:
host = host.split(':')[0]
host = "%s:%d" % (host, args.unsecure_port)
args.content_addr = "http://%s" % (host,)
host = "%s:%d" % (host, self.unsecure_port)
content_addr = "http://%s" % (host,)
self.content_addr = args.content_addr
self.content_addr = content_addr
@classmethod
def add_arguments(cls, parser):
super(ServerConfig, cls).add_arguments(parser)
def default_config(self, config_dir_path, server_name):
if ":" in server_name:
bind_port = int(server_name.split(":")[1])
unsecure_port = bind_port - 400
else:
bind_port = 8448
unsecure_port = 8008
pid_file = self.abspath("homeserver.pid")
return """\
## Server ##
# The domain name of the server, with optional explicit port.
# This is used by remote servers to connect to this server,
# e.g. matrix.org, localhost:8080, etc.
server_name: "%(server_name)s"
# The port to listen for HTTPS requests on.
# For when matrix traffic is sent directly to synapse.
bind_port: %(bind_port)s
# The port to listen for HTTP requests on.
# For when matrix traffic passes through loadbalancer that unwraps TLS.
unsecure_port: %(unsecure_port)s
# Local interface to listen on.
# The empty string will cause synapse to listen on all interfaces.
bind_host: ""
# When running as a daemon, the file to store the pid in
pid_file: %(pid_file)s
# Whether to serve a web client from the HTTP/HTTPS root resource.
web_client: True
# Set the soft limit on the number of file descriptors synapse can use
# Zero is used to indicate synapse should set the soft limit to the
# hard limit.
soft_file_limit: 0
# Turn on the twisted telnet manhole service on localhost on the given
# port.
manhole: ~
""" % locals()
def read_arguments(self, args):
if args.manhole is not None:
self.manhole = args.manhole
self.daemonize = args.daemonize
def add_arguments(self, parser):
server_group = parser.add_argument_group("server")
server_group.add_argument(
"-H", "--server-name", default="localhost",
help="The domain name of the server, with optional explicit port. "
"This is used by remote servers to connect to this server, "
"e.g. matrix.org, localhost:8080, etc."
)
server_group.add_argument("-p", "--bind-port", metavar="PORT",
type=int, help="https port to listen on",
default=8448)
server_group.add_argument("--unsecure-port", metavar="PORT",
type=int, help="http port to listen on",
default=8008)
server_group.add_argument("--bind-host", default="",
help="Local interface to listen on")
server_group.add_argument("-D", "--daemonize", action='store_true',
help="Daemonize the home server")
server_group.add_argument('--pid-file', default="homeserver.pid",
help="When running as a daemon, the file to"
" store the pid in")
server_group.add_argument('--web_client', default=True, type=bool,
help="Whether or not to serve a web client")
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
type=int,
help="Turn on the twisted telnet manhole"
" service on the given port.")
server_group.add_argument("--content-addr", default=None,
help="The host and scheme to use for the "
"content repository")
server_group.add_argument("--soft-file-limit", type=int, default=0,
help="Set the soft limit on the number of "
"file descriptors synapse can use. "
"Zero is used to indicate synapse "
"should set the soft limit to the hard"
"limit.")

View File

@ -23,37 +23,44 @@ GENERATE_DH_PARAMS = False
class TlsConfig(Config):
def __init__(self, args):
super(TlsConfig, self).__init__(args)
def read_config(self, config):
self.tls_certificate = self.read_tls_certificate(
args.tls_certificate_path
config.get("tls_certificate_path")
)
self.no_tls = args.no_tls
self.no_tls = config.get("no_tls", False)
if self.no_tls:
self.tls_private_key = None
else:
self.tls_private_key = self.read_tls_private_key(
args.tls_private_key_path
config.get("tls_private_key_path")
)
self.tls_dh_params_path = self.check_file(
args.tls_dh_params_path, "tls_dh_params"
config.get("tls_dh_params_path"), "tls_dh_params"
)
@classmethod
def add_arguments(cls, parser):
super(TlsConfig, cls).add_arguments(parser)
tls_group = parser.add_argument_group("tls")
tls_group.add_argument("--tls-certificate-path",
help="PEM encoded X509 certificate for TLS")
tls_group.add_argument("--tls-private-key-path",
help="PEM encoded private key for TLS")
tls_group.add_argument("--tls-dh-params-path",
help="PEM dh parameters for ephemeral keys")
tls_group.add_argument("--no-tls", action='store_true',
help="Don't bind to the https port.")
def default_config(self, config_dir_path, server_name):
base_key_name = os.path.join(config_dir_path, server_name)
tls_certificate_path = base_key_name + ".tls.crt"
tls_private_key_path = base_key_name + ".tls.key"
tls_dh_params_path = base_key_name + ".tls.dh"
return """\
# PEM encoded X509 certificate for TLS
tls_certificate_path: "%(tls_certificate_path)s"
# PEM encoded private key for TLS
tls_private_key_path: "%(tls_private_key_path)s"
# PEM dh parameters for ephemeral keys
tls_dh_params_path: "%(tls_dh_params_path)s"
# Don't bind to the https port
no_tls: False
""" % locals()
def read_tls_certificate(self, cert_path):
cert_pem = self.read_file(cert_path, "tls_certificate")
@ -63,22 +70,13 @@ class TlsConfig(Config):
private_key_pem = self.read_file(private_key_path, "tls_private_key")
return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem)
@classmethod
def generate_config(cls, args, config_dir_path):
super(TlsConfig, cls).generate_config(args, config_dir_path)
base_key_name = os.path.join(config_dir_path, args.server_name)
def generate_keys(self, config):
tls_certificate_path = config["tls_certificate_path"]
tls_private_key_path = config["tls_private_key_path"]
tls_dh_params_path = config["tls_dh_params_path"]
if args.tls_certificate_path is None:
args.tls_certificate_path = base_key_name + ".tls.crt"
if args.tls_private_key_path is None:
args.tls_private_key_path = base_key_name + ".tls.key"
if args.tls_dh_params_path is None:
args.tls_dh_params_path = base_key_name + ".tls.dh"
if not os.path.exists(args.tls_private_key_path):
with open(args.tls_private_key_path, "w") as private_key_file:
if not os.path.exists(tls_private_key_path):
with open(tls_private_key_path, "w") as private_key_file:
tls_private_key = crypto.PKey()
tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
private_key_pem = crypto.dump_privatekey(
@ -86,17 +84,17 @@ class TlsConfig(Config):
)
private_key_file.write(private_key_pem)
else:
with open(args.tls_private_key_path) as private_key_file:
with open(tls_private_key_path) as private_key_file:
private_key_pem = private_key_file.read()
tls_private_key = crypto.load_privatekey(
crypto.FILETYPE_PEM, private_key_pem
)
if not os.path.exists(args.tls_certificate_path):
with open(args.tls_certificate_path, "w") as certifcate_file:
if not os.path.exists(tls_certificate_path):
with open(tls_certificate_path, "w") as certifcate_file:
cert = crypto.X509()
subject = cert.get_subject()
subject.CN = args.server_name
subject.CN = config["server_name"]
cert.set_serial_number(1000)
cert.gmtime_adj_notBefore(0)
@ -110,16 +108,16 @@ class TlsConfig(Config):
certifcate_file.write(cert_pem)
if not os.path.exists(args.tls_dh_params_path):
if not os.path.exists(tls_dh_params_path):
if GENERATE_DH_PARAMS:
subprocess.check_call([
"openssl", "dhparam",
"-outform", "PEM",
"-out", args.tls_dh_params_path,
"-out", tls_dh_params_path,
"2048"
])
else:
with open(args.tls_dh_params_path, "w") as dh_params_file:
with open(tls_dh_params_path, "w") as dh_params_file:
dh_params_file.write(
"2048-bit DH parameters taken from rfc3526\n"
"-----BEGIN DH PARAMETERS-----\n"

View File

@ -17,28 +17,21 @@ from ._base import Config
class VoipConfig(Config):
def __init__(self, args):
super(VoipConfig, self).__init__(args)
self.turn_uris = args.turn_uris
self.turn_shared_secret = args.turn_shared_secret
self.turn_user_lifetime = args.turn_user_lifetime
def read_config(self, config):
self.turn_uris = config.get("turn_uris", [])
self.turn_shared_secret = config["turn_shared_secret"]
self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"])
@classmethod
def add_arguments(cls, parser):
super(VoipConfig, cls).add_arguments(parser)
group = parser.add_argument_group("voip")
group.add_argument(
"--turn-uris", type=str, default=None, action='append',
help="The public URIs of the TURN server to give to clients"
)
group.add_argument(
"--turn-shared-secret", type=str, default=None,
help=(
"The shared secret used to compute passwords for the TURN"
" server"
)
)
group.add_argument(
"--turn-user-lifetime", type=int, default=(1000 * 60 * 60),
help="How long generated TURN credentials last, in ms"
)
def default_config(self, config_dir_path, server_name):
return """\
## Turn ##
# The public URIs of the TURN server to give to clients
turn_uris: []
# The shared secret used to compute passwords for the TURN server
turn_shared_secret: "YOUR_SHARED_SECRET"
# How long generated TURN credentials last
turn_user_lifetime: "1h"
"""

View File

@ -59,7 +59,6 @@ class BaseHomeServer(object):
'config',
'clock',
'http_client',
'db_name',
'db_pool',
'persistence_service',
'replication_layer',