mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-10-01 08:25:44 -04:00
Manually generate the default config yaml, remove most of the commandline arguments for synapse anticipating that people will use the yaml instead. Simpify implementing config options by not requiring the classes to hit the super class
This commit is contained in:
parent
109c8aafd2
commit
d624e2a638
@ -16,30 +16,29 @@ if [ $# -eq 1 ]; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
export PYTHONPATH=$(readlink -f $(pwd))
|
||||||
|
|
||||||
|
|
||||||
|
echo $PYTHONPATH
|
||||||
|
|
||||||
for port in 8080 8081 8082; do
|
for port in 8080 8081 8082; do
|
||||||
echo "Starting server on port $port... "
|
echo "Starting server on port $port... "
|
||||||
|
|
||||||
https_port=$((port + 400))
|
https_port=$((port + 400))
|
||||||
|
mkdir -p demo/$port
|
||||||
|
pushd demo/$port
|
||||||
|
|
||||||
|
rm $DIR/etc/$port.config
|
||||||
|
python -m synapse.app.homeserver \
|
||||||
|
--generate-config "localhost:$https_port" \
|
||||||
|
--config-path "$DIR/etc/$port.config" \
|
||||||
|
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--generate-config \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
--config-path "demo/etc/$port.config" \
|
-D \
|
||||||
-p "$https_port" \
|
|
||||||
--unsecure-port "$port" \
|
|
||||||
-H "localhost:$https_port" \
|
|
||||||
-f "$DIR/$port.log" \
|
|
||||||
-d "$DIR/$port.db" \
|
|
||||||
-D --pid-file "$DIR/$port.pid" \
|
|
||||||
--manhole $((port + 1000)) \
|
|
||||||
--tls-dh-params-path "demo/demo.tls.dh" \
|
|
||||||
--media-store-path "demo/media_store.$port" \
|
|
||||||
$PARAMS $SYNAPSE_PARAMS \
|
|
||||||
--enable-registration
|
|
||||||
|
|
||||||
python -m synapse.app.homeserver \
|
|
||||||
--config-path "demo/etc/$port.config" \
|
|
||||||
-vv \
|
-vv \
|
||||||
|
|
||||||
|
popd
|
||||||
done
|
done
|
||||||
|
|
||||||
cd "$CWD"
|
cd "$CWD"
|
||||||
|
@ -394,7 +394,6 @@ def setup(config_options):
|
|||||||
config.server_name,
|
config.server_name,
|
||||||
domain_with_port=domain_with_port,
|
domain_with_port=domain_with_port,
|
||||||
upload_dir=os.path.abspath("uploads"),
|
upload_dir=os.path.abspath("uploads"),
|
||||||
db_name=config.database_path,
|
|
||||||
db_config=config.database_config,
|
db_config=config.database_config,
|
||||||
tls_context_factory=tls_context_factory,
|
tls_context_factory=tls_context_factory,
|
||||||
config=config,
|
config=config,
|
||||||
@ -407,9 +406,8 @@ def setup(config_options):
|
|||||||
redirect_root_to_web_client=True,
|
redirect_root_to_web_client=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_name = hs.get_db_name()
|
|
||||||
|
|
||||||
logger.info("Preparing database: %s...", db_name)
|
logger.info("Preparing database: %r...", config.database_config)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
db_conn = database_engine.module.connect(
|
db_conn = database_engine.module.connect(
|
||||||
@ -431,7 +429,7 @@ def setup(config_options):
|
|||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
logger.info("Database prepared in %s.", db_name)
|
logger.info("Database prepared in %r.", config.database_config)
|
||||||
|
|
||||||
if config.manhole:
|
if config.manhole:
|
||||||
f = twisted.manhole.telnet.ShellFactory()
|
f = twisted.manhole.telnet.ShellFactory()
|
||||||
|
@ -14,9 +14,10 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
import yaml
|
import yaml
|
||||||
|
import sys
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
||||||
class ConfigError(Exception):
|
class ConfigError(Exception):
|
||||||
@ -24,8 +25,6 @@ class ConfigError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class Config(object):
|
class Config(object):
|
||||||
def __init__(self, args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_size(string):
|
def parse_size(string):
|
||||||
@ -37,6 +36,22 @@ class Config(object):
|
|||||||
size = sizes[suffix]
|
size = sizes[suffix]
|
||||||
return int(string) * size
|
return int(string) * size
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_duration(string):
|
||||||
|
second = 1000
|
||||||
|
hour = 60 * 60 * second
|
||||||
|
day = 24 * hour
|
||||||
|
week = 7 * day
|
||||||
|
year = 365 * day
|
||||||
|
|
||||||
|
sizes = {"s": second, "h": hour, "d": day, "w": week, "y": year}
|
||||||
|
size = 1
|
||||||
|
suffix = string[-1]
|
||||||
|
if suffix in sizes:
|
||||||
|
string = string[:-1]
|
||||||
|
size = sizes[suffix]
|
||||||
|
return int(string) * size
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def abspath(file_path):
|
def abspath(file_path):
|
||||||
return os.path.abspath(file_path) if file_path else file_path
|
return os.path.abspath(file_path) if file_path else file_path
|
||||||
@ -77,17 +92,6 @@ class Config(object):
|
|||||||
with open(file_path) as file_stream:
|
with open(file_path) as file_stream:
|
||||||
return file_stream.read()
|
return file_stream.read()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def read_yaml_file(cls, file_path, config_name):
|
|
||||||
cls.check_file(file_path, config_name)
|
|
||||||
with open(file_path) as file_stream:
|
|
||||||
try:
|
|
||||||
return yaml.load(file_stream)
|
|
||||||
except:
|
|
||||||
raise ConfigError(
|
|
||||||
"Error parsing yaml in file %r" % (file_path,)
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def default_path(name):
|
def default_path(name):
|
||||||
return os.path.abspath(os.path.join(os.path.curdir, name))
|
return os.path.abspath(os.path.join(os.path.curdir, name))
|
||||||
@ -97,16 +101,33 @@ class Config(object):
|
|||||||
with open(file_path) as file_stream:
|
with open(file_path) as file_stream:
|
||||||
return yaml.load(file_stream)
|
return yaml.load(file_stream)
|
||||||
|
|
||||||
@classmethod
|
def invoke_all(self, name, *args, **kargs):
|
||||||
def add_arguments(cls, parser):
|
results = []
|
||||||
pass
|
for cls in type(self).mro():
|
||||||
|
if name in cls.__dict__:
|
||||||
|
results.append(getattr(cls, name)(self, *args, **kargs))
|
||||||
|
return results
|
||||||
|
|
||||||
@classmethod
|
def generate_config(self, config_dir_path, server_name):
|
||||||
def generate_config(cls, args, config_dir_path):
|
default_config = "# vim:ft=yaml\n"
|
||||||
pass
|
|
||||||
|
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
|
||||||
|
"default_config", config_dir_path, server_name
|
||||||
|
))
|
||||||
|
|
||||||
|
config = yaml.load(default_config)
|
||||||
|
|
||||||
|
if not os.path.exists(config_dir_path):
|
||||||
|
os.makedirs(config_dir_path)
|
||||||
|
|
||||||
|
self.invoke_all("generate_keys", config)
|
||||||
|
|
||||||
|
return default_config
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_config(cls, description, argv, generate_section=None):
|
def load_config(cls, description, argv, generate_section=None):
|
||||||
|
result = cls()
|
||||||
|
|
||||||
config_parser = argparse.ArgumentParser(add_help=False)
|
config_parser = argparse.ArgumentParser(add_help=False)
|
||||||
config_parser.add_argument(
|
config_parser.add_argument(
|
||||||
"-c", "--config-path",
|
"-c", "--config-path",
|
||||||
@ -115,66 +136,56 @@ class Config(object):
|
|||||||
)
|
)
|
||||||
config_parser.add_argument(
|
config_parser.add_argument(
|
||||||
"--generate-config",
|
"--generate-config",
|
||||||
action="store_true",
|
metavar="SERVER_NAME",
|
||||||
help="Generate config file"
|
help="Generate a config file for the server name"
|
||||||
)
|
)
|
||||||
config_args, remaining_args = config_parser.parse_known_args(argv)
|
config_args, remaining_args = config_parser.parse_known_args(argv)
|
||||||
|
|
||||||
if config_args.generate_config:
|
|
||||||
if not config_args.config_path:
|
if not config_args.config_path:
|
||||||
config_parser.error(
|
config_parser.error(
|
||||||
"Must specify where to generate the config file"
|
"Must supply a config file.\nA config file can be automatically"
|
||||||
|
" generated using \"--generate-config SERVER_NAME"
|
||||||
|
" -c CONFIG-FILE\""
|
||||||
)
|
)
|
||||||
config_dir_path = os.path.dirname(config_args.config_path)
|
|
||||||
if os.path.exists(config_args.config_path):
|
|
||||||
defaults = cls.read_config_file(config_args.config_path)
|
|
||||||
else:
|
|
||||||
defaults = {}
|
|
||||||
else:
|
|
||||||
if config_args.config_path:
|
|
||||||
defaults = cls.read_config_file(config_args.config_path)
|
|
||||||
else:
|
|
||||||
defaults = {}
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
parents=[config_parser],
|
|
||||||
description=description,
|
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
||||||
)
|
|
||||||
cls.add_arguments(parser)
|
|
||||||
parser.set_defaults(**defaults)
|
|
||||||
|
|
||||||
args = parser.parse_args(remaining_args)
|
|
||||||
|
|
||||||
if config_args.generate_config:
|
if config_args.generate_config:
|
||||||
|
server_name = config_args.generate_config
|
||||||
|
config_path = config_args.config_path
|
||||||
|
if os.path.exists(config_path):
|
||||||
|
print "Config file %r already exists. Not overwriting" % (
|
||||||
|
config_args.config_path
|
||||||
|
)
|
||||||
|
sys.exit(0)
|
||||||
config_dir_path = os.path.dirname(config_args.config_path)
|
config_dir_path = os.path.dirname(config_args.config_path)
|
||||||
config_dir_path = os.path.abspath(config_dir_path)
|
config_dir_path = os.path.abspath(config_dir_path)
|
||||||
if not os.path.exists(config_dir_path):
|
with open(config_path, "wb") as config_file:
|
||||||
os.makedirs(config_dir_path)
|
config_file.write(
|
||||||
cls.generate_config(args, config_dir_path)
|
result.generate_config(config_dir_path, server_name)
|
||||||
config = {}
|
)
|
||||||
for key, value in vars(args).items():
|
|
||||||
if (key not in set(["config_path", "generate_config"])
|
|
||||||
and value is not None):
|
|
||||||
config[key] = value
|
|
||||||
with open(config_args.config_path, "w") as config_file:
|
|
||||||
# TODO(mark/paul) We might want to output emacs-style mode
|
|
||||||
# markers as well as vim-style mode markers into the file,
|
|
||||||
# to further hint to people this is a YAML file.
|
|
||||||
config_file.write("# vim:ft=yaml\n")
|
|
||||||
yaml.dump(config, config_file, default_flow_style=False)
|
|
||||||
print (
|
print (
|
||||||
"A config file has been generated in %s for server name"
|
"A config file has been generated in %s for server name"
|
||||||
" '%s' with corresponding SSL keys and self-signed"
|
" '%s' with corresponding SSL keys and self-signed"
|
||||||
" certificates. Please review this file and customise it to"
|
" certificates. Please review this file and customise it to"
|
||||||
" your needs."
|
" your needs."
|
||||||
) % (
|
) % (config_path, server_name)
|
||||||
config_args.config_path, config['server_name']
|
|
||||||
)
|
|
||||||
print (
|
print (
|
||||||
"If this server name is incorrect, you will need to regenerate"
|
"If this server name is incorrect, you will need to regenerate"
|
||||||
" the SSL certificates"
|
" the SSL certificates"
|
||||||
)
|
)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
return cls(args)
|
config = cls.read_config_file(config_args.config_path)
|
||||||
|
result.invoke_all("read_config", config)
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
parents=[config_parser],
|
||||||
|
description=description,
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
)
|
||||||
|
|
||||||
|
result.invoke_all("add_arguments", parser)
|
||||||
|
args = parser.parse_args(remaining_args)
|
||||||
|
|
||||||
|
result.invoke_all("read_arguments", args)
|
||||||
|
|
||||||
|
return result
|
||||||
|
@ -17,15 +17,11 @@ from ._base import Config
|
|||||||
|
|
||||||
class AppServiceConfig(Config):
|
class AppServiceConfig(Config):
|
||||||
|
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(AppServiceConfig, self).__init__(args)
|
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||||
self.app_service_config_files = args.app_service_config_files
|
|
||||||
|
|
||||||
@classmethod
|
def default_config(cls, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
return """\
|
||||||
super(AppServiceConfig, cls).add_arguments(parser)
|
# A list of application service config file to use
|
||||||
group = parser.add_argument_group("appservice")
|
app_service_config_files: []
|
||||||
group.add_argument(
|
"""
|
||||||
"--app-service-config-files", type=str, nargs='+',
|
|
||||||
help="A list of application service config files to use."
|
|
||||||
)
|
|
||||||
|
@ -17,40 +17,34 @@ from ._base import Config
|
|||||||
|
|
||||||
class CaptchaConfig(Config):
|
class CaptchaConfig(Config):
|
||||||
|
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(CaptchaConfig, self).__init__(args)
|
self.recaptcha_private_key = config["recaptcha_private_key"]
|
||||||
self.recaptcha_private_key = args.recaptcha_private_key
|
self.recaptcha_public_key = config["recaptcha_public_key"]
|
||||||
self.recaptcha_public_key = args.recaptcha_public_key
|
self.enable_registration_captcha = config["enable_registration_captcha"]
|
||||||
self.enable_registration_captcha = args.enable_registration_captcha
|
|
||||||
self.captcha_ip_origin_is_x_forwarded = (
|
self.captcha_ip_origin_is_x_forwarded = (
|
||||||
args.captcha_ip_origin_is_x_forwarded
|
config["captcha_ip_origin_is_x_forwarded"]
|
||||||
)
|
)
|
||||||
self.captcha_bypass_secret = args.captcha_bypass_secret
|
self.captcha_bypass_secret = config.get("captcha_bypass_secret")
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
return """\
|
||||||
super(CaptchaConfig, cls).add_arguments(parser)
|
## Captcha ##
|
||||||
group = parser.add_argument_group("recaptcha")
|
|
||||||
group.add_argument(
|
# This Home Server's ReCAPTCHA public key.
|
||||||
"--recaptcha-public-key", type=str, default="YOUR_PUBLIC_KEY",
|
recaptcha_private_key: "YOUR_PUBLIC_KEY"
|
||||||
help="This Home Server's ReCAPTCHA public key."
|
|
||||||
)
|
# This Home Server's ReCAPTCHA private key.
|
||||||
group.add_argument(
|
recaptcha_public_key: "YOUR_PRIVATE_KEY"
|
||||||
"--recaptcha-private-key", type=str, default="YOUR_PRIVATE_KEY",
|
|
||||||
help="This Home Server's ReCAPTCHA private key."
|
# Enables ReCaptcha checks when registering, preventing signup
|
||||||
)
|
# unless a captcha is answered. Requires a valid ReCaptcha
|
||||||
group.add_argument(
|
# public/private key.
|
||||||
"--enable-registration-captcha", type=bool, default=False,
|
enable_registration_captcha: False
|
||||||
help="Enables ReCaptcha checks when registering, preventing signup"
|
|
||||||
+ " unless a captcha is answered. Requires a valid ReCaptcha "
|
# When checking captchas, use the X-Forwarded-For (XFF) header
|
||||||
+ "public/private key."
|
# as the client IP and not the actual client IP.
|
||||||
)
|
captcha_ip_origin_is_x_forwarded: False
|
||||||
group.add_argument(
|
|
||||||
"--captcha_ip_origin_is_x_forwarded", type=bool, default=False,
|
# A secret key used to bypass the captcha test entirely.
|
||||||
help="When checking captchas, use the X-Forwarded-For (XFF) header"
|
captcha_bypass_secret: ~
|
||||||
+ " as the client IP and not the actual client IP."
|
"""
|
||||||
)
|
|
||||||
group.add_argument(
|
|
||||||
"--captcha_bypass_secret", type=str,
|
|
||||||
help="A secret key used to bypass the captcha test entirely."
|
|
||||||
)
|
|
||||||
|
@ -14,28 +14,21 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config
|
||||||
import os
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseConfig(Config):
|
class DatabaseConfig(Config):
|
||||||
def __init__(self, args):
|
|
||||||
super(DatabaseConfig, self).__init__(args)
|
|
||||||
if args.database_path == ":memory:":
|
|
||||||
self.database_path = ":memory:"
|
|
||||||
else:
|
|
||||||
self.database_path = self.abspath(args.database_path)
|
|
||||||
self.event_cache_size = self.parse_size(args.event_cache_size)
|
|
||||||
|
|
||||||
if args.database_config:
|
def read_config(self, config):
|
||||||
with open(args.database_config) as f:
|
self.event_cache_size = self.parse_size(
|
||||||
self.database_config = yaml.safe_load(f)
|
config.get("event_cache_size", "10K")
|
||||||
else:
|
)
|
||||||
|
|
||||||
|
self.database_config = config.get("database")
|
||||||
|
|
||||||
|
if self.database_config is None:
|
||||||
self.database_config = {
|
self.database_config = {
|
||||||
"name": "sqlite3",
|
"name": "sqlite3",
|
||||||
"args": {
|
"args": {},
|
||||||
"database": self.database_path,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
name = self.database_config.get("name", None)
|
name = self.database_config.get("name", None)
|
||||||
@ -50,24 +43,36 @@ class DatabaseConfig(Config):
|
|||||||
else:
|
else:
|
||||||
raise RuntimeError("Unsupported database type '%s'" % (name,))
|
raise RuntimeError("Unsupported database type '%s'" % (name,))
|
||||||
|
|
||||||
@classmethod
|
self.set_databasepath(config.get("database_path"))
|
||||||
def add_arguments(cls, parser):
|
|
||||||
super(DatabaseConfig, cls).add_arguments(parser)
|
def default_config(self, config, config_dir_path):
|
||||||
|
database_path = self.abspath("homeserver.db")
|
||||||
|
return """\
|
||||||
|
# Database configuration
|
||||||
|
database:
|
||||||
|
# The database engine name
|
||||||
|
name: "sqlite3"
|
||||||
|
# Arguments to pass to the engine
|
||||||
|
args:
|
||||||
|
# Path to the database
|
||||||
|
database: "%(database_path)s"
|
||||||
|
# Number of events to cache in memory.
|
||||||
|
event_cache_size: "10K"
|
||||||
|
""" % locals()
|
||||||
|
|
||||||
|
def read_arguments(self, args):
|
||||||
|
self.set_databasepath(args.database_path)
|
||||||
|
|
||||||
|
def set_databasepath(self, database_path):
|
||||||
|
if database_path != ":memory:":
|
||||||
|
database_path = self.abspath(database_path)
|
||||||
|
if self.database_config.get("name", None) == "sqlite3":
|
||||||
|
if database_path is not None:
|
||||||
|
self.database_config["database"] = database_path
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
db_group = parser.add_argument_group("database")
|
db_group = parser.add_argument_group("database")
|
||||||
db_group.add_argument(
|
db_group.add_argument(
|
||||||
"-d", "--database-path", default="homeserver.db",
|
"-d", "--database-path", metavar="SQLITE_DATABASE_PATH",
|
||||||
metavar="SQLITE_DATABASE_PATH", help="The database name."
|
help="The path to a sqlite database to use."
|
||||||
)
|
)
|
||||||
db_group.add_argument(
|
|
||||||
"--event-cache-size", default="100K",
|
|
||||||
help="Number of events to cache in memory."
|
|
||||||
)
|
|
||||||
db_group.add_argument(
|
|
||||||
"--database-config", default=None,
|
|
||||||
help="Location of the database configuration file."
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def generate_config(cls, args, config_dir_path):
|
|
||||||
super(DatabaseConfig, cls).generate_config(args, config_dir_path)
|
|
||||||
args.database_path = os.path.abspath(args.database_path)
|
|
||||||
|
@ -36,4 +36,6 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
|
|||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
import sys
|
||||||
HomeServerConfig.load_config("Generate config", sys.argv[1:], "HomeServer")
|
sys.stdout.write(
|
||||||
|
HomeServerConfig().generate_config(sys.argv[1], sys.argv[2])
|
||||||
|
)
|
||||||
|
@ -24,44 +24,53 @@ from syutil.base64util import decode_base64
|
|||||||
|
|
||||||
class KeyConfig(Config):
|
class KeyConfig(Config):
|
||||||
|
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(KeyConfig, self).__init__(args)
|
self.signing_key = self.read_signing_key(config["signing_key_path"])
|
||||||
self.signing_key = self.read_signing_key(args.signing_key_path)
|
|
||||||
self.old_signing_keys = self.read_old_signing_keys(
|
self.old_signing_keys = self.read_old_signing_keys(
|
||||||
args.old_signing_key_path
|
config["old_signing_keys"]
|
||||||
|
)
|
||||||
|
self.key_refresh_interval = self.parse_duration(
|
||||||
|
config["key_refresh_interval"]
|
||||||
)
|
)
|
||||||
self.key_refresh_interval = args.key_refresh_interval
|
|
||||||
self.perspectives = self.read_perspectives(
|
self.perspectives = self.read_perspectives(
|
||||||
args.perspectives_config_path
|
config["perspectives"]
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
base_key_name = os.path.join(config_dir_path, server_name)
|
||||||
super(KeyConfig, cls).add_arguments(parser)
|
return """\
|
||||||
key_group = parser.add_argument_group("keys")
|
## Signing Keys ##
|
||||||
key_group.add_argument("--signing-key-path",
|
|
||||||
help="The signing key to sign messages with")
|
|
||||||
key_group.add_argument("--old-signing-key-path",
|
|
||||||
help="The keys that the server used to sign"
|
|
||||||
" sign messages with but won't use"
|
|
||||||
" to sign new messages. E.g. it has"
|
|
||||||
" lost its private key")
|
|
||||||
key_group.add_argument("--key-refresh-interval",
|
|
||||||
default=24 * 60 * 60 * 1000, # 1 Day
|
|
||||||
help="How long a key response is valid for."
|
|
||||||
" Used to set the exipiry in /key/v2/."
|
|
||||||
" Controls how frequently servers will"
|
|
||||||
" query what keys are still valid")
|
|
||||||
key_group.add_argument("--perspectives-config-path",
|
|
||||||
help="The trusted servers to download signing"
|
|
||||||
" keys from")
|
|
||||||
|
|
||||||
def read_perspectives(self, perspectives_config_path):
|
# Path to the signing key to sign messages with
|
||||||
config = self.read_yaml_file(
|
signing_key_path: "%(base_key_name)s.signing.key"
|
||||||
perspectives_config_path, "perspectives_config_path"
|
|
||||||
)
|
# The keys that the server used to sign messages with but won't use
|
||||||
|
# to sign new messages. E.g. it has lost its private key
|
||||||
|
old_signing_keys: {}
|
||||||
|
# "ed25519:auto":
|
||||||
|
# # Base64 encoded public key
|
||||||
|
# key: "The public part of your old signing key."
|
||||||
|
# # Millisecond POSIX timestamp when the key expired.
|
||||||
|
# expired_ts: 123456789123
|
||||||
|
|
||||||
|
# How long key response published by this server is valid for.
|
||||||
|
# Used to set the valid_until_ts in /key/v2 APIs.
|
||||||
|
# Determines how quickly servers will query to check which keys
|
||||||
|
# are still valid.
|
||||||
|
key_refresh_interval: "1d" # 1 Day.
|
||||||
|
|
||||||
|
# The trusted servers to download signing keys from.
|
||||||
|
perspectives:
|
||||||
|
servers:
|
||||||
|
"matrix.org":
|
||||||
|
verify_keys:
|
||||||
|
"ed25519:auto":
|
||||||
|
key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||||
|
""" % locals()
|
||||||
|
|
||||||
|
def read_perspectives(self, perspectives_config):
|
||||||
servers = {}
|
servers = {}
|
||||||
for server_name, server_config in config["servers"].items():
|
for server_name, server_config in perspectives_config["servers"].items():
|
||||||
for key_id, key_data in server_config["verify_keys"].items():
|
for key_id, key_data in server_config["verify_keys"].items():
|
||||||
if is_signing_algorithm_supported(key_id):
|
if is_signing_algorithm_supported(key_id):
|
||||||
key_base64 = key_data["key"]
|
key_base64 = key_data["key"]
|
||||||
@ -82,37 +91,31 @@ class KeyConfig(Config):
|
|||||||
" Try running again with --generate-config"
|
" Try running again with --generate-config"
|
||||||
)
|
)
|
||||||
|
|
||||||
def read_old_signing_keys(self, old_signing_key_path):
|
def read_old_signing_keys(self, old_signing_keys):
|
||||||
old_signing_keys = self.read_file(
|
keys = {}
|
||||||
old_signing_key_path, "old_signing_key"
|
for key_id, key_data in old_signing_keys.items():
|
||||||
)
|
if is_signing_algorithm_supported(key_id):
|
||||||
try:
|
key_base64 = key_data["key"]
|
||||||
return syutil.crypto.signing_key.read_old_signing_keys(
|
key_bytes = decode_base64(key_base64)
|
||||||
old_signing_keys.splitlines(True)
|
verify_key = decode_verify_key_bytes(key_id, key_bytes)
|
||||||
)
|
verify_key.expired_ts = key_data["expired_ts"]
|
||||||
except Exception:
|
keys[key_id] = verify_key
|
||||||
|
else:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Error reading old signing keys."
|
"Unsupported signing algorithm for old key: %r" % (key_id,)
|
||||||
)
|
)
|
||||||
|
return keys
|
||||||
|
|
||||||
@classmethod
|
def generate_keys(self, config):
|
||||||
def generate_config(cls, args, config_dir_path):
|
signing_key_path = config["signing_key_path"]
|
||||||
super(KeyConfig, cls).generate_config(args, config_dir_path)
|
if not os.path.exists(signing_key_path):
|
||||||
base_key_name = os.path.join(config_dir_path, args.server_name)
|
with open(signing_key_path, "w") as signing_key_file:
|
||||||
|
|
||||||
args.pid_file = os.path.abspath(args.pid_file)
|
|
||||||
|
|
||||||
if not args.signing_key_path:
|
|
||||||
args.signing_key_path = base_key_name + ".signing.key"
|
|
||||||
|
|
||||||
if not os.path.exists(args.signing_key_path):
|
|
||||||
with open(args.signing_key_path, "w") as signing_key_file:
|
|
||||||
syutil.crypto.signing_key.write_signing_keys(
|
syutil.crypto.signing_key.write_signing_keys(
|
||||||
signing_key_file,
|
signing_key_file,
|
||||||
(syutil.crypto.signing_key.generate_signing_key("auto"),),
|
(syutil.crypto.signing_key.generate_signing_key("auto"),),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
signing_keys = cls.read_file(args.signing_key_path, "signing_key")
|
signing_keys = self.read_file(signing_key_path, "signing_key")
|
||||||
if len(signing_keys.split("\n")[0].split()) == 1:
|
if len(signing_keys.split("\n")[0].split()) == 1:
|
||||||
# handle keys in the old format.
|
# handle keys in the old format.
|
||||||
key = syutil.crypto.signing_key.decode_signing_key_base64(
|
key = syutil.crypto.signing_key.decode_signing_key_base64(
|
||||||
@ -120,28 +123,8 @@ class KeyConfig(Config):
|
|||||||
"auto",
|
"auto",
|
||||||
signing_keys.split("\n")[0]
|
signing_keys.split("\n")[0]
|
||||||
)
|
)
|
||||||
with open(args.signing_key_path, "w") as signing_key_file:
|
with open(signing_key_path, "w") as signing_key_file:
|
||||||
syutil.crypto.signing_key.write_signing_keys(
|
syutil.crypto.signing_key.write_signing_keys(
|
||||||
signing_key_file,
|
signing_key_file,
|
||||||
(key,),
|
(key,),
|
||||||
)
|
)
|
||||||
|
|
||||||
if not args.old_signing_key_path:
|
|
||||||
args.old_signing_key_path = base_key_name + ".old.signing.keys"
|
|
||||||
|
|
||||||
if not os.path.exists(args.old_signing_key_path):
|
|
||||||
with open(args.old_signing_key_path, "w"):
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not args.perspectives_config_path:
|
|
||||||
args.perspectives_config_path = base_key_name + ".perspectives"
|
|
||||||
|
|
||||||
if not os.path.exists(args.perspectives_config_path):
|
|
||||||
with open(args.perspectives_config_path, "w") as perspectives_file:
|
|
||||||
perspectives_file.write(
|
|
||||||
'servers:\n'
|
|
||||||
' matrix.org:\n'
|
|
||||||
' verify_keys:\n'
|
|
||||||
' "ed25519:auto":\n'
|
|
||||||
' key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"\n'
|
|
||||||
)
|
|
||||||
|
@ -22,22 +22,41 @@ import yaml
|
|||||||
|
|
||||||
|
|
||||||
class LoggingConfig(Config):
|
class LoggingConfig(Config):
|
||||||
def __init__(self, args):
|
|
||||||
super(LoggingConfig, self).__init__(args)
|
|
||||||
self.verbosity = int(args.verbose) if args.verbose else None
|
|
||||||
self.log_config = self.abspath(args.log_config)
|
|
||||||
self.log_file = self.abspath(args.log_file)
|
|
||||||
|
|
||||||
@classmethod
|
def read_config(self, config):
|
||||||
|
self.verbosity = config.get("verbose", 0)
|
||||||
|
self.log_config = self.abspath(config.get("log_config"))
|
||||||
|
self.log_file = self.abspath(config.get("log_file"))
|
||||||
|
|
||||||
|
def default_config(self, config_dir_path, server_name):
|
||||||
|
log_file = self.abspath("homeserver.log")
|
||||||
|
return """
|
||||||
|
# Logging verbosity level.
|
||||||
|
verbose: 0
|
||||||
|
|
||||||
|
# File to write logging to
|
||||||
|
log_file: "%(log_file)s"
|
||||||
|
|
||||||
|
# A yaml python logging config file
|
||||||
|
#log_config: "your.log.config.yaml"
|
||||||
|
""" % locals()
|
||||||
|
|
||||||
|
def read_arguments(self, args):
|
||||||
|
if args.verbose is not None:
|
||||||
|
self.verbosity = args.verbose
|
||||||
|
if args.log_config is not None:
|
||||||
|
self.log_config = args.log_config
|
||||||
|
if args.log_file is not None:
|
||||||
|
self.log_file = args.log_file
|
||||||
|
|
||||||
def add_arguments(cls, parser):
|
def add_arguments(cls, parser):
|
||||||
super(LoggingConfig, cls).add_arguments(parser)
|
|
||||||
logging_group = parser.add_argument_group("logging")
|
logging_group = parser.add_argument_group("logging")
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
'-v', '--verbose', dest="verbose", action='count',
|
'-v', '--verbose', dest="verbose", action='count',
|
||||||
help="The verbosity level."
|
help="The verbosity level."
|
||||||
)
|
)
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
'-f', '--log-file', dest="log_file", default="homeserver.log",
|
'-f', '--log-file', dest="log_file",
|
||||||
help="File to log to."
|
help="File to log to."
|
||||||
)
|
)
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
|
@ -17,20 +17,17 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class MetricsConfig(Config):
|
class MetricsConfig(Config):
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(MetricsConfig, self).__init__(args)
|
self.enable_metrics = config["enable_metrics"]
|
||||||
self.enable_metrics = args.enable_metrics
|
self.metrics_port = config["metrics_port"]
|
||||||
self.metrics_port = args.metrics_port
|
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
return """\
|
||||||
super(MetricsConfig, cls).add_arguments(parser)
|
## Metrics ###
|
||||||
metrics_group = parser.add_argument_group("metrics")
|
|
||||||
metrics_group.add_argument(
|
# Enable collection and rendering of performance metrics
|
||||||
'--enable-metrics', dest="enable_metrics", action="store_true",
|
enable_metrics: False
|
||||||
help="Enable collection and rendering of performance metrics"
|
|
||||||
)
|
# Separate port to accept metrics requests on (on localhost)
|
||||||
metrics_group.add_argument(
|
metrics_port: ~
|
||||||
'--metrics-port', metavar="PORT", type=int,
|
"""
|
||||||
help="Separate port to accept metrics requests on (on localhost)"
|
|
||||||
)
|
|
||||||
|
@ -17,56 +17,42 @@ from ._base import Config
|
|||||||
|
|
||||||
class RatelimitConfig(Config):
|
class RatelimitConfig(Config):
|
||||||
|
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(RatelimitConfig, self).__init__(args)
|
self.rc_messages_per_second = config["rc_messages_per_second"]
|
||||||
self.rc_messages_per_second = args.rc_messages_per_second
|
self.rc_message_burst_count = config["rc_message_burst_count"]
|
||||||
self.rc_message_burst_count = args.rc_message_burst_count
|
|
||||||
|
|
||||||
self.federation_rc_window_size = args.federation_rc_window_size
|
self.federation_rc_window_size = config["federation_rc_window_size"]
|
||||||
self.federation_rc_sleep_limit = args.federation_rc_sleep_limit
|
self.federation_rc_sleep_limit = config["federation_rc_sleep_limit"]
|
||||||
self.federation_rc_sleep_delay = args.federation_rc_sleep_delay
|
self.federation_rc_sleep_delay = config["federation_rc_sleep_delay"]
|
||||||
self.federation_rc_reject_limit = args.federation_rc_reject_limit
|
self.federation_rc_reject_limit = config["federation_rc_reject_limit"]
|
||||||
self.federation_rc_concurrent = args.federation_rc_concurrent
|
self.federation_rc_concurrent = config["federation_rc_concurrent"]
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
return """\
|
||||||
super(RatelimitConfig, cls).add_arguments(parser)
|
## Ratelimiting ##
|
||||||
rc_group = parser.add_argument_group("ratelimiting")
|
|
||||||
rc_group.add_argument(
|
|
||||||
"--rc-messages-per-second", type=float, default=0.2,
|
|
||||||
help="number of messages a client can send per second"
|
|
||||||
)
|
|
||||||
rc_group.add_argument(
|
|
||||||
"--rc-message-burst-count", type=float, default=10,
|
|
||||||
help="number of message a client can send before being throttled"
|
|
||||||
)
|
|
||||||
|
|
||||||
rc_group.add_argument(
|
# Number of messages a client can send per second
|
||||||
"--federation-rc-window-size", type=int, default=10000,
|
rc_messages_per_second: 0.2
|
||||||
help="The federation window size in milliseconds",
|
|
||||||
)
|
|
||||||
|
|
||||||
rc_group.add_argument(
|
# Number of message a client can send before being throttled
|
||||||
"--federation-rc-sleep-limit", type=int, default=10,
|
rc_message_burst_count: 10.0
|
||||||
help="The number of federation requests from a single server"
|
|
||||||
" in a window before the server will delay processing the"
|
|
||||||
" request.",
|
|
||||||
)
|
|
||||||
|
|
||||||
rc_group.add_argument(
|
# The federation window size in milliseconds
|
||||||
"--federation-rc-sleep-delay", type=int, default=500,
|
federation_rc_window_size: 1000
|
||||||
help="The duration in milliseconds to delay processing events from"
|
|
||||||
" remote servers by if they go over the sleep limit.",
|
|
||||||
)
|
|
||||||
|
|
||||||
rc_group.add_argument(
|
# The number of federation requests from a single server in a window
|
||||||
"--federation-rc-reject-limit", type=int, default=50,
|
# before the server will delay processing the request.
|
||||||
help="The maximum number of concurrent federation requests allowed"
|
federation_rc_sleep_limit: 10
|
||||||
" from a single server",
|
|
||||||
)
|
|
||||||
|
|
||||||
rc_group.add_argument(
|
# The duration in milliseconds to delay processing events from
|
||||||
"--federation-rc-concurrent", type=int, default=3,
|
# remote servers by if they go over the sleep limit.
|
||||||
help="The number of federation requests to concurrently process"
|
federation_rc_sleep_delay: 500
|
||||||
" from a single server",
|
|
||||||
)
|
# The maximum number of concurrent federation requests allowed
|
||||||
|
# from a single server
|
||||||
|
federation_rc_reject_limit: 50
|
||||||
|
|
||||||
|
# The number of federation requests to concurrently process from a
|
||||||
|
# single server
|
||||||
|
federation_rc_concurrent: 3
|
||||||
|
"""
|
||||||
|
@ -22,40 +22,21 @@ import distutils.util
|
|||||||
|
|
||||||
class RegistrationConfig(Config):
|
class RegistrationConfig(Config):
|
||||||
|
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(RegistrationConfig, self).__init__(args)
|
|
||||||
|
|
||||||
# `args.enable_registration` may either be a bool or a string depending
|
|
||||||
# on if the option was given a value (e.g. --enable-registration=true
|
|
||||||
# would set `args.enable_registration` to "true" not True.)
|
|
||||||
self.disable_registration = not bool(
|
self.disable_registration = not bool(
|
||||||
distutils.util.strtobool(str(args.enable_registration))
|
distutils.util.strtobool(str(config["enable_registration"]))
|
||||||
)
|
)
|
||||||
self.registration_shared_secret = args.registration_shared_secret
|
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir, server_name):
|
||||||
def add_arguments(cls, parser):
|
registration_shared_secret = random_string_with_symbols(50)
|
||||||
super(RegistrationConfig, cls).add_arguments(parser)
|
return """\
|
||||||
reg_group = parser.add_argument_group("registration")
|
## Registration ##
|
||||||
|
|
||||||
reg_group.add_argument(
|
# Enable registration for new users.
|
||||||
"--enable-registration",
|
enable_registration: True
|
||||||
const=True,
|
|
||||||
default=False,
|
|
||||||
nargs='?',
|
|
||||||
help="Enable registration for new users.",
|
|
||||||
)
|
|
||||||
reg_group.add_argument(
|
|
||||||
"--registration-shared-secret", type=str,
|
|
||||||
help="If set, allows registration by anyone who also has the shared"
|
|
||||||
" secret, even if registration is otherwise disabled.",
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
# If set, allows registration by anyone who also has the shared
|
||||||
def generate_config(cls, args, config_dir_path):
|
# secret, even if registration is otherwise disabled.
|
||||||
super(RegistrationConfig, cls).generate_config(args, config_dir_path)
|
registration_shared_secret: "%(registration_shared_secret)s"
|
||||||
if args.enable_registration is None:
|
""" % locals()
|
||||||
args.enable_registration = False
|
|
||||||
|
|
||||||
if args.registration_shared_secret is None:
|
|
||||||
args.registration_shared_secret = random_string_with_symbols(50)
|
|
||||||
|
@ -17,11 +17,10 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class ContentRepositoryConfig(Config):
|
class ContentRepositoryConfig(Config):
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(ContentRepositoryConfig, self).__init__(args)
|
self.max_upload_size = self.parse_size(config["max_upload_size"])
|
||||||
self.max_upload_size = self.parse_size(args.max_upload_size)
|
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
||||||
self.max_image_pixels = self.parse_size(args.max_image_pixels)
|
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
||||||
self.media_store_path = self.ensure_directory(args.media_store_path)
|
|
||||||
|
|
||||||
def parse_size(self, string):
|
def parse_size(self, string):
|
||||||
sizes = {"K": 1024, "M": 1024 * 1024}
|
sizes = {"K": 1024, "M": 1024 * 1024}
|
||||||
@ -32,17 +31,15 @@ class ContentRepositoryConfig(Config):
|
|||||||
size = sizes[suffix]
|
size = sizes[suffix]
|
||||||
return int(string) * size
|
return int(string) * size
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
media_store = self.default_path("media_store")
|
||||||
super(ContentRepositoryConfig, cls).add_arguments(parser)
|
return """
|
||||||
db_group = parser.add_argument_group("content_repository")
|
# Directory where uploaded images and attachments are stored.
|
||||||
db_group.add_argument(
|
media_store_path: "%(media_store)s"
|
||||||
"--max-upload-size", default="10M"
|
|
||||||
)
|
# The largest allowed upload size in bytes
|
||||||
db_group.add_argument(
|
max_upload_size: "10M"
|
||||||
"--media-store-path", default=cls.default_path("media_store")
|
|
||||||
)
|
# Maximum number of pixels that will be thumbnailed
|
||||||
db_group.add_argument(
|
max_image_pixels: "32M"
|
||||||
"--max-image-pixels", default="32M",
|
""" % locals()
|
||||||
help="Maximum number of pixels that will be thumbnailed"
|
|
||||||
)
|
|
||||||
|
@ -17,64 +17,85 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class ServerConfig(Config):
|
class ServerConfig(Config):
|
||||||
def __init__(self, args):
|
|
||||||
super(ServerConfig, self).__init__(args)
|
|
||||||
self.server_name = args.server_name
|
|
||||||
self.bind_port = args.bind_port
|
|
||||||
self.bind_host = args.bind_host
|
|
||||||
self.unsecure_port = args.unsecure_port
|
|
||||||
self.daemonize = args.daemonize
|
|
||||||
self.pid_file = self.abspath(args.pid_file)
|
|
||||||
self.web_client = args.web_client
|
|
||||||
self.manhole = args.manhole
|
|
||||||
self.soft_file_limit = args.soft_file_limit
|
|
||||||
|
|
||||||
if not args.content_addr:
|
def read_config(self, config):
|
||||||
host = args.server_name
|
self.server_name = config["server_name"]
|
||||||
|
self.bind_port = config["bind_port"]
|
||||||
|
self.bind_host = config["bind_host"]
|
||||||
|
self.unsecure_port = config["unsecure_port"]
|
||||||
|
self.manhole = config["manhole"]
|
||||||
|
self.pid_file = self.abspath(config.get("pid_file"))
|
||||||
|
self.web_client = config["web_client"]
|
||||||
|
self.soft_file_limit = config["soft_file_limit"]
|
||||||
|
|
||||||
|
# Attempt to guess the content_addr for the v0 content repostitory
|
||||||
|
content_addr = config.get("content_addr")
|
||||||
|
if not content_addr:
|
||||||
|
host = self.server_name
|
||||||
if ':' not in host:
|
if ':' not in host:
|
||||||
host = "%s:%d" % (host, args.unsecure_port)
|
host = "%s:%d" % (host, self.unsecure_port)
|
||||||
else:
|
else:
|
||||||
host = host.split(':')[0]
|
host = host.split(':')[0]
|
||||||
host = "%s:%d" % (host, args.unsecure_port)
|
host = "%s:%d" % (host, self.unsecure_port)
|
||||||
args.content_addr = "http://%s" % (host,)
|
content_addr = "http://%s" % (host,)
|
||||||
|
|
||||||
self.content_addr = args.content_addr
|
self.content_addr = content_addr
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
if ":" in server_name:
|
||||||
super(ServerConfig, cls).add_arguments(parser)
|
bind_port = int(server_name.split(":")[1])
|
||||||
|
unsecure_port = bind_port - 400
|
||||||
|
else:
|
||||||
|
bind_port = 8448
|
||||||
|
unsecure_port = 8008
|
||||||
|
|
||||||
|
pid_file = self.abspath("homeserver.pid")
|
||||||
|
return """\
|
||||||
|
## Server ##
|
||||||
|
|
||||||
|
# The domain name of the server, with optional explicit port.
|
||||||
|
# This is used by remote servers to connect to this server,
|
||||||
|
# e.g. matrix.org, localhost:8080, etc.
|
||||||
|
server_name: "%(server_name)s"
|
||||||
|
|
||||||
|
# The port to listen for HTTPS requests on.
|
||||||
|
# For when matrix traffic is sent directly to synapse.
|
||||||
|
bind_port: %(bind_port)s
|
||||||
|
|
||||||
|
# The port to listen for HTTP requests on.
|
||||||
|
# For when matrix traffic passes through loadbalancer that unwraps TLS.
|
||||||
|
unsecure_port: %(unsecure_port)s
|
||||||
|
|
||||||
|
# Local interface to listen on.
|
||||||
|
# The empty string will cause synapse to listen on all interfaces.
|
||||||
|
bind_host: ""
|
||||||
|
|
||||||
|
# When running as a daemon, the file to store the pid in
|
||||||
|
pid_file: %(pid_file)s
|
||||||
|
|
||||||
|
# Whether to serve a web client from the HTTP/HTTPS root resource.
|
||||||
|
web_client: True
|
||||||
|
|
||||||
|
# Set the soft limit on the number of file descriptors synapse can use
|
||||||
|
# Zero is used to indicate synapse should set the soft limit to the
|
||||||
|
# hard limit.
|
||||||
|
soft_file_limit: 0
|
||||||
|
|
||||||
|
# Turn on the twisted telnet manhole service on localhost on the given
|
||||||
|
# port.
|
||||||
|
manhole: ~
|
||||||
|
""" % locals()
|
||||||
|
|
||||||
|
def read_arguments(self, args):
|
||||||
|
if args.manhole is not None:
|
||||||
|
self.manhole = args.manhole
|
||||||
|
self.daemonize = args.daemonize
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
server_group = parser.add_argument_group("server")
|
server_group = parser.add_argument_group("server")
|
||||||
server_group.add_argument(
|
|
||||||
"-H", "--server-name", default="localhost",
|
|
||||||
help="The domain name of the server, with optional explicit port. "
|
|
||||||
"This is used by remote servers to connect to this server, "
|
|
||||||
"e.g. matrix.org, localhost:8080, etc."
|
|
||||||
)
|
|
||||||
server_group.add_argument("-p", "--bind-port", metavar="PORT",
|
|
||||||
type=int, help="https port to listen on",
|
|
||||||
default=8448)
|
|
||||||
server_group.add_argument("--unsecure-port", metavar="PORT",
|
|
||||||
type=int, help="http port to listen on",
|
|
||||||
default=8008)
|
|
||||||
server_group.add_argument("--bind-host", default="",
|
|
||||||
help="Local interface to listen on")
|
|
||||||
server_group.add_argument("-D", "--daemonize", action='store_true',
|
server_group.add_argument("-D", "--daemonize", action='store_true',
|
||||||
help="Daemonize the home server")
|
help="Daemonize the home server")
|
||||||
server_group.add_argument('--pid-file', default="homeserver.pid",
|
|
||||||
help="When running as a daemon, the file to"
|
|
||||||
" store the pid in")
|
|
||||||
server_group.add_argument('--web_client', default=True, type=bool,
|
|
||||||
help="Whether or not to serve a web client")
|
|
||||||
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
|
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
|
||||||
type=int,
|
type=int,
|
||||||
help="Turn on the twisted telnet manhole"
|
help="Turn on the twisted telnet manhole"
|
||||||
" service on the given port.")
|
" service on the given port.")
|
||||||
server_group.add_argument("--content-addr", default=None,
|
|
||||||
help="The host and scheme to use for the "
|
|
||||||
"content repository")
|
|
||||||
server_group.add_argument("--soft-file-limit", type=int, default=0,
|
|
||||||
help="Set the soft limit on the number of "
|
|
||||||
"file descriptors synapse can use. "
|
|
||||||
"Zero is used to indicate synapse "
|
|
||||||
"should set the soft limit to the hard"
|
|
||||||
"limit.")
|
|
||||||
|
@ -23,37 +23,44 @@ GENERATE_DH_PARAMS = False
|
|||||||
|
|
||||||
|
|
||||||
class TlsConfig(Config):
|
class TlsConfig(Config):
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(TlsConfig, self).__init__(args)
|
|
||||||
self.tls_certificate = self.read_tls_certificate(
|
self.tls_certificate = self.read_tls_certificate(
|
||||||
args.tls_certificate_path
|
config.get("tls_certificate_path")
|
||||||
)
|
)
|
||||||
|
|
||||||
self.no_tls = args.no_tls
|
self.no_tls = config.get("no_tls", False)
|
||||||
|
|
||||||
if self.no_tls:
|
if self.no_tls:
|
||||||
self.tls_private_key = None
|
self.tls_private_key = None
|
||||||
else:
|
else:
|
||||||
self.tls_private_key = self.read_tls_private_key(
|
self.tls_private_key = self.read_tls_private_key(
|
||||||
args.tls_private_key_path
|
config.get("tls_private_key_path")
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tls_dh_params_path = self.check_file(
|
self.tls_dh_params_path = self.check_file(
|
||||||
args.tls_dh_params_path, "tls_dh_params"
|
config.get("tls_dh_params_path"), "tls_dh_params"
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
base_key_name = os.path.join(config_dir_path, server_name)
|
||||||
super(TlsConfig, cls).add_arguments(parser)
|
|
||||||
tls_group = parser.add_argument_group("tls")
|
tls_certificate_path = base_key_name + ".tls.crt"
|
||||||
tls_group.add_argument("--tls-certificate-path",
|
tls_private_key_path = base_key_name + ".tls.key"
|
||||||
help="PEM encoded X509 certificate for TLS")
|
tls_dh_params_path = base_key_name + ".tls.dh"
|
||||||
tls_group.add_argument("--tls-private-key-path",
|
|
||||||
help="PEM encoded private key for TLS")
|
return """\
|
||||||
tls_group.add_argument("--tls-dh-params-path",
|
# PEM encoded X509 certificate for TLS
|
||||||
help="PEM dh parameters for ephemeral keys")
|
tls_certificate_path: "%(tls_certificate_path)s"
|
||||||
tls_group.add_argument("--no-tls", action='store_true',
|
|
||||||
help="Don't bind to the https port.")
|
# PEM encoded private key for TLS
|
||||||
|
tls_private_key_path: "%(tls_private_key_path)s"
|
||||||
|
|
||||||
|
# PEM dh parameters for ephemeral keys
|
||||||
|
tls_dh_params_path: "%(tls_dh_params_path)s"
|
||||||
|
|
||||||
|
# Don't bind to the https port
|
||||||
|
no_tls: False
|
||||||
|
""" % locals()
|
||||||
|
|
||||||
def read_tls_certificate(self, cert_path):
|
def read_tls_certificate(self, cert_path):
|
||||||
cert_pem = self.read_file(cert_path, "tls_certificate")
|
cert_pem = self.read_file(cert_path, "tls_certificate")
|
||||||
@ -63,22 +70,13 @@ class TlsConfig(Config):
|
|||||||
private_key_pem = self.read_file(private_key_path, "tls_private_key")
|
private_key_pem = self.read_file(private_key_path, "tls_private_key")
|
||||||
return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem)
|
return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem)
|
||||||
|
|
||||||
@classmethod
|
def generate_keys(self, config):
|
||||||
def generate_config(cls, args, config_dir_path):
|
tls_certificate_path = config["tls_certificate_path"]
|
||||||
super(TlsConfig, cls).generate_config(args, config_dir_path)
|
tls_private_key_path = config["tls_private_key_path"]
|
||||||
base_key_name = os.path.join(config_dir_path, args.server_name)
|
tls_dh_params_path = config["tls_dh_params_path"]
|
||||||
|
|
||||||
if args.tls_certificate_path is None:
|
if not os.path.exists(tls_private_key_path):
|
||||||
args.tls_certificate_path = base_key_name + ".tls.crt"
|
with open(tls_private_key_path, "w") as private_key_file:
|
||||||
|
|
||||||
if args.tls_private_key_path is None:
|
|
||||||
args.tls_private_key_path = base_key_name + ".tls.key"
|
|
||||||
|
|
||||||
if args.tls_dh_params_path is None:
|
|
||||||
args.tls_dh_params_path = base_key_name + ".tls.dh"
|
|
||||||
|
|
||||||
if not os.path.exists(args.tls_private_key_path):
|
|
||||||
with open(args.tls_private_key_path, "w") as private_key_file:
|
|
||||||
tls_private_key = crypto.PKey()
|
tls_private_key = crypto.PKey()
|
||||||
tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
|
tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
|
||||||
private_key_pem = crypto.dump_privatekey(
|
private_key_pem = crypto.dump_privatekey(
|
||||||
@ -86,17 +84,17 @@ class TlsConfig(Config):
|
|||||||
)
|
)
|
||||||
private_key_file.write(private_key_pem)
|
private_key_file.write(private_key_pem)
|
||||||
else:
|
else:
|
||||||
with open(args.tls_private_key_path) as private_key_file:
|
with open(tls_private_key_path) as private_key_file:
|
||||||
private_key_pem = private_key_file.read()
|
private_key_pem = private_key_file.read()
|
||||||
tls_private_key = crypto.load_privatekey(
|
tls_private_key = crypto.load_privatekey(
|
||||||
crypto.FILETYPE_PEM, private_key_pem
|
crypto.FILETYPE_PEM, private_key_pem
|
||||||
)
|
)
|
||||||
|
|
||||||
if not os.path.exists(args.tls_certificate_path):
|
if not os.path.exists(tls_certificate_path):
|
||||||
with open(args.tls_certificate_path, "w") as certifcate_file:
|
with open(tls_certificate_path, "w") as certifcate_file:
|
||||||
cert = crypto.X509()
|
cert = crypto.X509()
|
||||||
subject = cert.get_subject()
|
subject = cert.get_subject()
|
||||||
subject.CN = args.server_name
|
subject.CN = config["server_name"]
|
||||||
|
|
||||||
cert.set_serial_number(1000)
|
cert.set_serial_number(1000)
|
||||||
cert.gmtime_adj_notBefore(0)
|
cert.gmtime_adj_notBefore(0)
|
||||||
@ -110,16 +108,16 @@ class TlsConfig(Config):
|
|||||||
|
|
||||||
certifcate_file.write(cert_pem)
|
certifcate_file.write(cert_pem)
|
||||||
|
|
||||||
if not os.path.exists(args.tls_dh_params_path):
|
if not os.path.exists(tls_dh_params_path):
|
||||||
if GENERATE_DH_PARAMS:
|
if GENERATE_DH_PARAMS:
|
||||||
subprocess.check_call([
|
subprocess.check_call([
|
||||||
"openssl", "dhparam",
|
"openssl", "dhparam",
|
||||||
"-outform", "PEM",
|
"-outform", "PEM",
|
||||||
"-out", args.tls_dh_params_path,
|
"-out", tls_dh_params_path,
|
||||||
"2048"
|
"2048"
|
||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
with open(args.tls_dh_params_path, "w") as dh_params_file:
|
with open(tls_dh_params_path, "w") as dh_params_file:
|
||||||
dh_params_file.write(
|
dh_params_file.write(
|
||||||
"2048-bit DH parameters taken from rfc3526\n"
|
"2048-bit DH parameters taken from rfc3526\n"
|
||||||
"-----BEGIN DH PARAMETERS-----\n"
|
"-----BEGIN DH PARAMETERS-----\n"
|
||||||
|
@ -17,28 +17,21 @@ from ._base import Config
|
|||||||
|
|
||||||
class VoipConfig(Config):
|
class VoipConfig(Config):
|
||||||
|
|
||||||
def __init__(self, args):
|
def read_config(self, config):
|
||||||
super(VoipConfig, self).__init__(args)
|
self.turn_uris = config.get("turn_uris", [])
|
||||||
self.turn_uris = args.turn_uris
|
self.turn_shared_secret = config["turn_shared_secret"]
|
||||||
self.turn_shared_secret = args.turn_shared_secret
|
self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"])
|
||||||
self.turn_user_lifetime = args.turn_user_lifetime
|
|
||||||
|
|
||||||
@classmethod
|
def default_config(self, config_dir_path, server_name):
|
||||||
def add_arguments(cls, parser):
|
return """\
|
||||||
super(VoipConfig, cls).add_arguments(parser)
|
## Turn ##
|
||||||
group = parser.add_argument_group("voip")
|
|
||||||
group.add_argument(
|
# The public URIs of the TURN server to give to clients
|
||||||
"--turn-uris", type=str, default=None, action='append',
|
turn_uris: []
|
||||||
help="The public URIs of the TURN server to give to clients"
|
|
||||||
)
|
# The shared secret used to compute passwords for the TURN server
|
||||||
group.add_argument(
|
turn_shared_secret: "YOUR_SHARED_SECRET"
|
||||||
"--turn-shared-secret", type=str, default=None,
|
|
||||||
help=(
|
# How long generated TURN credentials last
|
||||||
"The shared secret used to compute passwords for the TURN"
|
turn_user_lifetime: "1h"
|
||||||
" server"
|
"""
|
||||||
)
|
|
||||||
)
|
|
||||||
group.add_argument(
|
|
||||||
"--turn-user-lifetime", type=int, default=(1000 * 60 * 60),
|
|
||||||
help="How long generated TURN credentials last, in ms"
|
|
||||||
)
|
|
||||||
|
@ -59,7 +59,6 @@ class BaseHomeServer(object):
|
|||||||
'config',
|
'config',
|
||||||
'clock',
|
'clock',
|
||||||
'http_client',
|
'http_client',
|
||||||
'db_name',
|
|
||||||
'db_pool',
|
'db_pool',
|
||||||
'persistence_service',
|
'persistence_service',
|
||||||
'replication_layer',
|
'replication_layer',
|
||||||
|
Loading…
Reference in New Issue
Block a user