2014-08-12 22:14:34 -04:00
|
|
|
#!/usr/bin/env python
|
2014-08-12 10:10:52 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-08-12 10:10:52 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-02-17 05:54:06 -05:00
|
|
|
import sys
|
2015-12-08 10:26:52 -05:00
|
|
|
from synapse.rest import ClientRestResource
|
|
|
|
|
2015-02-17 05:54:06 -05:00
|
|
|
sys.dont_write_bytecode = True
|
2015-09-01 11:47:26 -04:00
|
|
|
from synapse.python_dependencies import (
|
|
|
|
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
|
|
|
|
)
|
2015-04-29 09:52:42 -04:00
|
|
|
|
2015-04-29 09:53:23 -04:00
|
|
|
if __name__ == '__main__':
|
2015-09-01 11:47:26 -04:00
|
|
|
try:
|
|
|
|
check_requirements()
|
|
|
|
except MissingRequirementError as e:
|
|
|
|
message = "\n".join([
|
|
|
|
"Missing Requirement: %s" % (e.message,),
|
|
|
|
"To install run:",
|
|
|
|
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
|
|
|
"",
|
|
|
|
])
|
|
|
|
sys.stderr.writelines(message)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-04-29 06:42:28 -04:00
|
|
|
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
2015-10-13 06:41:04 -04:00
|
|
|
from synapse.storage import are_all_users_on_domain
|
2015-10-13 08:56:22 -04:00
|
|
|
from synapse.storage.prepare_database import UpgradeDatabaseException
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-08 12:07:28 -05:00
|
|
|
|
2016-01-07 08:57:04 -05:00
|
|
|
from twisted.conch.manhole import ColoredManhole
|
|
|
|
from twisted.conch.insults import insults
|
|
|
|
from twisted.conch import manhole_ssh
|
|
|
|
from twisted.cred import checkers, portal
|
|
|
|
|
|
|
|
|
2015-09-22 07:57:40 -04:00
|
|
|
from twisted.internet import reactor, task, defer
|
2015-01-07 08:46:37 -05:00
|
|
|
from twisted.application import service
|
2015-05-14 11:39:19 -04:00
|
|
|
from twisted.web.resource import Resource, EncodingResourceWrapper
|
2014-08-14 04:52:20 -04:00
|
|
|
from twisted.web.static import File
|
2015-06-12 12:13:23 -04:00
|
|
|
from twisted.web.server import Site, GzipEncoderFactory, Request
|
2016-01-26 08:52:29 -05:00
|
|
|
from synapse.http.server import RootRedirect
|
2015-01-22 11:10:07 -05:00
|
|
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
|
|
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
2015-04-14 08:28:11 -04:00
|
|
|
from synapse.rest.key.v1.server_key_resource import LocalKey
|
2015-04-14 11:04:52 -04:00
|
|
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
2014-08-18 10:01:08 -04:00
|
|
|
from synapse.api.urls import (
|
2015-12-08 10:26:52 -05:00
|
|
|
FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
|
|
|
SERVER_KEY_PREFIX, MEDIA_PREFIX, STATIC_PREFIX,
|
2015-04-14 11:04:52 -04:00
|
|
|
SERVER_KEY_V2_PREFIX,
|
2014-08-18 10:01:08 -04:00
|
|
|
)
|
2014-08-31 11:06:39 -04:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2014-09-01 11:30:43 -04:00
|
|
|
from synapse.crypto import context_factory
|
2014-10-29 21:21:33 -04:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
2015-03-12 11:33:53 -04:00
|
|
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
2016-01-26 08:52:29 -05:00
|
|
|
from synapse.federation.transport.server import TransportLayerServer
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-05-29 07:17:33 -04:00
|
|
|
from synapse import events
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
from daemonize import Daemonize
|
|
|
|
|
2015-01-07 12:25:28 -05:00
|
|
|
import synapse
|
|
|
|
|
2015-06-15 13:18:05 -04:00
|
|
|
import contextlib
|
2014-08-12 10:10:52 -04:00
|
|
|
import logging
|
2014-08-14 09:07:14 -04:00
|
|
|
import os
|
2015-06-15 11:36:49 -04:00
|
|
|
import re
|
2015-02-19 06:50:49 -05:00
|
|
|
import resource
|
2015-02-18 11:21:35 -05:00
|
|
|
import subprocess
|
2015-06-15 13:18:05 -04:00
|
|
|
import time
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
|
2015-04-07 07:04:02 -04:00
|
|
|
logger = logging.getLogger("synapse.app.homeserver")
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
|
2016-01-13 06:47:32 -05:00
|
|
|
ACCESS_TOKEN_RE = re.compile(r'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$')
|
|
|
|
|
|
|
|
|
2015-05-14 11:39:19 -04:00
|
|
|
def gz_wrap(r):
|
|
|
|
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
|
|
|
|
|
|
|
|
2016-01-26 08:52:29 -05:00
|
|
|
def build_resource_for_web_client(hs):
|
|
|
|
webclient_path = hs.get_config().web_client_location
|
|
|
|
if not webclient_path:
|
|
|
|
try:
|
|
|
|
import syweb
|
|
|
|
except ImportError:
|
|
|
|
quit_with_error(
|
|
|
|
"Could not find a webclient.\n\n"
|
|
|
|
"Please either install the matrix-angular-sdk or configure\n"
|
|
|
|
"the location of the source to serve via the configuration\n"
|
|
|
|
"option `web_client_location`\n\n"
|
|
|
|
"To install the `matrix-angular-sdk` via pip, run:\n\n"
|
|
|
|
" pip install '%(dep)s'\n"
|
|
|
|
"\n"
|
|
|
|
"You can also disable hosting of the webclient via the\n"
|
|
|
|
"configuration option `web_client`\n"
|
|
|
|
% {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
|
|
|
|
)
|
|
|
|
syweb_path = os.path.dirname(syweb.__file__)
|
|
|
|
webclient_path = os.path.join(syweb_path, "webclient")
|
|
|
|
# GZip is disabled here due to
|
|
|
|
# https://twistedmatrix.com/trac/ticket/7678
|
|
|
|
# (It can stay enabled for the API resources: they call
|
|
|
|
# write() with the whole body and then finish() straight
|
|
|
|
# after and so do not trigger the bug.
|
|
|
|
# GzipFile was removed in commit 184ba09
|
|
|
|
# return GzipFile(webclient_path) # TODO configurable?
|
|
|
|
return File(webclient_path) # TODO configurable?
|
2015-03-20 06:55:55 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2016-01-26 08:52:29 -05:00
|
|
|
class SynapseHomeServer(HomeServer):
|
2015-06-12 10:33:07 -04:00
|
|
|
def _listener_http(self, config, listener_config):
|
|
|
|
port = listener_config["port"]
|
|
|
|
bind_address = listener_config.get("bind_address", "")
|
|
|
|
tls = listener_config.get("tls", False)
|
2015-06-15 12:11:44 -04:00
|
|
|
site_tag = listener_config.get("tag", port)
|
2015-06-12 10:33:07 -04:00
|
|
|
|
|
|
|
if tls and config.no_tls:
|
|
|
|
return
|
2015-03-12 11:51:33 -04:00
|
|
|
|
2015-06-12 10:33:07 -04:00
|
|
|
resources = {}
|
|
|
|
for res in listener_config["resources"]:
|
|
|
|
for name in res["names"]:
|
|
|
|
if name == "client":
|
2016-01-26 08:52:29 -05:00
|
|
|
client_resource = ClientRestResource(self)
|
2015-06-12 10:33:07 -04:00
|
|
|
if res["compress"]:
|
2015-12-08 10:26:52 -05:00
|
|
|
client_resource = gz_wrap(client_resource)
|
2015-06-12 10:33:07 -04:00
|
|
|
|
|
|
|
resources.update({
|
2015-12-08 10:26:52 -05:00
|
|
|
"/_matrix/client/api/v1": client_resource,
|
|
|
|
"/_matrix/client/r0": client_resource,
|
|
|
|
"/_matrix/client/unstable": client_resource,
|
|
|
|
"/_matrix/client/v2_alpha": client_resource,
|
2016-01-06 13:08:52 -05:00
|
|
|
"/_matrix/client/versions": client_resource,
|
2015-06-12 10:33:07 -04:00
|
|
|
})
|
|
|
|
|
|
|
|
if name == "federation":
|
|
|
|
resources.update({
|
2016-01-26 08:52:29 -05:00
|
|
|
FEDERATION_PREFIX: TransportLayerServer(self),
|
2015-06-12 10:33:07 -04:00
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["static", "client"]:
|
|
|
|
resources.update({
|
2016-01-26 08:52:29 -05:00
|
|
|
STATIC_PREFIX: File(
|
|
|
|
os.path.join(os.path.dirname(synapse.__file__), "static")
|
|
|
|
),
|
2015-06-12 10:33:07 -04:00
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["media", "federation", "client"]:
|
|
|
|
resources.update({
|
2016-01-26 08:52:29 -05:00
|
|
|
MEDIA_PREFIX: MediaRepositoryResource(self),
|
|
|
|
CONTENT_REPO_PREFIX: ContentRepoResource(
|
|
|
|
self, self.config.uploads_path, self.auth, self.content_addr
|
|
|
|
),
|
2015-06-12 10:33:07 -04:00
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["keys", "federation"]:
|
|
|
|
resources.update({
|
2016-01-26 08:52:29 -05:00
|
|
|
SERVER_KEY_PREFIX: LocalKey(self),
|
|
|
|
SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self),
|
2015-06-12 10:33:07 -04:00
|
|
|
})
|
|
|
|
|
|
|
|
if name == "webclient":
|
2016-01-26 08:52:29 -05:00
|
|
|
resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self)
|
2015-06-12 10:33:07 -04:00
|
|
|
|
2016-01-26 08:52:29 -05:00
|
|
|
if name == "metrics" and self.get_config().enable_metrics:
|
|
|
|
resources[METRICS_PREFIX] = MetricsResource(self)
|
2015-06-12 10:33:07 -04:00
|
|
|
|
|
|
|
root_resource = create_resource_tree(resources)
|
|
|
|
if tls:
|
2014-09-01 17:38:52 -04:00
|
|
|
reactor.listenSSL(
|
2015-06-12 10:33:07 -04:00
|
|
|
port,
|
2015-04-30 11:17:27 -04:00
|
|
|
SynapseSite(
|
2015-06-19 05:16:48 -04:00
|
|
|
"synapse.access.https.%s" % (site_tag,),
|
2015-06-15 12:11:44 -04:00
|
|
|
site_tag,
|
2015-06-12 12:13:23 -04:00
|
|
|
listener_config,
|
2015-06-12 10:33:07 -04:00
|
|
|
root_resource,
|
2015-04-30 08:58:13 -04:00
|
|
|
),
|
2015-09-09 07:02:07 -04:00
|
|
|
self.tls_server_context_factory,
|
2015-06-12 10:33:07 -04:00
|
|
|
interface=bind_address
|
2014-09-01 17:38:52 -04:00
|
|
|
)
|
2015-06-12 10:33:07 -04:00
|
|
|
else:
|
2014-09-01 17:38:52 -04:00
|
|
|
reactor.listenTCP(
|
2015-06-12 10:33:07 -04:00
|
|
|
port,
|
2015-04-30 11:17:27 -04:00
|
|
|
SynapseSite(
|
2015-06-19 05:16:48 -04:00
|
|
|
"synapse.access.http.%s" % (site_tag,),
|
2015-06-15 12:11:44 -04:00
|
|
|
site_tag,
|
2015-06-12 12:13:23 -04:00
|
|
|
listener_config,
|
2015-06-12 10:33:07 -04:00
|
|
|
root_resource,
|
2015-04-30 08:58:13 -04:00
|
|
|
),
|
2015-06-12 10:33:07 -04:00
|
|
|
interface=bind_address
|
2014-09-01 17:38:52 -04:00
|
|
|
)
|
2015-06-12 10:33:07 -04:00
|
|
|
logger.info("Synapse now listening on port %d", port)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2015-06-12 10:33:07 -04:00
|
|
|
def start_listening(self):
|
|
|
|
config = self.get_config()
|
|
|
|
|
|
|
|
for listener in config.listeners:
|
|
|
|
if listener["type"] == "http":
|
|
|
|
self._listener_http(config, listener)
|
|
|
|
elif listener["type"] == "manhole":
|
2016-01-07 08:57:04 -05:00
|
|
|
checker = checkers.InMemoryUsernamePasswordDatabaseDontUse(
|
|
|
|
matrix="rabbithole"
|
|
|
|
)
|
|
|
|
|
|
|
|
rlm = manhole_ssh.TerminalRealm()
|
|
|
|
rlm.chainedProtocolFactory = lambda: insults.ServerProtocol(
|
|
|
|
ColoredManhole,
|
|
|
|
{
|
|
|
|
"__name__": "__console__",
|
|
|
|
"hs": self,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
f = manhole_ssh.ConchFactory(portal.Portal(rlm, [checker]))
|
|
|
|
|
2015-06-12 10:33:07 -04:00
|
|
|
reactor.listenTCP(
|
|
|
|
listener["port"],
|
|
|
|
f,
|
|
|
|
interface=listener.get("bind_address", '127.0.0.1')
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
2015-03-12 12:05:46 -04:00
|
|
|
|
2015-04-28 08:39:42 -04:00
|
|
|
def run_startup_checks(self, db_conn, database_engine):
|
2015-04-27 06:46:00 -04:00
|
|
|
all_users_native = are_all_users_on_domain(
|
2015-04-28 08:39:42 -04:00
|
|
|
db_conn.cursor(), database_engine, self.hostname
|
2015-04-24 13:11:21 -04:00
|
|
|
)
|
|
|
|
if not all_users_native:
|
2015-04-29 07:12:18 -04:00
|
|
|
quit_with_error(
|
2015-04-24 13:11:21 -04:00
|
|
|
"Found users in database not native to %s!\n"
|
2015-04-29 07:12:18 -04:00
|
|
|
"You cannot changed a synapse server_name after it's been configured"
|
|
|
|
% (self.hostname,)
|
2015-04-24 13:11:21 -04:00
|
|
|
)
|
|
|
|
|
2015-04-29 06:42:28 -04:00
|
|
|
try:
|
|
|
|
database_engine.check_database(db_conn.cursor())
|
|
|
|
except IncorrectDatabaseSetup as e:
|
2015-04-29 07:12:18 -04:00
|
|
|
quit_with_error(e.message)
|
|
|
|
|
2016-01-26 10:51:06 -05:00
|
|
|
def get_db_conn(self):
|
2016-01-28 09:32:05 -05:00
|
|
|
# Any param beginning with cp_ is a parameter for adbapi, and should
|
|
|
|
# not be passed to the database engine.
|
|
|
|
db_params = {
|
|
|
|
k: v for k, v in self.db_config.get("args", {}).items()
|
|
|
|
if not k.startswith("cp_")
|
|
|
|
}
|
|
|
|
db_conn = self.database_engine.module.connect(**db_params)
|
2016-01-26 10:51:06 -05:00
|
|
|
|
|
|
|
self.database_engine.on_new_connection(db_conn)
|
|
|
|
return db_conn
|
|
|
|
|
2015-04-29 07:12:18 -04:00
|
|
|
|
|
|
|
def quit_with_error(error_string):
|
|
|
|
message_lines = error_string.split("\n")
|
2015-08-25 10:33:23 -04:00
|
|
|
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
2015-04-29 07:12:18 -04:00
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
for line in message_lines:
|
2015-08-25 09:19:09 -04:00
|
|
|
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
2015-04-29 07:12:18 -04:00
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
sys.exit(1)
|
2015-04-29 06:42:28 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-02-18 11:21:35 -05:00
|
|
|
def get_version_string():
|
|
|
|
try:
|
2015-02-21 08:44:46 -05:00
|
|
|
null = open(os.devnull, 'w')
|
|
|
|
cwd = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
try:
|
|
|
|
git_branch = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_branch = "b=" + git_branch
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_branch = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_tag = subprocess.check_output(
|
|
|
|
['git', 'describe', '--exact-match'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_tag = "t=" + git_tag
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_tag = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_commit = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--short', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_commit = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
dirty_string = "-this_is_a_dirty_checkout"
|
|
|
|
is_dirty = subprocess.check_output(
|
|
|
|
['git', 'describe', '--dirty=' + dirty_string],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip().endswith(dirty_string)
|
|
|
|
|
|
|
|
git_dirty = "dirty" if is_dirty else ""
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_dirty = ""
|
|
|
|
|
|
|
|
if git_branch or git_tag or git_commit or git_dirty:
|
|
|
|
git_version = ",".join(
|
|
|
|
s for s in
|
|
|
|
(git_branch, git_tag, git_commit, git_dirty,)
|
|
|
|
if s
|
2015-02-18 11:21:35 -05:00
|
|
|
)
|
2015-02-21 08:44:46 -05:00
|
|
|
|
|
|
|
return (
|
|
|
|
"Synapse/%s (%s)" % (
|
|
|
|
synapse.__version__, git_version,
|
|
|
|
)
|
|
|
|
).encode("ascii")
|
|
|
|
except Exception as e:
|
2015-09-03 04:51:01 -04:00
|
|
|
logger.info("Failed to check for git repository: %s", e)
|
2015-02-18 11:21:35 -05:00
|
|
|
|
|
|
|
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
|
|
|
|
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
def change_resource_limit(soft_file_no):
|
|
|
|
try:
|
|
|
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
2015-02-20 11:09:44 -05:00
|
|
|
|
|
|
|
if not soft_file_no:
|
|
|
|
soft_file_no = hard
|
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
|
|
|
logger.info("Set file limit to: %d", soft_file_no)
|
2016-01-07 10:28:40 -05:00
|
|
|
|
|
|
|
resource.setrlimit(
|
|
|
|
resource.RLIMIT_CORE, (resource.RLIM_INFINITY, resource.RLIM_INFINITY)
|
|
|
|
)
|
2015-04-07 07:16:05 -04:00
|
|
|
except (ValueError, resource.error) as e:
|
2016-01-07 10:28:40 -05:00
|
|
|
logger.warn("Failed to set file or core limit: %s", e)
|
2015-02-19 06:50:49 -05:00
|
|
|
|
|
|
|
|
2015-03-10 05:58:33 -04:00
|
|
|
def setup(config_options):
|
2015-03-10 05:39:42 -04:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
config_options_options: The options passed to Synapse. Usually
|
|
|
|
`sys.argv[1:]`.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
HomeServer
|
|
|
|
"""
|
2014-11-18 10:57:00 -05:00
|
|
|
config = HomeServerConfig.load_config(
|
|
|
|
"Synapse Homeserver",
|
2015-01-07 08:46:37 -05:00
|
|
|
config_options,
|
2014-11-18 10:57:00 -05:00
|
|
|
generate_section="Homeserver"
|
|
|
|
)
|
|
|
|
|
|
|
|
config.setup_logging()
|
|
|
|
|
2015-03-17 07:45:37 -04:00
|
|
|
# check any extra requirements we have now we have a config
|
|
|
|
check_requirements(config)
|
2015-01-08 12:07:28 -05:00
|
|
|
|
2015-02-18 11:21:35 -05:00
|
|
|
version_string = get_version_string()
|
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
logger.info("Server hostname: %s", config.server_name)
|
2015-02-18 11:21:35 -05:00
|
|
|
logger.info("Server version: %s", version_string)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-05-29 07:17:33 -04:00
|
|
|
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
|
|
|
|
2015-09-09 07:02:07 -04:00
|
|
|
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
2014-09-01 11:30:43 -04:00
|
|
|
|
2015-04-27 10:57:43 -04:00
|
|
|
database_engine = create_engine(config.database_config["name"])
|
|
|
|
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
2015-04-01 09:12:33 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
hs = SynapseHomeServer(
|
2014-08-31 11:06:39 -04:00
|
|
|
config.server_name,
|
2015-04-27 10:57:43 -04:00
|
|
|
db_config=config.database_config,
|
2015-09-09 07:02:07 -04:00
|
|
|
tls_server_context_factory=tls_server_context_factory,
|
2014-09-02 12:57:04 -04:00
|
|
|
config=config,
|
2014-09-03 06:57:23 -04:00
|
|
|
content_addr=config.content_addr,
|
2015-02-18 11:21:35 -05:00
|
|
|
version_string=version_string,
|
2015-04-01 09:12:33 -04:00
|
|
|
database_engine=database_engine,
|
2014-08-12 10:10:52 -04:00
|
|
|
)
|
|
|
|
|
2015-08-29 17:23:21 -04:00
|
|
|
logger.info("Preparing database: %s...", config.database_config['name'])
|
2014-09-10 11:23:58 -04:00
|
|
|
|
2014-12-16 09:20:32 -05:00
|
|
|
try:
|
2016-01-26 10:51:06 -05:00
|
|
|
db_conn = hs.get_db_conn()
|
2015-04-02 05:06:22 -04:00
|
|
|
database_engine.prepare_database(db_conn)
|
2015-04-28 08:39:42 -04:00
|
|
|
hs.run_startup_checks(db_conn, database_engine)
|
2015-04-01 09:12:33 -04:00
|
|
|
|
|
|
|
db_conn.commit()
|
2014-12-16 09:20:32 -05:00
|
|
|
except UpgradeDatabaseException:
|
|
|
|
sys.stderr.write(
|
|
|
|
"\nFailed to upgrade database.\n"
|
2015-01-19 10:30:48 -05:00
|
|
|
"Have you checked for version specific instructions in"
|
|
|
|
" UPGRADES.rst?\n"
|
2014-12-16 09:20:32 -05:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
2014-09-10 11:23:58 -04:00
|
|
|
|
2015-08-29 17:23:21 -04:00
|
|
|
logger.info("Database prepared in %s.", config.database_config['name'])
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2016-01-26 10:51:06 -05:00
|
|
|
hs.setup()
|
2015-03-12 11:51:33 -04:00
|
|
|
hs.start_listening()
|
2014-09-10 11:16:24 -04:00
|
|
|
|
2016-01-26 10:51:06 -05:00
|
|
|
def start():
|
|
|
|
hs.get_pusherpool().start()
|
|
|
|
hs.get_state_handler().start_caching()
|
|
|
|
hs.get_datastore().start_profiling()
|
|
|
|
hs.get_datastore().start_doing_background_updates()
|
|
|
|
hs.get_replication_layer().start_get_pdu_cache()
|
|
|
|
|
|
|
|
reactor.callWhenRunning(start)
|
2015-02-06 11:52:22 -05:00
|
|
|
|
2015-03-10 05:39:42 -04:00
|
|
|
return hs
|
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2015-01-07 08:46:37 -05:00
|
|
|
class SynapseService(service.Service):
|
2015-03-10 05:39:42 -04:00
|
|
|
"""A twisted Service class that will start synapse. Used to run synapse
|
|
|
|
via twistd and a .tac.
|
|
|
|
"""
|
2015-01-07 08:46:37 -05:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
|
|
|
|
def startService(self):
|
2015-03-10 05:58:33 -04:00
|
|
|
hs = setup(self.config)
|
2015-03-10 05:39:42 -04:00
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
2015-01-07 08:46:37 -05:00
|
|
|
|
|
|
|
def stopService(self):
|
|
|
|
return self._port.stopListening()
|
|
|
|
|
|
|
|
|
2015-06-15 11:36:49 -04:00
|
|
|
class SynapseRequest(Request):
|
2015-06-15 13:18:05 -04:00
|
|
|
def __init__(self, site, *args, **kw):
|
2015-06-12 12:13:23 -04:00
|
|
|
Request.__init__(self, *args, **kw)
|
2015-06-15 13:18:05 -04:00
|
|
|
self.site = site
|
2015-06-15 11:36:49 -04:00
|
|
|
self.authenticated_entity = None
|
2015-06-15 13:18:05 -04:00
|
|
|
self.start_time = 0
|
2015-06-15 11:36:49 -04:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
# We overwrite this so that we don't log ``access_token``
|
|
|
|
return '<%s at 0x%x method=%s uri=%s clientproto=%s site=%s>' % (
|
|
|
|
self.__class__.__name__,
|
|
|
|
id(self),
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri(),
|
|
|
|
self.clientproto,
|
2015-06-15 13:18:05 -04:00
|
|
|
self.site.site_tag,
|
2015-06-15 11:36:49 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_redacted_uri(self):
|
2016-01-13 06:47:32 -05:00
|
|
|
return ACCESS_TOKEN_RE.sub(
|
2016-01-08 12:48:08 -05:00
|
|
|
r'\1<redacted>\3',
|
2015-06-15 11:36:49 -04:00
|
|
|
self.uri
|
|
|
|
)
|
|
|
|
|
2015-06-15 12:11:44 -04:00
|
|
|
def get_user_agent(self):
|
|
|
|
return self.requestHeaders.getRawHeaders("User-Agent", [None])[-1]
|
|
|
|
|
2015-06-15 13:18:05 -04:00
|
|
|
def started_processing(self):
|
|
|
|
self.site.access_logger.info(
|
|
|
|
"%s - %s - Received request: %s %s",
|
|
|
|
self.getClientIP(),
|
|
|
|
self.site.site_tag,
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri()
|
|
|
|
)
|
|
|
|
self.start_time = int(time.time() * 1000)
|
|
|
|
|
|
|
|
def finished_processing(self):
|
2015-12-04 06:34:05 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
context = LoggingContext.current_context()
|
|
|
|
ru_utime, ru_stime = context.get_resource_usage()
|
2015-12-07 12:56:11 -05:00
|
|
|
db_txn_count = context.db_txn_count
|
|
|
|
db_txn_duration = context.db_txn_duration
|
2015-12-04 06:34:05 -05:00
|
|
|
except:
|
|
|
|
ru_utime, ru_stime = (0, 0)
|
2015-12-07 12:56:11 -05:00
|
|
|
db_txn_count, db_txn_duration = (0, 0)
|
2015-12-04 06:34:05 -05:00
|
|
|
|
2015-06-15 13:18:05 -04:00
|
|
|
self.site.access_logger.info(
|
|
|
|
"%s - %s - {%s}"
|
2015-12-07 12:56:11 -05:00
|
|
|
" Processed request: %dms (%dms, %dms) (%dms/%d)"
|
2015-12-04 06:34:05 -05:00
|
|
|
" %sB %s \"%s %s %s\" \"%s\"",
|
2015-06-15 13:18:05 -04:00
|
|
|
self.getClientIP(),
|
|
|
|
self.site.site_tag,
|
|
|
|
self.authenticated_entity,
|
|
|
|
int(time.time() * 1000) - self.start_time,
|
2015-12-04 06:34:05 -05:00
|
|
|
int(ru_utime * 1000),
|
|
|
|
int(ru_stime * 1000),
|
2015-12-07 12:56:11 -05:00
|
|
|
int(db_txn_duration * 1000),
|
|
|
|
int(db_txn_count),
|
2015-06-15 13:18:05 -04:00
|
|
|
self.sentLength,
|
|
|
|
self.code,
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri(),
|
|
|
|
self.clientproto,
|
|
|
|
self.get_user_agent(),
|
|
|
|
)
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def processing(self):
|
|
|
|
self.started_processing()
|
|
|
|
yield
|
|
|
|
self.finished_processing()
|
|
|
|
|
2015-06-15 11:36:49 -04:00
|
|
|
|
|
|
|
class XForwardedForRequest(SynapseRequest):
|
|
|
|
def __init__(self, *args, **kw):
|
|
|
|
SynapseRequest.__init__(self, *args, **kw)
|
2015-06-12 12:13:23 -04:00
|
|
|
|
|
|
|
"""
|
|
|
|
Add a layer on top of another request that only uses the value of an
|
|
|
|
X-Forwarded-For header as the result of C{getClientIP}.
|
|
|
|
"""
|
|
|
|
def getClientIP(self):
|
|
|
|
"""
|
|
|
|
@return: The client address (the first address) in the value of the
|
|
|
|
I{X-Forwarded-For header}. If the header is not present, return
|
|
|
|
C{b"-"}.
|
|
|
|
"""
|
|
|
|
return self.requestHeaders.getRawHeaders(
|
|
|
|
b"x-forwarded-for", [b"-"])[0].split(b",")[0].strip()
|
|
|
|
|
|
|
|
|
2015-06-15 11:36:49 -04:00
|
|
|
class SynapseRequestFactory(object):
|
2015-06-15 13:18:05 -04:00
|
|
|
def __init__(self, site, x_forwarded_for):
|
|
|
|
self.site = site
|
2015-06-15 11:36:49 -04:00
|
|
|
self.x_forwarded_for = x_forwarded_for
|
|
|
|
|
|
|
|
def __call__(self, *args, **kwargs):
|
|
|
|
if self.x_forwarded_for:
|
2015-06-15 13:18:05 -04:00
|
|
|
return XForwardedForRequest(self.site, *args, **kwargs)
|
2015-06-15 11:36:49 -04:00
|
|
|
else:
|
2015-06-15 13:18:05 -04:00
|
|
|
return SynapseRequest(self.site, *args, **kwargs)
|
2015-06-12 12:13:23 -04:00
|
|
|
|
|
|
|
|
2015-04-30 11:17:27 -04:00
|
|
|
class SynapseSite(Site):
|
|
|
|
"""
|
|
|
|
Subclass of a twisted http Site that does access logging with python's
|
|
|
|
standard logging
|
|
|
|
"""
|
2015-06-15 13:18:05 -04:00
|
|
|
def __init__(self, logger_name, site_tag, config, resource, *args, **kwargs):
|
2015-04-30 11:17:27 -04:00
|
|
|
Site.__init__(self, resource, *args, **kwargs)
|
2015-06-15 11:36:49 -04:00
|
|
|
|
2015-06-15 13:18:05 -04:00
|
|
|
self.site_tag = site_tag
|
2015-06-15 11:36:49 -04:00
|
|
|
|
2015-06-15 13:18:05 -04:00
|
|
|
proxied = config.get("x_forwarded", False)
|
|
|
|
self.requestFactory = SynapseRequestFactory(self, proxied)
|
2015-04-30 11:17:27 -04:00
|
|
|
self.access_logger = logging.getLogger(logger_name)
|
|
|
|
|
|
|
|
def log(self, request):
|
2015-06-15 13:18:05 -04:00
|
|
|
pass
|
2015-04-30 11:17:27 -04:00
|
|
|
|
|
|
|
|
2015-06-12 06:52:52 -04:00
|
|
|
def create_resource_tree(desired_tree, redirect_root_to_web_client=True):
|
|
|
|
"""Create the resource tree for this Home Server.
|
|
|
|
|
|
|
|
This in unduly complicated because Twisted does not support putting
|
|
|
|
child resources more than 1 level deep at a time.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
web_client (bool): True to enable the web client.
|
|
|
|
redirect_root_to_web_client (bool): True to redirect '/' to the
|
|
|
|
location of the web client. This does nothing if web_client is not
|
|
|
|
True.
|
|
|
|
"""
|
|
|
|
if redirect_root_to_web_client and WEB_CLIENT_PREFIX in desired_tree:
|
|
|
|
root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
|
|
|
else:
|
|
|
|
root_resource = Resource()
|
|
|
|
|
|
|
|
# ideally we'd just use getChild and putChild but getChild doesn't work
|
|
|
|
# unless you give it a Request object IN ADDITION to the name :/ So
|
|
|
|
# instead, we'll store a copy of this mapping so we can actually add
|
|
|
|
# extra resources to existing nodes. See self._resource_id for the key.
|
|
|
|
resource_mappings = {}
|
|
|
|
for full_path, res in desired_tree.items():
|
|
|
|
logger.info("Attaching %s to path %s", res, full_path)
|
|
|
|
last_resource = root_resource
|
|
|
|
for path_seg in full_path.split('/')[1:-1]:
|
|
|
|
if path_seg not in last_resource.listNames():
|
|
|
|
# resource doesn't exist, so make a "dummy resource"
|
|
|
|
child_resource = Resource()
|
|
|
|
last_resource.putChild(path_seg, child_resource)
|
|
|
|
res_id = _resource_id(last_resource, path_seg)
|
|
|
|
resource_mappings[res_id] = child_resource
|
|
|
|
last_resource = child_resource
|
|
|
|
else:
|
|
|
|
# we have an existing Resource, use that instead.
|
|
|
|
res_id = _resource_id(last_resource, path_seg)
|
|
|
|
last_resource = resource_mappings[res_id]
|
|
|
|
|
|
|
|
# ===========================
|
|
|
|
# now attach the actual desired resource
|
|
|
|
last_path_seg = full_path.split('/')[-1]
|
|
|
|
|
|
|
|
# if there is already a resource here, thieve its children and
|
|
|
|
# replace it
|
|
|
|
res_id = _resource_id(last_resource, last_path_seg)
|
|
|
|
if res_id in resource_mappings:
|
|
|
|
# there is a dummy resource at this path already, which needs
|
|
|
|
# to be replaced with the desired resource.
|
|
|
|
existing_dummy_resource = resource_mappings[res_id]
|
|
|
|
for child_name in existing_dummy_resource.listNames():
|
2015-06-12 12:13:54 -04:00
|
|
|
child_res_id = _resource_id(
|
|
|
|
existing_dummy_resource, child_name
|
|
|
|
)
|
2015-06-12 06:52:52 -04:00
|
|
|
child_resource = resource_mappings[child_res_id]
|
|
|
|
# steal the children
|
|
|
|
res.putChild(child_name, child_resource)
|
|
|
|
|
|
|
|
# finally, insert the desired resource in the right place
|
|
|
|
last_resource.putChild(last_path_seg, res)
|
|
|
|
res_id = _resource_id(last_resource, last_path_seg)
|
|
|
|
resource_mappings[res_id] = res
|
|
|
|
|
|
|
|
return root_resource
|
|
|
|
|
|
|
|
|
|
|
|
def _resource_id(resource, path_seg):
|
|
|
|
"""Construct an arbitrary resource ID so you can retrieve the mapping
|
|
|
|
later.
|
|
|
|
|
|
|
|
If you want to represent resource A putChild resource B with path C,
|
|
|
|
the mapping should looks like _resource_id(A,C) = B.
|
|
|
|
|
|
|
|
Args:
|
2016-01-26 08:52:29 -05:00
|
|
|
resource (Resource): The *parent* Resourceb
|
2015-06-12 06:52:52 -04:00
|
|
|
path_seg (str): The name of the child Resource to be attached.
|
|
|
|
Returns:
|
|
|
|
str: A unique string which can be a key to the child Resource.
|
|
|
|
"""
|
|
|
|
return "%s-%s" % (resource, path_seg)
|
|
|
|
|
|
|
|
|
2015-03-10 05:58:33 -04:00
|
|
|
def run(hs):
|
2016-01-26 13:27:23 -05:00
|
|
|
PROFILE_SYNAPSE = False
|
2015-05-06 12:08:00 -04:00
|
|
|
if PROFILE_SYNAPSE:
|
|
|
|
def profile(func):
|
|
|
|
from cProfile import Profile
|
|
|
|
from threading import current_thread
|
|
|
|
|
|
|
|
def profiled(*args, **kargs):
|
|
|
|
profile = Profile()
|
|
|
|
profile.enable()
|
|
|
|
func(*args, **kargs)
|
|
|
|
profile.disable()
|
|
|
|
ident = current_thread().ident
|
|
|
|
profile.dump_stats("/tmp/%s.%s.%i.pstat" % (
|
|
|
|
hs.hostname, func.__name__, ident
|
|
|
|
))
|
|
|
|
|
|
|
|
return profiled
|
|
|
|
|
|
|
|
from twisted.python.threadpool import ThreadPool
|
|
|
|
ThreadPool._worker = profile(ThreadPool._worker)
|
|
|
|
reactor.run = profile(reactor.run)
|
2015-03-10 05:58:33 -04:00
|
|
|
|
2015-09-22 07:57:40 -04:00
|
|
|
start_time = hs.get_clock().time()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def phone_stats_home():
|
2016-01-06 09:13:34 -05:00
|
|
|
logger.info("Gathering stats for reporting")
|
2015-09-22 07:57:40 -04:00
|
|
|
now = int(hs.get_clock().time())
|
|
|
|
uptime = int(now - start_time)
|
|
|
|
if uptime < 0:
|
|
|
|
uptime = 0
|
|
|
|
|
|
|
|
stats = {}
|
|
|
|
stats["homeserver"] = hs.config.server_name
|
|
|
|
stats["timestamp"] = now
|
|
|
|
stats["uptime_seconds"] = uptime
|
|
|
|
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
|
|
|
|
|
|
|
all_rooms = yield hs.get_datastore().get_rooms(False)
|
|
|
|
stats["total_room_count"] = len(all_rooms)
|
|
|
|
|
|
|
|
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
|
|
|
daily_messages = yield hs.get_datastore().count_daily_messages()
|
|
|
|
if daily_messages is not None:
|
|
|
|
stats["daily_messages"] = daily_messages
|
|
|
|
|
|
|
|
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
2015-09-22 08:34:29 -04:00
|
|
|
try:
|
|
|
|
yield hs.get_simple_http_client().put_json(
|
|
|
|
"https://matrix.org/report-usage-stats/push",
|
|
|
|
stats
|
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn("Error reporting stats: %s", e)
|
2015-09-22 07:57:40 -04:00
|
|
|
|
|
|
|
if hs.config.report_stats:
|
|
|
|
phone_home_task = task.LoopingCall(phone_stats_home)
|
2016-01-06 09:04:27 -05:00
|
|
|
logger.info("Scheduling stats reporting for 24 hour intervals")
|
2015-09-22 07:57:40 -04:00
|
|
|
phone_home_task.start(60 * 60 * 24, now=False)
|
|
|
|
|
2015-03-10 05:58:33 -04:00
|
|
|
def in_thread():
|
|
|
|
with LoggingContext("run"):
|
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
|
|
|
reactor.run()
|
|
|
|
|
|
|
|
if hs.config.daemonize:
|
|
|
|
|
2015-08-07 11:36:42 -04:00
|
|
|
if hs.config.print_pidfile:
|
|
|
|
print hs.config.pid_file
|
2015-03-10 05:58:33 -04:00
|
|
|
|
|
|
|
daemon = Daemonize(
|
|
|
|
app="synapse-homeserver",
|
|
|
|
pid=hs.config.pid_file,
|
|
|
|
action=lambda: in_thread(),
|
|
|
|
auto_close_fds=False,
|
|
|
|
verbose=True,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
|
|
|
|
daemon.start()
|
|
|
|
else:
|
2015-03-10 06:19:03 -04:00
|
|
|
in_thread()
|
2014-10-29 21:21:33 -04:00
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
def main():
|
2014-10-30 07:15:39 -04:00
|
|
|
with LoggingContext("main"):
|
2015-03-17 07:45:37 -04:00
|
|
|
# check base requirements
|
2015-01-08 12:07:28 -05:00
|
|
|
check_requirements()
|
2015-03-10 05:58:33 -04:00
|
|
|
hs = setup(sys.argv[1:])
|
|
|
|
run(hs)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
if __name__ == '__main__':
|
2014-11-18 10:57:00 -05:00
|
|
|
main()
|