2014-08-12 22:14:34 -04:00
|
|
|
#!/usr/bin/env python
|
2014-08-12 10:10:52 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 08:21:39 -05:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-08-12 10:10:52 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-02-17 05:54:06 -05:00
|
|
|
import sys
|
|
|
|
sys.dont_write_bytecode = True
|
2015-04-29 09:52:42 -04:00
|
|
|
from synapse.python_dependencies import check_requirements
|
|
|
|
|
2015-04-29 09:53:23 -04:00
|
|
|
if __name__ == '__main__':
|
2015-04-29 09:52:42 -04:00
|
|
|
check_requirements()
|
2015-02-17 05:54:06 -05:00
|
|
|
|
2015-04-29 06:42:28 -04:00
|
|
|
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
2015-03-04 07:04:19 -05:00
|
|
|
from synapse.storage import (
|
2015-04-28 08:44:23 -04:00
|
|
|
are_all_users_on_domain, UpgradeDatabaseException,
|
2015-03-04 07:04:19 -05:00
|
|
|
)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-08 12:07:28 -05:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
from twisted.internet import reactor
|
2015-01-07 08:46:37 -05:00
|
|
|
from twisted.application import service
|
2014-08-12 10:10:52 -04:00
|
|
|
from twisted.enterprise import adbapi
|
2015-05-14 11:39:19 -04:00
|
|
|
from twisted.web.resource import Resource, EncodingResourceWrapper
|
2014-08-14 04:52:20 -04:00
|
|
|
from twisted.web.static import File
|
2015-05-14 11:39:19 -04:00
|
|
|
from twisted.web.server import Site, GzipEncoderFactory
|
2015-04-30 11:17:27 -04:00
|
|
|
from twisted.web.http import proxiedLogFormatter, combinedLogFormatter
|
2014-09-03 06:10:29 -04:00
|
|
|
from synapse.http.server import JsonResource, RootRedirect
|
2015-01-22 11:10:07 -05:00
|
|
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
|
|
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
2015-04-14 08:28:11 -04:00
|
|
|
from synapse.rest.key.v1.server_key_resource import LocalKey
|
2015-04-14 11:04:52 -04:00
|
|
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
2014-11-20 12:41:56 -05:00
|
|
|
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
2014-08-18 10:01:08 -04:00
|
|
|
from synapse.api.urls import (
|
2014-09-23 13:40:59 -04:00
|
|
|
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
2015-04-14 11:04:52 -04:00
|
|
|
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX,
|
|
|
|
SERVER_KEY_V2_PREFIX,
|
2014-08-18 10:01:08 -04:00
|
|
|
)
|
2014-08-31 11:06:39 -04:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2014-09-01 11:30:43 -04:00
|
|
|
from synapse.crypto import context_factory
|
2014-10-29 21:21:33 -04:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
2015-01-23 05:37:38 -05:00
|
|
|
from synapse.rest.client.v1 import ClientV1RestResource
|
2015-01-23 13:54:51 -05:00
|
|
|
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
2015-03-12 11:33:53 -04:00
|
|
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
from daemonize import Daemonize
|
2014-08-26 08:43:55 -04:00
|
|
|
import twisted.manhole.telnet
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-01-07 12:25:28 -05:00
|
|
|
import synapse
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
import logging
|
2014-08-14 09:07:14 -04:00
|
|
|
import os
|
2014-08-24 06:56:55 -04:00
|
|
|
import re
|
2015-02-19 06:50:49 -05:00
|
|
|
import resource
|
2015-02-18 11:21:35 -05:00
|
|
|
import subprocess
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
|
2015-04-07 07:04:02 -04:00
|
|
|
logger = logging.getLogger("synapse.app.homeserver")
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
|
2015-05-14 11:39:19 -04:00
|
|
|
class GzipFile(File):
|
|
|
|
def getChild(self, path, request):
|
|
|
|
child = File.getChild(self, path, request)
|
|
|
|
return EncodingResourceWrapper(child, [GzipEncoderFactory()])
|
|
|
|
|
|
|
|
|
|
|
|
def gz_wrap(r):
|
|
|
|
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
|
|
|
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
class SynapseHomeServer(HomeServer):
|
|
|
|
|
|
|
|
def build_http_client(self):
|
2014-11-20 08:53:34 -05:00
|
|
|
return MatrixFederationHttpClient(self)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-08-14 04:52:20 -04:00
|
|
|
def build_resource_for_client(self):
|
2015-05-19 07:49:38 -04:00
|
|
|
return ClientV1RestResource(self)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2015-01-23 13:54:51 -05:00
|
|
|
def build_resource_for_client_v2_alpha(self):
|
2015-05-19 07:49:38 -04:00
|
|
|
return ClientV2AlphaRestResource(self)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
|
|
|
def build_resource_for_federation(self):
|
2015-02-09 08:46:22 -05:00
|
|
|
return JsonResource(self)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
|
|
|
def build_resource_for_web_client(self):
|
2015-03-17 07:45:37 -04:00
|
|
|
import syweb
|
2014-11-04 11:19:03 -05:00
|
|
|
syweb_path = os.path.dirname(syweb.__file__)
|
|
|
|
webclient_path = os.path.join(syweb_path, "webclient")
|
2015-05-19 07:49:38 -04:00
|
|
|
return File(webclient_path) # TODO configurable?
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2015-02-23 10:35:09 -05:00
|
|
|
def build_resource_for_static_content(self):
|
2015-05-14 11:39:19 -04:00
|
|
|
# This is old and should go away: not going to bother adding gzip
|
2015-02-23 10:35:09 -05:00
|
|
|
return File("static")
|
|
|
|
|
2014-08-18 10:01:08 -04:00
|
|
|
def build_resource_for_content_repo(self):
|
2014-09-03 06:57:23 -04:00
|
|
|
return ContentRepoResource(
|
|
|
|
self, self.upload_dir, self.auth, self.content_addr
|
|
|
|
)
|
2014-08-18 10:01:08 -04:00
|
|
|
|
2014-12-02 14:51:47 -05:00
|
|
|
def build_resource_for_media_repository(self):
|
|
|
|
return MediaRepositoryResource(self)
|
|
|
|
|
2014-09-23 13:40:59 -04:00
|
|
|
def build_resource_for_server_key(self):
|
|
|
|
return LocalKey(self)
|
|
|
|
|
2015-04-14 11:04:52 -04:00
|
|
|
def build_resource_for_server_key_v2(self):
|
|
|
|
return KeyApiV2Resource(self)
|
|
|
|
|
2015-03-12 11:33:53 -04:00
|
|
|
def build_resource_for_metrics(self):
|
|
|
|
if self.get_config().enable_metrics:
|
|
|
|
return MetricsResource(self)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
def build_db_pool(self):
|
2015-03-25 13:15:20 -04:00
|
|
|
name = self.db_config["name"]
|
2015-03-20 06:55:55 -04:00
|
|
|
|
2015-03-25 13:15:20 -04:00
|
|
|
return adbapi.ConnectionPool(
|
|
|
|
name,
|
|
|
|
**self.db_config.get("args", {})
|
|
|
|
)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-02-24 09:23:50 -05:00
|
|
|
def create_resource_tree(self, redirect_root_to_web_client):
|
2014-08-14 04:52:20 -04:00
|
|
|
"""Create the resource tree for this Home Server.
|
|
|
|
|
|
|
|
This in unduly complicated because Twisted does not support putting
|
|
|
|
child resources more than 1 level deep at a time.
|
2014-08-14 06:17:58 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
web_client (bool): True to enable the web client.
|
2014-08-14 06:37:13 -04:00
|
|
|
redirect_root_to_web_client (bool): True to redirect '/' to the
|
|
|
|
location of the web client. This does nothing if web_client is not
|
|
|
|
True.
|
2014-08-14 04:52:20 -04:00
|
|
|
"""
|
2015-03-12 12:05:46 -04:00
|
|
|
config = self.get_config()
|
2015-03-17 07:45:37 -04:00
|
|
|
web_client = config.web_client
|
2015-02-24 09:23:50 -05:00
|
|
|
|
2014-08-14 06:17:58 -04:00
|
|
|
# list containing (path_str, Resource) e.g:
|
|
|
|
# [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ]
|
|
|
|
desired_tree = [
|
2014-08-14 05:05:06 -04:00
|
|
|
(CLIENT_PREFIX, self.get_resource_for_client()),
|
2015-01-23 13:54:51 -05:00
|
|
|
(CLIENT_V2_ALPHA_PREFIX, self.get_resource_for_client_v2_alpha()),
|
2014-08-18 10:01:08 -04:00
|
|
|
(FEDERATION_PREFIX, self.get_resource_for_federation()),
|
2014-09-23 13:40:59 -04:00
|
|
|
(CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()),
|
|
|
|
(SERVER_KEY_PREFIX, self.get_resource_for_server_key()),
|
2015-04-14 11:04:52 -04:00
|
|
|
(SERVER_KEY_V2_PREFIX, self.get_resource_for_server_key_v2()),
|
2014-12-02 14:51:47 -05:00
|
|
|
(MEDIA_PREFIX, self.get_resource_for_media_repository()),
|
2015-02-23 12:36:37 -05:00
|
|
|
(STATIC_PREFIX, self.get_resource_for_static_content()),
|
2014-08-14 05:24:17 -04:00
|
|
|
]
|
2015-02-23 10:14:56 -05:00
|
|
|
|
2014-08-14 05:24:17 -04:00
|
|
|
if web_client:
|
|
|
|
logger.info("Adding the web client.")
|
2014-08-14 06:54:37 -04:00
|
|
|
desired_tree.append((WEB_CLIENT_PREFIX,
|
2014-08-14 05:24:17 -04:00
|
|
|
self.get_resource_for_web_client()))
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2014-08-14 06:37:13 -04:00
|
|
|
if web_client and redirect_root_to_web_client:
|
2014-08-14 06:54:37 -04:00
|
|
|
self.root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
2014-08-14 06:37:13 -04:00
|
|
|
else:
|
|
|
|
self.root_resource = Resource()
|
|
|
|
|
2015-03-12 11:33:53 -04:00
|
|
|
metrics_resource = self.get_resource_for_metrics()
|
2015-03-12 12:05:46 -04:00
|
|
|
if config.metrics_port is None and metrics_resource is not None:
|
2015-03-12 11:33:53 -04:00
|
|
|
desired_tree.append((METRICS_PREFIX, metrics_resource))
|
2015-02-24 10:49:14 -05:00
|
|
|
|
2014-08-14 04:52:20 -04:00
|
|
|
# ideally we'd just use getChild and putChild but getChild doesn't work
|
|
|
|
# unless you give it a Request object IN ADDITION to the name :/ So
|
|
|
|
# instead, we'll store a copy of this mapping so we can actually add
|
|
|
|
# extra resources to existing nodes. See self._resource_id for the key.
|
|
|
|
resource_mappings = {}
|
2015-02-19 06:53:13 -05:00
|
|
|
for full_path, res in desired_tree:
|
|
|
|
logger.info("Attaching %s to path %s", res, full_path)
|
2014-08-14 04:52:20 -04:00
|
|
|
last_resource = self.root_resource
|
|
|
|
for path_seg in full_path.split('/')[1:-1]:
|
2015-02-10 12:58:36 -05:00
|
|
|
if path_seg not in last_resource.listNames():
|
2014-08-14 06:17:58 -04:00
|
|
|
# resource doesn't exist, so make a "dummy resource"
|
2014-08-14 04:52:20 -04:00
|
|
|
child_resource = Resource()
|
|
|
|
last_resource.putChild(path_seg, child_resource)
|
|
|
|
res_id = self._resource_id(last_resource, path_seg)
|
|
|
|
resource_mappings[res_id] = child_resource
|
|
|
|
last_resource = child_resource
|
|
|
|
else:
|
2014-08-14 06:17:58 -04:00
|
|
|
# we have an existing Resource, use that instead.
|
2014-08-14 04:52:20 -04:00
|
|
|
res_id = self._resource_id(last_resource, path_seg)
|
|
|
|
last_resource = resource_mappings[res_id]
|
|
|
|
|
2014-08-14 06:17:58 -04:00
|
|
|
# ===========================
|
|
|
|
# now attach the actual desired resource
|
2014-08-14 04:52:20 -04:00
|
|
|
last_path_seg = full_path.split('/')[-1]
|
2014-08-14 06:17:58 -04:00
|
|
|
|
|
|
|
# if there is already a resource here, thieve its children and
|
|
|
|
# replace it
|
|
|
|
res_id = self._resource_id(last_resource, last_path_seg)
|
|
|
|
if res_id in resource_mappings:
|
|
|
|
# there is a dummy resource at this path already, which needs
|
|
|
|
# to be replaced with the desired resource.
|
|
|
|
existing_dummy_resource = resource_mappings[res_id]
|
|
|
|
for child_name in existing_dummy_resource.listNames():
|
|
|
|
child_res_id = self._resource_id(existing_dummy_resource,
|
|
|
|
child_name)
|
|
|
|
child_resource = resource_mappings[child_res_id]
|
|
|
|
# steal the children
|
2015-02-19 06:53:13 -05:00
|
|
|
res.putChild(child_name, child_resource)
|
2014-08-14 06:17:58 -04:00
|
|
|
|
|
|
|
# finally, insert the desired resource in the right place
|
2015-02-19 06:53:13 -05:00
|
|
|
last_resource.putChild(last_path_seg, res)
|
2014-08-14 04:52:20 -04:00
|
|
|
res_id = self._resource_id(last_resource, last_path_seg)
|
2015-02-19 06:53:13 -05:00
|
|
|
resource_mappings[res_id] = res
|
2014-08-14 04:52:20 -04:00
|
|
|
|
|
|
|
return self.root_resource
|
|
|
|
|
|
|
|
def _resource_id(self, resource, path_seg):
|
|
|
|
"""Construct an arbitrary resource ID so you can retrieve the mapping
|
|
|
|
later.
|
|
|
|
|
|
|
|
If you want to represent resource A putChild resource B with path C,
|
|
|
|
the mapping should looks like _resource_id(A,C) = B.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
resource (Resource): The *parent* Resource
|
|
|
|
path_seg (str): The name of the child Resource to be attached.
|
|
|
|
Returns:
|
|
|
|
str: A unique string which can be a key to the child Resource.
|
|
|
|
"""
|
|
|
|
return "%s-%s" % (resource, path_seg)
|
|
|
|
|
2015-03-12 11:51:33 -04:00
|
|
|
def start_listening(self):
|
|
|
|
config = self.get_config()
|
|
|
|
|
|
|
|
if not config.no_tls and config.bind_port is not None:
|
2014-09-01 17:38:52 -04:00
|
|
|
reactor.listenSSL(
|
2015-03-13 20:12:20 -04:00
|
|
|
config.bind_port,
|
2015-04-30 11:17:27 -04:00
|
|
|
SynapseSite(
|
|
|
|
"synapse.access.https",
|
|
|
|
config,
|
2015-04-30 08:58:13 -04:00
|
|
|
self.root_resource,
|
|
|
|
),
|
2015-03-13 20:12:20 -04:00
|
|
|
self.tls_context_factory,
|
|
|
|
interface=config.bind_host
|
2014-09-01 17:38:52 -04:00
|
|
|
)
|
2015-03-12 11:51:33 -04:00
|
|
|
logger.info("Synapse now listening on port %d", config.bind_port)
|
|
|
|
|
|
|
|
if config.unsecure_port is not None:
|
2014-09-01 17:38:52 -04:00
|
|
|
reactor.listenTCP(
|
2015-03-13 20:12:20 -04:00
|
|
|
config.unsecure_port,
|
2015-04-30 11:17:27 -04:00
|
|
|
SynapseSite(
|
|
|
|
"synapse.access.http",
|
|
|
|
config,
|
2015-04-30 08:58:13 -04:00
|
|
|
self.root_resource,
|
|
|
|
),
|
2015-03-13 20:12:20 -04:00
|
|
|
interface=config.bind_host
|
2014-09-01 17:38:52 -04:00
|
|
|
)
|
2015-03-12 11:51:33 -04:00
|
|
|
logger.info("Synapse now listening on port %d", config.unsecure_port)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2015-03-12 12:05:46 -04:00
|
|
|
metrics_resource = self.get_resource_for_metrics()
|
|
|
|
if metrics_resource and config.metrics_port is not None:
|
|
|
|
reactor.listenTCP(
|
2015-04-30 08:58:13 -04:00
|
|
|
config.metrics_port,
|
2015-04-30 11:17:27 -04:00
|
|
|
SynapseSite(
|
|
|
|
"synapse.access.metrics",
|
|
|
|
config,
|
2015-04-30 08:58:13 -04:00
|
|
|
metrics_resource,
|
|
|
|
),
|
|
|
|
interface="127.0.0.1",
|
2015-03-12 12:05:46 -04:00
|
|
|
)
|
|
|
|
logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port)
|
|
|
|
|
2015-04-28 08:39:42 -04:00
|
|
|
def run_startup_checks(self, db_conn, database_engine):
|
2015-04-27 06:46:00 -04:00
|
|
|
all_users_native = are_all_users_on_domain(
|
2015-04-28 08:39:42 -04:00
|
|
|
db_conn.cursor(), database_engine, self.hostname
|
2015-04-24 13:11:21 -04:00
|
|
|
)
|
|
|
|
if not all_users_native:
|
2015-04-29 07:12:18 -04:00
|
|
|
quit_with_error(
|
2015-04-24 13:11:21 -04:00
|
|
|
"Found users in database not native to %s!\n"
|
2015-04-29 07:12:18 -04:00
|
|
|
"You cannot changed a synapse server_name after it's been configured"
|
|
|
|
% (self.hostname,)
|
2015-04-24 13:11:21 -04:00
|
|
|
)
|
|
|
|
|
2015-04-29 06:42:28 -04:00
|
|
|
try:
|
|
|
|
database_engine.check_database(db_conn.cursor())
|
|
|
|
except IncorrectDatabaseSetup as e:
|
2015-04-29 07:12:18 -04:00
|
|
|
quit_with_error(e.message)
|
|
|
|
|
|
|
|
|
|
|
|
def quit_with_error(error_string):
|
|
|
|
message_lines = error_string.split("\n")
|
|
|
|
line_length = max([len(l) for l in message_lines]) + 2
|
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
for line in message_lines:
|
|
|
|
if line.strip():
|
|
|
|
sys.stderr.write(" %s\n" % (line.strip(),))
|
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
sys.exit(1)
|
2015-04-29 06:42:28 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-02-18 11:21:35 -05:00
|
|
|
def get_version_string():
|
|
|
|
try:
|
2015-02-21 08:44:46 -05:00
|
|
|
null = open(os.devnull, 'w')
|
|
|
|
cwd = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
try:
|
|
|
|
git_branch = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_branch = "b=" + git_branch
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_branch = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_tag = subprocess.check_output(
|
|
|
|
['git', 'describe', '--exact-match'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_tag = "t=" + git_tag
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_tag = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_commit = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--short', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_commit = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
dirty_string = "-this_is_a_dirty_checkout"
|
|
|
|
is_dirty = subprocess.check_output(
|
|
|
|
['git', 'describe', '--dirty=' + dirty_string],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip().endswith(dirty_string)
|
|
|
|
|
|
|
|
git_dirty = "dirty" if is_dirty else ""
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_dirty = ""
|
|
|
|
|
|
|
|
if git_branch or git_tag or git_commit or git_dirty:
|
|
|
|
git_version = ",".join(
|
|
|
|
s for s in
|
|
|
|
(git_branch, git_tag, git_commit, git_dirty,)
|
|
|
|
if s
|
2015-02-18 11:21:35 -05:00
|
|
|
)
|
2015-02-21 08:44:46 -05:00
|
|
|
|
|
|
|
return (
|
|
|
|
"Synapse/%s (%s)" % (
|
|
|
|
synapse.__version__, git_version,
|
|
|
|
)
|
|
|
|
).encode("ascii")
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn("Failed to check for git repository: %s", e)
|
2015-02-18 11:21:35 -05:00
|
|
|
|
|
|
|
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
|
|
|
|
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
def change_resource_limit(soft_file_no):
|
|
|
|
try:
|
|
|
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
2015-02-20 11:09:44 -05:00
|
|
|
|
|
|
|
if not soft_file_no:
|
|
|
|
soft_file_no = hard
|
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
2015-02-20 11:09:44 -05:00
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
logger.info("Set file limit to: %d", soft_file_no)
|
2015-04-07 07:16:05 -04:00
|
|
|
except (ValueError, resource.error) as e:
|
2015-02-19 06:50:49 -05:00
|
|
|
logger.warn("Failed to set file limit: %s", e)
|
|
|
|
|
|
|
|
|
2015-03-10 05:58:33 -04:00
|
|
|
def setup(config_options):
|
2015-03-10 05:39:42 -04:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
config_options_options: The options passed to Synapse. Usually
|
|
|
|
`sys.argv[1:]`.
|
|
|
|
should_run (bool): Whether to start the reactor.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
HomeServer
|
|
|
|
"""
|
2014-11-18 10:57:00 -05:00
|
|
|
config = HomeServerConfig.load_config(
|
|
|
|
"Synapse Homeserver",
|
2015-01-07 08:46:37 -05:00
|
|
|
config_options,
|
2014-11-18 10:57:00 -05:00
|
|
|
generate_section="Homeserver"
|
|
|
|
)
|
|
|
|
|
|
|
|
config.setup_logging()
|
|
|
|
|
2015-03-17 07:45:37 -04:00
|
|
|
# check any extra requirements we have now we have a config
|
|
|
|
check_requirements(config)
|
2015-01-08 12:07:28 -05:00
|
|
|
|
2015-02-18 11:21:35 -05:00
|
|
|
version_string = get_version_string()
|
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
logger.info("Server hostname: %s", config.server_name)
|
2015-02-18 11:21:35 -05:00
|
|
|
logger.info("Server version: %s", version_string)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
if re.search(":[0-9]+$", config.server_name):
|
|
|
|
domain_with_port = config.server_name
|
2014-08-24 06:56:55 -04:00
|
|
|
else:
|
2014-09-01 10:51:15 -04:00
|
|
|
domain_with_port = "%s:%s" % (config.server_name, config.bind_port)
|
2014-08-24 06:56:55 -04:00
|
|
|
|
2014-09-01 11:30:43 -04:00
|
|
|
tls_context_factory = context_factory.ServerContextFactory(config)
|
|
|
|
|
2015-04-27 10:57:43 -04:00
|
|
|
database_engine = create_engine(config.database_config["name"])
|
|
|
|
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
2015-04-01 09:12:33 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
hs = SynapseHomeServer(
|
2014-08-31 11:06:39 -04:00
|
|
|
config.server_name,
|
2014-08-24 06:56:55 -04:00
|
|
|
domain_with_port=domain_with_port,
|
2014-08-22 05:25:27 -04:00
|
|
|
upload_dir=os.path.abspath("uploads"),
|
2015-04-27 10:57:43 -04:00
|
|
|
db_config=config.database_config,
|
2014-09-01 11:30:43 -04:00
|
|
|
tls_context_factory=tls_context_factory,
|
2014-09-02 12:57:04 -04:00
|
|
|
config=config,
|
2014-09-03 06:57:23 -04:00
|
|
|
content_addr=config.content_addr,
|
2015-02-18 11:21:35 -05:00
|
|
|
version_string=version_string,
|
2015-04-01 09:12:33 -04:00
|
|
|
database_engine=database_engine,
|
2014-08-12 10:10:52 -04:00
|
|
|
)
|
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
hs.create_resource_tree(
|
|
|
|
redirect_root_to_web_client=True,
|
|
|
|
)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-04-29 23:24:44 -04:00
|
|
|
logger.info("Preparing database: %r...", config.database_config)
|
2014-09-10 11:23:58 -04:00
|
|
|
|
2014-12-16 09:20:32 -05:00
|
|
|
try:
|
2015-04-01 10:09:51 -04:00
|
|
|
db_conn = database_engine.module.connect(
|
|
|
|
**{
|
2015-04-27 10:57:43 -04:00
|
|
|
k: v for k, v in config.database_config.get("args", {}).items()
|
2015-04-01 10:09:51 -04:00
|
|
|
if not k.startswith("cp_")
|
|
|
|
}
|
|
|
|
)
|
2015-04-01 09:12:33 -04:00
|
|
|
|
2015-04-02 05:06:22 -04:00
|
|
|
database_engine.prepare_database(db_conn)
|
2015-04-28 08:39:42 -04:00
|
|
|
hs.run_startup_checks(db_conn, database_engine)
|
2015-04-01 09:12:33 -04:00
|
|
|
|
|
|
|
db_conn.commit()
|
2014-12-16 09:20:32 -05:00
|
|
|
except UpgradeDatabaseException:
|
|
|
|
sys.stderr.write(
|
|
|
|
"\nFailed to upgrade database.\n"
|
2015-01-19 10:30:48 -05:00
|
|
|
"Have you checked for version specific instructions in"
|
|
|
|
" UPGRADES.rst?\n"
|
2014-12-16 09:20:32 -05:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
2014-09-10 11:23:58 -04:00
|
|
|
|
2015-04-29 23:24:44 -04:00
|
|
|
logger.info("Database prepared in %r.", config.database_config)
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
if config.manhole:
|
2014-08-26 08:43:55 -04:00
|
|
|
f = twisted.manhole.telnet.ShellFactory()
|
|
|
|
f.username = "matrix"
|
|
|
|
f.password = "rabbithole"
|
|
|
|
f.namespace['hs'] = hs
|
2014-08-31 11:06:39 -04:00
|
|
|
reactor.listenTCP(config.manhole, f, interface='127.0.0.1')
|
2014-08-26 08:43:55 -04:00
|
|
|
|
2015-03-12 11:51:33 -04:00
|
|
|
hs.start_listening()
|
2014-09-10 11:16:24 -04:00
|
|
|
|
2014-11-19 13:20:59 -05:00
|
|
|
hs.get_pusherpool().start()
|
2015-02-06 11:52:22 -05:00
|
|
|
hs.get_state_handler().start_caching()
|
2015-02-09 09:22:52 -05:00
|
|
|
hs.get_datastore().start_profiling()
|
2015-02-18 05:14:10 -05:00
|
|
|
hs.get_replication_layer().start_get_pdu_cache()
|
2015-02-06 11:52:22 -05:00
|
|
|
|
2015-03-10 05:39:42 -04:00
|
|
|
return hs
|
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2015-01-07 08:46:37 -05:00
|
|
|
class SynapseService(service.Service):
|
2015-03-10 05:39:42 -04:00
|
|
|
"""A twisted Service class that will start synapse. Used to run synapse
|
|
|
|
via twistd and a .tac.
|
|
|
|
"""
|
2015-01-07 08:46:37 -05:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
|
|
|
|
def startService(self):
|
2015-03-10 05:58:33 -04:00
|
|
|
hs = setup(self.config)
|
2015-03-10 05:39:42 -04:00
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
2015-01-07 08:46:37 -05:00
|
|
|
|
|
|
|
def stopService(self):
|
|
|
|
return self._port.stopListening()
|
|
|
|
|
|
|
|
|
2015-04-30 11:17:27 -04:00
|
|
|
class SynapseSite(Site):
|
|
|
|
"""
|
|
|
|
Subclass of a twisted http Site that does access logging with python's
|
|
|
|
standard logging
|
|
|
|
"""
|
|
|
|
def __init__(self, logger_name, config, resource, *args, **kwargs):
|
|
|
|
Site.__init__(self, resource, *args, **kwargs)
|
|
|
|
if config.captcha_ip_origin_is_x_forwarded:
|
|
|
|
self._log_formatter = proxiedLogFormatter
|
|
|
|
else:
|
|
|
|
self._log_formatter = combinedLogFormatter
|
|
|
|
self.access_logger = logging.getLogger(logger_name)
|
|
|
|
|
|
|
|
def log(self, request):
|
|
|
|
line = self._log_formatter(self._logDateTime, request)
|
|
|
|
self.access_logger.info(line)
|
|
|
|
|
|
|
|
|
2015-03-10 05:58:33 -04:00
|
|
|
def run(hs):
|
2015-05-06 12:08:00 -04:00
|
|
|
PROFILE_SYNAPSE = False
|
|
|
|
if PROFILE_SYNAPSE:
|
|
|
|
def profile(func):
|
|
|
|
from cProfile import Profile
|
|
|
|
from threading import current_thread
|
|
|
|
|
|
|
|
def profiled(*args, **kargs):
|
|
|
|
profile = Profile()
|
|
|
|
profile.enable()
|
|
|
|
func(*args, **kargs)
|
|
|
|
profile.disable()
|
|
|
|
ident = current_thread().ident
|
|
|
|
profile.dump_stats("/tmp/%s.%s.%i.pstat" % (
|
|
|
|
hs.hostname, func.__name__, ident
|
|
|
|
))
|
|
|
|
|
|
|
|
return profiled
|
|
|
|
|
|
|
|
from twisted.python.threadpool import ThreadPool
|
|
|
|
ThreadPool._worker = profile(ThreadPool._worker)
|
|
|
|
reactor.run = profile(reactor.run)
|
2015-03-10 05:58:33 -04:00
|
|
|
|
|
|
|
def in_thread():
|
|
|
|
with LoggingContext("run"):
|
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
|
|
|
reactor.run()
|
|
|
|
|
|
|
|
if hs.config.daemonize:
|
|
|
|
|
|
|
|
print hs.config.pid_file
|
|
|
|
|
|
|
|
daemon = Daemonize(
|
|
|
|
app="synapse-homeserver",
|
|
|
|
pid=hs.config.pid_file,
|
|
|
|
action=lambda: in_thread(),
|
|
|
|
auto_close_fds=False,
|
|
|
|
verbose=True,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
|
|
|
|
daemon.start()
|
|
|
|
else:
|
2015-03-10 06:19:03 -04:00
|
|
|
in_thread()
|
2014-10-29 21:21:33 -04:00
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
def main():
|
2014-10-30 07:15:39 -04:00
|
|
|
with LoggingContext("main"):
|
2015-03-17 07:45:37 -04:00
|
|
|
# check base requirements
|
2015-01-08 12:07:28 -05:00
|
|
|
check_requirements()
|
2015-03-10 05:58:33 -04:00
|
|
|
hs = setup(sys.argv[1:])
|
|
|
|
run(hs)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
if __name__ == '__main__':
|
2014-11-18 10:57:00 -05:00
|
|
|
main()
|