2014-08-12 22:14:34 -04:00
|
|
|
#!/usr/bin/env python
|
2014-08-12 10:10:52 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 08:21:39 -05:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-08-12 10:10:52 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-02-17 05:54:06 -05:00
|
|
|
import sys
|
|
|
|
sys.dont_write_bytecode = True
|
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
from synapse.storage import (
|
|
|
|
prepare_database, prepare_sqlite3_database, UpgradeDatabaseException,
|
|
|
|
)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-08 12:07:28 -05:00
|
|
|
from synapse.python_dependencies import check_requirements
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
from twisted.internet import reactor
|
|
|
|
from twisted.enterprise import adbapi
|
2014-08-14 04:52:20 -04:00
|
|
|
from twisted.web.resource import Resource
|
|
|
|
from twisted.web.static import File
|
|
|
|
from twisted.web.server import Site
|
2014-09-03 06:10:29 -04:00
|
|
|
from synapse.http.server import JsonResource, RootRedirect
|
2015-01-27 09:01:51 -05:00
|
|
|
from synapse.rest.appservice.v1 import AppServiceRestResource
|
2015-01-22 11:10:07 -05:00
|
|
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
|
|
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
2014-09-23 13:40:59 -04:00
|
|
|
from synapse.http.server_key_resource import LocalKey
|
2014-11-20 12:41:56 -05:00
|
|
|
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
2014-08-18 10:01:08 -04:00
|
|
|
from synapse.api.urls import (
|
2014-09-23 13:40:59 -04:00
|
|
|
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
2015-02-23 10:35:09 -05:00
|
|
|
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, APP_SERVICE_PREFIX,
|
|
|
|
STATIC_PREFIX
|
2014-08-18 10:01:08 -04:00
|
|
|
)
|
2014-08-31 11:06:39 -04:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2014-09-01 11:30:43 -04:00
|
|
|
from synapse.crypto import context_factory
|
2014-10-29 21:21:33 -04:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
2015-01-23 05:37:38 -05:00
|
|
|
from synapse.rest.client.v1 import ClientV1RestResource
|
2015-01-23 13:54:51 -05:00
|
|
|
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
from daemonize import Daemonize
|
2014-08-26 08:43:55 -04:00
|
|
|
import twisted.manhole.telnet
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-01-07 12:25:28 -05:00
|
|
|
import synapse
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
import logging
|
2014-08-14 09:07:14 -04:00
|
|
|
import os
|
2014-08-24 06:56:55 -04:00
|
|
|
import re
|
2015-02-19 06:50:49 -05:00
|
|
|
import resource
|
2015-02-18 11:21:35 -05:00
|
|
|
import subprocess
|
2014-09-10 11:23:58 -04:00
|
|
|
import sqlite3
|
2014-11-04 11:19:03 -05:00
|
|
|
import syweb
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class SynapseHomeServer(HomeServer):
|
|
|
|
|
|
|
|
def build_http_client(self):
|
2014-11-20 08:53:34 -05:00
|
|
|
return MatrixFederationHttpClient(self)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-08-14 04:52:20 -04:00
|
|
|
def build_resource_for_client(self):
|
2015-01-23 05:37:38 -05:00
|
|
|
return ClientV1RestResource(self)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2015-01-23 13:54:51 -05:00
|
|
|
def build_resource_for_client_v2_alpha(self):
|
|
|
|
return ClientV2AlphaRestResource(self)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
|
|
|
def build_resource_for_federation(self):
|
2015-02-09 08:46:22 -05:00
|
|
|
return JsonResource(self)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2015-01-27 09:01:51 -05:00
|
|
|
def build_resource_for_app_services(self):
|
|
|
|
return AppServiceRestResource(self)
|
|
|
|
|
2014-08-14 04:52:20 -04:00
|
|
|
def build_resource_for_web_client(self):
|
2014-11-04 11:19:03 -05:00
|
|
|
syweb_path = os.path.dirname(syweb.__file__)
|
|
|
|
webclient_path = os.path.join(syweb_path, "webclient")
|
|
|
|
return File(webclient_path) # TODO configurable?
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2015-02-23 10:35:09 -05:00
|
|
|
def build_resource_for_static_content(self):
|
|
|
|
return File("static")
|
|
|
|
|
2014-08-18 10:01:08 -04:00
|
|
|
def build_resource_for_content_repo(self):
|
2014-09-03 06:57:23 -04:00
|
|
|
return ContentRepoResource(
|
|
|
|
self, self.upload_dir, self.auth, self.content_addr
|
|
|
|
)
|
2014-08-18 10:01:08 -04:00
|
|
|
|
2014-12-02 14:51:47 -05:00
|
|
|
def build_resource_for_media_repository(self):
|
|
|
|
return MediaRepositoryResource(self)
|
|
|
|
|
2014-09-23 13:40:59 -04:00
|
|
|
def build_resource_for_server_key(self):
|
|
|
|
return LocalKey(self)
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
def build_db_pool(self):
|
2014-09-10 10:42:15 -04:00
|
|
|
return adbapi.ConnectionPool(
|
|
|
|
"sqlite3", self.get_db_name(),
|
|
|
|
check_same_thread=False,
|
|
|
|
cp_min=1,
|
2015-02-13 09:29:49 -05:00
|
|
|
cp_max=1,
|
|
|
|
cp_openfun=prepare_database, # Prepare the database for each conn
|
|
|
|
# so that :memory: sqlite works
|
2014-09-10 10:42:15 -04:00
|
|
|
)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-08-14 06:37:13 -04:00
|
|
|
def create_resource_tree(self, web_client, redirect_root_to_web_client):
|
2014-08-14 04:52:20 -04:00
|
|
|
"""Create the resource tree for this Home Server.
|
|
|
|
|
|
|
|
This in unduly complicated because Twisted does not support putting
|
|
|
|
child resources more than 1 level deep at a time.
|
2014-08-14 06:17:58 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
web_client (bool): True to enable the web client.
|
2014-08-14 06:37:13 -04:00
|
|
|
redirect_root_to_web_client (bool): True to redirect '/' to the
|
|
|
|
location of the web client. This does nothing if web_client is not
|
|
|
|
True.
|
2014-08-14 04:52:20 -04:00
|
|
|
"""
|
2014-08-14 06:17:58 -04:00
|
|
|
# list containing (path_str, Resource) e.g:
|
|
|
|
# [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ]
|
|
|
|
desired_tree = [
|
2014-08-14 05:05:06 -04:00
|
|
|
(CLIENT_PREFIX, self.get_resource_for_client()),
|
2015-01-23 13:54:51 -05:00
|
|
|
(CLIENT_V2_ALPHA_PREFIX, self.get_resource_for_client_v2_alpha()),
|
2014-08-18 10:01:08 -04:00
|
|
|
(FEDERATION_PREFIX, self.get_resource_for_federation()),
|
2014-09-23 13:40:59 -04:00
|
|
|
(CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()),
|
|
|
|
(SERVER_KEY_PREFIX, self.get_resource_for_server_key()),
|
2014-12-02 14:51:47 -05:00
|
|
|
(MEDIA_PREFIX, self.get_resource_for_media_repository()),
|
2015-01-27 09:01:51 -05:00
|
|
|
(APP_SERVICE_PREFIX, self.get_resource_for_app_services()),
|
2015-02-23 12:36:37 -05:00
|
|
|
(STATIC_PREFIX, self.get_resource_for_static_content()),
|
2014-08-14 05:24:17 -04:00
|
|
|
]
|
2015-02-23 10:14:56 -05:00
|
|
|
|
2014-08-14 05:24:17 -04:00
|
|
|
if web_client:
|
|
|
|
logger.info("Adding the web client.")
|
2014-08-14 06:54:37 -04:00
|
|
|
desired_tree.append((WEB_CLIENT_PREFIX,
|
2014-08-14 05:24:17 -04:00
|
|
|
self.get_resource_for_web_client()))
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2014-08-14 06:37:13 -04:00
|
|
|
if web_client and redirect_root_to_web_client:
|
2014-08-14 06:54:37 -04:00
|
|
|
self.root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
2014-08-14 06:37:13 -04:00
|
|
|
else:
|
|
|
|
self.root_resource = Resource()
|
|
|
|
|
2014-08-14 04:52:20 -04:00
|
|
|
# ideally we'd just use getChild and putChild but getChild doesn't work
|
|
|
|
# unless you give it a Request object IN ADDITION to the name :/ So
|
|
|
|
# instead, we'll store a copy of this mapping so we can actually add
|
|
|
|
# extra resources to existing nodes. See self._resource_id for the key.
|
|
|
|
resource_mappings = {}
|
2015-02-19 06:53:13 -05:00
|
|
|
for full_path, res in desired_tree:
|
|
|
|
logger.info("Attaching %s to path %s", res, full_path)
|
2014-08-14 04:52:20 -04:00
|
|
|
last_resource = self.root_resource
|
|
|
|
for path_seg in full_path.split('/')[1:-1]:
|
2015-02-10 12:58:36 -05:00
|
|
|
if path_seg not in last_resource.listNames():
|
2014-08-14 06:17:58 -04:00
|
|
|
# resource doesn't exist, so make a "dummy resource"
|
2014-08-14 04:52:20 -04:00
|
|
|
child_resource = Resource()
|
|
|
|
last_resource.putChild(path_seg, child_resource)
|
|
|
|
res_id = self._resource_id(last_resource, path_seg)
|
|
|
|
resource_mappings[res_id] = child_resource
|
|
|
|
last_resource = child_resource
|
|
|
|
else:
|
2014-08-14 06:17:58 -04:00
|
|
|
# we have an existing Resource, use that instead.
|
2014-08-14 04:52:20 -04:00
|
|
|
res_id = self._resource_id(last_resource, path_seg)
|
|
|
|
last_resource = resource_mappings[res_id]
|
|
|
|
|
2014-08-14 06:17:58 -04:00
|
|
|
# ===========================
|
|
|
|
# now attach the actual desired resource
|
2014-08-14 04:52:20 -04:00
|
|
|
last_path_seg = full_path.split('/')[-1]
|
2014-08-14 06:17:58 -04:00
|
|
|
|
|
|
|
# if there is already a resource here, thieve its children and
|
|
|
|
# replace it
|
|
|
|
res_id = self._resource_id(last_resource, last_path_seg)
|
|
|
|
if res_id in resource_mappings:
|
|
|
|
# there is a dummy resource at this path already, which needs
|
|
|
|
# to be replaced with the desired resource.
|
|
|
|
existing_dummy_resource = resource_mappings[res_id]
|
|
|
|
for child_name in existing_dummy_resource.listNames():
|
|
|
|
child_res_id = self._resource_id(existing_dummy_resource,
|
|
|
|
child_name)
|
|
|
|
child_resource = resource_mappings[child_res_id]
|
|
|
|
# steal the children
|
2015-02-19 06:53:13 -05:00
|
|
|
res.putChild(child_name, child_resource)
|
2014-08-14 06:17:58 -04:00
|
|
|
|
|
|
|
# finally, insert the desired resource in the right place
|
2015-02-19 06:53:13 -05:00
|
|
|
last_resource.putChild(last_path_seg, res)
|
2014-08-14 04:52:20 -04:00
|
|
|
res_id = self._resource_id(last_resource, last_path_seg)
|
2015-02-19 06:53:13 -05:00
|
|
|
resource_mappings[res_id] = res
|
2014-08-14 04:52:20 -04:00
|
|
|
|
|
|
|
return self.root_resource
|
|
|
|
|
|
|
|
def _resource_id(self, resource, path_seg):
|
|
|
|
"""Construct an arbitrary resource ID so you can retrieve the mapping
|
|
|
|
later.
|
|
|
|
|
|
|
|
If you want to represent resource A putChild resource B with path C,
|
|
|
|
the mapping should looks like _resource_id(A,C) = B.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
resource (Resource): The *parent* Resource
|
|
|
|
path_seg (str): The name of the child Resource to be attached.
|
|
|
|
Returns:
|
|
|
|
str: A unique string which can be a key to the child Resource.
|
|
|
|
"""
|
|
|
|
return "%s-%s" % (resource, path_seg)
|
|
|
|
|
2014-09-01 17:38:52 -04:00
|
|
|
def start_listening(self, secure_port, unsecure_port):
|
|
|
|
if secure_port is not None:
|
|
|
|
reactor.listenSSL(
|
|
|
|
secure_port, Site(self.root_resource), self.tls_context_factory
|
|
|
|
)
|
|
|
|
logger.info("Synapse now listening on port %d", secure_port)
|
|
|
|
if unsecure_port is not None:
|
|
|
|
reactor.listenTCP(
|
|
|
|
unsecure_port, Site(self.root_resource)
|
|
|
|
)
|
|
|
|
logger.info("Synapse now listening on port %d", unsecure_port)
|
2014-08-14 04:52:20 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-02-18 11:21:35 -05:00
|
|
|
def get_version_string():
|
|
|
|
null = open(os.devnull, 'w')
|
2015-02-18 11:26:30 -05:00
|
|
|
cwd = os.path.dirname(os.path.abspath(__file__))
|
2015-02-18 11:21:35 -05:00
|
|
|
try:
|
|
|
|
git_branch = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
|
|
|
stderr=null,
|
2015-02-18 11:26:30 -05:00
|
|
|
cwd=cwd,
|
2015-02-18 11:21:35 -05:00
|
|
|
).strip()
|
|
|
|
git_branch = "b=" + git_branch
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_branch = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_tag = subprocess.check_output(
|
|
|
|
['git', 'describe', '--exact-match'],
|
|
|
|
stderr=null,
|
2015-02-18 11:26:30 -05:00
|
|
|
cwd=cwd,
|
2015-02-18 11:21:35 -05:00
|
|
|
).strip()
|
|
|
|
git_tag = "t=" + git_tag
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_tag = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_commit = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--short', 'HEAD'],
|
|
|
|
stderr=null,
|
2015-02-18 11:26:30 -05:00
|
|
|
cwd=cwd,
|
2015-02-18 11:21:35 -05:00
|
|
|
).strip()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_commit = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
dirty_string = "-this_is_a_dirty_checkout"
|
|
|
|
is_dirty = subprocess.check_output(
|
|
|
|
['git', 'describe', '--dirty=' + dirty_string],
|
|
|
|
stderr=null,
|
2015-02-18 11:26:30 -05:00
|
|
|
cwd=cwd,
|
2015-02-18 11:21:35 -05:00
|
|
|
).strip().endswith(dirty_string)
|
|
|
|
|
|
|
|
git_dirty = "dirty" if is_dirty else ""
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_dirty = ""
|
|
|
|
|
|
|
|
if git_branch or git_tag or git_commit or git_dirty:
|
|
|
|
git_version = ",".join(
|
|
|
|
s for s in
|
|
|
|
(git_branch, git_tag, git_commit, git_dirty,)
|
|
|
|
if s
|
|
|
|
)
|
|
|
|
|
|
|
|
return (
|
2015-02-18 12:34:26 -05:00
|
|
|
"Synapse/%s (%s)" % (
|
2015-02-18 11:21:35 -05:00
|
|
|
synapse.__version__, git_version,
|
|
|
|
)
|
|
|
|
).encode("ascii")
|
|
|
|
|
|
|
|
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
|
|
|
|
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
def change_resource_limit(soft_file_no):
|
|
|
|
try:
|
|
|
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
2015-02-20 11:09:44 -05:00
|
|
|
|
|
|
|
if not soft_file_no:
|
|
|
|
soft_file_no = hard
|
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
2015-02-20 11:09:44 -05:00
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
logger.info("Set file limit to: %d", soft_file_no)
|
|
|
|
except (ValueError, resource.error) as e:
|
|
|
|
logger.warn("Failed to set file limit: %s", e)
|
|
|
|
|
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
def setup():
|
|
|
|
config = HomeServerConfig.load_config(
|
|
|
|
"Synapse Homeserver",
|
|
|
|
sys.argv[1:],
|
|
|
|
generate_section="Homeserver"
|
|
|
|
)
|
|
|
|
|
|
|
|
config.setup_logging()
|
|
|
|
|
2015-01-08 12:07:28 -05:00
|
|
|
check_requirements()
|
|
|
|
|
2015-02-18 11:21:35 -05:00
|
|
|
version_string = get_version_string()
|
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
logger.info("Server hostname: %s", config.server_name)
|
2015-02-18 11:21:35 -05:00
|
|
|
logger.info("Server version: %s", version_string)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
if re.search(":[0-9]+$", config.server_name):
|
|
|
|
domain_with_port = config.server_name
|
2014-08-24 06:56:55 -04:00
|
|
|
else:
|
2014-09-01 10:51:15 -04:00
|
|
|
domain_with_port = "%s:%s" % (config.server_name, config.bind_port)
|
2014-08-24 06:56:55 -04:00
|
|
|
|
2014-09-01 11:30:43 -04:00
|
|
|
tls_context_factory = context_factory.ServerContextFactory(config)
|
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
hs = SynapseHomeServer(
|
2014-08-31 11:06:39 -04:00
|
|
|
config.server_name,
|
2014-08-24 06:56:55 -04:00
|
|
|
domain_with_port=domain_with_port,
|
2014-08-22 05:25:27 -04:00
|
|
|
upload_dir=os.path.abspath("uploads"),
|
2014-08-31 11:06:39 -04:00
|
|
|
db_name=config.database_path,
|
2014-09-01 11:30:43 -04:00
|
|
|
tls_context_factory=tls_context_factory,
|
2014-09-02 12:57:04 -04:00
|
|
|
config=config,
|
2014-09-03 06:57:23 -04:00
|
|
|
content_addr=config.content_addr,
|
2015-02-18 11:21:35 -05:00
|
|
|
version_string=version_string,
|
2014-08-12 10:10:52 -04:00
|
|
|
)
|
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
hs.create_resource_tree(
|
|
|
|
web_client=config.webclient,
|
|
|
|
redirect_root_to_web_client=True,
|
|
|
|
)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-09-10 11:23:58 -04:00
|
|
|
db_name = hs.get_db_name()
|
|
|
|
|
2014-11-20 12:10:37 -05:00
|
|
|
logger.info("Preparing database: %s...", db_name)
|
2014-09-10 11:23:58 -04:00
|
|
|
|
2014-12-16 09:20:32 -05:00
|
|
|
try:
|
|
|
|
with sqlite3.connect(db_name) as db_conn:
|
2015-03-04 07:04:19 -05:00
|
|
|
prepare_sqlite3_database(db_conn)
|
2014-12-16 09:20:32 -05:00
|
|
|
prepare_database(db_conn)
|
|
|
|
except UpgradeDatabaseException:
|
|
|
|
sys.stderr.write(
|
|
|
|
"\nFailed to upgrade database.\n"
|
2015-01-19 10:30:48 -05:00
|
|
|
"Have you checked for version specific instructions in"
|
|
|
|
" UPGRADES.rst?\n"
|
2014-12-16 09:20:32 -05:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
2014-09-10 11:23:58 -04:00
|
|
|
|
2014-11-20 12:10:37 -05:00
|
|
|
logger.info("Database prepared in %s.", db_name)
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
if config.manhole:
|
2014-08-26 08:43:55 -04:00
|
|
|
f = twisted.manhole.telnet.ShellFactory()
|
|
|
|
f.username = "matrix"
|
|
|
|
f.password = "rabbithole"
|
|
|
|
f.namespace['hs'] = hs
|
2014-08-31 11:06:39 -04:00
|
|
|
reactor.listenTCP(config.manhole, f, interface='127.0.0.1')
|
2014-08-26 08:43:55 -04:00
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
bind_port = config.bind_port
|
|
|
|
if config.no_tls:
|
|
|
|
bind_port = None
|
2015-02-13 09:29:49 -05:00
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
hs.start_listening(bind_port, config.unsecure_port)
|
2014-09-10 11:16:24 -04:00
|
|
|
|
2014-11-19 13:20:59 -05:00
|
|
|
hs.get_pusherpool().start()
|
2015-02-06 11:52:22 -05:00
|
|
|
hs.get_state_handler().start_caching()
|
2015-02-09 09:22:52 -05:00
|
|
|
hs.get_datastore().start_profiling()
|
2015-02-18 05:14:10 -05:00
|
|
|
hs.get_replication_layer().start_get_pdu_cache()
|
2015-02-06 11:52:22 -05:00
|
|
|
|
2014-08-31 11:06:39 -04:00
|
|
|
if config.daemonize:
|
2014-09-01 10:51:15 -04:00
|
|
|
print config.pid_file
|
2015-02-19 06:50:49 -05:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
daemon = Daemonize(
|
|
|
|
app="synapse-homeserver",
|
2014-08-31 11:06:39 -04:00
|
|
|
pid=config.pid_file,
|
2015-02-19 06:50:49 -05:00
|
|
|
action=lambda: run(config),
|
2014-08-12 10:10:52 -04:00
|
|
|
auto_close_fds=False,
|
|
|
|
verbose=True,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
|
|
|
|
daemon.start()
|
|
|
|
else:
|
2015-02-19 06:50:49 -05:00
|
|
|
run(config)
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2015-02-19 06:50:49 -05:00
|
|
|
def run(config):
|
2014-11-18 10:57:00 -05:00
|
|
|
with LoggingContext("run"):
|
2015-02-20 11:09:44 -05:00
|
|
|
change_resource_limit(config.soft_file_limit)
|
2015-02-19 06:50:49 -05:00
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
reactor.run()
|
2014-10-29 21:21:33 -04:00
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2014-11-18 10:57:00 -05:00
|
|
|
def main():
|
2014-10-30 07:15:39 -04:00
|
|
|
with LoggingContext("main"):
|
2015-01-08 12:07:28 -05:00
|
|
|
check_requirements()
|
2014-11-18 10:57:00 -05:00
|
|
|
setup()
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-11-20 12:26:36 -05:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
if __name__ == '__main__':
|
2014-11-18 10:57:00 -05:00
|
|
|
main()
|