Merge branch 'develop' into server2server_tls

This commit is contained in:
Mark Haines 2014-08-31 16:08:20 +01:00
commit 3eb45eba0e
43 changed files with 1796 additions and 790 deletions

View file

@ -29,6 +29,7 @@ from synapse.http.client import TwistedHttpClient
from synapse.api.urls import (
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX
)
from synapse.config.homeserver import HomeServerConfig
from daemonize import Daemonize
import twisted.manhole.telnet
@ -211,32 +212,7 @@ class SynapseHomeServer(HomeServer):
logger.info("Synapse now listening on port %d", port)
def setup_logging(verbosity=0, filename=None, config_path=None):
""" Sets up logging with verbosity levels.
Args:
verbosity: The verbosity level.
filename: Log to the given file rather than to the console.
config_path: Path to a python logging config file.
"""
if config_path is None:
log_format = (
'%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s'
)
level = logging.INFO
if verbosity:
level = logging.DEBUG
# FIXME: we need a logging.WARN for a -q quiet option
logging.basicConfig(level=level, filename=filename, format=log_format)
else:
logging.config.fileConfig(config_path)
observer = PythonLoggingObserver()
observer.start()
def run():
@ -244,78 +220,49 @@ def run():
def setup():
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--port", dest="port", type=int, default=8080,
help="The port to listen on.")
parser.add_argument("-d", "--database", dest="db", default="homeserver.db",
help="The database name.")
parser.add_argument("-H", "--host", dest="host", default="localhost",
help="The hostname of the server.")
parser.add_argument('-v', '--verbose', dest="verbose", action='count',
help="The verbosity level.")
parser.add_argument('-f', '--log-file', dest="log_file", default=None,
help="File to log to.")
parser.add_argument('--log-config', dest="log_config", default=None,
help="Python logging config")
parser.add_argument('-D', '--daemonize', action='store_true',
default=False, help="Daemonize the home server")
parser.add_argument('--pid-file', dest="pid", help="When running as a "
"daemon, the file to store the pid in",
default="hs.pid")
parser.add_argument("-W", "--webclient", dest="webclient", default=True,
action="store_false", help="Don't host a web client.")
parser.add_argument("--manhole", dest="manhole", type=int, default=None,
help="Turn on the twisted telnet manhole service.")
args = parser.parse_args()
config = HomeServerConfig.load_config("Synapse Homeserver", sys.argv[1:])
verbosity = int(args.verbose) if args.verbose else None
# Because if/when we daemonize we change to root dir.
db_name = os.path.abspath(args.db)
log_file = args.log_file
if log_file:
log_file = os.path.abspath(log_file)
setup_logging(
config.setup_logging(
verbosity=verbosity,
filename=log_file,
config_path=args.log_config,
)
logger.info("Server hostname: %s", args.host)
logger.info("Server hostname: %s", config.server_name)
if re.search(":[0-9]+$", args.host):
domain_with_port = args.host
if re.search(":[0-9]+$", config.server_name):
domain_with_port = config.server_name
else:
domain_with_port = "%s:%s" % (args.host, args.port)
domain_with_port = "%s:%s" % (args.server_name, config.bind_port)
hs = SynapseHomeServer(
args.host,
config.server_name,
domain_with_port=domain_with_port,
upload_dir=os.path.abspath("uploads"),
db_name=db_name,
db_name=config.database_path,
)
hs.register_servlets()
hs.create_resource_tree(
web_client=args.webclient,
redirect_root_to_web_client=True)
hs.start_listening(args.port)
web_client=config.webclient,
redirect_root_to_web_client=True,
)
hs.start_listening(config.bind_port)
hs.get_db_pool()
if args.manhole:
if config.manhole:
f = twisted.manhole.telnet.ShellFactory()
f.username = "matrix"
f.password = "rabbithole"
f.namespace['hs'] = hs
reactor.listenTCP(args.manhole, f, interface='127.0.0.1')
reactor.listenTCP(config.manhole, f, interface='127.0.0.1')
if args.daemonize:
if config.daemonize:
daemon = Daemonize(
app="synapse-homeserver",
pid=args.pid,
pid=config.pid_file,
action=run,
auto_close_fds=False,
verbose=True,

View file

@ -0,0 +1,14 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

99
synapse/config/_base.py Normal file
View file

@ -0,0 +1,99 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ConfigParser as configparser
import argparse
import sys
import os
class Config(object):
def __init__(self, args):
pass
@staticmethod
def read_file(file_path):
with open(file_path) as file_stream:
return file_stream.read()
@staticmethod
def read_config_file(file_path):
config = configparser.SafeConfigParser()
config.read([file_path])
config_dict = {}
for section in config.sections():
config_dict.update(config.items(section))
return config_dict
@classmethod
def add_arguments(cls, parser):
pass
@classmethod
def generate_config(cls, args, config_dir_path):
pass
@classmethod
def load_config(cls, description, argv, generate_section=None):
config_parser = argparse.ArgumentParser(add_help=False)
config_parser.add_argument(
"-c", "--config-path",
metavar="CONFIG_FILE",
help="Specify config file"
)
config_args, remaining_args = config_parser.parse_known_args(argv)
if generate_section:
if not config_args.config_path:
config_parser.error(
"Must specify where to generate the config file"
)
config_dir_path = os.path.dirname(config_args.config_path)
if os.path.exists(config_args.config_path):
defaults = cls.read_config_file(config_args.config_path)
else:
if config_args.config_path:
defaults = cls.read_config_file(config_args.config_path)
else:
defaults = {}
parser = argparse.ArgumentParser(
parents=[config_parser],
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.set_defaults(**defaults)
cls.add_arguments(parser)
args = parser.parse_args(remaining_args)
if generate_section:
config_dir_path = os.path.dirname(config_args.config_path)
config_dir_path = os.path.abspath(config_dir_path)
cls.generate_config(args, config_dir_path)
config = configparser.SafeConfigParser()
config.add_section(generate_section)
for key, value in vars(args).items():
if key != "config_path" and value is not None:
config.set(generate_section, key, str(value))
with open(config_args.config_path, "w") as config_file:
config.write(config_file)
return cls(args)

View file

@ -0,0 +1,36 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
import os
class DatabaseConfig(Config):
def __init__(self, args):
self.db_path = os.path.abspath(args.database_path)
@classmethod
def add_arguments(cls, parser):
super(DatabaseConfig, cls).add_arguments(parser)
db_group = parser.add_argument_group("database")
db_group.add_argument(
"-d", "--database", dest="database_path", default="homeserver.db",
help="The database name."
)
@classmethod
def generate_config(cls, args, config_dir_path):
super(DatabaseConfig, cls).generate_config(args, config_dir_path)
args.database_path = os.path.abspath(args.database_path)

View file

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .tls import TlsConfig
from .server import ServerConfig
from .logger import LoggingConfig
from .database import DatabaseConfig
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig):
pass
if __name__=='__main__':
import sys
HomeServerConfig.load_config("Generate config", sys.argv[1:], "HomeServer")

67
synapse/config/logger.py Normal file
View file

@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
from twisted.python.log import PythonLoggingObserver
import logging
import logging.config
import os
class LoggingConfig(Config):
def __init__(self, args):
self.verbosity = int(args.verbose) if args.verbose else None
self.log_config = os.path.abspath(args.log_config)
self.log_file = os.path.abspath(args.log_file)
@classmethod
def add_arguments(cls, parser):
super(LoggingConfig, cls).add_arguments(parser)
logging_group = parser.add_argument_group("logging")
logging_group.add_argument(
'-v', '--verbose', dest="verbose", action='count',
help="The verbosity level."
)
logging_group.add_argument(
'-f', '--log-file', dest="log_file", default=None,
help="File to log to."
)
logging_group.add_argument(
'--log-config', dest="log_config", default=None,
help="Python logging config file"
)
def setup_logging(self):
log_format = (
'%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s'
)
if self.config_path is None:
level = logging.INFO
if verbosity:
level = logging.DEBUG
# FIXME: we need a logging.WARN for a -q quiet option
logging.basicConfig(
level=level,
filename=filename,
format=log_format
)
else:
logging.config.fileConfig(config_path)
observer = PythonLoggingObserver()
observer.start()

75
synapse/config/server.py Normal file
View file

@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nacl.signing
import socket
import os
from ._base import Config
from syutil.base64util import encode_base64, decode_base64
class ServerConfig(Config):
def __init__(self, args):
super(ServerConfig, self).__init__(args)
self.server_name = args.server_name
self.signing_key = self.read_signing_key(args.signing_key_path)
self.bind_port = args.bind_port
self.bind_host = args.bind_host
self.daemonize = args.daemonize
self.pid_file = os.path.abspath(args.pid_file)
@classmethod
def add_arguments(cls, parser):
super(ServerConfig, cls).add_arguments(parser)
server_group = parser.add_argument_group("server")
server_group.add_argument("-H", "--server-name", default="localhost",
help="The name of the server")
server_group.add_argument("--signing-key-path",
help="The signing key to sign messages with")
server_group.add_argument("-p", "--bind-port", type=int,
help="TCP port to listen on")
server_group.add_argument("--bind-host", default="",
help="Local interface to listen on")
server_group.add_argument("-D", "--daemonize", action='store_true',
help="Daemonize the home server")
server_group.add_argument('--pid-file', default = "hs.pid",
help="When running as a daemon, the file to"
" store the pid in")
server_group.add_argument("-W", "--no-webclient", dest="webclient",
default=True, action="store_false",
help="Don't host a web client.")
server_group.add_argument("--manhole", dest="manhole", type=int,
help="Turn on the twisted telnet manhole"
" service on the given port.")
def read_signing_key(self, signing_key_path):
signing_key_base64 = self.read_file(signing_key_path)
signing_key_bytes = decode_base64(signing_key_base64)
return nacl.signing.SigningKey(signing_key_bytes)
@classmethod
def generate_config(cls, args, config_dir_path):
super(ServerConfig, cls).generate_config(args, config_dir_path)
base_key_name = os.path.join(config_dir_path, args.server_name)
args.pid_file = os.path.abspath(args.pid_file)
if not args.signing_key_path:
args.signing_key_path = base_key_name + ".signing.key"
if not os.path.exists(args.signing_key_path):
with open(args.signing_key_path, "w") as signing_key_file:
key = nacl.signing.SigningKey.generate()
signing_key_file.write(encode_base64(key.encode()))

106
synapse/config/tls.py Normal file
View file

@ -0,0 +1,106 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
from OpenSSL import crypto
import subprocess
import os
class TlsConfig(Config):
def __init__(self, args):
super(TlsConfig, self).__init__(args)
self.tls_certificate = self.read_tls_certificate(
args.tls_certificate_path
)
self.tls_private_key = self.read_tls_private_key(
args.tls_private_key_path
)
self.tls_dh_params_path = args.tls_dh_params_path
@classmethod
def add_arguments(cls, parser):
super(TlsConfig, cls).add_arguments(parser)
tls_group = parser.add_argument_group("tls")
tls_group.add_argument("--tls-certificate-path",
help="PEM encoded X509 certificate for TLS")
tls_group.add_argument("--tls-private-key-path",
help="PEM encoded private key for TLS")
tls_group.add_argument("--tls-dh-params-path",
help="PEM dh parameters for ephemeral keys")
def read_tls_certificate(self, cert_path):
cert_pem = self.read_file(cert_path)
return crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem)
def read_tls_private_key(self, private_key_path):
private_key_pem = self.read_file(private_key_path)
return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem)
@classmethod
def generate_config(cls, args, config_dir_path):
super(TlsConfig, cls).generate_config(args, config_dir_path)
base_key_name = os.path.join(config_dir_path, args.server_name)
if args.tls_certificate_path is None:
args.tls_certificate_path = base_key_name + ".tls.crt"
if args.tls_private_key_path is None:
args.tls_private_key_path = base_key_name + ".tls.key"
if args.tls_dh_params_path is None:
args.tls_dh_params_path = base_key_name + ".tls.dh"
if not os.path.exists(args.tls_private_key_path):
with open(args.tls_private_key_path, "w") as private_key_file:
tls_private_key = crypto.PKey()
tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
private_key_pem = crypto.dump_privatekey(
crypto.FILETYPE_PEM, tls_private_key
)
private_key_file.write(private_key_pem)
else:
with open(args.tls_private_key_path) as private_key_file:
private_key_pem = private_key_file.read()
tls_private_key = crypto.load_privatekey(
crypto.FILETYPE_PEM, private_key_pem
)
if not os.path.exists(args.tls_certificate_path):
with open(args.tls_certificate_path, "w") as certifcate_file:
cert = crypto.X509()
subject = cert.get_subject()
subject.CN = args.server_name
cert.set_serial_number(1000)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(tls_private_key)
cert.sign(tls_private_key, 'sha256')
cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
certifcate_file.write(cert_pem)
if not os.path.exists(args.tls_dh_params_path):
subprocess.check_call([
"openssl", "dhparam",
"-outform", "PEM",
"-out", args.tls_dh_params_path,
"2048"
])

View file

@ -1,160 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ConfigParser as configparser
import argparse
import socket
import sys
import os
from OpenSSL import crypto
import nacl.signing
from syutil.base64util import encode_base64
import subprocess
def load_config(description, argv):
config_parser = argparse.ArgumentParser(add_help=False)
config_parser.add_argument("-c", "--config-path", metavar="CONFIG_FILE",
help="Specify config file")
config_args, remaining_args = config_parser.parse_known_args(argv)
if config_args.config_path:
config = configparser.SafeConfigParser()
config.read([config_args.config_path])
defaults = dict(config.items("KeyServer"))
else:
defaults = {}
parser = argparse.ArgumentParser(
parents=[config_parser],
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.set_defaults(**defaults)
parser.add_argument("--server-name", default=socket.getfqdn(),
help="The name of the server")
parser.add_argument("--signing-key-path",
help="The signing key to sign responses with")
parser.add_argument("--tls-certificate-path",
help="PEM encoded X509 certificate for TLS")
parser.add_argument("--tls-private-key-path",
help="PEM encoded private key for TLS")
parser.add_argument("--tls-dh-params-path",
help="PEM encoded dh parameters for ephemeral keys")
parser.add_argument("--bind-port", type=int,
help="TCP port to listen on")
parser.add_argument("--bind-host", default="",
help="Local interface to listen on")
args = parser.parse_args(remaining_args)
server_config = vars(args)
del server_config["config_path"]
return server_config
def generate_config(argv):
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config-path", help="Specify config file",
metavar="CONFIG_FILE", required=True)
parser.add_argument("--server-name", default=socket.getfqdn(),
help="The name of the server")
parser.add_argument("--signing-key-path",
help="The signing key to sign responses with")
parser.add_argument("--tls-certificate-path",
help="PEM encoded X509 certificate for TLS")
parser.add_argument("--tls-private-key-path",
help="PEM encoded private key for TLS")
parser.add_argument("--tls-dh-params-path",
help="PEM encoded dh parameters for ephemeral keys")
parser.add_argument("--bind-port", type=int, required=True,
help="TCP port to listen on")
parser.add_argument("--bind-host", default="",
help="Local interface to listen on")
args = parser.parse_args(argv)
dir_name = os.path.dirname(args.config_path)
base_key_name = os.path.join(dir_name, args.server_name)
if args.signing_key_path is None:
args.signing_key_path = base_key_name + ".signing.key"
if args.tls_certificate_path is None:
args.tls_certificate_path = base_key_name + ".tls.crt"
if args.tls_private_key_path is None:
args.tls_private_key_path = base_key_name + ".tls.key"
if args.tls_dh_params_path is None:
args.tls_dh_params_path = base_key_name + ".tls.dh"
if not os.path.exists(args.signing_key_path):
with open(args.signing_key_path, "w") as signing_key_file:
key = nacl.signing.SigningKey.generate()
signing_key_file.write(encode_base64(key.encode()))
if not os.path.exists(args.tls_private_key_path):
with open(args.tls_private_key_path, "w") as private_key_file:
tls_private_key = crypto.PKey()
tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
private_key_pem = crypto.dump_privatekey(
crypto.FILETYPE_PEM, tls_private_key
)
private_key_file.write(private_key_pem)
else:
with open(args.tls_private_key_path) as private_key_file:
private_key_pem = private_key_file.read()
tls_private_key = crypto.load_privatekey(
crypto.FILETYPE_PEM, private_key_pem
)
if not os.path.exists(args.tls_certificate_path):
with open(args.tls_certificate_path, "w") as certifcate_file:
cert = crypto.X509()
subject = cert.get_subject()
subject.CN = args.server_name
cert.set_serial_number(1000)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(tls_private_key)
cert.sign(tls_private_key, 'sha256')
cert_pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
certifcate_file.write(cert_pem)
if not os.path.exists(args.tls_dh_params_path):
subprocess.check_call([
"openssl", "dhparam",
"-outform", "PEM",
"-out", args.tls_dh_params_path,
"2048"
])
config = configparser.SafeConfigParser()
config.add_section("KeyServer")
for key, value in vars(args).items():
if key != "config_path":
config.set("KeyServer", key, str(value))
with open(args.config_path, "w") as config_file:
config.write(config_file)
if __name__ == "__main__":
generate_config(sys.argv[1:])

View file

@ -541,7 +541,10 @@ class _TransactionQueue(object):
)
def eb(failure):
deferred.errback(failure)
if not deferred.called:
deferred.errback(failure)
else:
logger.exception("Failed to send edu", failure)
self._attempt_new_transaction(destination).addErrback(eb)
return deferred

View file

@ -16,6 +16,7 @@
from twisted.internet import defer
from synapse.api.events import SynapseEvent
from synapse.util.logutils import log_function
from ._base import BaseHandler
@ -44,6 +45,7 @@ class EventStreamHandler(BaseHandler):
self.notifier = hs.get_notifier()
@defer.inlineCallbacks
@log_function
def get_stream(self, auth_user_id, pagin_config, timeout=0):
auth_user = self.hs.parse_userid(auth_user_id)
@ -90,13 +92,15 @@ class EventStreamHandler(BaseHandler):
# 10 seconds of grace to allow the client to reconnect again
# before we think they're gone
def _later():
logger.debug("_later stopped_user_eventstream %s", auth_user)
self.distributor.fire(
"stopped_user_eventstream", auth_user
)
del self._stop_timer_per_user[auth_user]
logger.debug("Scheduling _later: for %s", auth_user)
self._stop_timer_per_user[auth_user] = (
self.clock.call_later(5, _later)
self.clock.call_later(30, _later)
)

View file

@ -146,7 +146,7 @@ class FederationHandler(BaseHandler):
# Huh, let's try and get the current state
try:
yield self.replication_layer.get_state_for_context(
origin, event.room_id
event.origin, event.room_id
)
hosts = yield self.store.get_joined_hosts_for_room(

View file

@ -277,10 +277,13 @@ class MessageHandler(BaseRoomHandler):
end_token=now_token.events_key,
)
start_token = now_token.copy_and_replace("events_key", token[0])
end_token = now_token.copy_and_replace("events_key", token[1])
d["messages"] = {
"chunk": [m.get_dict() for m in messages],
"start": token[0],
"end": token[1],
"start": start_token.to_string(),
"end": end_token.to_string(),
}
current_state = yield self.store.get_current_state(

View file

@ -18,6 +18,8 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError, AuthError
from synapse.api.constants import PresenceState
from synapse.util.logutils import log_function
from ._base import BaseHandler
import logging
@ -142,7 +144,7 @@ class PresenceHandler(BaseHandler):
@defer.inlineCallbacks
def is_presence_visible(self, observer_user, observed_user):
defer.returnValue(True)
return
# return
# FIXME (erikj): This code path absolutely kills the database.
assert(observed_user.is_mine)
@ -188,8 +190,9 @@ class PresenceHandler(BaseHandler):
defer.returnValue(state)
@defer.inlineCallbacks
@log_function
def set_state(self, target_user, auth_user, state):
return
# return
# TODO (erikj): Turn this back on. Why did we end up sending EDUs
# everywhere?
@ -245,33 +248,42 @@ class PresenceHandler(BaseHandler):
self.push_presence(user, statuscache=statuscache)
@log_function
def started_user_eventstream(self, user):
# TODO(paul): Use "last online" state
self.set_state(user, user, {"state": PresenceState.ONLINE})
@log_function
def stopped_user_eventstream(self, user):
# TODO(paul): Save current state as "last online" state
self.set_state(user, user, {"state": PresenceState.OFFLINE})
@defer.inlineCallbacks
def user_joined_room(self, user, room_id):
localusers = set()
remotedomains = set()
rm_handler = self.homeserver.get_handlers().room_member_handler
yield rm_handler.fetch_room_distributions_into(room_id,
localusers=localusers, remotedomains=remotedomains,
ignore_user=user)
if user.is_mine:
yield self._send_presence_to_distribution(srcuser=user,
localusers=localusers, remotedomains=remotedomains,
self.push_update_to_local_and_remote(
observed_user=user,
room_ids=[room_id],
statuscache=self._get_or_offline_usercache(user),
)
for srcuser in localusers:
yield self._send_presence(srcuser=srcuser, destuser=user,
statuscache=self._get_or_offline_usercache(srcuser),
else:
self.push_update_to_clients(
observed_user=user,
room_ids=[room_id],
statuscache=self._get_or_offline_usercache(user),
)
# We also want to tell them about current presence of people.
rm_handler = self.homeserver.get_handlers().room_member_handler
curr_users = yield rm_handler.get_room_members(room_id)
for local_user in [c for c in curr_users if c.is_mine]:
self.push_update_to_local_and_remote(
observed_user=local_user,
users_to_push=[user],
statuscache=self._get_or_offline_usercache(local_user),
)
@defer.inlineCallbacks
@ -382,11 +394,13 @@ class PresenceHandler(BaseHandler):
defer.returnValue(presence)
@defer.inlineCallbacks
@log_function
def start_polling_presence(self, user, target_user=None, state=None):
logger.debug("Start polling for presence from %s", user)
if target_user:
target_users = set([target_user])
room_ids = []
else:
presence = yield self.store.get_presence_list(
user.localpart, accepted=True
@ -400,23 +414,37 @@ class PresenceHandler(BaseHandler):
rm_handler = self.homeserver.get_handlers().room_member_handler
room_ids = yield rm_handler.get_rooms_for_user(user)
for room_id in room_ids:
for member in (yield rm_handler.get_room_members(room_id)):
target_users.add(member)
if state is None:
state = yield self.store.get_presence_state(user.localpart)
else:
# statuscache = self._get_or_make_usercache(user)
# self._user_cachemap_latest_serial += 1
# statuscache.update(state, self._user_cachemap_latest_serial)
pass
localusers, remoteusers = partitionbool(
target_users,
lambda u: u.is_mine
yield self.push_update_to_local_and_remote(
observed_user=user,
users_to_push=target_users,
room_ids=room_ids,
statuscache=self._get_or_make_usercache(user),
)
for target_user in localusers:
self._start_polling_local(user, target_user)
for target_user in target_users:
if target_user.is_mine:
self._start_polling_local(user, target_user)
# We want to tell the person that just came online
# presence state of people they are interested in?
self.push_update_to_clients(
observed_user=target_user,
users_to_push=[user],
statuscache=self._get_or_offline_usercache(target_user),
)
deferreds = []
remoteusers_by_domain = partition(remoteusers, lambda u: u.domain)
remote_users = [u for u in target_users if not u.is_mine]
remoteusers_by_domain = partition(remote_users, lambda u: u.domain)
# Only poll for people in our get_presence_list
for domain in remoteusers_by_domain:
remoteusers = remoteusers_by_domain[domain]
@ -438,25 +466,26 @@ class PresenceHandler(BaseHandler):
self._local_pushmap[target_localpart].add(user)
self.push_update_to_clients(
observer_user=user,
observed_user=target_user,
statuscache=self._get_or_offline_usercache(target_user),
)
def _start_polling_remote(self, user, domain, remoteusers):
to_poll = set()
for u in remoteusers:
if u not in self._remote_recvmap:
self._remote_recvmap[u] = set()
to_poll.add(u)
self._remote_recvmap[u].add(user)
if not to_poll:
return defer.succeed(None)
return self.federation.send_edu(
destination=domain,
edu_type="m.presence",
content={"poll": [u.to_string() for u in remoteusers]}
content={"poll": [u.to_string() for u in to_poll]}
)
@log_function
def stop_polling_presence(self, user, target_user=None):
logger.debug("Stop polling for presence from %s", user)
@ -496,20 +525,28 @@ class PresenceHandler(BaseHandler):
if not self._local_pushmap[localpart]:
del self._local_pushmap[localpart]
@log_function
def _stop_polling_remote(self, user, domain, remoteusers):
to_unpoll = set()
for u in remoteusers:
self._remote_recvmap[u].remove(user)
if not self._remote_recvmap[u]:
del self._remote_recvmap[u]
to_unpoll.add(u)
if not to_unpoll:
return defer.succeed(None)
return self.federation.send_edu(
destination=domain,
edu_type="m.presence",
content={"unpoll": [u.to_string() for u in remoteusers]}
content={"unpoll": [u.to_string() for u in to_unpoll]}
)
@defer.inlineCallbacks
@log_function
def push_presence(self, user, statuscache):
assert(user.is_mine)
@ -525,53 +562,17 @@ class PresenceHandler(BaseHandler):
rm_handler = self.homeserver.get_handlers().room_member_handler
room_ids = yield rm_handler.get_rooms_for_user(user)
for room_id in room_ids:
yield rm_handler.fetch_room_distributions_into(
room_id, localusers=localusers, remotedomains=remotedomains,
ignore_user=user,
)
if not localusers and not remotedomains:
if not localusers and not room_ids:
defer.returnValue(None)
yield self._send_presence_to_distribution(user,
localusers=localusers, remotedomains=remotedomains,
statuscache=statuscache
yield self.push_update_to_local_and_remote(
observed_user=user,
users_to_push=localusers,
remote_domains=remotedomains,
room_ids=room_ids,
statuscache=statuscache,
)
def _send_presence(self, srcuser, destuser, statuscache):
if destuser.is_mine:
self.push_update_to_clients(
observer_user=destuser,
observed_user=srcuser,
statuscache=statuscache)
return defer.succeed(None)
else:
return self._push_presence_remote(srcuser, destuser.domain,
state=statuscache.get_state()
)
@defer.inlineCallbacks
def _send_presence_to_distribution(self, srcuser, localusers=set(),
remotedomains=set(), statuscache=None):
for u in localusers:
logger.debug(" | push to local user %s", u)
self.push_update_to_clients(
observer_user=u,
observed_user=srcuser,
statuscache=statuscache,
)
deferreds = []
for domain in remotedomains:
logger.debug(" | push to remote domain %s", domain)
deferreds.append(self._push_presence_remote(srcuser, domain,
state=statuscache.get_state())
)
yield defer.DeferredList(deferreds)
@defer.inlineCallbacks
def _push_presence_remote(self, user, destination, state=None):
if state is None:
@ -587,12 +588,17 @@ class PresenceHandler(BaseHandler):
self.clock.time_msec() - state.pop("mtime")
)
user_state = {
"user_id": user.to_string(),
}
user_state.update(**state)
yield self.federation.send_edu(
destination=destination,
edu_type="m.presence",
content={
"push": [
dict(user_id=user.to_string(), **state),
user_state,
],
}
)
@ -611,12 +617,7 @@ class PresenceHandler(BaseHandler):
rm_handler = self.homeserver.get_handlers().room_member_handler
room_ids = yield rm_handler.get_rooms_for_user(user)
for room_id in room_ids:
yield rm_handler.fetch_room_distributions_into(
room_id, localusers=observers, ignore_user=user
)
if not observers:
if not observers and not room_ids:
break
state = dict(push)
@ -632,12 +633,12 @@ class PresenceHandler(BaseHandler):
self._user_cachemap_latest_serial += 1
statuscache.update(state, serial=self._user_cachemap_latest_serial)
for observer_user in observers:
self.push_update_to_clients(
observer_user=observer_user,
observed_user=user,
statuscache=statuscache,
)
self.push_update_to_clients(
observed_user=user,
users_to_push=observers,
room_ids=room_ids,
statuscache=statuscache,
)
if state["state"] == PresenceState.OFFLINE:
del self._user_cachemap[user]
@ -671,12 +672,53 @@ class PresenceHandler(BaseHandler):
yield defer.DeferredList(deferreds)
def push_update_to_clients(self, observer_user, observed_user,
statuscache):
statuscache.make_event(user=observed_user, clock=self.clock)
@defer.inlineCallbacks
def push_update_to_local_and_remote(self, observed_user,
users_to_push=[], room_ids=[],
remote_domains=[],
statuscache=None):
localusers, remoteusers = partitionbool(
users_to_push,
lambda u: u.is_mine
)
localusers = set(localusers)
self.push_update_to_clients(
observed_user=observed_user,
users_to_push=localusers,
room_ids=room_ids,
statuscache=statuscache,
)
remote_domains = set(remote_domains)
remote_domains |= set([r.domain for r in remoteusers])
for room_id in room_ids:
remote_domains.update(
(yield self.store.get_joined_hosts_for_room(room_id))
)
remote_domains.discard(self.hs.hostname)
deferreds = []
for domain in remote_domains:
logger.debug(" | push to remote domain %s", domain)
deferreds.append(
self._push_presence_remote(
observed_user, domain, state=statuscache.get_state()
)
)
yield defer.DeferredList(deferreds)
defer.returnValue((localusers, remote_domains))
def push_update_to_clients(self, observed_user, users_to_push=[],
room_ids=[], statuscache=None):
self.notifier.on_new_user_event(
[observer_user],
users_to_push,
room_ids,
)

View file

@ -119,6 +119,7 @@ class Notifier(object):
)
@defer.inlineCallbacks
@log_function
def on_new_user_event(self, users=[], rooms=[]):
""" Used to inform listeners that something has happend
presence/user event wise.

View file

@ -205,8 +205,11 @@ class StreamStore(SQLBaseStore):
with_feedback=False):
# TODO (erikj): Handle compressed feedback
from_comp = '<' if direction =='b' else '>'
to_comp = '>' if direction =='b' else '<'
# Tokens really represent positions between elements, but we use
# the convention of pointing to the event before the gap. Hence
# we have a bit of asymmetry when it comes to equalities.
from_comp = '<=' if direction =='b' else '>'
to_comp = '>' if direction =='b' else '<='
order = "DESC" if direction == 'b' else "ASC"
args = [room_id]
@ -294,7 +297,7 @@ class StreamStore(SQLBaseStore):
logger.debug("get_room_events_max_id: %s", res)
if not res or not res[0] or not res[0]["m"]:
return "s1"
return "s0"
key = res[0]["m"]
return "s%d" % (key,)

View file

@ -81,4 +81,4 @@ class PaginationConfig(object):
return (
"<PaginationConfig from_tok=%s, to_tok=%s, "
"direction=%s, limit=%s>"
) % (self.from_tok, self.to_tok, self.direction, self.limit)
) % (self.from_token, self.to_token, self.direction, self.limit)

View file

@ -18,6 +18,8 @@ from inspect import getcallargs
from functools import wraps
import logging
import inspect
import traceback
def log_function(f):
@ -65,4 +67,55 @@ def log_function(f):
return f(*args, **kwargs)
wrapped.__name__ = func_name
return wrapped
def trace_function(f):
func_name = f.__name__
linenum = f.func_code.co_firstlineno
pathname = f.func_code.co_filename
def wrapped(*args, **kwargs):
name = f.__module__
logger = logging.getLogger(name)
level = logging.DEBUG
s = inspect.currentframe().f_back
to_print = [
"\t%s:%s %s. Args: args=%s, kwargs=%s" % (
pathname, linenum, func_name, args, kwargs
)
]
while s:
if True or s.f_globals["__name__"].startswith("synapse"):
filename, lineno, function, _, _ = inspect.getframeinfo(s)
args_string = inspect.formatargvalues(*inspect.getargvalues(s))
to_print.append(
"\t%s:%d %s. Args: %s" % (
filename, lineno, function, args_string
)
)
s = s.f_back
msg = "\nTraceback for %s:\n" % (func_name,) + "\n".join(to_print)
record = logging.LogRecord(
name=name,
level=level,
pathname=pathname,
lineno=lineno,
msg=msg,
args=None,
exc_info=None
)
logger.handle(record)
return f(*args, **kwargs)
wrapped.__name__ = func_name
return wrapped