From 82be4457deeab9cc4e21ffebfec78bb2bac2e5b1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 7 Jan 2015 13:46:37 +0000 Subject: [PATCH 001/112] Add twisted Service interface --- synapse/app/homeserver.py | 21 ++++++++++++++++++--- synapse/storage/_base.py | 1 + 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5fec8da7c..e99153988 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -19,6 +19,7 @@ from synapse.storage import prepare_database, UpgradeDatabaseException from synapse.server import HomeServer from twisted.internet import reactor +from twisted.application import service from twisted.enterprise import adbapi from twisted.web.resource import Resource from twisted.web.static import File @@ -189,10 +190,10 @@ class SynapseHomeServer(HomeServer): logger.info("Synapse now listening on port %d", unsecure_port) -def setup(): +def setup(config_options, should_run=True): config = HomeServerConfig.load_config( "Synapse Homeserver", - sys.argv[1:], + config_options, generate_section="Homeserver" ) @@ -254,6 +255,9 @@ def setup(): bind_port = None hs.start_listening(bind_port, config.unsecure_port) + if not should_run: + return + if config.daemonize: print config.pid_file daemon = Daemonize( @@ -270,6 +274,17 @@ def setup(): reactor.run() +class SynapseService(service.Service): + def __init__(self, config): + self.config = config + + def startService(self): + setup(self.config, should_run=False) + + def stopService(self): + return self._port.stopListening() + + def run(): with LoggingContext("run"): reactor.run() @@ -277,7 +292,7 @@ def run(): def main(): with LoggingContext("main"): - setup() + setup(sys.argv[1:]) if __name__ == '__main__': diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index f660fc6ea..ce63f1200 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -95,6 +95,7 @@ class SQLBaseStore(object): current_context.copy_to(context) start = time.time() * 1000 txn_id = SQLBaseStore._TXN_ID + SQLBaseStore._TXN_ID += 1 # We don't really need these to be unique, so lets stop it from # growing really large. From a901ed16b5805adf04b5b8b1b99c14720e5abb3d Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 19:10:57 +0000 Subject: [PATCH 002/112] Move federation API responding code out of weird mix of lambdas into Servlet-style methods on instances --- synapse/federation/transport/server.py | 282 +++++++++++-------------- 1 file changed, 121 insertions(+), 161 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index ece6dbcf6..eb3e30a18 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -122,14 +122,9 @@ class TransportLayerServer(object): Args: handler (TransportReceivedHandler) """ - self.received_handler = handler - - # This is when someone is trying to send us a bunch of data. - self.server.register_path( - "PUT", - re.compile("^" + PREFIX + "/send/([^/]*)/$"), - self._with_authentication(self._on_send_request) - ) + FederationSendServlet( + handler, self._with_authentication, self.server_name + ).register(self.server) @log_function def register_request_handler(self, handler): @@ -138,136 +133,48 @@ class TransportLayerServer(object): Args: handler (TransportRequestHandler) """ - self.request_handler = handler + for servletclass in ( + FederationPullServlet, + FederationEventServlet, + FederationStateServlet, + FederationBackfillServlet, + FederationQueryServlet, + FederationMakeJoinServlet, + FederationEventServlet, + FederationSendJoinServlet, + FederationInviteServlet, + FederationQueryAuthServlet, + FederationGetMissingEventsServlet, + ): + servletclass(handler, self._with_authentication).register(self.server) - # This is for when someone asks us for everything since version X - self.server.register_path( - "GET", - re.compile("^" + PREFIX + "/pull/$"), - self._with_authentication( - lambda origin, content, query: - handler.on_pull_request(query["origin"][0], query["v"]) - ) - ) - # This is when someone asks for a data item for a given server - # data_id pair. - self.server.register_path( - "GET", - re.compile("^" + PREFIX + "/event/([^/]*)/$"), - self._with_authentication( - lambda origin, content, query, event_id: - handler.on_pdu_request(origin, event_id) - ) - ) +class BaseFederationServlet(object): + def __init__(self, handler, wrapper): + self.handler = handler + self.wrapper = wrapper - # This is when someone asks for all data for a given context. - self.server.register_path( - "GET", - re.compile("^" + PREFIX + "/state/([^/]*)/$"), - self._with_authentication( - lambda origin, content, query, context: - handler.on_context_state_request( - origin, - context, - query.get("event_id", [None])[0], - ) - ) - ) + def register(self, server): + pattern = re.compile("^" + PREFIX + self.PATH) - self.server.register_path( - "GET", - re.compile("^" + PREFIX + "/backfill/([^/]*)/$"), - self._with_authentication( - lambda origin, content, query, context: - self._on_backfill_request( - origin, context, query["v"], query["limit"] - ) - ) - ) + for method in ("GET", "PUT", "POST"): + code = getattr(self, "on_%s" % (method), None) + if code is None: + continue - # This is when we receive a server-server Query - self.server.register_path( - "GET", - re.compile("^" + PREFIX + "/query/([^/]*)$"), - self._with_authentication( - lambda origin, content, query, query_type: - handler.on_query_request( - query_type, - {k: v[0].decode("utf-8") for k, v in query.items()} - ) - ) - ) + server.register_path(method, pattern, self.wrapper(code)) - self.server.register_path( - "GET", - re.compile("^" + PREFIX + "/make_join/([^/]*)/([^/]*)$"), - self._with_authentication( - lambda origin, content, query, context, user_id: - self._on_make_join_request( - origin, content, query, context, user_id - ) - ) - ) - self.server.register_path( - "GET", - re.compile("^" + PREFIX + "/event_auth/([^/]*)/([^/]*)$"), - self._with_authentication( - lambda origin, content, query, context, event_id: - handler.on_event_auth( - origin, context, event_id, - ) - ) - ) +class FederationSendServlet(BaseFederationServlet): + PATH = "/send/([^/]*)/$" - self.server.register_path( - "PUT", - re.compile("^" + PREFIX + "/send_join/([^/]*)/([^/]*)$"), - self._with_authentication( - lambda origin, content, query, context, event_id: - self._on_send_join_request( - origin, content, query, - ) - ) - ) - - self.server.register_path( - "PUT", - re.compile("^" + PREFIX + "/invite/([^/]*)/([^/]*)$"), - self._with_authentication( - lambda origin, content, query, context, event_id: - self._on_invite_request( - origin, content, query, - ) - ) - ) - - self.server.register_path( - "POST", - re.compile("^" + PREFIX + "/query_auth/([^/]*)/([^/]*)$"), - self._with_authentication( - lambda origin, content, query, context, event_id: - self._on_query_auth_request( - origin, content, event_id, - ) - ) - ) - - self.server.register_path( - "POST", - re.compile("^" + PREFIX + "/get_missing_events/([^/]*)/?$"), - self._with_authentication( - lambda origin, content, query, room_id: - self._get_missing_events( - origin, content, room_id, - ) - ) - ) + def __init__(self, handler, wrapper, server_name): + super(FederationSendServlet, self).__init__(handler, wrapper) + self.server_name = server_name + # This is when someone is trying to send us a bunch of data. @defer.inlineCallbacks - @log_function - def _on_send_request(self, origin, content, query, transaction_id): + def on_PUT(self, origin, content, query, transaction_id): """ Called on PUT /send// Args: @@ -305,8 +212,7 @@ class TransportLayerServer(object): return try: - handler = self.received_handler - code, response = yield handler.on_incoming_transaction( + code, response = yield self.handler.on_incoming_transaction( transaction_data ) except: @@ -315,65 +221,119 @@ class TransportLayerServer(object): defer.returnValue((code, response)) - @log_function - def _on_backfill_request(self, origin, context, v_list, limits): + +class FederationPullServlet(BaseFederationServlet): + PATH = "/pull/$" + + # This is for when someone asks us for everything since version X + def on_GET(self, origin, content, query): + return self.handler.on_pull_request(query["origin"][0], query["v"]) + + +class FederationEventServlet(BaseFederationServlet): + PATH = "/event/([^/]*)/$" + + # This is when someone asks for a data item for a given server data_id pair. + def on_GET(self, origin, content, query, event_id): + return self.handler.on_pdu_request(origin, event_id) + + +class FederationStateServlet(BaseFederationServlet): + PATH = "/state/([^/]*)/$" + + # This is when someone asks for all data for a given context. + def on_GET(self, origin, content, query, context): + return self.handler.on_context_state_request(origin, context, + query.get("event_id", [None])[0], + ) + + +class FederationBackfillServlet(BaseFederationServlet): + PATH = "/backfill/([^/]*)/$" + + def on_GET(self, origin, content, query, context): + versions = query["v"] + limits = query["limit"] + if not limits: - return defer.succeed( - (400, {"error": "Did not include limit param"}) - ) + return defer.succeed((400, {"error": "Did not include limit param"})) limit = int(limits[-1]) - versions = v_list + return self.handler.on_backfill_request(origin, context, versions, limit) - return self.request_handler.on_backfill_request( - origin, context, versions, limit + +class FederationQueryServlet(BaseFederationServlet): + PATH = "/query/([^/]*)$" + + # This is when we receive a server-server Query + def on_GET(self, origin, content, query, query_type): + return self.handler.on_query_request(query_type, + {k: v[0].decode("utf-8") for k, v in query.items()} ) + +class FederationMakeJoinServlet(BaseFederationServlet): + PATH = "/make_join/([^/]*)/([^/]*)$" + @defer.inlineCallbacks - @log_function - def _on_make_join_request(self, origin, content, query, context, user_id): - content = yield self.request_handler.on_make_join_request( - context, user_id, - ) + def on_GET(self, origin, content, query, context, user_id): + content = yield self.handler.on_make_join_request(context, user_id) defer.returnValue((200, content)) - @defer.inlineCallbacks - @log_function - def _on_send_join_request(self, origin, content, query): - content = yield self.request_handler.on_send_join_request( - origin, content, - ) +class FederationEventAuthServlet(BaseFederationServlet): + PATH = "/event_auth/([^/]*)/([^/]*)$" + + def on_GET(self, origin, content, query, context, event_id): + return self.handler.on_event_auth(origin, context, event_id) + + +class FederationSendJoinServlet(BaseFederationServlet): + PATH = "/send_join/([^/]*)/([^/]*)$" + + @defer.inlineCallbacks + def on_PUT(self, origin, content, query, context, event_id): + # TODO(paul): assert that context/event_id parsed from path actually + # match those given in content + content = yield self.handler.on_send_join_request(origin, content) defer.returnValue((200, content)) - @defer.inlineCallbacks - @log_function - def _on_invite_request(self, origin, content, query): - content = yield self.request_handler.on_invite_request( - origin, content, - ) +class FederationInviteServlet(BaseFederationServlet): + PATH = "/invite/([^/]*)/([^/]*)$" + + @defer.inlineCallbacks + def on_PUT(self, origin, content, query, context, event_id): + # TODO(paul): assert that context/event_id parsed from path actually + # match those given in content + content = yield self.handler.on_invite_request(origin, content) defer.returnValue((200, content)) + +class FederationQueryAuthServlet(BaseFederationServlet): + PATH = "/query_auth/([^/]*)/([^/]*)$" + @defer.inlineCallbacks - @log_function - def _on_query_auth_request(self, origin, content, event_id): - new_content = yield self.request_handler.on_query_auth_request( + def on_POST(self, origin, content, query, context, event_id): + new_content = yield self.handler.on_query_auth_request( origin, content, event_id ) defer.returnValue((200, new_content)) + +class FederationGetMissingEventsServlet(BaseFederationServlet): + PATH = "/get_missing_events/([^/]*)/?$" + @defer.inlineCallbacks - @log_function - def _get_missing_events(self, origin, content, room_id): + def on_POST(self, origin, content, query, room_id): limit = int(content.get("limit", 10)) min_depth = int(content.get("min_depth", 0)) earliest_events = content.get("earliest_events", []) latest_events = content.get("latest_events", []) - content = yield self.request_handler.on_get_missing_events( + content = yield self.handler.on_get_missing_events( origin, room_id=room_id, earliest_events=earliest_events, From ba8ac996f951c872c8815f09a4ffd3a508da6863 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 19:43:17 +0000 Subject: [PATCH 003/112] Remove the dead 'rate_limit_origin' method from TransportLayerServer --- synapse/federation/transport/server.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index eb3e30a18..dc9f1e082 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -109,12 +109,6 @@ class TransportLayerServer(object): defer.returnValue(response) return new_handler - def rate_limit_origin(self, handler): - def new_handler(origin, *args, **kwargs): - response = yield handler(origin, *args, **kwargs) - defer.returnValue(response) - return new_handler() - @log_function def register_received_handler(self, handler): """ Register a handler that will be fired when we receive data. From 7644cb79b222207ef739a9ca29699f32aa3cee0b Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 20:33:16 +0000 Subject: [PATCH 004/112] Slightly neater(?) arrangement of authentication wrapper for HTTP servlet methods --- synapse/federation/transport/server.py | 62 +++++++++++++++----------- 1 file changed, 37 insertions(+), 25 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index dc9f1e082..39b18ae30 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -19,6 +19,7 @@ from synapse.api.urls import FEDERATION_PREFIX as PREFIX from synapse.api.errors import Codes, SynapseError from synapse.util.logutils import log_function +import functools import logging import simplejson as json import re @@ -30,8 +31,9 @@ logger = logging.getLogger(__name__) class TransportLayerServer(object): """Handles incoming federation HTTP requests""" + # A method just so we can pass 'self' as the authenticator to the Servlets @defer.inlineCallbacks - def _authenticate_request(self, request): + def authenticate_request(self, request): json_request = { "method": request.method, "uri": request.uri, @@ -93,22 +95,6 @@ class TransportLayerServer(object): defer.returnValue((origin, content)) - def _with_authentication(self, handler): - @defer.inlineCallbacks - def new_handler(request, *args, **kwargs): - try: - (origin, content) = yield self._authenticate_request(request) - with self.ratelimiter.ratelimit(origin) as d: - yield d - response = yield handler( - origin, content, request.args, *args, **kwargs - ) - except: - logger.exception("_authenticate_request failed") - raise - defer.returnValue(response) - return new_handler - @log_function def register_received_handler(self, handler): """ Register a handler that will be fired when we receive data. @@ -116,8 +102,10 @@ class TransportLayerServer(object): Args: handler (TransportReceivedHandler) """ - FederationSendServlet( - handler, self._with_authentication, self.server_name + FederationSendServlet(handler, + authenticator=self, + ratelimiter=self.ratelimiter, + server_name=self.server_name, ).register(self.server) @log_function @@ -140,13 +128,37 @@ class TransportLayerServer(object): FederationQueryAuthServlet, FederationGetMissingEventsServlet, ): - servletclass(handler, self._with_authentication).register(self.server) + servletclass(handler, + authenticator=self, + ratelimiter=self.ratelimiter, + ).register(self.server) class BaseFederationServlet(object): - def __init__(self, handler, wrapper): + def __init__(self, handler, authenticator, ratelimiter): self.handler = handler - self.wrapper = wrapper + self.authenticator = authenticator + self.ratelimiter = ratelimiter + + def _wrap(self, code): + authenticator = self.authenticator + ratelimiter = self.ratelimiter + + @defer.inlineCallbacks + @functools.wraps(code) + def new_code(request, *args, **kwargs): + try: + (origin, content) = yield authenticator.authenticate_request(request) + with ratelimiter.ratelimit(origin) as d: + yield d + response = yield code( + origin, content, request.args, *args, **kwargs + ) + except: + logger.exception("authenticate_request failed") + raise + defer.returnValue(response) + return new_code def register(self, server): pattern = re.compile("^" + PREFIX + self.PATH) @@ -156,14 +168,14 @@ class BaseFederationServlet(object): if code is None: continue - server.register_path(method, pattern, self.wrapper(code)) + server.register_path(method, pattern, self._wrap(code)) class FederationSendServlet(BaseFederationServlet): PATH = "/send/([^/]*)/$" - def __init__(self, handler, wrapper, server_name): - super(FederationSendServlet, self).__init__(handler, wrapper) + def __init__(self, handler, server_name, **kwargs): + super(FederationSendServlet, self).__init__(handler, **kwargs) self.server_name = server_name # This is when someone is trying to send us a bunch of data. From 5eab2549ab13c14535de266cc153dc6d5b479590 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 20:36:05 +0000 Subject: [PATCH 005/112] Append a $ on PATH at registration time, meaning each PATH attribute doesn't need it --- synapse/federation/transport/server.py | 27 +++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 39b18ae30..8f985f8fe 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -161,7 +161,7 @@ class BaseFederationServlet(object): return new_code def register(self, server): - pattern = re.compile("^" + PREFIX + self.PATH) + pattern = re.compile("^" + PREFIX + self.PATH + "$") for method in ("GET", "PUT", "POST"): code = getattr(self, "on_%s" % (method), None) @@ -172,7 +172,7 @@ class BaseFederationServlet(object): class FederationSendServlet(BaseFederationServlet): - PATH = "/send/([^/]*)/$" + PATH = "/send/([^/]*)/" def __init__(self, handler, server_name, **kwargs): super(FederationSendServlet, self).__init__(handler, **kwargs) @@ -229,7 +229,7 @@ class FederationSendServlet(BaseFederationServlet): class FederationPullServlet(BaseFederationServlet): - PATH = "/pull/$" + PATH = "/pull/" # This is for when someone asks us for everything since version X def on_GET(self, origin, content, query): @@ -237,7 +237,7 @@ class FederationPullServlet(BaseFederationServlet): class FederationEventServlet(BaseFederationServlet): - PATH = "/event/([^/]*)/$" + PATH = "/event/([^/]*)/" # This is when someone asks for a data item for a given server data_id pair. def on_GET(self, origin, content, query, event_id): @@ -245,7 +245,7 @@ class FederationEventServlet(BaseFederationServlet): class FederationStateServlet(BaseFederationServlet): - PATH = "/state/([^/]*)/$" + PATH = "/state/([^/]*)/" # This is when someone asks for all data for a given context. def on_GET(self, origin, content, query, context): @@ -255,7 +255,7 @@ class FederationStateServlet(BaseFederationServlet): class FederationBackfillServlet(BaseFederationServlet): - PATH = "/backfill/([^/]*)/$" + PATH = "/backfill/([^/]*)/" def on_GET(self, origin, content, query, context): versions = query["v"] @@ -270,7 +270,7 @@ class FederationBackfillServlet(BaseFederationServlet): class FederationQueryServlet(BaseFederationServlet): - PATH = "/query/([^/]*)$" + PATH = "/query/([^/]*)" # This is when we receive a server-server Query def on_GET(self, origin, content, query, query_type): @@ -280,7 +280,7 @@ class FederationQueryServlet(BaseFederationServlet): class FederationMakeJoinServlet(BaseFederationServlet): - PATH = "/make_join/([^/]*)/([^/]*)$" + PATH = "/make_join/([^/]*)/([^/]*)" @defer.inlineCallbacks def on_GET(self, origin, content, query, context, user_id): @@ -289,14 +289,14 @@ class FederationMakeJoinServlet(BaseFederationServlet): class FederationEventAuthServlet(BaseFederationServlet): - PATH = "/event_auth/([^/]*)/([^/]*)$" + PATH = "/event_auth/([^/]*)/([^/]*)" def on_GET(self, origin, content, query, context, event_id): return self.handler.on_event_auth(origin, context, event_id) class FederationSendJoinServlet(BaseFederationServlet): - PATH = "/send_join/([^/]*)/([^/]*)$" + PATH = "/send_join/([^/]*)/([^/]*)" @defer.inlineCallbacks def on_PUT(self, origin, content, query, context, event_id): @@ -307,7 +307,7 @@ class FederationSendJoinServlet(BaseFederationServlet): class FederationInviteServlet(BaseFederationServlet): - PATH = "/invite/([^/]*)/([^/]*)$" + PATH = "/invite/([^/]*)/([^/]*)" @defer.inlineCallbacks def on_PUT(self, origin, content, query, context, event_id): @@ -318,7 +318,7 @@ class FederationInviteServlet(BaseFederationServlet): class FederationQueryAuthServlet(BaseFederationServlet): - PATH = "/query_auth/([^/]*)/([^/]*)$" + PATH = "/query_auth/([^/]*)/([^/]*)" @defer.inlineCallbacks def on_POST(self, origin, content, query, context, event_id): @@ -330,7 +330,8 @@ class FederationQueryAuthServlet(BaseFederationServlet): class FederationGetMissingEventsServlet(BaseFederationServlet): - PATH = "/get_missing_events/([^/]*)/?$" + # TODO(paul): Why does this path alone end with "/?" optional? + PATH = "/get_missing_events/([^/]*)/?" @defer.inlineCallbacks def on_POST(self, origin, content, query, room_id): From d79d91a4a7bdd42bc6c4d0324623e11c8bd3c5ef Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 20:53:33 +0000 Subject: [PATCH 006/112] Appease pep8 --- synapse/federation/transport/server.py | 46 +++++++++++++++----------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 8f985f8fe..6c624977d 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -102,7 +102,8 @@ class TransportLayerServer(object): Args: handler (TransportReceivedHandler) """ - FederationSendServlet(handler, + FederationSendServlet( + handler, authenticator=self, ratelimiter=self.ratelimiter, server_name=self.server_name, @@ -115,20 +116,9 @@ class TransportLayerServer(object): Args: handler (TransportRequestHandler) """ - for servletclass in ( - FederationPullServlet, - FederationEventServlet, - FederationStateServlet, - FederationBackfillServlet, - FederationQueryServlet, - FederationMakeJoinServlet, - FederationEventServlet, - FederationSendJoinServlet, - FederationInviteServlet, - FederationQueryAuthServlet, - FederationGetMissingEventsServlet, - ): - servletclass(handler, + for servletclass in SERVLET_CLASSES: + servletclass( + handler, authenticator=self, ratelimiter=self.ratelimiter, ).register(self.server) @@ -138,11 +128,11 @@ class BaseFederationServlet(object): def __init__(self, handler, authenticator, ratelimiter): self.handler = handler self.authenticator = authenticator - self.ratelimiter = ratelimiter + self.ratelimiter = ratelimiter def _wrap(self, code): authenticator = self.authenticator - ratelimiter = self.ratelimiter + ratelimiter = self.ratelimiter @defer.inlineCallbacks @functools.wraps(code) @@ -249,7 +239,9 @@ class FederationStateServlet(BaseFederationServlet): # This is when someone asks for all data for a given context. def on_GET(self, origin, content, query, context): - return self.handler.on_context_state_request(origin, context, + return self.handler.on_context_state_request( + origin, + context, query.get("event_id", [None])[0], ) @@ -274,7 +266,8 @@ class FederationQueryServlet(BaseFederationServlet): # This is when we receive a server-server Query def on_GET(self, origin, content, query, query_type): - return self.handler.on_query_request(query_type, + return self.handler.on_query_request( + query_type, {k: v[0].decode("utf-8") for k, v in query.items()} ) @@ -350,3 +343,18 @@ class FederationGetMissingEventsServlet(BaseFederationServlet): ) defer.returnValue((200, content)) + + +SERVLET_CLASSES = ( + FederationPullServlet, + FederationEventServlet, + FederationStateServlet, + FederationBackfillServlet, + FederationQueryServlet, + FederationMakeJoinServlet, + FederationEventServlet, + FederationSendJoinServlet, + FederationInviteServlet, + FederationQueryAuthServlet, + FederationGetMissingEventsServlet, +) From 8e28db5cc9fa55ec9ceb641ac91dcd58c2c633d5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 9 Mar 2015 16:43:09 +0000 Subject: [PATCH 007/112] Change room handlers get_rooms_for_user to get_joined_rooms_for_user. This uses the a storage api that is cached. --- synapse/handlers/events.py | 2 +- synapse/handlers/presence.py | 6 +++--- synapse/handlers/profile.py | 3 +-- synapse/handlers/room.py | 6 +++--- synapse/handlers/sync.py | 6 ++++-- 5 files changed, 12 insertions(+), 11 deletions(-) diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index d3297b729..f9f855213 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -71,7 +71,7 @@ class EventStreamHandler(BaseHandler): self._streams_per_user[auth_user] += 1 rm_handler = self.hs.get_handlers().room_member_handler - room_ids = yield rm_handler.get_rooms_for_user(auth_user) + room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user) if timeout: # If they've set a timeout set a minimum limit. diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 8ef248ecf..28e922f79 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -452,7 +452,7 @@ class PresenceHandler(BaseHandler): # Also include people in all my rooms rm_handler = self.homeserver.get_handlers().room_member_handler - room_ids = yield rm_handler.get_rooms_for_user(user) + room_ids = yield rm_handler.get_joined_rooms_for_user(user) if state is None: state = yield self.store.get_presence_state(user.localpart) @@ -596,7 +596,7 @@ class PresenceHandler(BaseHandler): localusers.add(user) rm_handler = self.homeserver.get_handlers().room_member_handler - room_ids = yield rm_handler.get_rooms_for_user(user) + room_ids = yield rm_handler.get_joined_rooms_for_user(user) if not localusers and not room_ids: defer.returnValue(None) @@ -663,7 +663,7 @@ class PresenceHandler(BaseHandler): ) rm_handler = self.homeserver.get_handlers().room_member_handler - room_ids = yield rm_handler.get_rooms_for_user(user) + room_ids = yield rm_handler.get_joined_rooms_for_user(user) if room_ids: logger.debug(" | %d interested room IDs %r", len(room_ids), room_ids) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 2ddf9d537..ee2732b84 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -197,9 +197,8 @@ class ProfileHandler(BaseHandler): self.ratelimit(user.to_string()) - joins = yield self.store.get_rooms_for_user_where_membership_is( + joins = yield self.store.get_rooms_for_user( user.to_string(), - [Membership.JOIN], ) for j in joins: diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 80f7ee3f1..823affc38 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -507,7 +507,7 @@ class RoomMemberHandler(BaseHandler): defer.returnValue((is_remote_invite_join, room_host)) @defer.inlineCallbacks - def get_rooms_for_user(self, user, membership_list=[Membership.JOIN]): + def get_joined_rooms_for_user(self, user): """Returns a list of roomids that the user has any of the given membership states in.""" @@ -517,8 +517,8 @@ class RoomMemberHandler(BaseHandler): if app_service: rooms = yield self.store.get_app_service_rooms(app_service) else: - rooms = yield self.store.get_rooms_for_user_where_membership_is( - user_id=user.to_string(), membership_list=membership_list + rooms = yield self.store.get_rooms_for_user( + user.to_string(), ) # For some reason the list of events contains duplicates diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 7883bbd83..35a62fda4 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -96,7 +96,9 @@ class SyncHandler(BaseHandler): return self.current_sync_for_user(sync_config, since_token) rm_handler = self.hs.get_handlers().room_member_handler - room_ids = yield rm_handler.get_rooms_for_user(sync_config.user) + room_ids = yield rm_handler.get_joined_rooms_for_user( + sync_config.user + ) result = yield self.notifier.wait_for_events( sync_config.user, room_ids, sync_config.filter, timeout, current_sync_callback @@ -227,7 +229,7 @@ class SyncHandler(BaseHandler): logger.debug("Typing %r", typing_by_room) rm_handler = self.hs.get_handlers().room_member_handler - room_ids = yield rm_handler.get_rooms_for_user(sync_config.user) + room_ids = yield rm_handler.get_joined_rooms_for_user(sync_config.user) # TODO (mjark): Does public mean "published"? published_rooms = yield self.store.get_rooms(is_public=True) From 6d74e4662102d306c036f800cbd3a3b58477f2b8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 9 Mar 2015 17:01:11 +0000 Subject: [PATCH 008/112] Fix tests --- tests/handlers/test_presence.py | 4 ++-- tests/handlers/test_presencelike.py | 8 ++++---- tests/rest/client/v1/test_presence.py | 8 ++++---- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 6ffc3c99c..04eba4289 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -100,7 +100,7 @@ class PresenceTestCase(unittest.TestCase): self.room_members = [] room_member_handler = handlers.room_member_handler = Mock(spec=[ - "get_rooms_for_user", + "get_joined_rooms_for_user", "get_room_members", "fetch_room_distributions_into", ]) @@ -111,7 +111,7 @@ class PresenceTestCase(unittest.TestCase): return defer.succeed([self.room_id]) else: return defer.succeed([]) - room_member_handler.get_rooms_for_user = get_rooms_for_user + room_member_handler.get_joined_rooms_for_user = get_rooms_for_user def get_room_members(room_id): if room_id == self.room_id: diff --git a/tests/handlers/test_presencelike.py b/tests/handlers/test_presencelike.py index 18cac9a84..977e832da 100644 --- a/tests/handlers/test_presencelike.py +++ b/tests/handlers/test_presencelike.py @@ -64,7 +64,7 @@ class PresenceProfilelikeDataTestCase(unittest.TestCase): "set_presence_state", "is_presence_visible", "set_profile_displayname", - "get_rooms_for_user_where_membership_is", + "get_rooms_for_user", ]), handlers=None, resource_for_federation=Mock(), @@ -124,9 +124,9 @@ class PresenceProfilelikeDataTestCase(unittest.TestCase): self.mock_update_client) hs.handlers.room_member_handler = Mock(spec=[ - "get_rooms_for_user", + "get_joined_rooms_for_user", ]) - hs.handlers.room_member_handler.get_rooms_for_user = ( + hs.handlers.room_member_handler.get_joined_rooms_for_user = ( lambda u: defer.succeed([])) # Some local users to test with @@ -138,7 +138,7 @@ class PresenceProfilelikeDataTestCase(unittest.TestCase): self.u_potato = UserID.from_string("@potato:remote") self.mock_get_joined = ( - self.datastore.get_rooms_for_user_where_membership_is + self.datastore.get_rooms_for_user ) @defer.inlineCallbacks diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py index 5f2ef64ef..b9c03383a 100644 --- a/tests/rest/client/v1/test_presence.py +++ b/tests/rest/client/v1/test_presence.py @@ -79,13 +79,13 @@ class PresenceStateTestCase(unittest.TestCase): room_member_handler = hs.handlers.room_member_handler = Mock( spec=[ - "get_rooms_for_user", + "get_joined_rooms_for_user", ] ) def get_rooms_for_user(user): return defer.succeed([]) - room_member_handler.get_rooms_for_user = get_rooms_for_user + room_member_handler.get_joined_rooms_for_user = get_rooms_for_user presence.register_servlets(hs, self.mock_resource) @@ -166,7 +166,7 @@ class PresenceListTestCase(unittest.TestCase): hs.handlers.room_member_handler = Mock( spec=[ - "get_rooms_for_user", + "get_joined_rooms_for_user", ] ) @@ -291,7 +291,7 @@ class PresenceEventStreamTestCase(unittest.TestCase): return ["a-room"] else: return [] - hs.handlers.room_member_handler.get_rooms_for_user = get_rooms_for_user + hs.handlers.room_member_handler.get_joined_rooms_for_user = get_rooms_for_user self.mock_datastore = hs.get_datastore() self.mock_datastore.get_app_service_by_token = Mock(return_value=None) From b8a6692657d350389f1c3dc8bd903110c2ae1d72 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Mar 2015 09:39:42 +0000 Subject: [PATCH 009/112] Add documentation. When starting via twistd respect soft_file_limit config option. --- synapse/app/homeserver.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index dff08c8bc..26ef02e3d 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -297,6 +297,15 @@ def change_resource_limit(soft_file_no): def setup(config_options, should_run=True): + """ + Args: + config_options_options: The options passed to Synapse. Usually + `sys.argv[1:]`. + should_run (bool): Whether to start the reactor. + + Returns: + HomeServer + """ config = HomeServerConfig.load_config( "Synapse Homeserver", config_options, @@ -372,7 +381,7 @@ def setup(config_options, should_run=True): hs.get_replication_layer().start_get_pdu_cache() if not should_run: - return + return hs if config.daemonize: print config.pid_file @@ -390,13 +399,19 @@ def setup(config_options, should_run=True): else: run(config) + return hs + class SynapseService(service.Service): + """A twisted Service class that will start synapse. Used to run synapse + via twistd and a .tac. + """ def __init__(self, config): self.config = config def startService(self): - setup(self.config, should_run=False) + hs = setup(self.config, should_run=False) + change_resource_limit(hs.config.soft_file_limit) def stopService(self): return self._port.stopListening() From c37eceeb9ef2514237468a7f5e3e6e07d293de16 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Mar 2015 09:58:33 +0000 Subject: [PATCH 010/112] Split out the 'run' from 'setup' --- synapse/app/homeserver.py | 53 ++++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 26 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 26ef02e3d..e84b9ef5b 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -296,7 +296,7 @@ def change_resource_limit(soft_file_no): logger.warn("Failed to set file limit: %s", e) -def setup(config_options, should_run=True): +def setup(config_options): """ Args: config_options_options: The options passed to Synapse. Usually @@ -380,25 +380,6 @@ def setup(config_options, should_run=True): hs.get_datastore().start_profiling() hs.get_replication_layer().start_get_pdu_cache() - if not should_run: - return hs - - if config.daemonize: - print config.pid_file - - daemon = Daemonize( - app="synapse-homeserver", - pid=config.pid_file, - action=lambda: run(config), - auto_close_fds=False, - verbose=True, - logger=logger, - ) - - daemon.start() - else: - run(config) - return hs @@ -410,24 +391,44 @@ class SynapseService(service.Service): self.config = config def startService(self): - hs = setup(self.config, should_run=False) + hs = setup(self.config) change_resource_limit(hs.config.soft_file_limit) def stopService(self): return self._port.stopListening() -def run(config): - with LoggingContext("run"): - change_resource_limit(config.soft_file_limit) +def run(hs): - reactor.run() + def in_thread(): + with LoggingContext("run"): + change_resource_limit(hs.config.soft_file_limit) + + reactor.run() + + if hs.config.daemonize: + + print hs.config.pid_file + + daemon = Daemonize( + app="synapse-homeserver", + pid=hs.config.pid_file, + action=lambda: in_thread(), + auto_close_fds=False, + verbose=True, + logger=logger, + ) + + daemon.start() + else: + in_thread(hs.config) def main(): with LoggingContext("main"): check_requirements() - setup(sys.argv[1:]) + hs = setup(sys.argv[1:]) + run(hs) if __name__ == '__main__': From 98b867f7b7f6b82eafa8d7694ed4703b7ce47b19 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Mar 2015 10:15:58 +0000 Subject: [PATCH 011/112] Fix bug in logging. --- synapse/federation/transaction_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 741a4e7a1..e6eb85501 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -144,7 +144,7 @@ class TransactionQueue(object): deferred.errback(failure) def log_failure(failure): - logger.warn("Failed to send pdu", failure.value) + logger.warn("Failed to send pdu: %s", failure.value) deferred.addErrback(log_failure) From 045afd6b6154ffb76664a16bdcb4d6bca791573d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Mar 2015 10:19:03 +0000 Subject: [PATCH 012/112] in_thread takes no arguments --- synapse/app/homeserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index e84b9ef5b..3649406ef 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -421,7 +421,7 @@ def run(hs): daemon.start() else: - in_thread(hs.config) + in_thread() def main(): From abaf47bbb6b9be0b493d07ca8a9efe4cdf0aee01 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 10 Mar 2015 10:28:24 +0000 Subject: [PATCH 013/112] Fix bug in logging. --- synapse/federation/transaction_queue.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index e6eb85501..9dc7849b1 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -115,8 +115,8 @@ class TransactionQueue(object): if not deferred.called: deferred.errback(failure) - def log_failure(failure): - logger.warn("Failed to send pdu", failure.value) + def log_failure(f): + logger.warn("Failed to send pdu to %s: %s", destination, f.value) deferred.addErrback(log_failure) @@ -143,8 +143,8 @@ class TransactionQueue(object): if not deferred.called: deferred.errback(failure) - def log_failure(failure): - logger.warn("Failed to send pdu: %s", failure.value) + def log_failure(f): + logger.warn("Failed to send edu to %s: %s", destination, f.value) deferred.addErrback(log_failure) @@ -174,7 +174,7 @@ class TransactionQueue(object): deferred.errback(f) def log_failure(f): - logger.warn("Failed to send pdu", f.value) + logger.warn("Failed to send failure to %s: %s", destination, f.value) deferred.addErrback(log_failure) From 8916acbc1367ca54a9e3fa83f3965ccd9008b245 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Mar 2015 11:21:37 +0000 Subject: [PATCH 014/112] These aren't defined for redacted events so don't crash --- synapse/storage/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index a3ff99569..14a788c19 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -484,9 +484,11 @@ class DataStore(RoomMemberStore, RoomStore, for e in events: if e.type == 'm.room.name': - name = e.content['name'] + if 'name' in e.content: + name = e.content['name'] elif e.type == 'm.room.aliases': - aliases.extend(e.content['aliases']) + if 'aliases' in e.content: + aliases.extend(e.content['aliases']) defer.returnValue((name, aliases)) From 04f8478aaa02d11ce079d1eaeabcfba88b1fcee7 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Mar 2015 17:26:25 +0000 Subject: [PATCH 015/112] Add the master push rule for the break-my-push button. Allow server default rules to be disabled by default. --- synapse/push/__init__.py | 6 ++- synapse/push/baserules.py | 58 ++++++++++++++++++++++++----- synapse/rest/client/v1/push_rule.py | 5 ++- synapse/storage/push_rule.py | 27 +++----------- 4 files changed, 63 insertions(+), 33 deletions(-) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 3da0ce870..c98aca60b 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -105,7 +105,11 @@ class Pusher(object): room_member_count += 1 for r in rules: - if r['rule_id'] in enabled_map and not enabled_map[r['rule_id']]: + if r['rule_id'] in enabled_map: + r['enabled'] = enabled_map[r['rule_id']] + elif 'enabled' not in r: + r['enabled'] = True + if not r['enabled']: continue matches = True diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 6e333a3d2..bf6ee6710 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -6,36 +6,63 @@ def list_with_base_rules(rawrules, user_name): # shove the server default rules for each kind onto the end of each current_prio_class = PRIORITY_CLASS_INVERSE_MAP.keys()[-1] + + ruleslist.extend(make_base_prepend_rules( + user_name, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] + )) + for r in rawrules: if r['priority_class'] < current_prio_class: while r['priority_class'] < current_prio_class: - ruleslist.extend(make_base_rules( + ruleslist.extend(make_base_append_rules( user_name, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) current_prio_class -= 1 + if current_prio_class > 0: + ruleslist.extend(make_base_prepend_rules( + user_name, + PRIORITY_CLASS_INVERSE_MAP[current_prio_class] + )) ruleslist.append(r) while current_prio_class > 0: - ruleslist.extend(make_base_rules( + ruleslist.extend(make_base_append_rules( user_name, PRIORITY_CLASS_INVERSE_MAP[current_prio_class] )) current_prio_class -= 1 + if current_prio_class > 0: + ruleslist.extend(make_base_prepend_rules( + user_name, + PRIORITY_CLASS_INVERSE_MAP[current_prio_class] + )) return ruleslist -def make_base_rules(user, kind): +def make_base_append_rules(user, kind): rules = [] if kind == 'override': - rules = make_base_override_rules() + rules = make_base_append_override_rules() elif kind == 'underride': - rules = make_base_underride_rules(user) + rules = make_base_append_underride_rules(user) elif kind == 'content': - rules = make_base_content_rules(user) + rules = make_base_append_content_rules(user) + + for r in rules: + r['priority_class'] = PRIORITY_CLASS_MAP[kind] + r['default'] = True # Deprecated, left for backwards compat + + return rules + +def make_base_prepend_rules(user, kind): + rules = [] + + if kind == 'override': + rules = make_base_prepend_override_rules() for r in rules: r['priority_class'] = PRIORITY_CLASS_MAP[kind] @@ -44,7 +71,7 @@ def make_base_rules(user, kind): return rules -def make_base_content_rules(user): +def make_base_append_content_rules(user): return [ { 'rule_id': 'global/content/.m.rule.contains_user_name', @@ -68,7 +95,20 @@ def make_base_content_rules(user): ] -def make_base_override_rules(): +def make_base_prepend_override_rules(): + return [ + { + 'rule_id': 'global/override/.m.rule.master', + 'enabled': False, + 'conditions': [], + 'actions': [ + "dont_notify" + ] + } + ] + + +def make_base_append_override_rules(): return [ { 'rule_id': 'global/override/.m.rule.call', @@ -142,7 +182,7 @@ def make_base_override_rules(): ] -def make_base_underride_rules(user): +def make_base_append_underride_rules(user): return [ { 'rule_id': 'global/underride/.m.rule.invite_for_me', diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index fef0eb657..d4e7ab220 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -156,9 +156,12 @@ class PushRuleRestServlet(ClientV1RestServlet): template_rule = _rule_to_template(r) if template_rule: - template_rule['enabled'] = True if r['rule_id'] in enabled_map: template_rule['enabled'] = enabled_map[r['rule_id']] + elif 'enabled' in r: + template_rule['enabled'] = r['enabled'] + else: + template_rule['enabled'] = True rulearray.append(template_rule) path = request.postpath[1:] diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index bbf322cc8..1628489fa 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -56,17 +56,6 @@ class PushRuleStore(SQLBaseStore): {r['rule_id']: False if r['enabled'] == 0 else True for r in results} ) - @defer.inlineCallbacks - def get_push_rule_enabled_by_user_rule_id(self, user_name, rule_id): - results = yield self._simple_select_list( - PushRuleEnableTable.table_name, - {'user_name': user_name, 'rule_id': rule_id}, - ['enabled'] - ) - if not results: - defer.returnValue(True) - defer.returnValue(results[0]) - @defer.inlineCallbacks def add_push_rule(self, before, after, **kwargs): vals = copy.copy(kwargs) @@ -217,17 +206,11 @@ class PushRuleStore(SQLBaseStore): @defer.inlineCallbacks def set_push_rule_enabled(self, user_name, rule_id, enabled): - if enabled: - yield self._simple_delete_one( - PushRuleEnableTable.table_name, - {'user_name': user_name, 'rule_id': rule_id} - ) - else: - yield self._simple_upsert( - PushRuleEnableTable.table_name, - {'user_name': user_name, 'rule_id': rule_id}, - {'enabled': False} - ) + yield self._simple_upsert( + PushRuleEnableTable.table_name, + {'user_name': user_name, 'rule_id': rule_id}, + {'enabled': enabled} + ) class RuleNotFoundException(Exception): From 83b1e7fb3cd411a4e86fe8926e71c28495c7fa60 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 11 Mar 2015 10:01:17 +0000 Subject: [PATCH 016/112] PEP8 blank lines --- synapse/push/baserules.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index bf6ee6710..0443ee377 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -58,6 +58,7 @@ def make_base_append_rules(user, kind): return rules + def make_base_prepend_rules(user, kind): rules = [] From 465354ffdea1585bcab32eae819a7d9683746d8e Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 11 Mar 2015 11:24:50 +0000 Subject: [PATCH 017/112] 'false' is not False --- synapse/push/baserules.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 0443ee377..60fd35fbf 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -127,7 +127,7 @@ def make_base_append_override_rules(): 'value': 'ring' }, { 'set_tweak': 'highlight', - 'value': 'false' + 'value': False } ] }, @@ -176,7 +176,7 @@ def make_base_append_override_rules(): 'value': 'default' }, { 'set_tweak': 'highlight', - 'value': 'false' + 'value': False } ] } @@ -211,7 +211,7 @@ def make_base_append_underride_rules(user): 'value': 'default' }, { 'set_tweak': 'highlight', - 'value': 'false' + 'value': False } ] }, @@ -227,7 +227,7 @@ def make_base_append_underride_rules(user): 'actions': [ 'notify', { 'set_tweak': 'highlight', - 'value': 'false' + 'value': False } ] }, @@ -243,7 +243,7 @@ def make_base_append_underride_rules(user): 'actions': [ 'notify', { 'set_tweak': 'highlight', - 'value': 'false' + 'value': False } ] } From cdb2e045eed71b58c3e14471d9fb8c6ff1cf490d Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 11 Mar 2015 14:22:35 +0000 Subject: [PATCH 018/112] Again, underscore, not hyphen --- synapse/push/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index c98aca60b..d69c92911 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -32,7 +32,7 @@ class Pusher(object): INITIAL_BACKOFF = 1000 MAX_BACKOFF = 60 * 60 * 1000 GIVE_UP_AFTER = 24 * 60 * 60 * 1000 - DEFAULT_ACTIONS = ['dont-notify'] + DEFAULT_ACTIONS = ['dont_notify'] INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") From 099e4b88d8053fbe5a661706a92a50e2c25f4dd6 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 11 Mar 2015 17:08:57 +0000 Subject: [PATCH 019/112] Add a description to storage layer's _execute_and_decode() --- synapse/storage/__init__.py | 4 ++-- synapse/storage/_base.py | 3 ++- synapse/storage/appservice.py | 2 +- synapse/storage/feedback.py | 2 +- synapse/storage/keys.py | 4 +++- 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 14a788c19..e3bf014d4 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -450,7 +450,7 @@ class DataStore(RoomMemberStore, RoomStore, else: args = (room_id, ) - results = yield self._execute_and_decode(sql, *args) + results = yield self._execute_and_decode("get_current_state", sql, *args) events = yield self._parse_events(results) defer.returnValue(events) @@ -475,7 +475,7 @@ class DataStore(RoomMemberStore, RoomStore, sql += " OR s.type = 'm.room.aliases')" args = (room_id,) - results = yield self._execute_and_decode(sql, *args) + results = yield self._execute_and_decode("get_current_state", sql, *args) events = yield self._parse_events(results) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 3725c9795..a4e1d66e2 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -279,7 +279,8 @@ class SQLBaseStore(object): return self.runInteraction("_execute", interaction) - def _execute_and_decode(self, query, *args): + def _execute_and_decode(self, desc, query, *args): + # TODO: for now ignore desc return self._execute(self.cursor_to_dict, query, *args) # "Simple" SQL API methods that operate on a single table with no JOINs, diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index e30265750..850676ce6 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -296,7 +296,7 @@ class ApplicationServiceStore(SQLBaseStore): # } # ] services = {} - results = yield self._execute_and_decode(sql) + results = yield self._execute_and_decode("_populate_cache", sql) for res in results: as_token = res["token"] if as_token not in services: diff --git a/synapse/storage/feedback.py b/synapse/storage/feedback.py index fcf011b23..8eab769b7 100644 --- a/synapse/storage/feedback.py +++ b/synapse/storage/feedback.py @@ -37,7 +37,7 @@ class FeedbackStore(SQLBaseStore): "WHERE feedback.target_event_id = ? " ) - rows = yield self._execute_and_decode(sql, event_id) + rows = yield self._execute_and_decode("get_feedback_for_event", sql, event_id) defer.returnValue( [ diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 1f244019f..153d5b4dd 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -85,7 +85,9 @@ class KeyStore(SQLBaseStore): " AND key_id in (" + ",".join("?" for key_id in key_ids) + ")" ) - rows = yield self._execute_and_decode(sql, server_name, *key_ids) + rows = yield self._execute_and_decode("get_server_verify_keys", sql, + server_name, *key_ids + ) keys = [] for row in rows: From 59a5f012ccaef5c255014efdbbb800ecfb73a2b7 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 11 Mar 2015 17:19:17 +0000 Subject: [PATCH 020/112] Also give _execute() a description --- synapse/storage/__init__.py | 3 +-- synapse/storage/_base.py | 7 +++---- synapse/storage/push_rule.py | 2 +- synapse/storage/pusher.py | 5 +++-- synapse/storage/registration.py | 3 +-- synapse/storage/room.py | 2 +- 6 files changed, 10 insertions(+), 12 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index e3bf014d4..4b16f445d 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -495,8 +495,7 @@ class DataStore(RoomMemberStore, RoomStore, @defer.inlineCallbacks def _get_min_token(self): row = yield self._execute( - None, - "SELECT MIN(stream_ordering) FROM events" + "_get_min_token", None, "SELECT MIN(stream_ordering) FROM events" ) self.min_token = row[0][0] if row and row[0] and row[0][0] else -1 diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index a4e1d66e2..3ea738276 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -259,7 +259,7 @@ class SQLBaseStore(object): ) return results - def _execute(self, decoder, query, *args): + def _execute(self, desc, decoder, query, *args): """Runs a single query for a result set. Args: @@ -277,11 +277,10 @@ class SQLBaseStore(object): else: return cursor.fetchall() - return self.runInteraction("_execute", interaction) + return self.runInteraction(desc, interaction) def _execute_and_decode(self, desc, query, *args): - # TODO: for now ignore desc - return self._execute(self.cursor_to_dict, query, *args) + return self._execute(desc, self.cursor_to_dict, query, *args) # "Simple" SQL API methods that operate on a single table with no JOINs, # no complex WHERE clauses, just a dict of values for columns. diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 1628489fa..d769db2c7 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -34,7 +34,7 @@ class PushRuleStore(SQLBaseStore): "WHERE user_name = ? " "ORDER BY priority_class DESC, priority DESC" ) - rows = yield self._execute(None, sql, user_name) + rows = yield self._execute("get_push_rules_for_user", None, sql, user_name) dicts = [] for r in rows: diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index 6622b4d18..587dada68 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -37,7 +37,8 @@ class PusherStore(SQLBaseStore): ) rows = yield self._execute( - None, sql, app_id_and_pushkey[0], app_id_and_pushkey[1] + "get_pushers_by_app_id_and_pushkey", None, sql, + app_id_and_pushkey[0], app_id_and_pushkey[1] ) ret = [ @@ -70,7 +71,7 @@ class PusherStore(SQLBaseStore): "FROM pushers" ) - rows = yield self._execute(None, sql) + rows = yield self._execute("get_all_pushers", None, sql) ret = [ { diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 029b07cc6..adc8fc079 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -88,8 +88,7 @@ class RegistrationStore(SQLBaseStore): query = ("SELECT users.name, users.password_hash FROM users" " WHERE users.name = ?") return self._execute( - self.cursor_to_dict, - query, user_id + "get_user_by_id", self.cursor_to_dict, query, user_id ) def get_user_by_token(self, token): diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 750b17a45..549c9af39 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -68,7 +68,7 @@ class RoomStore(SQLBaseStore): """ query = RoomsTable.select_statement("room_id=?") return self._execute( - RoomsTable.decode_single_result, query, room_id, + "get_room", RoomsTable.decode_single_result, query, room_id, ) @defer.inlineCallbacks From a7a28f85ae33462f00cde79c0ce6da64a4b0f281 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 11 Mar 2015 17:32:43 +0000 Subject: [PATCH 021/112] Appease pep8 --- synapse/storage/keys.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py index 153d5b4dd..09d1e6365 100644 --- a/synapse/storage/keys.py +++ b/synapse/storage/keys.py @@ -85,8 +85,8 @@ class KeyStore(SQLBaseStore): " AND key_id in (" + ",".join("?" for key_id in key_ids) + ")" ) - rows = yield self._execute_and_decode("get_server_verify_keys", sql, - server_name, *key_ids + rows = yield self._execute_and_decode( + "get_server_verify_keys", sql, server_name, *key_ids ) keys = [] From c92fdf88a34c22b3147f0a674633cea33ada4b99 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 11 Mar 2015 22:17:31 +0000 Subject: [PATCH 022/112] Log the matching push rule. --- synapse/push/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index d69c92911..5056374eb 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -128,13 +128,15 @@ class Pusher(object): # ignore rules with no actions (we have an explict 'dont_notify') if len(actions) == 0: logger.warn( - "Ignoring rule id %s with no actions for user %s" % - (r['rule_id'], r['user_name']) + "Ignoring rule id %s with no actions for user %s", + r['rule_id'], self.user_name ) continue if matches: + logger.info("%s matches for user %s, event %s", r['rule_id'], self.user_name, ev['event_id']) defer.returnValue(actions) + logger.info("No rules match for user %s, event %s", self.user_name, ev['event_id']) defer.returnValue(Pusher.DEFAULT_ACTIONS) @staticmethod From ad5701f50f1893ceb04160d36facf9e10bd52a0d Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 14:23:26 +0000 Subject: [PATCH 023/112] Expose 'config' as a real HomeServer dependency key --- synapse/server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/server.py b/synapse/server.py index cb8610a1b..e3eefda4f 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -56,6 +56,7 @@ class BaseHomeServer(object): """ DEPENDENCIES = [ + 'config', 'clock', 'http_client', 'db_name', From 211c14c3910ca0cd86a55ac27ef575127e73e721 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 14:23:50 +0000 Subject: [PATCH 024/112] No need to explicitly pass 'web_client' in to create_resource_tree as it can be found via config --- synapse/app/homeserver.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3649406ef..5df4ba23a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -110,7 +110,7 @@ class SynapseHomeServer(HomeServer): # so that :memory: sqlite works ) - def create_resource_tree(self, web_client, redirect_root_to_web_client): + def create_resource_tree(self, redirect_root_to_web_client): """Create the resource tree for this Home Server. This in unduly complicated because Twisted does not support putting @@ -122,6 +122,8 @@ class SynapseHomeServer(HomeServer): location of the web client. This does nothing if web_client is not True. """ + web_client = self.get_config().webclient + # list containing (path_str, Resource) e.g: # [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ] desired_tree = [ @@ -340,7 +342,6 @@ def setup(config_options): ) hs.create_resource_tree( - web_client=config.webclient, redirect_root_to_web_client=True, ) From 5b6e11d560e274e8690417b21300ca1a79a24d92 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 14:30:14 +0000 Subject: [PATCH 025/112] Commandline option to enable metrics system --- synapse/config/homeserver.py | 4 +++- synapse/config/metrics.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 synapse/config/metrics.py diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index c024535f5..241afdf87 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -23,11 +23,13 @@ from .captcha import CaptchaConfig from .email import EmailConfig from .voip import VoipConfig from .registration import RegistrationConfig +from .metrics import MetricsConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, - EmailConfig, VoipConfig, RegistrationConfig,): + EmailConfig, VoipConfig, RegistrationConfig, + MetricsConfig,): pass diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py new file mode 100644 index 000000000..90aba10bb --- /dev/null +++ b/synapse/config/metrics.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._base import Config + + +class MetricsConfig(Config): + def __init__(self, args): + super(MetricsConfig, self).__init__(args) + self.enable_metrics = args.enable_metrics + + @classmethod + def add_arguments(cls, parser): + super(MetricsConfig, cls).add_arguments(parser) + metrics_group = parser.add_argument_group("metrics") + metrics_group.add_argument( + '--enable-metrics', dest="enable_metrics", action="store_true", + help="Enable collection and rendering of performance metrics" + ) From e07fc62833dbbfc9dda7c344f194fc070cfc587f Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 15:49:14 +0000 Subject: [PATCH 026/112] A trivial 'hello world'-style resource on /_synapse/metrics, with optional commandline flag --- synapse/app/homeserver.py | 7 +++++++ synapse/metrics/resource.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 synapse/metrics/resource.py diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5df4ba23a..9747f7a01 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -147,6 +147,13 @@ class SynapseHomeServer(HomeServer): else: self.root_resource = Resource() + if self.get_config().enable_metrics: + from synapse.metrics.resource import ( + MetricsResource, METRICS_PREFIX + ) + + desired_tree.append((METRICS_PREFIX, MetricsResource(self))) + # ideally we'd just use getChild and putChild but getChild doesn't work # unless you give it a Request object IN ADDITION to the name :/ So # instead, we'll store a copy of this mapping so we can actually add diff --git a/synapse/metrics/resource.py b/synapse/metrics/resource.py new file mode 100644 index 000000000..b48048462 --- /dev/null +++ b/synapse/metrics/resource.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.web.resource import Resource +from twisted.web.server import NOT_DONE_YET + + +METRICS_PREFIX = "/_synapse/metrics" + + +class MetricsResource(Resource): + isLeaf = True + + def __init__(self, hs): + Resource.__init__(self) # Resource is old-style, so no super() + + self.hs = hs + + def render_GET(self, request): + response = "Hello, world!" + + request.setHeader("Content-Type", "text/plain") + request.setHeader("Content-Length", str(len(response))) + + return response From e7420a3bef308e12d2b202c7a2c256d15eee0983 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 16:58:26 +0000 Subject: [PATCH 027/112] Initial tiny attempt at (vectorable) counter metrics --- synapse/metrics/metric.py | 54 +++++++++++++++++++++++++++++++ tests/metrics/__init__.py | 0 tests/metrics/test_metric.py | 61 ++++++++++++++++++++++++++++++++++++ 3 files changed, 115 insertions(+) create mode 100644 synapse/metrics/metric.py create mode 100644 tests/metrics/__init__.py create mode 100644 tests/metrics/test_metric.py diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py new file mode 100644 index 000000000..f5a98763c --- /dev/null +++ b/synapse/metrics/metric.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class CounterMetric(object): + + def __init__(self, name, keys=[]): + self.name = name + self.keys = keys # OK not to clone as we never write it + + self.counts = {} + + # Scalar metrics are never empty + if not len(keys): + self.counts[()] = 0 + + def inc(self, *values): + if len(values) != len(self.keys): + raise ValueError("Expected as many values to inc() as keys (%d)" % + (len(self.keys)) + ) + + # TODO: should assert that the tag values are all strings + + if values not in self.counts: + self.counts[values] = 1 + else: + self.counts[values] += 1 + + def fetch(self): + return dict(self.counts) + + def _render_key(self, values): + # TODO: some kind of value escape + return ",".join(["%s=%s" % kv for kv in zip(self.keys, values)]) + + def render(self): + if not len(self.keys): + return ["%s %d" % (self.name, self.counts[()])] + + return ["%s{%s} %d" % (self.name, self._render_key(k), self.counts[k]) + for k in sorted(self.counts.keys())] diff --git a/tests/metrics/__init__.py b/tests/metrics/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py new file mode 100644 index 000000000..a4fd52a9d --- /dev/null +++ b/tests/metrics/test_metric.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from tests import unittest + +from synapse.metrics.metric import CounterMetric + + +class CounterMetricTestCase(unittest.TestCase): + + def test_scalar(self): + counter = CounterMetric("scalar") + + self.assertEquals(counter.render(), [ + "scalar 0", + ]) + + counter.inc() + + self.assertEquals(counter.render(), [ + "scalar 1", + ]) + + counter.inc() + counter.inc() + + self.assertEquals(counter.render(), [ + "scalar 3" + ]) + + def test_vector(self): + counter = CounterMetric("vector", keys=["method"]) + + # Empty counter doesn't yet know what values it has + self.assertEquals(counter.render(), []) + + counter.inc("GET") + + self.assertEquals(counter.render(), [ + "vector{method=GET} 1", + ]) + + counter.inc("GET") + counter.inc("PUT") + + self.assertEquals(counter.render(), [ + "vector{method=GET} 2", + "vector{method=PUT} 1", + ]) From 120b6892840bae0e791348da4b1b35761e841b55 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 17:20:14 +0000 Subject: [PATCH 028/112] Delete pointless (and unreachable) __init__ method from FederationClient --- synapse/federation/federation_client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index f131941f4..2284fc1d9 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -37,8 +37,6 @@ logger = logging.getLogger(__name__) class FederationClient(FederationBase): - def __init__(self): - self._get_pdu_cache = None def start_get_pdu_cache(self): self._get_pdu_cache = ExpiringCache( From 74bc42cfddb9bbaf364794857ab902b046c893dc Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 18:03:39 +0000 Subject: [PATCH 029/112] An initial implementation of a 'metrics' instance, similar to a 'logger' for keeping counter stats on method calls --- synapse/metrics/__init__.py | 69 +++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 synapse/metrics/__init__.py diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py new file mode 100644 index 000000000..125845eb3 --- /dev/null +++ b/synapse/metrics/__init__.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .metric import CounterMetric + + +# We'll keep all the available metrics in a single toplevel dict, one shared +# for the entire process. We don't currently support per-HomeServer instances +# of metrics, because in practice any one python VM will host only one +# HomeServer anyway. This makes a lot of implementation neater +all_metrics = {} + + +class Metrics(object): + """ A single Metrics object gives a (mutable) slice view of the all_metrics + dict, allowing callers to easily register new metrics that are namespaced + nicely.""" + + def __init__(self, name): + self.name_prefix = name + + def _register(self, metric): + all_metrics[metric.name] = metric + + def register_counter(self, name, *args, **kwargs): + full_name = "%s.%s" % (self.name_prefix, name) + + metric = CounterMetric(full_name, *args, **kwargs) + + self._register(metric) + + return metric + + def counted(self, func): + """ A method decorator that registers a counter, to count invocations + of this method. """ + counter = self.register_counter(func.__name__) + + def wrapped(*args, **kwargs): + counter.inc() + return func(*args, **kwargs) + return wrapped + + +def get_metrics_for(name): + """ Returns a Metrics instance for conveniently creating metrics + namespaced with the given name prefix. """ + return Metrics(name) + + +def render_all(): + strs = [] + + for name in sorted(all_metrics.keys()): + strs += all_metrics[name].render() + + return "\n".join(strs) From a594087f06795ae19f7eaf22df314be83dd7286f Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 18:04:01 +0000 Subject: [PATCH 030/112] Have the MetricsResource actually render metric counters --- synapse/metrics/resource.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/metrics/resource.py b/synapse/metrics/resource.py index b48048462..2b9a1ee0a 100644 --- a/synapse/metrics/resource.py +++ b/synapse/metrics/resource.py @@ -16,6 +16,8 @@ from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET +import synapse.metrics + METRICS_PREFIX = "/_synapse/metrics" @@ -29,7 +31,7 @@ class MetricsResource(Resource): self.hs = hs def render_GET(self, request): - response = "Hello, world!" + response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) From 9470412316dee5c782b0815383fff1ba10002f15 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 18:10:44 +0000 Subject: [PATCH 031/112] Initial attempt at sprinkling some @metrics.counted decorations around the federation code --- synapse/federation/federation_client.py | 14 ++++++++++++++ synapse/federation/federation_server.py | 14 ++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 2284fc1d9..ef177b79c 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -25,6 +25,7 @@ from synapse.api.errors import ( from synapse.util.expiringcache import ExpiringCache from synapse.util.logutils import log_function from synapse.events import FrozenEvent +import synapse.metrics from synapse.util.retryutils import get_retry_limiter, NotRetryingDestination @@ -35,6 +36,8 @@ import random logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + class FederationClient(FederationBase): @@ -50,6 +53,7 @@ class FederationClient(FederationBase): self._get_pdu_cache.start() @log_function + @metrics.counted def send_pdu(self, pdu, destinations): """Informs the replication layer about a new PDU generated within the home server that should be transmitted to others. @@ -77,6 +81,7 @@ class FederationClient(FederationBase): ) @log_function + @metrics.counted def send_edu(self, destination, edu_type, content): edu = Edu( origin=self.server_name, @@ -90,11 +95,13 @@ class FederationClient(FederationBase): return defer.succeed(None) @log_function + @metrics.counted def send_failure(self, failure, destination): self._transaction_queue.enqueue_failure(failure, destination) return defer.succeed(None) @log_function + @metrics.counted def make_query(self, destination, query_type, args, retry_on_dns_fail=True): """Sends a federation Query to a remote homeserver of the given type @@ -156,6 +163,7 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def get_pdu(self, destinations, event_id, outlier=False): """Requests the PDU with given origin and ID from the remote home servers. @@ -245,6 +253,7 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def get_state_for_room(self, destination, room_id, event_id): """Requests all of the `current` state PDUs for a given room from a remote home server. @@ -285,6 +294,7 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def get_event_auth(self, destination, room_id, event_id): res = yield self.transport_layer.get_event_auth( destination, room_id, event_id, @@ -304,6 +314,7 @@ class FederationClient(FederationBase): defer.returnValue(signed_auth) @defer.inlineCallbacks + @metrics.counted def make_join(self, destinations, room_id, user_id): for destination in destinations: try: @@ -330,6 +341,7 @@ class FederationClient(FederationBase): raise RuntimeError("Failed to send to any server.") @defer.inlineCallbacks + @metrics.counted def send_join(self, destinations, pdu): for destination in destinations: try: @@ -379,6 +391,7 @@ class FederationClient(FederationBase): raise RuntimeError("Failed to send to any server.") @defer.inlineCallbacks + @metrics.counted def send_invite(self, destination, room_id, event_id, pdu): time_now = self._clock.time_msec() code, content = yield self.transport_layer.send_invite( @@ -402,6 +415,7 @@ class FederationClient(FederationBase): defer.returnValue(pdu) @defer.inlineCallbacks + @metrics.counted def query_auth(self, destination, room_id, event_id, local_auth): """ Params: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 9c7dcdba9..3216fca95 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -22,6 +22,7 @@ from .units import Transaction, Edu from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext from synapse.events import FrozenEvent +import synapse.metrics from synapse.api.errors import FederationError, SynapseError @@ -32,6 +33,8 @@ import logging logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + class FederationServer(FederationBase): def set_handler(self, handler): @@ -72,6 +75,7 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def on_backfill_request(self, origin, room_id, versions, limit): pdus = yield self.handler.on_backfill_request( origin, room_id, versions, limit @@ -81,6 +85,7 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def on_incoming_transaction(self, transaction_data): transaction = Transaction(**transaction_data) @@ -160,6 +165,7 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def on_context_state_request(self, origin, room_id, event_id): if event_id: pdus = yield self.handler.get_state_for_pdu( @@ -187,6 +193,7 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def on_pdu_request(self, origin, event_id): pdu = yield self._get_persisted_pdu(origin, event_id) @@ -199,10 +206,12 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function + @metrics.counted def on_pull_request(self, origin, versions): raise NotImplementedError("Pull transactions not implemented") @defer.inlineCallbacks + @metrics.counted def on_query_request(self, query_type, args): if query_type in self.query_handlers: response = yield self.query_handlers[query_type](args) @@ -213,12 +222,14 @@ class FederationServer(FederationBase): ) @defer.inlineCallbacks + @metrics.counted def on_make_join_request(self, room_id, user_id): pdu = yield self.handler.on_make_join_request(room_id, user_id) time_now = self._clock.time_msec() defer.returnValue({"event": pdu.get_pdu_json(time_now)}) @defer.inlineCallbacks + @metrics.counted def on_invite_request(self, origin, content): pdu = self.event_from_pdu_json(content) ret_pdu = yield self.handler.on_invite_request(origin, pdu) @@ -226,6 +237,7 @@ class FederationServer(FederationBase): defer.returnValue((200, {"event": ret_pdu.get_pdu_json(time_now)})) @defer.inlineCallbacks + @metrics.counted def on_send_join_request(self, origin, content): logger.debug("on_send_join_request: content: %s", content) pdu = self.event_from_pdu_json(content) @@ -240,6 +252,7 @@ class FederationServer(FederationBase): })) @defer.inlineCallbacks + @metrics.counted def on_event_auth(self, origin, room_id, event_id): time_now = self._clock.time_msec() auth_pdus = yield self.handler.on_event_auth(event_id) @@ -248,6 +261,7 @@ class FederationServer(FederationBase): })) @defer.inlineCallbacks + @metrics.counted def on_query_auth_request(self, origin, content, event_id): """ Content is a dict with keys:: From c53ec53d80a02a1feea5efab18f0f84a0bdf4be8 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 19:05:20 +0000 Subject: [PATCH 032/112] Pull out all uses of the underlying HTTP user agent .request() method into a single wrapper function, to make adding metrics easier --- synapse/http/client.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index b53a07aa2..d61e53bd9 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -45,12 +45,17 @@ class SimpleHttpClient(object): self.agent = Agent(reactor) self.version_string = hs.version_string + def request(self, method, *args, **kwargs): + # A small wrapper around self.agent.request() so we can easily attach + # counters to it + return self.agent.request(method, *args, **kwargs) + @defer.inlineCallbacks def post_urlencoded_get_json(self, uri, args={}): logger.debug("post_urlencoded_get_json args: %s", args) query_bytes = urllib.urlencode(args, True) - response = yield self.agent.request( + response = yield self.request( "POST", uri.encode("ascii"), headers=Headers({ @@ -70,7 +75,7 @@ class SimpleHttpClient(object): logger.info("HTTP POST %s -> %s", json_str, uri) - response = yield self.agent.request( + response = yield self.request( "POST", uri.encode("ascii"), headers=Headers({ @@ -104,7 +109,7 @@ class SimpleHttpClient(object): query_bytes = urllib.urlencode(args, True) uri = "%s?%s" % (uri, query_bytes) - response = yield self.agent.request( + response = yield self.request( "GET", uri.encode("ascii"), headers=Headers({ @@ -145,7 +150,7 @@ class SimpleHttpClient(object): json_str = encode_canonical_json(json_body) - response = yield self.agent.request( + response = yield self.request( "PUT", uri.encode("ascii"), headers=Headers({ @@ -176,7 +181,7 @@ class CaptchaServerHttpClient(SimpleHttpClient): def post_urlencoded_get_raw(self, url, args={}): query_bytes = urllib.urlencode(args, True) - response = yield self.agent.request( + response = yield self.request( "POST", url.encode("ascii"), bodyProducer=FileBodyProducer(StringIO(query_bytes)), From 7d72e44eb9660eaeab017897853cf3ec18fd06c6 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 24 Feb 2015 19:51:21 +0000 Subject: [PATCH 033/112] Add vector counters to HTTP clients and servers; count the requests by method and responses by method and response code --- synapse/http/client.py | 23 ++++++++++++++++++++- synapse/http/matrixfederationclient.py | 28 +++++++++++++++++++++++--- synapse/http/server.py | 14 +++++++++++++ 3 files changed, 61 insertions(+), 4 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index d61e53bd9..e40e82e80 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -15,6 +15,7 @@ from synapse.api.errors import CodeMessageException from syutil.jsonutil import encode_canonical_json +import synapse.metrics from twisted.internet import defer, reactor from twisted.web.client import ( @@ -31,6 +32,15 @@ import urllib logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + +outgoing_requests_counter = metrics.register_counter("outgoing_requests", + keys=["method"], +) +incoming_responses_counter = metrics.register_counter("incoming_responses", + keys=["method","code"], +) + class SimpleHttpClient(object): """ @@ -48,7 +58,18 @@ class SimpleHttpClient(object): def request(self, method, *args, **kwargs): # A small wrapper around self.agent.request() so we can easily attach # counters to it - return self.agent.request(method, *args, **kwargs) + outgoing_requests_counter.inc(method) + d = self.agent.request(method, *args, **kwargs) + + def _cb(response): + incoming_responses_counter.inc(method, response.code) + return response + def _eb(failure): + incoming_responses_counter.inc(method, "ERR") + return failure + d.addCallbacks(_cb, _eb) + + return d @defer.inlineCallbacks def post_urlencoded_get_json(self, uri, args={}): diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 7db001cc6..009152769 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -23,6 +23,7 @@ from twisted.web._newclient import ResponseDone from synapse.http.endpoint import matrix_federation_endpoint from synapse.util.async import sleep from synapse.util.logcontext import PreserveLoggingContext +import synapse.metrics from syutil.jsonutil import encode_canonical_json @@ -40,6 +41,15 @@ import urlparse logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + +outgoing_requests_counter = metrics.register_counter("outgoing_requests", + keys=["method"], +) +incoming_responses_counter = metrics.register_counter("incoming_responses", + keys=["method","code"], +) + class MatrixFederationHttpAgent(_AgentBase): @@ -49,6 +59,8 @@ class MatrixFederationHttpAgent(_AgentBase): def request(self, destination, endpoint, method, path, params, query, headers, body_producer): + outgoing_requests_counter.inc(method) + host = b"" port = 0 fragment = b"" @@ -59,9 +71,19 @@ class MatrixFederationHttpAgent(_AgentBase): # Set the connection pool key to be the destination. key = destination - return self._requestWithEndpoint(key, endpoint, method, parsed_URI, - headers, body_producer, - parsed_URI.originForm) + d = self._requestWithEndpoint(key, endpoint, method, parsed_URI, + headers, body_producer, + parsed_URI.originForm) + + def _cb(response): + incoming_responses_counter.inc(method, response.code) + return response + def _eb(failure): + incoming_responses_counter.inc(method, "ERR") + return failure + d.addCallbacks(_cb, _eb) + + return d class MatrixFederationHttpClient(object): diff --git a/synapse/http/server.py b/synapse/http/server.py index 767c3ef79..ac893bb40 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -18,6 +18,7 @@ from synapse.api.errors import ( cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError ) from synapse.util.logcontext import LoggingContext +import synapse.metrics from syutil.jsonutil import ( encode_canonical_json, encode_pretty_printed_json @@ -34,6 +35,15 @@ import urllib logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + +incoming_requests_counter = metrics.register_counter("incoming_requests", + keys=["method"], +) +outgoing_responses_counter = metrics.register_counter("outgoing_responses", + keys=["method","code"], +) + class HttpServer(object): """ Interface for registering callbacks on a HTTP server @@ -112,6 +122,8 @@ class JsonResource(HttpServer, resource.Resource): This checks if anyone has registered a callback for that method and path. """ + incoming_requests_counter.inc(request.method) + code = None start = self.clock.time_msec() try: @@ -190,6 +202,8 @@ class JsonResource(HttpServer, resource.Resource): request) return + outgoing_responses_counter.inc(request.method, str(code)) + # TODO: Only enable CORS for the requests that need it. respond_with_json( request, code, response_json_object, From ce8b5769f7e08515edf8988281d17df7b0ddfdaa Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 15:47:23 +0000 Subject: [PATCH 034/112] Create the concept of a cachecounter metric; generating two counters specific to caches --- synapse/metrics/__init__.py | 11 ++++++++- synapse/metrics/metric.py | 43 +++++++++++++++++++++++++++++++----- tests/metrics/test_metric.py | 27 +++++++++++++++++++++- 3 files changed, 73 insertions(+), 8 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 125845eb3..d5c30bbe4 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .metric import CounterMetric +from .metric import CounterMetric, CacheCounterMetric # We'll keep all the available metrics in a single toplevel dict, one shared @@ -43,6 +43,15 @@ class Metrics(object): return metric + def register_cachecounter(self, name, *args, **kwargs): + full_name = "%s.%s" % (self.name_prefix, name) + + metric = CacheCounterMetric(full_name, *args, **kwargs) + + self._register(metric) + + return metric + def counted(self, func): """ A method decorator that registers a counter, to count invocations of this method. """ diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index f5a98763c..00b149f6f 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -14,16 +14,28 @@ # limitations under the License. -class CounterMetric(object): +class BaseMetric(object): def __init__(self, name, keys=[]): self.name = name self.keys = keys # OK not to clone as we never write it + def _render_key(self, values): + # TODO: some kind of value escape + return ",".join(["%s=%s" % kv for kv in zip(self.keys, values)]) + + +class CounterMetric(BaseMetric): + """The simplest kind of metric; one that stores a monotonically-increasing + integer that counts events.""" + + def __init__(self, *args, **kwargs): + super(CounterMetric, self).__init__(*args, **kwargs) + self.counts = {} # Scalar metrics are never empty - if not len(keys): + if not len(self.keys): self.counts[()] = 0 def inc(self, *values): @@ -42,13 +54,32 @@ class CounterMetric(object): def fetch(self): return dict(self.counts) - def _render_key(self, values): - # TODO: some kind of value escape - return ",".join(["%s=%s" % kv for kv in zip(self.keys, values)]) - def render(self): if not len(self.keys): return ["%s %d" % (self.name, self.counts[()])] return ["%s{%s} %d" % (self.name, self._render_key(k), self.counts[k]) for k in sorted(self.counts.keys())] + + +class CacheCounterMetric(object): + """A combination of two CounterMetrics, one to count cache hits and one to + count misses. + + This metric generates standard metric name pairs, so that monitoring rules + can easily be applied to measure hit ratio.""" + + def __init__(self, name, keys=[]): + self.name = name + + self.hits = CounterMetric(name + ":hits", keys=keys) + self.misses = CounterMetric(name + ":misses", keys=keys) + + def inc_hits(self, *values): + self.hits.inc(*values) + + def inc_misses(self, *values): + self.misses.inc(*values) + + def render(self): + return self.hits.render() + self.misses.render() diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index a4fd52a9d..93e8e27e4 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -15,7 +15,7 @@ from tests import unittest -from synapse.metrics.metric import CounterMetric +from synapse.metrics.metric import CounterMetric, CacheCounterMetric class CounterMetricTestCase(unittest.TestCase): @@ -59,3 +59,28 @@ class CounterMetricTestCase(unittest.TestCase): "vector{method=GET} 2", "vector{method=PUT} 1", ]) + + +class CacheCounterMetricTestCase(unittest.TestCase): + + def test_cachecounter(self): + counter = CacheCounterMetric("cache") + + self.assertEquals(counter.render(), [ + "cache:hits 0", + "cache:misses 0", + ]) + + counter.inc_misses() + + self.assertEquals(counter.render(), [ + "cache:hits 0", + "cache:misses 1", + ]) + + counter.inc_hits() + + self.assertEquals(counter.render(), [ + "cache:hits 1", + "cache:misses 1", + ]) From b0cdf097f4393fc288d6d31bb9b37bf921a35a1c Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 16:04:46 +0000 Subject: [PATCH 035/112] Sprinkle some CacheCounterMetrics around the synapse.storage layer --- synapse/storage/_base.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 3ea738276..804655e34 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -20,6 +20,7 @@ from synapse.events.utils import prune_event from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext, LoggingContext from synapse.util.lrucache import LruCache +import synapse.metrics from twisted.internet import defer @@ -31,6 +32,8 @@ import time logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for("synapse.storage") + sql_logger = logging.getLogger("synapse.storage.SQL") transaction_logger = logging.getLogger("synapse.storage.txn") @@ -56,6 +59,8 @@ def cached(max_entries=1000): def wrap(orig): cache = OrderedDict() + counter = metrics.register_cachecounter(orig.__name__) + def prefill(key, value): while len(cache) > max_entries: cache.popitem(last=False) @@ -65,8 +70,10 @@ def cached(max_entries=1000): @defer.inlineCallbacks def wrapped(self, key): if key in cache: + counter.inc_hits() defer.returnValue(cache[key]) + counter.inc_misses() ret = yield orig(self, key) prefill(key, ret) defer.returnValue(ret) @@ -176,6 +183,9 @@ class SQLBaseStore(object): self._get_event_counters = PerformanceCounters() self._get_event_cache = LruCache(hs.config.event_cache_size) + self._get_event_cache_counter = metrics.register_cachecounter( + "get_event" + ) def start_profiling(self): self._previous_loop_ts = self._clock.time_msec() @@ -644,8 +654,12 @@ class SQLBaseStore(object): try: # Separate cache entries for each way to invoke _get_event_txn - return cache[(check_redacted, get_prev_content, allow_rejected)] + ret = cache[(check_redacted, get_prev_content, allow_rejected)] + + self._get_event_cache_counter.inc_hits() + return ret except KeyError: + self._get_event_cache_counter.inc_misses() pass finally: start_time = update_counter("event_cache", start_time) From d8caa5454d781a76a65fa4ce75336541b973f624 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 16:46:44 +0000 Subject: [PATCH 036/112] Initial attempt at a scalar callback-based metric to give instantaneous snapshot gauges --- synapse/metrics/__init__.py | 11 ++++++++++- synapse/metrics/metric.py | 14 ++++++++++++++ tests/metrics/test_metric.py | 22 +++++++++++++++++++++- 3 files changed, 45 insertions(+), 2 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index d5c30bbe4..d7584fc0b 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .metric import CounterMetric, CacheCounterMetric +from .metric import CounterMetric, CallbackMetric, CacheCounterMetric # We'll keep all the available metrics in a single toplevel dict, one shared @@ -43,6 +43,15 @@ class Metrics(object): return metric + def register_callback(self, name, callback, *args, **kwargs): + full_name = "%s.%s" % (self.name_prefix, name) + + metric = CallbackMetric(full_name, *args, callback=callback, **kwargs) + + self._register(metric) + + return metric + def register_cachecounter(self, name, *args, **kwargs): full_name = "%s.%s" % (self.name_prefix, name) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 00b149f6f..8a497fc15 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -62,6 +62,20 @@ class CounterMetric(BaseMetric): for k in sorted(self.counts.keys())] +class CallbackMetric(BaseMetric): + """A metric that returns the numeric value returned by a callback whenever + it is rendered. Typically this is used to implement gauges that yield the + size or other state of some in-memory object by actively querying it.""" + + def __init__(self, name, callback, keys=[]): + super(CallbackMetric, self).__init__(name, keys=keys) + + self.callback = callback + + def render(self): + # TODO(paul): work out something we can do with keys and vectors + return ["%s %d" % (self.name, self.callback())] + class CacheCounterMetric(object): """A combination of two CounterMetrics, one to count cache hits and one to count misses. diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index 93e8e27e4..b7bb375ce 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -15,7 +15,9 @@ from tests import unittest -from synapse.metrics.metric import CounterMetric, CacheCounterMetric +from synapse.metrics.metric import ( + CounterMetric, CallbackMetric, CacheCounterMetric +) class CounterMetricTestCase(unittest.TestCase): @@ -61,6 +63,24 @@ class CounterMetricTestCase(unittest.TestCase): ]) +class CallbackMetricTestCase(unittest.TestCase): + + def test_callback(self): + d = dict() + + metric = CallbackMetric("size", lambda: len(d)) + + self.assertEquals(metric.render(), [ + "size 0", + ]) + + d["key"] = "value" + + self.assertEquals(metric.render(), [ + "size 1", + ]) + + class CacheCounterMetricTestCase(unittest.TestCase): def test_cachecounter(self): From 59c448f074439f1c6f5f51765e24ecfcff8e4101 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 16:50:23 +0000 Subject: [PATCH 037/112] Add a scalar gauge metric on the size of the presence user cachemap --- synapse/handlers/presence.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 28e922f79..698946a48 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -21,6 +21,7 @@ from synapse.api.constants import PresenceState from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext from synapse.types import UserID +import synapse.metrics from ._base import BaseHandler @@ -29,6 +30,8 @@ import logging logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + # TODO(paul): Maybe there's one of these I can steal from somewhere def partition(l, func): @@ -133,6 +136,10 @@ class PresenceHandler(BaseHandler): self._user_cachemap = {} self._user_cachemap_latest_serial = 0 + metrics.register_callback("user_cachemap:size", + lambda: len(self._user_cachemap) + ) + def _get_or_make_usercache(self, user): """If the cache entry doesn't exist, initialise a new one.""" if user not in self._user_cachemap: From e02cc249da6ff71dcc1e5560232b302246a11c9d Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 17:13:09 +0000 Subject: [PATCH 038/112] Ensure that exceptions while rendering individual metrics don't stop others from being rendered anyway - especially useful for CallbackMetric --- synapse/metrics/__init__.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index d7584fc0b..d967b04ee 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -13,9 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from .metric import CounterMetric, CallbackMetric, CacheCounterMetric +logger = logging.getLogger(__name__) + + # We'll keep all the available metrics in a single toplevel dict, one shared # for the entire process. We don't currently support per-HomeServer instances # of metrics, because in practice any one python VM will host only one @@ -82,6 +87,10 @@ def render_all(): strs = [] for name in sorted(all_metrics.keys()): - strs += all_metrics[name].render() + try: + strs += all_metrics[name].render() + except Exception as e: + strs += ["# FAILED to render %s" % name] + logger.exception("Failed to render %s metric", name) return "\n".join(strs) From 8664599af77ba0ed6268b3112174dc8e0c91101b Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 17:34:23 +0000 Subject: [PATCH 039/112] Rename CacheCounterMetric to just CacheMetric; add a CallbackMetric component to give the size of the cache --- synapse/metrics/__init__.py | 6 +++--- synapse/metrics/metric.py | 13 +++++++++---- synapse/storage/_base.py | 6 +++--- tests/metrics/test_metric.py | 24 +++++++++++++++--------- 4 files changed, 30 insertions(+), 19 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index d967b04ee..442fd70cd 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -15,7 +15,7 @@ import logging -from .metric import CounterMetric, CallbackMetric, CacheCounterMetric +from .metric import CounterMetric, CallbackMetric, CacheMetric logger = logging.getLogger(__name__) @@ -57,10 +57,10 @@ class Metrics(object): return metric - def register_cachecounter(self, name, *args, **kwargs): + def register_cache(self, name, *args, **kwargs): full_name = "%s.%s" % (self.name_prefix, name) - metric = CacheCounterMetric(full_name, *args, **kwargs) + metric = CacheMetric(full_name, *args, **kwargs) self._register(metric) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 8a497fc15..7e47f8615 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -76,19 +76,24 @@ class CallbackMetric(BaseMetric): # TODO(paul): work out something we can do with keys and vectors return ["%s %d" % (self.name, self.callback())] -class CacheCounterMetric(object): +class CacheMetric(object): """A combination of two CounterMetrics, one to count cache hits and one to - count misses. + count misses, and a callback metric to yield the current size. This metric generates standard metric name pairs, so that monitoring rules can easily be applied to measure hit ratio.""" - def __init__(self, name, keys=[]): + def __init__(self, name, size_callback, keys=[]): self.name = name self.hits = CounterMetric(name + ":hits", keys=keys) self.misses = CounterMetric(name + ":misses", keys=keys) + self.size = CallbackMetric(name + ":size", + callback=size_callback, + keys=keys, + ) + def inc_hits(self, *values): self.hits.inc(*values) @@ -96,4 +101,4 @@ class CacheCounterMetric(object): self.misses.inc(*values) def render(self): - return self.hits.render() + self.misses.render() + return self.hits.render() + self.misses.render() + self.size.render() diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 804655e34..d3c2bc7bf 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -59,7 +59,7 @@ def cached(max_entries=1000): def wrap(orig): cache = OrderedDict() - counter = metrics.register_cachecounter(orig.__name__) + counter = metrics.register_cache(orig.__name__, lambda: len(cache)) def prefill(key, value): while len(cache) > max_entries: @@ -183,8 +183,8 @@ class SQLBaseStore(object): self._get_event_counters = PerformanceCounters() self._get_event_cache = LruCache(hs.config.event_cache_size) - self._get_event_cache_counter = metrics.register_cachecounter( - "get_event" + self._get_event_cache_counter = metrics.register_cache("get_event", + size_callback=lambda: len(self._get_event_cache), ) def start_profiling(self): diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index b7bb375ce..32fd178ed 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -16,7 +16,7 @@ from tests import unittest from synapse.metrics.metric import ( - CounterMetric, CallbackMetric, CacheCounterMetric + CounterMetric, CallbackMetric, CacheMetric ) @@ -81,26 +81,32 @@ class CallbackMetricTestCase(unittest.TestCase): ]) -class CacheCounterMetricTestCase(unittest.TestCase): +class CacheMetricTestCase(unittest.TestCase): - def test_cachecounter(self): - counter = CacheCounterMetric("cache") + def test_cache(self): + d = dict() - self.assertEquals(counter.render(), [ + metric = CacheMetric("cache", lambda: len(d)) + + self.assertEquals(metric.render(), [ "cache:hits 0", "cache:misses 0", + "cache:size 0", ]) - counter.inc_misses() + metric.inc_misses() + d["key"] = "value" - self.assertEquals(counter.render(), [ + self.assertEquals(metric.render(), [ "cache:hits 0", "cache:misses 1", + "cache:size 1", ]) - counter.inc_hits() + metric.inc_hits() - self.assertEquals(counter.render(), [ + self.assertEquals(metric.render(), [ "cache:hits 1", "cache:misses 1", + "cache:size 1", ]) From 849300bc736b82a6bcefdae110f5bf9111141afb Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 17:53:51 +0000 Subject: [PATCH 040/112] Neater introspection methods on BaseMetric so that subclasses don't need to touch self.keys directly --- synapse/metrics/metric.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 7e47f8615..e8c15a60e 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -20,6 +20,12 @@ class BaseMetric(object): self.name = name self.keys = keys # OK not to clone as we never write it + def dimension(self): + return len(self.keys) + + def is_scalar(self): + return not len(self.keys) + def _render_key(self, values): # TODO: some kind of value escape return ",".join(["%s=%s" % kv for kv in zip(self.keys, values)]) @@ -35,13 +41,13 @@ class CounterMetric(BaseMetric): self.counts = {} # Scalar metrics are never empty - if not len(self.keys): + if self.is_scalar(): self.counts[()] = 0 def inc(self, *values): - if len(values) != len(self.keys): + if len(values) != self.dimension(): raise ValueError("Expected as many values to inc() as keys (%d)" % - (len(self.keys)) + (self.dimension()) ) # TODO: should assert that the tag values are all strings @@ -55,7 +61,7 @@ class CounterMetric(BaseMetric): return dict(self.counts) def render(self): - if not len(self.keys): + if self.is_scalar(): return ["%s %d" % (self.name, self.counts[()])] return ["%s{%s} %d" % (self.name, self._render_key(k), self.counts[k]) @@ -76,6 +82,7 @@ class CallbackMetric(BaseMetric): # TODO(paul): work out something we can do with keys and vectors return ["%s %d" % (self.name, self.callback())] + class CacheMetric(object): """A combination of two CounterMetrics, one to count cache hits and one to count misses, and a callback metric to yield the current size. From 23ab0c68c28e60e0f8774ee4099b2abe876374d0 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 17:58:10 +0000 Subject: [PATCH 041/112] Implement vector CallbackMetrics --- synapse/metrics/metric.py | 8 ++++++-- tests/metrics/test_metric.py | 18 +++++++++++++++++- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index e8c15a60e..4df5ebfda 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -79,9 +79,13 @@ class CallbackMetric(BaseMetric): self.callback = callback def render(self): - # TODO(paul): work out something we can do with keys and vectors - return ["%s %d" % (self.name, self.callback())] + value = self.callback() + if self.is_scalar(): + return ["%s %d" % (self.name, value)] + + return ["%s{%s} %d" % (self.name, self._render_key(k), value[k]) + for k in sorted(value.keys())] class CacheMetric(object): """A combination of two CounterMetrics, one to count cache hits and one to diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index 32fd178ed..b7facb858 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -65,7 +65,7 @@ class CounterMetricTestCase(unittest.TestCase): class CallbackMetricTestCase(unittest.TestCase): - def test_callback(self): + def test_scalar(self): d = dict() metric = CallbackMetric("size", lambda: len(d)) @@ -80,6 +80,22 @@ class CallbackMetricTestCase(unittest.TestCase): "size 1", ]) + def test_vector(self): + vals = dict() + + metric = CallbackMetric("values", lambda: vals, keys=["type"]) + + self.assertEquals(metric.render(), []) + + # Keys have to be tuples, even if they're 1-element + vals[("foo",)] = 1 + vals[("bar",)] = 2 + + self.assertEquals(metric.render(), [ + "values{type=bar} 2", + "values{type=foo} 1", + ]) + class CacheMetricTestCase(unittest.TestCase): From e9c4b0d1789fca9cdefb53c75defa19a8ea62473 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 18:15:07 +0000 Subject: [PATCH 042/112] Ensure that /_synapse/metrics response is UTF-8 encoded --- synapse/metrics/resource.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/metrics/resource.py b/synapse/metrics/resource.py index 2b9a1ee0a..ff7baab01 100644 --- a/synapse/metrics/resource.py +++ b/synapse/metrics/resource.py @@ -36,4 +36,5 @@ class MetricsResource(Resource): request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) - return response + # Encode as UTF-8 (default) + return response.encode() From 094803cf82cc748dd32fe4b03c6db016aeb90075 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 18:15:34 +0000 Subject: [PATCH 043/112] Put vector gauges on transaction queue pending PDU and EDU dicts --- synapse/federation/transaction_queue.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 9dc7849b1..b9d3f8932 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -25,12 +25,15 @@ from synapse.util.logcontext import PreserveLoggingContext from synapse.util.retryutils import ( get_retry_limiter, NotRetryingDestination, ) +import synapse.metrics import logging logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + class TransactionQueue(object): """This class makes sure we only have one transaction in flight at @@ -56,9 +59,9 @@ class TransactionQueue(object): # Is a mapping from destination -> list of # tuple(pending pdus, deferred, order) - self.pending_pdus_by_dest = {} + self.pending_pdus_by_dest = pdus = {} # destination -> list of tuple(edu, deferred) - self.pending_edus_by_dest = {} + self.pending_edus_by_dest = edus = {} # destination -> list of tuple(failure, deferred) self.pending_failures_by_dest = {} @@ -66,6 +69,15 @@ class TransactionQueue(object): # HACK to get unique tx id self._next_txn_id = int(self._clock.time_msec()) + metrics.register_callback("pending_pdus", + lambda: {(dest,): len(pdus[dest]) for dest in pdus.keys()}, + keys=["dest"], + ) + metrics.register_callback("pending_edus", + lambda: {(dest,): len(edus[dest]) for dest in edus.keys()}, + keys=["dest"], + ) + def can_send_to(self, destination): """Can we send messages to the given server? From e1a7e3564fa1e20b52c7294a8b0f28e5ecbaf672 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 18:16:54 +0000 Subject: [PATCH 044/112] Delete a couple of TODO markers of monitoring stats now done --- synapse/storage/_base.py | 1 - synapse/util/lrucache.py | 1 - 2 files changed, 2 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index d3c2bc7bf..d6cf88c35 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -40,7 +40,6 @@ transaction_logger = logging.getLogger("synapse.storage.txn") # TODO(paul): # * more generic key management -# * export monitoring stats # * consider other eviction strategies - LRU? def cached(max_entries=1000): """ A method decorator that applies a memoizing cache around the function. diff --git a/synapse/util/lrucache.py b/synapse/util/lrucache.py index f115f50e5..65d579290 100644 --- a/synapse/util/lrucache.py +++ b/synapse/util/lrucache.py @@ -16,7 +16,6 @@ class LruCache(object): """Least-recently-used cache.""" - # TODO(mjark) Add hit/miss counters # TODO(mjark) Add mutex for linked list for thread safety. def __init__(self, max_size): cache = {} From 72625f2f4d633e9fe59e61bb371a118927e5c66c Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 19:22:14 +0000 Subject: [PATCH 045/112] Initial hack at a TimerMetric; for storing counts + duration accumulators --- synapse/metrics/metric.py | 48 ++++++++++++++++++++++++++++++++++++ tests/metrics/test_metric.py | 36 ++++++++++++++++++++++++++- 2 files changed, 83 insertions(+), 1 deletion(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 4df5ebfda..717588194 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -14,6 +14,15 @@ # limitations under the License. +from itertools import chain + + +# TODO(paul): I can't believe Python doesn't have one of these +def map_concat(func, items): + # flatten a list-of-lists + return list(chain.from_iterable(map(func, items))) + + class BaseMetric(object): def __init__(self, name, keys=[]): @@ -87,6 +96,45 @@ class CallbackMetric(BaseMetric): return ["%s{%s} %d" % (self.name, self._render_key(k), value[k]) for k in sorted(value.keys())] + +class TimerMetric(CounterMetric): + """A combination of an event counter and a time accumulator, which counts + both the number of events and how long each one takes. + + TODO(paul): Try to export some heatmap-style stats? + """ + + def __init__(self, *args, **kwargs): + super(TimerMetric, self).__init__(*args, **kwargs) + + self.times = {} + + # Scalar metrics are never empty + if self.is_scalar(): + self.times[()] = 0 + + def inc_time(self, msec, *values): + self.inc(*values) + + if values not in self.times: + self.times[values] = msec + else: + self.times[values] += msec + + def render(self): + if self.is_scalar(): + return ["%s:count %d" % (self.name, self.counts[()]), + "%s:msec %d" % (self.name, self.times[()])] + + def render_item(k): + keystr = self._render_key(k) + + return ["%s{%s}:count %d" % (self.name, keystr, self.counts[k]), + "%s{%s}:msec %d" % (self.name, keystr, self.times[k])] + + return map_concat(render_item, sorted(self.counts.keys())) + + class CacheMetric(object): """A combination of two CounterMetrics, one to count cache hits and one to count misses, and a callback metric to yield the current size. diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index b7facb858..b25520821 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -16,7 +16,7 @@ from tests import unittest from synapse.metrics.metric import ( - CounterMetric, CallbackMetric, CacheMetric + CounterMetric, CallbackMetric, TimerMetric, CacheMetric ) @@ -97,6 +97,40 @@ class CallbackMetricTestCase(unittest.TestCase): ]) +class TimerMetricTestCase(unittest.TestCase): + + def test_scalar(self): + metric = TimerMetric("thing") + + self.assertEquals(metric.render(), [ + "thing:count 0", + "thing:msec 0", + ]) + + metric.inc_time(500) + + self.assertEquals(metric.render(), [ + "thing:count 1", + "thing:msec 500", + ]) + + def test_vector(self): + metric = TimerMetric("queries", keys=["verb"]) + + self.assertEquals(metric.render(), []) + + metric.inc_time(300, "SELECT") + metric.inc_time(200, "SELECT") + metric.inc_time(800, "INSERT") + + self.assertEquals(metric.render(), [ + "queries{verb=INSERT}:count 1", + "queries{verb=INSERT}:msec 800", + "queries{verb=SELECT}:count 2", + "queries{verb=SELECT}:msec 500", + ]) + + class CacheMetricTestCase(unittest.TestCase): def test_cache(self): From a99d6edc05c60cdb473adbe5fc783532e0ff9ea3 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 19:28:17 +0000 Subject: [PATCH 046/112] Neater implementation of metric render methods by pulling out 'render' as a base method that calls self.render_item --- synapse/metrics/metric.py | 33 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 717588194..4a6ab9cd7 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -36,8 +36,15 @@ class BaseMetric(object): return not len(self.keys) def _render_key(self, values): + if self.is_scalar(): + return "" # TODO: some kind of value escape - return ",".join(["%s=%s" % kv for kv in zip(self.keys, values)]) + return "{%s}" % ( + ",".join(["%s=%s" % kv for kv in zip(self.keys, values)]) + ) + + def render(self): + return map_concat(self.render_item, sorted(self.counts.keys())) class CounterMetric(BaseMetric): @@ -69,12 +76,8 @@ class CounterMetric(BaseMetric): def fetch(self): return dict(self.counts) - def render(self): - if self.is_scalar(): - return ["%s %d" % (self.name, self.counts[()])] - - return ["%s{%s} %d" % (self.name, self._render_key(k), self.counts[k]) - for k in sorted(self.counts.keys())] + def render_item(self, k): + return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])] class CallbackMetric(BaseMetric): @@ -93,7 +96,7 @@ class CallbackMetric(BaseMetric): if self.is_scalar(): return ["%s %d" % (self.name, value)] - return ["%s{%s} %d" % (self.name, self._render_key(k), value[k]) + return ["%s%s %d" % (self.name, self._render_key(k), value[k]) for k in sorted(value.keys())] @@ -121,18 +124,12 @@ class TimerMetric(CounterMetric): else: self.times[values] += msec - def render(self): - if self.is_scalar(): - return ["%s:count %d" % (self.name, self.counts[()]), - "%s:msec %d" % (self.name, self.times[()])] + def render_item(self, k): + keystr = self._render_key(k) - def render_item(k): - keystr = self._render_key(k) + return ["%s%s:count %d" % (self.name, keystr, self.counts[k]), + "%s%s:msec %d" % (self.name, keystr, self.times[k])] - return ["%s{%s}:count %d" % (self.name, keystr, self.counts[k]), - "%s{%s}:msec %d" % (self.name, keystr, self.times[k])] - - return map_concat(render_item, sorted(self.counts.keys())) class CacheMetric(object): From f52acf3b129864f400876f9316e1324e62e75eb0 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 19:43:46 +0000 Subject: [PATCH 047/112] Neater register_* methods on overall Metrics container --- synapse/metrics/__init__.py | 34 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 442fd70cd..e1818ce39 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -15,7 +15,7 @@ import logging -from .metric import CounterMetric, CallbackMetric, CacheMetric +from .metric import CounterMetric, CallbackMetric, TimerMetric, CacheMetric logger = logging.getLogger(__name__) @@ -36,35 +36,25 @@ class Metrics(object): def __init__(self, name): self.name_prefix = name - def _register(self, metric): - all_metrics[metric.name] = metric - - def register_counter(self, name, *args, **kwargs): + def _register(self, metric_class, name, *args, **kwargs): full_name = "%s.%s" % (self.name_prefix, name) - metric = CounterMetric(full_name, *args, **kwargs) - - self._register(metric) + metric = metric_class(full_name, *args, **kwargs) + all_metrics[full_name] = metric return metric - def register_callback(self, name, callback, *args, **kwargs): - full_name = "%s.%s" % (self.name_prefix, name) + def register_counter(self, *args, **kwargs): + return self._register(CounterMetric, *args, **kwargs) - metric = CallbackMetric(full_name, *args, callback=callback, **kwargs) + def register_callback(self, *args, **kwargs): + return self._register(CallbackMetric, *args, **kwargs) - self._register(metric) + def register_timer(self, *args, **kwargs): + return self._register(TimerMetric, *args, **kwargs) - return metric - - def register_cache(self, name, *args, **kwargs): - full_name = "%s.%s" % (self.name_prefix, name) - - metric = CacheMetric(full_name, *args, **kwargs) - - self._register(metric) - - return metric + def register_cache(self, *args, **kwargs): + return self._register(CacheMetric, *args, **kwargs) def counted(self, func): """ A method decorator that registers a counter, to count invocations From 25187ab67426e206de30ebf5c8b5f34c995d3faf Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 4 Mar 2015 19:45:16 +0000 Subject: [PATCH 048/112] Collect per-SQL-verb timer stats on query execution time --- synapse/storage/_base.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index d6cf88c35..92e90ac57 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -32,12 +32,15 @@ import time logger = logging.getLogger(__name__) -metrics = synapse.metrics.get_metrics_for("synapse.storage") - sql_logger = logging.getLogger("synapse.storage.SQL") transaction_logger = logging.getLogger("synapse.storage.txn") +metrics = synapse.metrics.get_metrics_for("synapse.storage") + +sql_query_timer = metrics.register_timer("queries", keys=["verb"]) + + # TODO(paul): # * more generic key management # * consider other eviction strategies - LRU? @@ -89,7 +92,8 @@ def cached(max_entries=1000): class LoggingTransaction(object): """An object that almost-transparently proxies for the 'txn' object - passed to the constructor. Adds logging to the .execute() method.""" + passed to the constructor. Adds logging and metrics to the .execute() + method.""" __slots__ = ["txn", "name"] def __init__(self, txn, name): @@ -105,6 +109,7 @@ class LoggingTransaction(object): def execute(self, sql, *args, **kwargs): # TODO(paul): Maybe use 'info' and 'debug' for values? sql_logger.debug("[SQL] {%s} %s", self.name, sql) + try: if args and args[0]: values = args[0] @@ -126,8 +131,9 @@ class LoggingTransaction(object): logger.exception("[SQL FAIL] {%s}", self.name) raise finally: - end = time.time() * 1000 - sql_logger.debug("[SQL time] {%s} %f", self.name, end - start) + msecs = (time.time() * 1000) - start + sql_logger.debug("[SQL time] {%s} %f", self.name, msecs) + sql_query_timer.inc_time(msecs, sql.split()[0]) class PerformanceCounters(object): From 6d146e15df645a7958c59e4d1e4f48e4822be9f9 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 15:12:39 +0000 Subject: [PATCH 049/112] Put some gauge metrics on the number of notifier listeners, and notified-on objects (users, rooms, appservices) --- synapse/notifier.py | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/synapse/notifier.py b/synapse/notifier.py index df13e8ddb..1f7cad624 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -19,12 +19,25 @@ from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext from synapse.util.async import run_on_reactor from synapse.types import StreamToken +import synapse.metrics import logging logger = logging.getLogger(__name__) +metrics = synapse.metrics.get_metrics_for(__name__) + + +# TODO(paul): Should be shared somewhere +def count(func, l): + """Return the number of items in l for which func returns true.""" + n = 0 + for x in l: + if func(x): + n += 1 + return n + class _NotificationListener(object): """ This represents a single client connection to the events stream. @@ -95,6 +108,32 @@ class Notifier(object): "user_joined_room", self._user_joined_room ) + # This is not a very cheap test to perform, but it's only executed + # when rendering the metrics page, which is likely once per minute at + # most when scraping it. + def count_listeners(): + all_listeners = set() + + for x in self.room_to_listeners.values(): + all_listeners |= x + for x in self.user_to_listeners.values(): + all_listeners |= x + for x in self.appservice_to_listeners.values(): + all_listeners |= x + + return len(all_listeners) + metrics.register_callback("all_listeners", count_listeners) + + metrics.register_callback("rooms", + lambda: count(bool, self.room_to_listeners.values()) + ) + metrics.register_callback("users", + lambda: count(bool, self.user_to_listeners.values()) + ) + metrics.register_callback("appservices", + lambda: count(bool, self.appservice_to_listeners.values()) + ) + @log_function @defer.inlineCallbacks def on_new_room_event(self, event, extra_users=[]): From fa319a57860af460add565d7801711ec53ab6799 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 15:58:03 +0000 Subject: [PATCH 050/112] Add TimerMetrics to shadow the PerformanceCounters in synapse.storage; with the view to eventually replacing them entirely --- synapse/storage/_base.py | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 92e90ac57..d8c5a60c7 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -39,6 +39,8 @@ transaction_logger = logging.getLogger("synapse.storage.txn") metrics = synapse.metrics.get_metrics_for("synapse.storage") sql_query_timer = metrics.register_timer("queries", keys=["verb"]) +sql_txn_timer = metrics.register_timer("transactions", keys=["desc"]) +sql_getevents_timer = metrics.register_timer("get_events", keys=["desc"]) # TODO(paul): @@ -184,11 +186,16 @@ class SQLBaseStore(object): self._previous_txn_total_time = 0 self._current_txn_total_time = 0 self._previous_loop_ts = 0 + + # TODO(paul): These can eventually be removed once the metrics code + # is running in mainline, and we have some nice monitoring frontends + # to watch it self._txn_perf_counters = PerformanceCounters() self._get_event_counters = PerformanceCounters() self._get_event_cache = LruCache(hs.config.event_cache_size) - self._get_event_cache_counter = metrics.register_cache("get_event", + self._get_event_cache_counter = metrics.register_cache( + "get_event_cache", size_callback=lambda: len(self._get_event_cache), ) @@ -254,6 +261,8 @@ class SQLBaseStore(object): self._current_txn_total_time += end - start self._txn_perf_counters.update(desc, start, end) + sql_txn_timer.inc_time(self._current_txn_total_time, desc) + with PreserveLoggingContext(): result = yield self._db_pool.runInteraction( inner_func, *args, **kwargs @@ -653,7 +662,11 @@ class SQLBaseStore(object): get_prev_content=False, allow_rejected=False): start_time = time.time() * 1000 - update_counter = self._get_event_counters.update + + def update_counter(desc, last_time): + curr_time = self._get_event_counters.update(desc, last_time) + sql_getevents_timer.inc_time(curr_time - last_time, desc) + return curr_time cache = self._get_event_cache.setdefault(event_id, {}) @@ -704,7 +717,11 @@ class SQLBaseStore(object): check_redacted=True, get_prev_content=False): start_time = time.time() * 1000 - update_counter = self._get_event_counters.update + + def update_counter(desc, last_time): + curr_time = self._get_event_counters.update(desc, last_time) + sql_getevents_timer.inc_time(curr_time - last_time, desc) + return curr_time d = json.loads(js) start_time = update_counter("decode_json", start_time) From 399689dcc79e73de1ec6bae8aa18bd83f5618f38 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 5 Mar 2015 16:15:21 +0000 Subject: [PATCH 051/112] Provide some process resource usage metrics --- synapse/metrics/__init__.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index e1818ce39..c00f088ff 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -13,7 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Because otherwise 'resource' collides with synapse.metrics.resource +from __future__ import absolute_import + import logging +from resource import getrusage, getpagesize, RUSAGE_SELF from .metric import CounterMetric, CallbackMetric, TimerMetric, CacheMetric @@ -76,6 +80,9 @@ def get_metrics_for(name): def render_all(): strs = [] + # TODO(paul): Internal hack + update_resource_metrics() + for name in sorted(all_metrics.keys()): try: strs += all_metrics[name].render() @@ -84,3 +91,23 @@ def render_all(): logger.exception("Failed to render %s metric", name) return "\n".join(strs) + + +# Now register some standard process-wide state metrics, to give indications of +# process resource usage + +rusage = None +PAGE_SIZE = getpagesize() + +def update_resource_metrics(): + global rusage + rusage = getrusage(RUSAGE_SELF) + +resource_metrics = get_metrics_for("process.resource") + +# msecs +resource_metrics.register_callback("utime", lambda: rusage.ru_utime * 1000) +resource_metrics.register_callback("stime", lambda: rusage.ru_stime * 1000) + +# pages +resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * PAGE_SIZE) From f9478e475bf645038b4f1f163240d7fd0ec02af0 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 6 Mar 2015 15:28:06 +0000 Subject: [PATCH 052/112] Rename Metrics' "keys" to "labels" --- synapse/federation/transaction_queue.py | 4 ++-- synapse/http/client.py | 4 ++-- synapse/http/matrixfederationclient.py | 4 ++-- synapse/http/server.py | 4 ++-- synapse/metrics/metric.py | 24 ++++++++++++------------ synapse/storage/_base.py | 6 +++--- tests/metrics/test_metric.py | 6 +++--- 7 files changed, 26 insertions(+), 26 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index b9d3f8932..ae62c69fc 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -71,11 +71,11 @@ class TransactionQueue(object): metrics.register_callback("pending_pdus", lambda: {(dest,): len(pdus[dest]) for dest in pdus.keys()}, - keys=["dest"], + labels=["dest"], ) metrics.register_callback("pending_edus", lambda: {(dest,): len(edus[dest]) for dest in edus.keys()}, - keys=["dest"], + labels=["dest"], ) def can_send_to(self, destination): diff --git a/synapse/http/client.py b/synapse/http/client.py index e40e82e80..ad2c9c05e 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -35,10 +35,10 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) outgoing_requests_counter = metrics.register_counter("outgoing_requests", - keys=["method"], + labels=["method"], ) incoming_responses_counter = metrics.register_counter("incoming_responses", - keys=["method","code"], + labels=["method","code"], ) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 009152769..6b6d79a04 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -44,10 +44,10 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) outgoing_requests_counter = metrics.register_counter("outgoing_requests", - keys=["method"], + labels=["method"], ) incoming_responses_counter = metrics.register_counter("incoming_responses", - keys=["method","code"], + labels=["method","code"], ) diff --git a/synapse/http/server.py b/synapse/http/server.py index ac893bb40..35bd3a00b 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -38,10 +38,10 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) incoming_requests_counter = metrics.register_counter("incoming_requests", - keys=["method"], + labels=["method"], ) outgoing_responses_counter = metrics.register_counter("outgoing_responses", - keys=["method","code"], + labels=["method","code"], ) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 4a6ab9cd7..8ba13075f 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -25,22 +25,22 @@ def map_concat(func, items): class BaseMetric(object): - def __init__(self, name, keys=[]): + def __init__(self, name, labels=[]): self.name = name - self.keys = keys # OK not to clone as we never write it + self.labels = labels # OK not to clone as we never write it def dimension(self): - return len(self.keys) + return len(self.labels) def is_scalar(self): - return not len(self.keys) + return not len(self.labels) def _render_key(self, values): if self.is_scalar(): return "" # TODO: some kind of value escape return "{%s}" % ( - ",".join(["%s=%s" % kv for kv in zip(self.keys, values)]) + ",".join(["%s=%s" % kv for kv in zip(self.labels, values)]) ) def render(self): @@ -62,7 +62,7 @@ class CounterMetric(BaseMetric): def inc(self, *values): if len(values) != self.dimension(): - raise ValueError("Expected as many values to inc() as keys (%d)" % + raise ValueError("Expected as many values to inc() as labels (%d)" % (self.dimension()) ) @@ -85,8 +85,8 @@ class CallbackMetric(BaseMetric): it is rendered. Typically this is used to implement gauges that yield the size or other state of some in-memory object by actively querying it.""" - def __init__(self, name, callback, keys=[]): - super(CallbackMetric, self).__init__(name, keys=keys) + def __init__(self, name, callback, labels=[]): + super(CallbackMetric, self).__init__(name, labels=labels) self.callback = callback @@ -139,15 +139,15 @@ class CacheMetric(object): This metric generates standard metric name pairs, so that monitoring rules can easily be applied to measure hit ratio.""" - def __init__(self, name, size_callback, keys=[]): + def __init__(self, name, size_callback, labels=[]): self.name = name - self.hits = CounterMetric(name + ":hits", keys=keys) - self.misses = CounterMetric(name + ":misses", keys=keys) + self.hits = CounterMetric(name + ":hits", labels=labels) + self.misses = CounterMetric(name + ":misses", labels=labels) self.size = CallbackMetric(name + ":size", callback=size_callback, - keys=keys, + labels=labels, ) def inc_hits(self, *values): diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index d8c5a60c7..a38b60358 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -38,9 +38,9 @@ transaction_logger = logging.getLogger("synapse.storage.txn") metrics = synapse.metrics.get_metrics_for("synapse.storage") -sql_query_timer = metrics.register_timer("queries", keys=["verb"]) -sql_txn_timer = metrics.register_timer("transactions", keys=["desc"]) -sql_getevents_timer = metrics.register_timer("get_events", keys=["desc"]) +sql_query_timer = metrics.register_timer("queries", labels=["verb"]) +sql_txn_timer = metrics.register_timer("transactions", labels=["desc"]) +sql_getevents_timer = metrics.register_timer("get_events", labels=["desc"]) # TODO(paul): diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index b25520821..fefe1a586 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -43,7 +43,7 @@ class CounterMetricTestCase(unittest.TestCase): ]) def test_vector(self): - counter = CounterMetric("vector", keys=["method"]) + counter = CounterMetric("vector", labels=["method"]) # Empty counter doesn't yet know what values it has self.assertEquals(counter.render(), []) @@ -83,7 +83,7 @@ class CallbackMetricTestCase(unittest.TestCase): def test_vector(self): vals = dict() - metric = CallbackMetric("values", lambda: vals, keys=["type"]) + metric = CallbackMetric("values", lambda: vals, labels=["type"]) self.assertEquals(metric.render(), []) @@ -115,7 +115,7 @@ class TimerMetricTestCase(unittest.TestCase): ]) def test_vector(self): - metric = TimerMetric("queries", keys=["verb"]) + metric = TimerMetric("queries", labels=["verb"]) self.assertEquals(metric.render(), []) From b3a0179d64c2c3b4f57688bdcceb818d0124c858 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 6 Mar 2015 15:35:23 +0000 Subject: [PATCH 053/112] Bugfix to rendering output of vectored TimerMetrics --- synapse/metrics/metric.py | 5 ++--- tests/metrics/test_metric.py | 8 ++++---- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 8ba13075f..17cd67389 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -127,9 +127,8 @@ class TimerMetric(CounterMetric): def render_item(self, k): keystr = self._render_key(k) - return ["%s%s:count %d" % (self.name, keystr, self.counts[k]), - "%s%s:msec %d" % (self.name, keystr, self.times[k])] - + return ["%s:count%s %d" % (self.name, keystr, self.counts[k]), + "%s:msec%s %d" % (self.name, keystr, self.times[k])] class CacheMetric(object): diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index fefe1a586..75b6cbc92 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -124,10 +124,10 @@ class TimerMetricTestCase(unittest.TestCase): metric.inc_time(800, "INSERT") self.assertEquals(metric.render(), [ - "queries{verb=INSERT}:count 1", - "queries{verb=INSERT}:msec 800", - "queries{verb=SELECT}:count 2", - "queries{verb=SELECT}:msec 500", + "queries:count{verb=INSERT} 1", + "queries:msec{verb=INSERT} 800", + "queries:count{verb=SELECT} 2", + "queries:msec{verb=SELECT} 500", ]) From 0b96bb793e7e5d3935804b8f0ccaf415006388a9 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 6 Mar 2015 15:39:14 +0000 Subject: [PATCH 054/112] Have all @metrics.counted use a single metric name vectored on the method name, rather than a brand new scalar counter per counted method --- synapse/metrics/__init__.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index c00f088ff..443d67f41 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -63,10 +63,17 @@ class Metrics(object): def counted(self, func): """ A method decorator that registers a counter, to count invocations of this method. """ - counter = self.register_counter(func.__name__) + if not hasattr(self, "method_counter"): + self.method_counter = self.register_counter( + "calls", + labels=["method"] + ) + + counter = self.method_counter + name = func.__name__ def wrapped(*args, **kwargs): - counter.inc() + counter.inc(name) return func(*args, **kwargs) return wrapped From b0cf86731957876ca877c35bf30c6f695f1a544c Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 6 Mar 2015 16:18:21 +0000 Subject: [PATCH 055/112] Use _ instead of . as a metric namespacing separator, for Prometheus --- synapse/federation/transaction_queue.py | 4 ++-- synapse/handlers/presence.py | 2 +- synapse/http/client.py | 4 ++-- synapse/http/matrixfederationclient.py | 4 ++-- synapse/http/server.py | 4 ++-- synapse/metrics/__init__.py | 14 +++++++++++--- synapse/notifier.py | 2 +- synapse/storage/_base.py | 18 +++++++++++++----- 8 files changed, 34 insertions(+), 18 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index ae62c69fc..ca5bcf21c 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -69,11 +69,11 @@ class TransactionQueue(object): # HACK to get unique tx id self._next_txn_id = int(self._clock.time_msec()) - metrics.register_callback("pending_pdus", + metrics.register_callback("pendingPdus", lambda: {(dest,): len(pdus[dest]) for dest in pdus.keys()}, labels=["dest"], ) - metrics.register_callback("pending_edus", + metrics.register_callback("pendingEdus", lambda: {(dest,): len(edus[dest]) for dest in edus.keys()}, labels=["dest"], ) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 698946a48..c6d6aef53 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -136,7 +136,7 @@ class PresenceHandler(BaseHandler): self._user_cachemap = {} self._user_cachemap_latest_serial = 0 - metrics.register_callback("user_cachemap:size", + metrics.register_callback("userCachemap:size", lambda: len(self._user_cachemap) ) diff --git a/synapse/http/client.py b/synapse/http/client.py index ad2c9c05e..01737a718 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -34,10 +34,10 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) -outgoing_requests_counter = metrics.register_counter("outgoing_requests", +outgoing_requests_counter = metrics.register_counter("requests", labels=["method"], ) -incoming_responses_counter = metrics.register_counter("incoming_responses", +incoming_responses_counter = metrics.register_counter("responses", labels=["method","code"], ) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 6b6d79a04..11883d385 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -43,10 +43,10 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) -outgoing_requests_counter = metrics.register_counter("outgoing_requests", +outgoing_requests_counter = metrics.register_counter("requests", labels=["method"], ) -incoming_responses_counter = metrics.register_counter("incoming_responses", +incoming_responses_counter = metrics.register_counter("responses", labels=["method","code"], ) diff --git a/synapse/http/server.py b/synapse/http/server.py index 35bd3a00b..23708c08c 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -37,10 +37,10 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) -incoming_requests_counter = metrics.register_counter("incoming_requests", +incoming_requests_counter = metrics.register_counter("requests", labels=["method"], ) -outgoing_responses_counter = metrics.register_counter("outgoing_responses", +outgoing_responses_counter = metrics.register_counter("responses", labels=["method","code"], ) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 443d67f41..47e475acd 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -41,7 +41,12 @@ class Metrics(object): self.name_prefix = name def _register(self, metric_class, name, *args, **kwargs): - full_name = "%s.%s" % (self.name_prefix, name) + if "_" in name: + raise ValueError("Metric names %s is invalid as it cannot contain an underscore" + % (name) + ) + + full_name = "%s_%s" % (self.name_prefix, name) metric = metric_class(full_name, *args, **kwargs) @@ -78,10 +83,13 @@ class Metrics(object): return wrapped -def get_metrics_for(name): +def get_metrics_for(pkg_name): """ Returns a Metrics instance for conveniently creating metrics namespaced with the given name prefix. """ - return Metrics(name) + + # Convert a "package.name" to "package_name" because Prometheus doesn't + # let us use . in metric names + return Metrics(pkg_name.replace(".", "_")) def render_all(): diff --git a/synapse/notifier.py b/synapse/notifier.py index 1f7cad624..75e8152d0 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -122,7 +122,7 @@ class Notifier(object): all_listeners |= x return len(all_listeners) - metrics.register_callback("all_listeners", count_listeners) + metrics.register_callback("listeners", count_listeners) metrics.register_callback("rooms", lambda: count(bool, self.room_to_listeners.values()) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index a38b60358..35d118c58 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -40,7 +40,14 @@ metrics = synapse.metrics.get_metrics_for("synapse.storage") sql_query_timer = metrics.register_timer("queries", labels=["verb"]) sql_txn_timer = metrics.register_timer("transactions", labels=["desc"]) -sql_getevents_timer = metrics.register_timer("get_events", labels=["desc"]) +sql_getevents_timer = metrics.register_timer("getEvents", labels=["desc"]) + +caches_by_name = {} +cache_counter = metrics.register_cache( + "cache", + lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()}, + labels=["name"], +) # TODO(paul): @@ -62,8 +69,9 @@ def cached(max_entries=1000): """ def wrap(orig): cache = OrderedDict() + name = orig.__name__ - counter = metrics.register_cache(orig.__name__, lambda: len(cache)) + caches_by_name[name] = cache def prefill(key, value): while len(cache) > max_entries: @@ -74,10 +82,10 @@ def cached(max_entries=1000): @defer.inlineCallbacks def wrapped(self, key): if key in cache: - counter.inc_hits() + cache_counter.inc_hits(name) defer.returnValue(cache[key]) - counter.inc_misses() + cache_counter.inc_misses(name) ret = yield orig(self, key) prefill(key, ret) defer.returnValue(ret) @@ -195,7 +203,7 @@ class SQLBaseStore(object): self._get_event_cache = LruCache(hs.config.event_cache_size) self._get_event_cache_counter = metrics.register_cache( - "get_event_cache", + "getEventCache", size_callback=lambda: len(self._get_event_cache), ) From 22b37b75dbfa69fd90705d7aa3e5650eebb89b5f Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 6 Mar 2015 17:08:25 +0000 Subject: [PATCH 056/112] Kill unused CounterMetric.fetch() method --- synapse/metrics/metric.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 17cd67389..93508eeac 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -73,9 +73,6 @@ class CounterMetric(BaseMetric): else: self.counts[values] += 1 - def fetch(self): - return dict(self.counts) - def render_item(self, k): return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])] From 0e847540c3aa1c471a00b3200f7f18e48004b48d Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 6 Mar 2015 18:40:20 +0000 Subject: [PATCH 057/112] Prometheus needs "escaped" label values --- synapse/metrics/metric.py | 8 ++++-- tests/metrics/test_metric.py | 54 ++++++++++++++++++------------------ 2 files changed, 33 insertions(+), 29 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 93508eeac..922cb5a6f 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -35,12 +35,16 @@ class BaseMetric(object): def is_scalar(self): return not len(self.labels) + def _render_labelvalue(self, value): + # TODO: some kind of value escape + return '"%s"' % (value) + def _render_key(self, values): if self.is_scalar(): return "" - # TODO: some kind of value escape return "{%s}" % ( - ",".join(["%s=%s" % kv for kv in zip(self.labels, values)]) + ",".join(["%s=%s" % (k, self._render_labelvalue(v)) + for k, v in zip(self.labels, values)]) ) def render(self): diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index 75b6cbc92..1919630fe 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -26,20 +26,20 @@ class CounterMetricTestCase(unittest.TestCase): counter = CounterMetric("scalar") self.assertEquals(counter.render(), [ - "scalar 0", + 'scalar 0', ]) counter.inc() self.assertEquals(counter.render(), [ - "scalar 1", + 'scalar 1', ]) counter.inc() counter.inc() self.assertEquals(counter.render(), [ - "scalar 3" + 'scalar 3' ]) def test_vector(self): @@ -51,15 +51,15 @@ class CounterMetricTestCase(unittest.TestCase): counter.inc("GET") self.assertEquals(counter.render(), [ - "vector{method=GET} 1", + 'vector{method="GET"} 1', ]) counter.inc("GET") counter.inc("PUT") self.assertEquals(counter.render(), [ - "vector{method=GET} 2", - "vector{method=PUT} 1", + 'vector{method="GET"} 2', + 'vector{method="PUT"} 1', ]) @@ -71,13 +71,13 @@ class CallbackMetricTestCase(unittest.TestCase): metric = CallbackMetric("size", lambda: len(d)) self.assertEquals(metric.render(), [ - "size 0", + 'size 0', ]) d["key"] = "value" self.assertEquals(metric.render(), [ - "size 1", + 'size 1', ]) def test_vector(self): @@ -92,8 +92,8 @@ class CallbackMetricTestCase(unittest.TestCase): vals[("bar",)] = 2 self.assertEquals(metric.render(), [ - "values{type=bar} 2", - "values{type=foo} 1", + 'values{type="bar"} 2', + 'values{type="foo"} 1', ]) @@ -103,15 +103,15 @@ class TimerMetricTestCase(unittest.TestCase): metric = TimerMetric("thing") self.assertEquals(metric.render(), [ - "thing:count 0", - "thing:msec 0", + 'thing:count 0', + 'thing:msec 0', ]) metric.inc_time(500) self.assertEquals(metric.render(), [ - "thing:count 1", - "thing:msec 500", + 'thing:count 1', + 'thing:msec 500', ]) def test_vector(self): @@ -124,10 +124,10 @@ class TimerMetricTestCase(unittest.TestCase): metric.inc_time(800, "INSERT") self.assertEquals(metric.render(), [ - "queries:count{verb=INSERT} 1", - "queries:msec{verb=INSERT} 800", - "queries:count{verb=SELECT} 2", - "queries:msec{verb=SELECT} 500", + 'queries:count{verb="INSERT"} 1', + 'queries:msec{verb="INSERT"} 800', + 'queries:count{verb="SELECT"} 2', + 'queries:msec{verb="SELECT"} 500', ]) @@ -139,24 +139,24 @@ class CacheMetricTestCase(unittest.TestCase): metric = CacheMetric("cache", lambda: len(d)) self.assertEquals(metric.render(), [ - "cache:hits 0", - "cache:misses 0", - "cache:size 0", + 'cache:hits 0', + 'cache:misses 0', + 'cache:size 0', ]) metric.inc_misses() d["key"] = "value" self.assertEquals(metric.render(), [ - "cache:hits 0", - "cache:misses 1", - "cache:size 1", + 'cache:hits 0', + 'cache:misses 1', + 'cache:size 1', ]) metric.inc_hits() self.assertEquals(metric.render(), [ - "cache:hits 1", - "cache:misses 1", - "cache:size 1", + 'cache:hits 1', + 'cache:misses 1', + 'cache:size 1', ]) From 4d661ec0f3ccced9cb6a0b1441bfb845f70f1270 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 6 Mar 2015 19:08:47 +0000 Subject: [PATCH 058/112] Remember to emit final linefeed from /metrics page, or Prometheus gets upset --- synapse/metrics/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 47e475acd..1acaa3fd0 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -105,6 +105,8 @@ def render_all(): strs += ["# FAILED to render %s" % name] logger.exception("Failed to render %s metric", name) + strs.append("") # to generate a final CRLF + return "\n".join(strs) From 1748605c5d69cb93cbe6bb4d93060124cdc9282f Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Mon, 9 Mar 2015 18:34:20 +0000 Subject: [PATCH 059/112] Count incoming HTTP requests per servlet that responds --- synapse/federation/transport/server.py | 4 ++++ synapse/http/server.py | 18 +++++++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 6c624977d..7838a8136 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -148,6 +148,10 @@ class BaseFederationServlet(object): logger.exception("authenticate_request failed") raise defer.returnValue(response) + + # Extra logic that functools.wraps() doesn't finish + new_code.__self__ = code.__self__ + return new_code def register(self, server): diff --git a/synapse/http/server.py b/synapse/http/server.py index 23708c08c..a0d190ff7 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -38,7 +38,7 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) incoming_requests_counter = metrics.register_counter("requests", - labels=["method"], + labels=["method", "servlet"], ) outgoing_responses_counter = metrics.register_counter("responses", labels=["method","code"], @@ -122,8 +122,6 @@ class JsonResource(HttpServer, resource.Resource): This checks if anyone has registered a callback for that method and path. """ - incoming_requests_counter.inc(request.method) - code = None start = self.clock.time_msec() try: @@ -143,6 +141,15 @@ class JsonResource(HttpServer, resource.Resource): # returned response. We pass both the request and any # matched groups from the regex to the callback. + callback = path_entry.callback + + servlet_instance = getattr(callback, "__self__", None) + if servlet_instance is not None: + servlet_classname = servlet_instance.__class__.__name__ + else: + servlet_classname = "%r" % callback + incoming_requests_counter.inc(request.method, servlet_classname) + args = [ urllib.unquote(u).decode("UTF-8") for u in m.groups() ] @@ -152,10 +159,7 @@ class JsonResource(HttpServer, resource.Resource): request.method, request.path ) - code, response = yield path_entry.callback( - request, - *args - ) + code, response = yield callback(request, *args) self._send_response(request, code, response) return From cbc0406be844894cac08c457544f1eb0c28435bb Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Mon, 9 Mar 2015 20:35:33 +0000 Subject: [PATCH 060/112] Export CacheMetric as hits+total, rather than hits+misses, as it's easier to derive hit ratio from that --- synapse/metrics/metric.py | 11 ++++++----- tests/metrics/test_metric.py | 6 +++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 922cb5a6f..6b7d3358b 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -134,7 +134,7 @@ class TimerMetric(CounterMetric): class CacheMetric(object): """A combination of two CounterMetrics, one to count cache hits and one to - count misses, and a callback metric to yield the current size. + count a total, and a callback metric to yield the current size. This metric generates standard metric name pairs, so that monitoring rules can easily be applied to measure hit ratio.""" @@ -142,8 +142,8 @@ class CacheMetric(object): def __init__(self, name, size_callback, labels=[]): self.name = name - self.hits = CounterMetric(name + ":hits", labels=labels) - self.misses = CounterMetric(name + ":misses", labels=labels) + self.hits = CounterMetric(name + ":hits", labels=labels) + self.total = CounterMetric(name + ":total", labels=labels) self.size = CallbackMetric(name + ":size", callback=size_callback, @@ -152,9 +152,10 @@ class CacheMetric(object): def inc_hits(self, *values): self.hits.inc(*values) + self.total.inc(*values) def inc_misses(self, *values): - self.misses.inc(*values) + self.total.inc(*values) def render(self): - return self.hits.render() + self.misses.render() + self.size.render() + return self.hits.render() + self.total.render() + self.size.render() diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index 1919630fe..193908b44 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -140,7 +140,7 @@ class CacheMetricTestCase(unittest.TestCase): self.assertEquals(metric.render(), [ 'cache:hits 0', - 'cache:misses 0', + 'cache:total 0', 'cache:size 0', ]) @@ -149,7 +149,7 @@ class CacheMetricTestCase(unittest.TestCase): self.assertEquals(metric.render(), [ 'cache:hits 0', - 'cache:misses 1', + 'cache:total 1', 'cache:size 1', ]) @@ -157,6 +157,6 @@ class CacheMetricTestCase(unittest.TestCase): self.assertEquals(metric.render(), [ 'cache:hits 1', - 'cache:misses 1', + 'cache:total 2', 'cache:size 1', ]) From 642f725fd74df0a921912c5284a93a81dc0d448d Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Mon, 9 Mar 2015 20:39:17 +0000 Subject: [PATCH 061/112] Pretend the 'getEvent' cache is just another cache in the set of all the others for metric --- synapse/storage/_base.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 35d118c58..2708d3c5b 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -202,10 +202,9 @@ class SQLBaseStore(object): self._get_event_counters = PerformanceCounters() self._get_event_cache = LruCache(hs.config.event_cache_size) - self._get_event_cache_counter = metrics.register_cache( - "getEventCache", - size_callback=lambda: len(self._get_event_cache), - ) + + # Pretend the getEventCache is just another named cache + caches_by_name["*getEvent*"] = self._get_event_cache def start_profiling(self): self._previous_loop_ts = self._clock.time_msec() @@ -682,10 +681,10 @@ class SQLBaseStore(object): # Separate cache entries for each way to invoke _get_event_txn ret = cache[(check_redacted, get_prev_content, allow_rejected)] - self._get_event_cache_counter.inc_hits() + cache_counter.inc_hits("*getEvent*") return ret except KeyError: - self._get_event_cache_counter.inc_misses() + cache_counter.inc_misses("*getEvent*") pass finally: start_time = update_counter("event_cache", start_time) From f1fbe3e09f5573ac7ea9159635b02cc579e19720 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 10 Mar 2015 15:21:03 +0000 Subject: [PATCH 062/112] Rename TimerMetric to DistributionMetric; as it could count more than just time --- synapse/metrics/__init__.py | 8 +++++--- synapse/metrics/metric.py | 24 +++++++++++++----------- synapse/storage/_base.py | 14 +++++++------- tests/metrics/test_metric.py | 24 ++++++++++++------------ 4 files changed, 37 insertions(+), 33 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 1acaa3fd0..c161c17e9 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -19,7 +19,9 @@ from __future__ import absolute_import import logging from resource import getrusage, getpagesize, RUSAGE_SELF -from .metric import CounterMetric, CallbackMetric, TimerMetric, CacheMetric +from .metric import ( + CounterMetric, CallbackMetric, DistributionMetric, CacheMetric +) logger = logging.getLogger(__name__) @@ -59,8 +61,8 @@ class Metrics(object): def register_callback(self, *args, **kwargs): return self._register(CallbackMetric, *args, **kwargs) - def register_timer(self, *args, **kwargs): - return self._register(TimerMetric, *args, **kwargs) + def register_distribution(self, *args, **kwargs): + return self._register(DistributionMetric, *args, **kwargs) def register_cache(self, *args, **kwargs): return self._register(CacheMetric, *args, **kwargs) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 6b7d3358b..45d2752a2 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -101,35 +101,37 @@ class CallbackMetric(BaseMetric): for k in sorted(value.keys())] -class TimerMetric(CounterMetric): - """A combination of an event counter and a time accumulator, which counts - both the number of events and how long each one takes. +class DistributionMetric(CounterMetric): + """A combination of an event counter and an accumulator, which counts + both the number of events and accumulates the total value. Typically this + could be used to keep track of method-running times, or other distributions + of values that occur in discrete occurances. TODO(paul): Try to export some heatmap-style stats? """ def __init__(self, *args, **kwargs): - super(TimerMetric, self).__init__(*args, **kwargs) + super(DistributionMetric, self).__init__(*args, **kwargs) - self.times = {} + self.totals = {} # Scalar metrics are never empty if self.is_scalar(): - self.times[()] = 0 + self.totals[()] = 0 - def inc_time(self, msec, *values): + def inc_by(self, inc, *values): self.inc(*values) - if values not in self.times: - self.times[values] = msec + if values not in self.totals: + self.totals[values] = inc else: - self.times[values] += msec + self.totals[values] += inc def render_item(self, k): keystr = self._render_key(k) return ["%s:count%s %d" % (self.name, keystr, self.counts[k]), - "%s:msec%s %d" % (self.name, keystr, self.times[k])] + "%s:total%s %d" % (self.name, keystr, self.totals[k])] class CacheMetric(object): diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 2708d3c5b..104e8e3cf 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -38,9 +38,9 @@ transaction_logger = logging.getLogger("synapse.storage.txn") metrics = synapse.metrics.get_metrics_for("synapse.storage") -sql_query_timer = metrics.register_timer("queries", labels=["verb"]) -sql_txn_timer = metrics.register_timer("transactions", labels=["desc"]) -sql_getevents_timer = metrics.register_timer("getEvents", labels=["desc"]) +sql_query_timer = metrics.register_distribution("queries", labels=["verb"]) +sql_txn_timer = metrics.register_distribution("transactions", labels=["desc"]) +sql_getevents_timer = metrics.register_distribution("getEvents", labels=["desc"]) caches_by_name = {} cache_counter = metrics.register_cache( @@ -143,7 +143,7 @@ class LoggingTransaction(object): finally: msecs = (time.time() * 1000) - start sql_logger.debug("[SQL time] {%s} %f", self.name, msecs) - sql_query_timer.inc_time(msecs, sql.split()[0]) + sql_query_timer.inc_by(msecs, sql.split()[0]) class PerformanceCounters(object): @@ -268,7 +268,7 @@ class SQLBaseStore(object): self._current_txn_total_time += end - start self._txn_perf_counters.update(desc, start, end) - sql_txn_timer.inc_time(self._current_txn_total_time, desc) + sql_txn_timer.inc_by(self._current_txn_total_time, desc) with PreserveLoggingContext(): result = yield self._db_pool.runInteraction( @@ -672,7 +672,7 @@ class SQLBaseStore(object): def update_counter(desc, last_time): curr_time = self._get_event_counters.update(desc, last_time) - sql_getevents_timer.inc_time(curr_time - last_time, desc) + sql_getevents_timer.inc_by(curr_time - last_time, desc) return curr_time cache = self._get_event_cache.setdefault(event_id, {}) @@ -727,7 +727,7 @@ class SQLBaseStore(object): def update_counter(desc, last_time): curr_time = self._get_event_counters.update(desc, last_time) - sql_getevents_timer.inc_time(curr_time - last_time, desc) + sql_getevents_timer.inc_by(curr_time - last_time, desc) return curr_time d = json.loads(js) diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index 193908b44..1ca3e45a2 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -16,7 +16,7 @@ from tests import unittest from synapse.metrics.metric import ( - CounterMetric, CallbackMetric, TimerMetric, CacheMetric + CounterMetric, CallbackMetric, DistributionMetric, CacheMetric ) @@ -97,37 +97,37 @@ class CallbackMetricTestCase(unittest.TestCase): ]) -class TimerMetricTestCase(unittest.TestCase): +class DistributionMetricTestCase(unittest.TestCase): def test_scalar(self): - metric = TimerMetric("thing") + metric = DistributionMetric("thing") self.assertEquals(metric.render(), [ 'thing:count 0', - 'thing:msec 0', + 'thing:total 0', ]) - metric.inc_time(500) + metric.inc_by(500) self.assertEquals(metric.render(), [ 'thing:count 1', - 'thing:msec 500', + 'thing:total 500', ]) def test_vector(self): - metric = TimerMetric("queries", labels=["verb"]) + metric = DistributionMetric("queries", labels=["verb"]) self.assertEquals(metric.render(), []) - metric.inc_time(300, "SELECT") - metric.inc_time(200, "SELECT") - metric.inc_time(800, "INSERT") + metric.inc_by(300, "SELECT") + metric.inc_by(200, "SELECT") + metric.inc_by(800, "INSERT") self.assertEquals(metric.render(), [ 'queries:count{verb="INSERT"} 1', - 'queries:msec{verb="INSERT"} 800', + 'queries:total{verb="INSERT"} 800', 'queries:count{verb="SELECT"} 2', - 'queries:msec{verb="SELECT"} 500', + 'queries:total{verb="SELECT"} 500', ]) From 493e3fa0ca81b6070648e0a2c00c6c229cec92fe Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 10 Mar 2015 15:23:33 +0000 Subject: [PATCH 063/112] Don't forbid '_' in metric basenames any more, to allow things like foo_time --- synapse/metrics/__init__.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index c161c17e9..f85c6418e 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -43,11 +43,6 @@ class Metrics(object): self.name_prefix = name def _register(self, metric_class, name, *args, **kwargs): - if "_" in name: - raise ValueError("Metric names %s is invalid as it cannot contain an underscore" - % (name) - ) - full_name = "%s_%s" % (self.name_prefix, name) metric = metric_class(full_name, *args, **kwargs) From 63cb7ece62962a0004bc45cf866b87e107bc0bc9 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 10 Mar 2015 15:24:02 +0000 Subject: [PATCH 064/112] Rename the timer metrics exported by synapse.storage to append _time, so the meaning of ':total' is clearer --- synapse/storage/_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 104e8e3cf..f742fe15d 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -38,9 +38,9 @@ transaction_logger = logging.getLogger("synapse.storage.txn") metrics = synapse.metrics.get_metrics_for("synapse.storage") -sql_query_timer = metrics.register_distribution("queries", labels=["verb"]) -sql_txn_timer = metrics.register_distribution("transactions", labels=["desc"]) -sql_getevents_timer = metrics.register_distribution("getEvents", labels=["desc"]) +sql_query_timer = metrics.register_distribution("query_time", labels=["verb"]) +sql_txn_timer = metrics.register_distribution("transaction_time", labels=["desc"]) +sql_getevents_timer = metrics.register_distribution("getEvents_time", labels=["desc"]) caches_by_name = {} cache_counter = metrics.register_cache( From c1cdd7954d7cc411a5ec926148b9060e59b8a7bd Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 10 Mar 2015 15:54:16 +0000 Subject: [PATCH 065/112] Add an .inc_by() method to CounterMetric; implement DistributionMetric a neater way --- synapse/metrics/metric.py | 37 ++++++++++++++---------------------- tests/metrics/test_metric.py | 5 ++--- 2 files changed, 16 insertions(+), 26 deletions(-) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 45d2752a2..12460c99c 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -64,7 +64,7 @@ class CounterMetric(BaseMetric): if self.is_scalar(): self.counts[()] = 0 - def inc(self, *values): + def inc_by(self, incr, *values): if len(values) != self.dimension(): raise ValueError("Expected as many values to inc() as labels (%d)" % (self.dimension()) @@ -73,9 +73,12 @@ class CounterMetric(BaseMetric): # TODO: should assert that the tag values are all strings if values not in self.counts: - self.counts[values] = 1 + self.counts[values] = incr else: - self.counts[values] += 1 + self.counts[values] += incr + + def inc(self, *values): + self.inc_by(1, *values) def render_item(self, k): return ["%s%s %d" % (self.name, self._render_key(k), self.counts[k])] @@ -101,7 +104,7 @@ class CallbackMetric(BaseMetric): for k in sorted(value.keys())] -class DistributionMetric(CounterMetric): +class DistributionMetric(object): """A combination of an event counter and an accumulator, which counts both the number of events and accumulates the total value. Typically this could be used to keep track of method-running times, or other distributions @@ -110,28 +113,16 @@ class DistributionMetric(CounterMetric): TODO(paul): Try to export some heatmap-style stats? """ - def __init__(self, *args, **kwargs): - super(DistributionMetric, self).__init__(*args, **kwargs) - - self.totals = {} - - # Scalar metrics are never empty - if self.is_scalar(): - self.totals[()] = 0 + def __init__(self, name, *args, **kwargs): + self.counts = CounterMetric(name + ":count", **kwargs) + self.totals = CounterMetric(name + ":total", **kwargs) def inc_by(self, inc, *values): - self.inc(*values) + self.counts.inc(*values) + self.totals.inc_by(inc, *values) - if values not in self.totals: - self.totals[values] = inc - else: - self.totals[values] += inc - - def render_item(self, k): - keystr = self._render_key(k) - - return ["%s:count%s %d" % (self.name, keystr, self.counts[k]), - "%s:total%s %d" % (self.name, keystr, self.totals[k])] + def render(self): + return self.counts.render() + self.totals.render() class CacheMetric(object): diff --git a/tests/metrics/test_metric.py b/tests/metrics/test_metric.py index 1ca3e45a2..600901429 100644 --- a/tests/metrics/test_metric.py +++ b/tests/metrics/test_metric.py @@ -35,8 +35,7 @@ class CounterMetricTestCase(unittest.TestCase): 'scalar 1', ]) - counter.inc() - counter.inc() + counter.inc_by(2) self.assertEquals(counter.render(), [ 'scalar 3' @@ -125,8 +124,8 @@ class DistributionMetricTestCase(unittest.TestCase): self.assertEquals(metric.render(), [ 'queries:count{verb="INSERT"} 1', - 'queries:total{verb="INSERT"} 800', 'queries:count{verb="SELECT"} 2', + 'queries:total{verb="INSERT"} 800', 'queries:total{verb="SELECT"} 500', ]) From 2e4f0b2bd736fd70040d936145948b65b4e00b12 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 10 Mar 2015 15:29:22 +0000 Subject: [PATCH 066/112] Replace the @metrics.counted annotations in federation with specifically-written counters and distributions --- synapse/federation/federation_client.py | 27 ++++++++++++++----------- synapse/federation/federation_server.py | 26 +++++++++++++----------- synapse/metrics/__init__.py | 17 ---------------- 3 files changed, 29 insertions(+), 41 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index ef177b79c..6811a0e3d 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -36,7 +36,15 @@ import random logger = logging.getLogger(__name__) -metrics = synapse.metrics.get_metrics_for(__name__) + +# synapse.federation.federation_client is a silly name +metrics = synapse.metrics.get_metrics_for("synapse.federation.client") + +sent_pdus_destination_dist = metrics.register_distribution("sent_pdu_destinations") + +sent_edus_counter = metrics.register_counter("sent_edus") + +sent_queries_counter = metrics.register_counter("sent_queries", labels=["type"]) class FederationClient(FederationBase): @@ -53,7 +61,6 @@ class FederationClient(FederationBase): self._get_pdu_cache.start() @log_function - @metrics.counted def send_pdu(self, pdu, destinations): """Informs the replication layer about a new PDU generated within the home server that should be transmitted to others. @@ -70,6 +77,8 @@ class FederationClient(FederationBase): order = self._order self._order += 1 + sent_pdus_destination_dist.inc_by(len(destinations)) + logger.debug("[%s] transaction_layer.enqueue_pdu... ", pdu.event_id) # TODO, add errback, etc. @@ -81,7 +90,6 @@ class FederationClient(FederationBase): ) @log_function - @metrics.counted def send_edu(self, destination, edu_type, content): edu = Edu( origin=self.server_name, @@ -90,18 +98,18 @@ class FederationClient(FederationBase): content=content, ) + sent_edus_counter.inc() + # TODO, add errback, etc. self._transaction_queue.enqueue_edu(edu) return defer.succeed(None) @log_function - @metrics.counted def send_failure(self, failure, destination): self._transaction_queue.enqueue_failure(failure, destination) return defer.succeed(None) @log_function - @metrics.counted def make_query(self, destination, query_type, args, retry_on_dns_fail=True): """Sends a federation Query to a remote homeserver of the given type @@ -118,6 +126,8 @@ class FederationClient(FederationBase): a Deferred which will eventually yield a JSON object from the response """ + sent_queries_counter.inc(query_type) + return self.transport_layer.make_query( destination, query_type, args, retry_on_dns_fail=retry_on_dns_fail ) @@ -163,7 +173,6 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def get_pdu(self, destinations, event_id, outlier=False): """Requests the PDU with given origin and ID from the remote home servers. @@ -253,7 +262,6 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def get_state_for_room(self, destination, room_id, event_id): """Requests all of the `current` state PDUs for a given room from a remote home server. @@ -294,7 +302,6 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def get_event_auth(self, destination, room_id, event_id): res = yield self.transport_layer.get_event_auth( destination, room_id, event_id, @@ -314,7 +321,6 @@ class FederationClient(FederationBase): defer.returnValue(signed_auth) @defer.inlineCallbacks - @metrics.counted def make_join(self, destinations, room_id, user_id): for destination in destinations: try: @@ -341,7 +347,6 @@ class FederationClient(FederationBase): raise RuntimeError("Failed to send to any server.") @defer.inlineCallbacks - @metrics.counted def send_join(self, destinations, pdu): for destination in destinations: try: @@ -391,7 +396,6 @@ class FederationClient(FederationBase): raise RuntimeError("Failed to send to any server.") @defer.inlineCallbacks - @metrics.counted def send_invite(self, destination, room_id, event_id, pdu): time_now = self._clock.time_msec() code, content = yield self.transport_layer.send_invite( @@ -415,7 +419,6 @@ class FederationClient(FederationBase): defer.returnValue(pdu) @defer.inlineCallbacks - @metrics.counted def query_auth(self, destination, room_id, event_id, local_auth): """ Params: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 3216fca95..25c0014f9 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -33,7 +33,14 @@ import logging logger = logging.getLogger(__name__) -metrics = synapse.metrics.get_metrics_for(__name__) +# synapse.federation.federation_server is a silly name +metrics = synapse.metrics.get_metrics_for("synapse.federation.server") + +received_pdus_counter = metrics.register_counter("received_pdus") + +received_edus_counter = metrics.register_counter("received_edus") + +received_queries_counter = metrics.register_counter("received_queries", labels=["type"]) class FederationServer(FederationBase): @@ -75,7 +82,6 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def on_backfill_request(self, origin, room_id, versions, limit): pdus = yield self.handler.on_backfill_request( origin, room_id, versions, limit @@ -85,10 +91,11 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def on_incoming_transaction(self, transaction_data): transaction = Transaction(**transaction_data) + received_pdus_counter.inc_by(len(transaction.pdus)) + for p in transaction.pdus: if "unsigned" in p: unsigned = p["unsigned"] @@ -158,6 +165,8 @@ class FederationServer(FederationBase): defer.returnValue((200, response)) def received_edu(self, origin, edu_type, content): + received_edus_counter.inc() + if edu_type in self.edu_handlers: self.edu_handlers[edu_type](origin, content) else: @@ -165,7 +174,6 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def on_context_state_request(self, origin, room_id, event_id): if event_id: pdus = yield self.handler.get_state_for_pdu( @@ -193,7 +201,6 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def on_pdu_request(self, origin, event_id): pdu = yield self._get_persisted_pdu(origin, event_id) @@ -206,13 +213,13 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function - @metrics.counted def on_pull_request(self, origin, versions): raise NotImplementedError("Pull transactions not implemented") @defer.inlineCallbacks - @metrics.counted def on_query_request(self, query_type, args): + received_queries_counter.inc(query_type) + if query_type in self.query_handlers: response = yield self.query_handlers[query_type](args) defer.returnValue((200, response)) @@ -222,14 +229,12 @@ class FederationServer(FederationBase): ) @defer.inlineCallbacks - @metrics.counted def on_make_join_request(self, room_id, user_id): pdu = yield self.handler.on_make_join_request(room_id, user_id) time_now = self._clock.time_msec() defer.returnValue({"event": pdu.get_pdu_json(time_now)}) @defer.inlineCallbacks - @metrics.counted def on_invite_request(self, origin, content): pdu = self.event_from_pdu_json(content) ret_pdu = yield self.handler.on_invite_request(origin, pdu) @@ -237,7 +242,6 @@ class FederationServer(FederationBase): defer.returnValue((200, {"event": ret_pdu.get_pdu_json(time_now)})) @defer.inlineCallbacks - @metrics.counted def on_send_join_request(self, origin, content): logger.debug("on_send_join_request: content: %s", content) pdu = self.event_from_pdu_json(content) @@ -252,7 +256,6 @@ class FederationServer(FederationBase): })) @defer.inlineCallbacks - @metrics.counted def on_event_auth(self, origin, room_id, event_id): time_now = self._clock.time_msec() auth_pdus = yield self.handler.on_event_auth(event_id) @@ -261,7 +264,6 @@ class FederationServer(FederationBase): })) @defer.inlineCallbacks - @metrics.counted def on_query_auth_request(self, origin, content, event_id): """ Content is a dict with keys:: diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index f85c6418e..94164974f 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -62,23 +62,6 @@ class Metrics(object): def register_cache(self, *args, **kwargs): return self._register(CacheMetric, *args, **kwargs) - def counted(self, func): - """ A method decorator that registers a counter, to count invocations - of this method. """ - if not hasattr(self, "method_counter"): - self.method_counter = self.register_counter( - "calls", - labels=["method"] - ) - - counter = self.method_counter - name = func.__name__ - - def wrapped(*args, **kwargs): - counter.inc(name) - return func(*args, **kwargs) - return wrapped - def get_metrics_for(pkg_name): """ Returns a Metrics instance for conveniently creating metrics From 89ac1fa8ba55c6cbb4f1888e0542d106209d9c2a Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 10 Mar 2015 18:06:24 +0000 Subject: [PATCH 067/112] Add a counter to track total number of events served by the notifier --- synapse/notifier.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/notifier.py b/synapse/notifier.py index 75e8152d0..88873d453 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -28,6 +28,8 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) +notified_events_counter = metrics.register_counter("notified_events") + # TODO(paul): Should be shared somewhere def count(func, l): @@ -72,6 +74,7 @@ class _NotificationListener(object): try: self.deferred.callback(result) + notified_events_counter.inc_by(len(events)) except defer.AlreadyCalledError: pass From c782e893ec5ceaf7e8136f45c9e6cfa8b11ec653 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 10 Mar 2015 18:24:52 +0000 Subject: [PATCH 068/112] Neater metrics from TransactionQueue --- synapse/federation/transaction_queue.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index ca5bcf21c..99e386fa5 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -57,27 +57,29 @@ class TransactionQueue(object): # done self.pending_transactions = {} + metrics.register_callback("pending_destinations", + lambda: len(self.pending_transactions), + ) + # Is a mapping from destination -> list of # tuple(pending pdus, deferred, order) self.pending_pdus_by_dest = pdus = {} # destination -> list of tuple(edu, deferred) self.pending_edus_by_dest = edus = {} + metrics.register_callback("pending_pdus", + lambda: sum(map(len, pdus.values())), + ) + metrics.register_callback("pending_edus", + lambda: sum(map(len, edus.values())), + ) + # destination -> list of tuple(failure, deferred) self.pending_failures_by_dest = {} # HACK to get unique tx id self._next_txn_id = int(self._clock.time_msec()) - metrics.register_callback("pendingPdus", - lambda: {(dest,): len(pdus[dest]) for dest in pdus.keys()}, - labels=["dest"], - ) - metrics.register_callback("pendingEdus", - lambda: {(dest,): len(edus[dest]) for dest in edus.keys()}, - labels=["dest"], - ) - def can_send_to(self, destination): """Can we send messages to the given server? From e75fa8bbbf1215a89e0239f96789317b96383734 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 11 Mar 2015 18:57:35 +0000 Subject: [PATCH 069/112] Bugfix to sql_txn_timer increment - add only the per-TXN duration, not the total time ever spent since boot --- synapse/storage/_base.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index f742fe15d..40f2fc6d7 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -260,15 +260,13 @@ class SQLBaseStore(object): raise finally: end = time.time() * 1000 - transaction_logger.debug( - "[TXN END] {%s} %f", - name, end - start - ) + duration = end - start - self._current_txn_total_time += end - start + transaction_logger.debug("[TXN END] {%s} %f", name, duration) + + self._current_txn_total_time += duration self._txn_perf_counters.update(desc, start, end) - - sql_txn_timer.inc_by(self._current_txn_total_time, desc) + sql_txn_timer.inc_by(duration, desc) with PreserveLoggingContext(): result = yield self._db_pool.runInteraction( From e0214a263b610f21d59d16cf84b5a3eee8485b28 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 12 Mar 2015 15:33:53 +0000 Subject: [PATCH 070/112] Build MetricsResource as a specific HomeServer dependency --- synapse/app/homeserver.py | 16 ++++++++++------ synapse/server.py | 1 + 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 9747f7a01..3801302c6 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -47,6 +47,7 @@ from synapse.crypto import context_factory from synapse.util.logcontext import LoggingContext from synapse.rest.client.v1 import ClientV1RestResource from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource +from synapse.metrics.resource import MetricsResource, METRICS_PREFIX from daemonize import Daemonize import twisted.manhole.telnet @@ -100,6 +101,12 @@ class SynapseHomeServer(HomeServer): def build_resource_for_server_key(self): return LocalKey(self) + def build_resource_for_metrics(self): + if self.get_config().enable_metrics: + return MetricsResource(self) + else: + return None + def build_db_pool(self): return adbapi.ConnectionPool( "sqlite3", self.get_db_name(), @@ -147,12 +154,9 @@ class SynapseHomeServer(HomeServer): else: self.root_resource = Resource() - if self.get_config().enable_metrics: - from synapse.metrics.resource import ( - MetricsResource, METRICS_PREFIX - ) - - desired_tree.append((METRICS_PREFIX, MetricsResource(self))) + metrics_resource = self.get_resource_for_metrics() + if metrics_resource is not None: + desired_tree.append((METRICS_PREFIX, metrics_resource)) # ideally we'd just use getChild and putChild but getChild doesn't work # unless you give it a Request object IN ADDITION to the name :/ So diff --git a/synapse/server.py b/synapse/server.py index e3eefda4f..c7772244b 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -80,6 +80,7 @@ class BaseHomeServer(object): 'resource_for_server_key', 'resource_for_media_repository', 'resource_for_app_services', + 'resource_for_metrics', 'event_sources', 'ratelimiter', 'keyring', From a2cdd11d4a05f76b0be96d7e2de9294a9ff2cd57 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 12 Mar 2015 15:51:33 +0000 Subject: [PATCH 071/112] Fold the slightly-odd bind_port/secure_port/etc.. logic into SynapseHomeServer.start_listening() --- synapse/app/homeserver.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3801302c6..2ba701f53 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -219,17 +219,20 @@ class SynapseHomeServer(HomeServer): """ return "%s-%s" % (resource, path_seg) - def start_listening(self, secure_port, unsecure_port): - if secure_port is not None: + def start_listening(self): + config = self.get_config() + + if not config.no_tls and config.bind_port is not None: reactor.listenSSL( - secure_port, Site(self.root_resource), self.tls_context_factory + config.bind_port, Site(self.root_resource), self.tls_context_factory ) - logger.info("Synapse now listening on port %d", secure_port) - if unsecure_port is not None: + logger.info("Synapse now listening on port %d", config.bind_port) + + if config.unsecure_port is not None: reactor.listenTCP( - unsecure_port, Site(self.root_resource) + config.unsecure_port, Site(self.root_resource) ) - logger.info("Synapse now listening on port %d", unsecure_port) + logger.info("Synapse now listening on port %d", config.unsecure_port) def get_version_string(): @@ -381,11 +384,7 @@ def setup(config_options): f.namespace['hs'] = hs reactor.listenTCP(config.manhole, f, interface='127.0.0.1') - bind_port = config.bind_port - if config.no_tls: - bind_port = None - - hs.start_listening(bind_port, config.unsecure_port) + hs.start_listening() hs.get_pusherpool().start() hs.get_state_handler().start_caching() From b98b4c135d9738d5cf701712fc244209651cddf7 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 12 Mar 2015 16:05:46 +0000 Subject: [PATCH 072/112] Option to serve metrics from their own localhost-only TCP port instead of muxed on the main listener --- synapse/app/homeserver.py | 12 ++++++++++-- synapse/config/metrics.py | 5 +++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 2ba701f53..aa7c722ef 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -129,7 +129,8 @@ class SynapseHomeServer(HomeServer): location of the web client. This does nothing if web_client is not True. """ - web_client = self.get_config().webclient + config = self.get_config() + web_client = config.webclient # list containing (path_str, Resource) e.g: # [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ] @@ -155,7 +156,7 @@ class SynapseHomeServer(HomeServer): self.root_resource = Resource() metrics_resource = self.get_resource_for_metrics() - if metrics_resource is not None: + if config.metrics_port is None and metrics_resource is not None: desired_tree.append((METRICS_PREFIX, metrics_resource)) # ideally we'd just use getChild and putChild but getChild doesn't work @@ -234,6 +235,13 @@ class SynapseHomeServer(HomeServer): ) logger.info("Synapse now listening on port %d", config.unsecure_port) + metrics_resource = self.get_resource_for_metrics() + if metrics_resource and config.metrics_port is not None: + reactor.listenTCP( + config.metrics_port, Site(metrics_resource), interface="127.0.0.1", + ) + logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port) + def get_version_string(): try: diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index 90aba10bb..901a429c7 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -20,6 +20,7 @@ class MetricsConfig(Config): def __init__(self, args): super(MetricsConfig, self).__init__(args) self.enable_metrics = args.enable_metrics + self.metrics_port = args.metrics_port @classmethod def add_arguments(cls, parser): @@ -29,3 +30,7 @@ class MetricsConfig(Config): '--enable-metrics', dest="enable_metrics", action="store_true", help="Enable collection and rendering of performance metrics" ) + metrics_group.add_argument( + '--metrics-port', metavar="PORT", type=int, + help="Separate port to accept metrics requests on (on localhost)" + ) From 128cf2daf76e5b05a4e577b60ea406fdbb6986bf Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 12 Mar 2015 16:24:38 +0000 Subject: [PATCH 073/112] Appease pep8 --- synapse/federation/transaction_queue.py | 9 ++++++--- synapse/handlers/presence.py | 5 +++-- synapse/http/client.py | 10 +++++++--- synapse/http/matrixfederationclient.py | 10 +++++++--- synapse/http/server.py | 8 +++++--- synapse/metrics/__init__.py | 3 ++- synapse/metrics/metric.py | 11 ++++++----- synapse/metrics/resource.py | 2 +- synapse/notifier.py | 15 +++++++++------ 9 files changed, 46 insertions(+), 27 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 99e386fa5..4dccd93d0 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -57,7 +57,8 @@ class TransactionQueue(object): # done self.pending_transactions = {} - metrics.register_callback("pending_destinations", + metrics.register_callback( + "pending_destinations", lambda: len(self.pending_transactions), ) @@ -67,10 +68,12 @@ class TransactionQueue(object): # destination -> list of tuple(edu, deferred) self.pending_edus_by_dest = edus = {} - metrics.register_callback("pending_pdus", + metrics.register_callback( + "pending_pdus", lambda: sum(map(len, pdus.values())), ) - metrics.register_callback("pending_edus", + metrics.register_callback( + "pending_edus", lambda: sum(map(len, edus.values())), ) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index c6d6aef53..731df0064 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -136,8 +136,9 @@ class PresenceHandler(BaseHandler): self._user_cachemap = {} self._user_cachemap_latest_serial = 0 - metrics.register_callback("userCachemap:size", - lambda: len(self._user_cachemap) + metrics.register_callback( + "userCachemap:size", + lambda: len(self._user_cachemap), ) def _get_or_make_usercache(self, user): diff --git a/synapse/http/client.py b/synapse/http/client.py index 01737a718..2ae1c4d3a 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -34,11 +34,13 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) -outgoing_requests_counter = metrics.register_counter("requests", +outgoing_requests_counter = metrics.register_counter( + "requests", labels=["method"], ) -incoming_responses_counter = metrics.register_counter("responses", - labels=["method","code"], +incoming_responses_counter = metrics.register_counter( + "responses", + labels=["method", "code"], ) @@ -64,9 +66,11 @@ class SimpleHttpClient(object): def _cb(response): incoming_responses_counter.inc(method, response.code) return response + def _eb(failure): incoming_responses_counter.inc(method, "ERR") return failure + d.addCallbacks(_cb, _eb) return d diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 11883d385..7fa295cad 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -43,11 +43,13 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) -outgoing_requests_counter = metrics.register_counter("requests", +outgoing_requests_counter = metrics.register_counter( + "requests", labels=["method"], ) -incoming_responses_counter = metrics.register_counter("responses", - labels=["method","code"], +incoming_responses_counter = metrics.register_counter( + "responses", + labels=["method", "code"], ) @@ -78,9 +80,11 @@ class MatrixFederationHttpAgent(_AgentBase): def _cb(response): incoming_responses_counter.inc(method, response.code) return response + def _eb(failure): incoming_responses_counter.inc(method, "ERR") return failure + d.addCallbacks(_cb, _eb) return d diff --git a/synapse/http/server.py b/synapse/http/server.py index a0d190ff7..d77cb7779 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -37,11 +37,13 @@ logger = logging.getLogger(__name__) metrics = synapse.metrics.get_metrics_for(__name__) -incoming_requests_counter = metrics.register_counter("requests", +incoming_requests_counter = metrics.register_counter( + "requests", labels=["method", "servlet"], ) -outgoing_responses_counter = metrics.register_counter("responses", - labels=["method","code"], +outgoing_responses_counter = metrics.register_counter( + "responses", + labels=["method", "code"], ) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 94164974f..7b9c9c8ba 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -85,7 +85,7 @@ def render_all(): strs += ["# FAILED to render %s" % name] logger.exception("Failed to render %s metric", name) - strs.append("") # to generate a final CRLF + strs.append("") # to generate a final CRLF return "\n".join(strs) @@ -96,6 +96,7 @@ def render_all(): rusage = None PAGE_SIZE = getpagesize() + def update_resource_metrics(): global rusage rusage = getrusage(RUSAGE_SELF) diff --git a/synapse/metrics/metric.py b/synapse/metrics/metric.py index 12460c99c..21b37748f 100644 --- a/synapse/metrics/metric.py +++ b/synapse/metrics/metric.py @@ -27,7 +27,7 @@ class BaseMetric(object): def __init__(self, name, labels=[]): self.name = name - self.labels = labels # OK not to clone as we never write it + self.labels = labels # OK not to clone as we never write it def dimension(self): return len(self.labels) @@ -66,8 +66,8 @@ class CounterMetric(BaseMetric): def inc_by(self, incr, *values): if len(values) != self.dimension(): - raise ValueError("Expected as many values to inc() as labels (%d)" % - (self.dimension()) + raise ValueError( + "Expected as many values to inc() as labels (%d)" % (self.dimension()) ) # TODO: should assert that the tag values are all strings @@ -135,10 +135,11 @@ class CacheMetric(object): def __init__(self, name, size_callback, labels=[]): self.name = name - self.hits = CounterMetric(name + ":hits", labels=labels) + self.hits = CounterMetric(name + ":hits", labels=labels) self.total = CounterMetric(name + ":total", labels=labels) - self.size = CallbackMetric(name + ":size", + self.size = CallbackMetric( + name + ":size", callback=size_callback, labels=labels, ) diff --git a/synapse/metrics/resource.py b/synapse/metrics/resource.py index ff7baab01..97ea797bf 100644 --- a/synapse/metrics/resource.py +++ b/synapse/metrics/resource.py @@ -26,7 +26,7 @@ class MetricsResource(Resource): isLeaf = True def __init__(self, hs): - Resource.__init__(self) # Resource is old-style, so no super() + Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs diff --git a/synapse/notifier.py b/synapse/notifier.py index 88873d453..7121d659d 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -127,14 +127,17 @@ class Notifier(object): return len(all_listeners) metrics.register_callback("listeners", count_listeners) - metrics.register_callback("rooms", - lambda: count(bool, self.room_to_listeners.values()) + metrics.register_callback( + "rooms", + lambda: count(bool, self.room_to_listeners.values()), ) - metrics.register_callback("users", - lambda: count(bool, self.user_to_listeners.values()) + metrics.register_callback( + "users", + lambda: count(bool, self.user_to_listeners.values()), ) - metrics.register_callback("appservices", - lambda: count(bool, self.appservice_to_listeners.values()) + metrics.register_callback( + "appservices", + lambda: count(bool, self.appservice_to_listeners.values()), ) @log_function From 0eb7e6b9a8e4e38793b1e045ab5f0f0a4d4e6777 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 12 Mar 2015 16:39:52 +0000 Subject: [PATCH 074/112] Delete unused import of NOT_READY_YET --- synapse/metrics/resource.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/metrics/resource.py b/synapse/metrics/resource.py index 97ea797bf..0af4b3eb5 100644 --- a/synapse/metrics/resource.py +++ b/synapse/metrics/resource.py @@ -14,7 +14,6 @@ # limitations under the License. from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET import synapse.metrics From 05a056a409042d233972c608a8e526e8fd0af262 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 12 Mar 2015 16:45:05 +0000 Subject: [PATCH 075/112] Appease pyflakes --- synapse/metrics/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 7b9c9c8ba..dffb8a486 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -81,7 +81,7 @@ def render_all(): for name in sorted(all_metrics.keys()): try: strs += all_metrics[name].render() - except Exception as e: + except Exception: strs += ["# FAILED to render %s" % name] logger.exception("Failed to render %s metric", name) From bbb010a30fe4beb6104e7d185e722a76c88825ff Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 12 Mar 2015 16:53:12 +0000 Subject: [PATCH 076/112] More sacrifices to the pep8 gods. --- synapse/push/__init__.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 5056374eb..0727f772a 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -133,10 +133,16 @@ class Pusher(object): ) continue if matches: - logger.info("%s matches for user %s, event %s", r['rule_id'], self.user_name, ev['event_id']) + logger.info( + "%s matches for user %s, event %s", + r['rule_id'], self.user_name, ev['event_id'] + ) defer.returnValue(actions) - logger.info("No rules match for user %s, event %s", self.user_name, ev['event_id']) + logger.info( + "No rules match for user %s, event %s", + self.user_name, ev['event_id'] + ) defer.returnValue(Pusher.DEFAULT_ACTIONS) @staticmethod From f55bd3f94bf98a9470f5cd2ccec1100dd64e573a Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 12 Mar 2015 18:56:53 +0000 Subject: [PATCH 077/112] bump dep on syweb 0.6.5 --- synapse/python_dependencies.py | 2 +- synapse/rest/appservice/v1/register.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 5fe8a825e..471d1ccd6 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) REQUIREMENTS = { "syutil>=0.0.3": ["syutil"], - "matrix_angular_sdk>=0.6.4": ["syweb>=0.6.4"], + "matrix_angular_sdk>=0.6.5": ["syweb>=0.6.5"], "Twisted==14.0.2": ["twisted==14.0.2"], "service_identity>=1.0.0": ["service_identity>=1.0.0"], "pyopenssl>=0.14": ["OpenSSL>=0.14"], diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py index a4f615977..ea24d88f7 100644 --- a/synapse/rest/appservice/v1/register.py +++ b/synapse/rest/appservice/v1/register.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2015 OpenMarket Ltd # -# Licensed under the Apache License, Version 2.0 (the "License"); +# Licensensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # @@ -89,7 +89,8 @@ def _parse_json(request): if type(content) != dict: raise SynapseError(400, "Content must be a JSON object.") return content - except ValueError: + except ValueError as e: + logger.warn(e) raise SynapseError(400, "Content not JSON.") From 58247c8b4b72e1f62d50ab3e84b51792ff4de8ba Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 11:39:57 +0000 Subject: [PATCH 078/112] Also bump dependency link version --- synapse/python_dependencies.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 471d1ccd6..8a5849d96 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -36,8 +36,8 @@ DEPENDENCY_LINKS = [ ), github_link( project="matrix-org/matrix-angular-sdk", - version="v0.6.4", - egg="matrix_angular_sdk-0.6.4", + version="v0.6.5", + egg="matrix_angular_sdk-0.6.5", ), ] From 58367a9da2539abdbfe4dc817fba5b179b95334b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 12:59:45 +0000 Subject: [PATCH 079/112] Disable registration by default --- synapse/config/registration.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index cca8ab567..e603575da 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -31,3 +31,7 @@ class RegistrationConfig(Config): action='store_true', help="Disable registration of new users." ) + + @classmethod + def generate_config(cls, args, config_dir_path): + args.disable_registration = True From 69135f59aa87962b848f9f19cad6adc625821ba8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 15:23:37 +0000 Subject: [PATCH 080/112] Implement registering with shared secret. --- synapse/api/constants.py | 1 + synapse/config/registration.py | 20 +++++++++-- synapse/rest/client/v1/register.py | 57 ++++++++++++++++++++++++++++-- synapse/util/stringutils.py | 10 ++++++ 4 files changed, 83 insertions(+), 5 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 420f963d9..b16bf4247 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -60,6 +60,7 @@ class LoginType(object): EMAIL_IDENTITY = u"m.login.email.identity" RECAPTCHA = u"m.login.recaptcha" APPLICATION_SERVICE = u"m.login.application_service" + SHARED_SECRET = u"org.matrix.login.shared_secret" class EventTypes(object): diff --git a/synapse/config/registration.py b/synapse/config/registration.py index e603575da..6a0aaea92 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -15,23 +15,37 @@ from ._base import Config +from synapse.util.stringutils import random_string_with_symbols + class RegistrationConfig(Config): def __init__(self, args): super(RegistrationConfig, self).__init__(args) self.disable_registration = args.disable_registration + self.registration_shared_secret = args.registration_shared_secret @classmethod def add_arguments(cls, parser): super(RegistrationConfig, cls).add_arguments(parser) reg_group = parser.add_argument_group("registration") + reg_group.add_argument( "--disable-registration", - action='store_true', - help="Disable registration of new users." + action='store_const', + const=True, + help="Disable registration of new users.", + ) + reg_group.add_argument( + "--registration-shared-secret", type=str, + help="If set, allows registration by anyone who also has the shared" + " secret, even if registration is otherwise disabled.", ) @classmethod def generate_config(cls, args, config_dir_path): - args.disable_registration = True + if args.disable_registration is None: + args.disable_registration = True + + if args.registration_shared_secret is None: + args.registration_shared_secret= random_string_with_symbols(50) diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index f5acfb945..a7c9c5bb6 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -110,14 +110,22 @@ class RegisterRestServlet(ClientV1RestServlet): login_type = register_json["type"] is_application_server = login_type == LoginType.APPLICATION_SERVICE - if self.disable_registration and not is_application_server: + is_using_shared_secret = login_type == LoginType.SHARED_SECRET + + can_register = ( + not self.disable_registration + or is_application_server + or is_using_shared_secret + ) + if not can_register: raise SynapseError(403, "Registration has been disabled") stages = { LoginType.RECAPTCHA: self._do_recaptcha, LoginType.PASSWORD: self._do_password, LoginType.EMAIL_IDENTITY: self._do_email_identity, - LoginType.APPLICATION_SERVICE: self._do_app_service + LoginType.APPLICATION_SERVICE: self._do_app_service, + LoginType.SHARED_SECRET: self._do_shared_secret, } session_info = self._get_session_info(request, session) @@ -304,6 +312,51 @@ class RegisterRestServlet(ClientV1RestServlet): "home_server": self.hs.hostname, }) + @defer.inlineCallbacks + def _do_shared_secret(self, request, register_json, session): + yield run_on_reactor() + + if "mac" not in register_json: + raise SynapseError(400, "Expected mac.") + if "user" not in register_json: + raise SynapseError(400, "Expected 'user' key.") + if "password" not in register_json: + raise SynapseError(400, "Expected 'password' key.") + + if not self.hs.config.registration_shared_secret: + raise SynapseError(400, "Shared secret registration is not enabled") + + user = register_json["user"].encode("utf-8") + + # str() because otherwise hmac complains that 'unicode' does not + # have the buffer interface + got_mac = str(register_json["mac"]) + + want_mac = hmac.new( + key=self.hs.config.registration_shared_secret, + msg=user, + digestmod=sha1, + ).hexdigest() + + password = register_json["password"].encode("utf-8") + + if compare_digest(want_mac, got_mac): + handler = self.handlers.registration_handler + user_id, token = yield handler.register( + localpart=user, + password=password, + ) + self._remove_session(session) + defer.returnValue({ + "user_id": user_id, + "access_token": token, + "home_server": self.hs.hostname, + }) + else: + raise SynapseError( + 400, "HMAC incorrect", + ) + def _parse_json(request): try: diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index ea53a8085..52e66beae 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -16,6 +16,10 @@ import random import string +_string_with_symbols = ( + string.digits + string.ascii_letters + ".,;:^&*-_+=#~@" +) + def origin_from_ucid(ucid): return ucid.split("@", 1)[1] @@ -23,3 +27,9 @@ def origin_from_ucid(ucid): def random_string(length): return ''.join(random.choice(string.ascii_letters) for _ in xrange(length)) + + +def random_string_with_symbols(length): + return ''.join( + random.choice(_string_with_symbols) for _ in xrange(length) + ) From dea236e4fa6dd9f42e2adc10858b118c814d28d4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 15:24:03 +0000 Subject: [PATCH 081/112] Add missing commas --- synapse/http/servlet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index a4eb6c817..265559a3e 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -51,8 +51,8 @@ class RestServlet(object): pattern = self.PATTERN for method in ("GET", "PUT", "POST", "OPTIONS", "DELETE"): - if hasattr(self, "on_%s" % (method)): - method_handler = getattr(self, "on_%s" % (method)) + if hasattr(self, "on_%s" % (method,)): + method_handler = getattr(self, "on_%s" % (method,)) http_server.register_path(method, pattern, method_handler) else: raise NotImplementedError("RestServlet must register something.") From bcfce93ccdb28c044a8e778a4e113e053d6dfe62 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 15:25:30 +0000 Subject: [PATCH 082/112] Add 'register_new_user' script --- register_new_user | 149 ++++++++++++++++++++++++++++++++++++++++++++++ setup.py | 2 +- 2 files changed, 150 insertions(+), 1 deletion(-) create mode 100755 register_new_user diff --git a/register_new_user b/register_new_user new file mode 100755 index 000000000..e368c4481 --- /dev/null +++ b/register_new_user @@ -0,0 +1,149 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import argparse +import getpass +import hashlib +import hmac +import json +import sys +import urllib2 +import yaml + + +def request_registration(user, password, server_location, shared_secret): + mac = hmac.new( + key=shared_secret, + msg=user, + digestmod=hashlib.sha1, + ).hexdigest() + + data = { + "user": user, + "password": password, + "mac": mac, + "type": "org.matrix.login.shared_secret", + } + + server_location = server_location.rstrip("/") + + print "Sending registration request..." + + req = urllib2.Request( + "%s/_matrix/client/api/v1/register" % (server_location,), + data=json.dumps(data), + headers={'Content-Type': 'application/json'} + ) + try: + f = urllib2.urlopen(req) + f.read() + f.close() + print "Success." + except urllib2.HTTPError as e: + print "ERROR! Received %d %s" % (e.code, e.reason,) + if 400 <= e.code < 500: + if e.info().type == "application/json": + resp = json.load(e) + if "error" in resp: + print resp["error"] + sys.exit(1) + + +def register_new_user(user, password, server_location, shared_secret): + if not user: + try: + default_user = getpass.getuser() + except: + default_user = None + + if default_user: + user = raw_input("New user localpart [%s]: " % (default_user,)) + if not user: + user = default_user + else: + user = raw_input("New user localpart: ") + + if not user: + print "Invalid user name" + sys.exit(1) + + if not password: + password = getpass.getpass("Password: ") + + if not password: + print "Password cannot be blank." + sys.exit(1) + + confirm_password = getpass.getpass("Confirm password: ") + + if password != confirm_password: + print "Passwords do not match" + sys.exit(1) + + request_registration(user, password, server_location, shared_secret) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Used to register new users with a given home server when" + " registration has been disabled. The home server must be" + " configured with the 'registration_shared_secret' option" + " set.", + ) + parser.add_argument( + "-u", "--user", + default=None, + help="Local part of the new user. Will prompt if omitted.", + ) + parser.add_argument( + "-p", "--password", + default=None, + help="New password for user. Will prompt if omitted.", + ) + + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument( + "-c", "--config", + type=argparse.FileType('r'), + help="Path to server config file. Used to read in shared secret.", + ) + + group.add_argument( + "-k", "--shared-secret", + help="Shared secret as defined in server config file.", + ) + + parser.add_argument( + "server_url", + default="https://localhost:8480", + nargs='?', + help="URL to use to talk to the home server. Defaults to " + " 'https://localhost:8480'.", + ) + + args = parser.parse_args() + + if "config" in args and args.config: + config = yaml.safe_load(args.config) + secret = config.get("registration_shared_secret", None) + if not secret: + print "No 'registration_shared_secret' defined in config." + sys.exit(1) + else: + secret = args.shared_secret + + register_new_user(args.user, args.password, args.server_url, secret) diff --git a/setup.py b/setup.py index 2d812fa38..a45dfb6e0 100755 --- a/setup.py +++ b/setup.py @@ -55,5 +55,5 @@ setup( include_package_data=True, zip_safe=False, long_description=long_description, - scripts=["synctl"], + scripts=["synctl", "register_new_user"], ) From 9266cb0a220f83061ccf99b9c031fb9383c55c7f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 15:26:00 +0000 Subject: [PATCH 083/112] PEP8 --- synapse/config/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 6a0aaea92..e015680d0 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -48,4 +48,4 @@ class RegistrationConfig(Config): args.disable_registration = True if args.registration_shared_secret is None: - args.registration_shared_secret= random_string_with_symbols(50) + args.registration_shared_secret = random_string_with_symbols(50) From 598c47a10835accdc2fc382fed8db803e7a33deb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 15:29:38 +0000 Subject: [PATCH 084/112] Change default server url to match default ports --- register_new_user | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/register_new_user b/register_new_user index e368c4481..daddadc30 100755 --- a/register_new_user +++ b/register_new_user @@ -129,10 +129,10 @@ if __name__ == "__main__": parser.add_argument( "server_url", - default="https://localhost:8480", + default="https://localhost:8448", nargs='?', help="URL to use to talk to the home server. Defaults to " - " 'https://localhost:8480'.", + " 'https://localhost:8448'.", ) args = parser.parse_args() From 7393c5ce4c831640f771ca32c8f22f7f2fd7fba2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 15:33:21 +0000 Subject: [PATCH 085/112] Rename register script to 'register_new_matrix_user' --- register_new_user => register_new_matrix_user | 0 setup.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename register_new_user => register_new_matrix_user (100%) diff --git a/register_new_user b/register_new_matrix_user similarity index 100% rename from register_new_user rename to register_new_matrix_user diff --git a/setup.py b/setup.py index a45dfb6e0..45943adb2 100755 --- a/setup.py +++ b/setup.py @@ -55,5 +55,5 @@ setup( include_package_data=True, zip_safe=False, long_description=long_description, - scripts=["synctl", "register_new_user"], + scripts=["synctl", "register_new_matrix_user"], ) From 98a3825614328887ad1d855d2d1076496e49be6b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 16:49:18 +0000 Subject: [PATCH 086/112] Allow enabling of registration with --disable-registration false --- synapse/config/registration.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index e015680d0..3fed8364c 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -17,12 +17,17 @@ from ._base import Config from synapse.util.stringutils import random_string_with_symbols +import distutils.util + class RegistrationConfig(Config): def __init__(self, args): super(RegistrationConfig, self).__init__(args) - self.disable_registration = args.disable_registration + + self.disable_registration = bool( + distutils.util.strtobool(str(args.disable_registration)) + ) self.registration_shared_secret = args.registration_shared_secret @classmethod @@ -32,8 +37,9 @@ class RegistrationConfig(Config): reg_group.add_argument( "--disable-registration", - action='store_const', const=True, + default=True, + nargs='?', help="Disable registration of new users.", ) reg_group.add_argument( From a1abee013c5cd9c1251f6544dcd0b89779c8e6e4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Mar 2015 17:06:21 +0000 Subject: [PATCH 087/112] Add note about disabling registration by default --- README.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.rst b/README.rst index c2af7c933..d1c0e9bd1 100644 --- a/README.rst +++ b/README.rst @@ -126,6 +126,17 @@ To set up your homeserver, run (in your virtualenv, as before):: Substituting your host and domain name as appropriate. +By default, registration of new users is disabled. You can either enable +registration in the config (it is then recommended to also set up CAPTCHA), or +you can use the command line to register new users:: + + $ source ~/.synapse/bin/activate + $ register_new_matrix_user -c homeserver.yaml https://localhost:8448 + New user localpart: erikj + Password: + Confirm password: + Success! + For reliable VoIP calls to be routed via this homeserver, you MUST configure a TURN server. See docs/turn-howto.rst for details. From 256fe08963895cf1078f4f57c1c2673529fa25cd Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 14 Mar 2015 00:09:21 +0000 Subject: [PATCH 088/112] uncommited WIP from MWC --- contrib/vertobot/bot.pl | 9 ++++----- contrib/vertobot/bridge.pl | 18 +++++++++++------- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/contrib/vertobot/bot.pl b/contrib/vertobot/bot.pl index 828fc4878..0430a38aa 100755 --- a/contrib/vertobot/bot.pl +++ b/contrib/vertobot/bot.pl @@ -175,13 +175,12 @@ sub on_room_message my $verto_connecting = $loop->new_future; $bot_verto->connect( %{ $CONFIG{"verto-bot"} }, - on_connected => sub { - warn("[Verto] connected to websocket"); - $verto_connecting->done($bot_verto) if not $verto_connecting->is_done; - }, on_connect_error => sub { die "Cannot connect to verto - $_[-1]" }, on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" }, -); +)->then( sub { + warn("[Verto] connected to websocket"); + $verto_connecting->done($bot_verto) if not $verto_connecting->is_done; +}); Future->needs_all( $bot_matrix->login( %{ $CONFIG{"matrix-bot"} } )->then( sub { diff --git a/contrib/vertobot/bridge.pl b/contrib/vertobot/bridge.pl index e1a07f665..a551850f4 100755 --- a/contrib/vertobot/bridge.pl +++ b/contrib/vertobot/bridge.pl @@ -86,7 +86,7 @@ sub create_virtual_user "user": "$localpart" } EOT - )->get; + )->get; warn $response->as_string if ($response->code != 200); } @@ -266,17 +266,21 @@ my $as_url = $CONFIG{"matrix-bot"}->{as_url}; Future->needs_all( $http->do_request( - method => "POST", - uri => URI->new( $CONFIG{"matrix"}->{server}."/_matrix/appservice/v1/register" ), - content_type => "application/json", - content => < "POST", + uri => URI->new( $CONFIG{"matrix"}->{server}."/_matrix/appservice/v1/register" ), + content_type => "application/json", + content => <then( sub{ + my ($response) = (@_); + warn $response->as_string if ($response->code != 200); + return Future->done; + }), $verto_connecting, )->get; From c8032aec1794cb3ad79b4b61892917768b372bd6 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 14 Mar 2015 00:12:20 +0000 Subject: [PATCH 089/112] actually uphold the bind_host parameter. in theory should make ipv6 binds work like bind_host: 'fe80::1%lo0' --- synapse/app/homeserver.py | 9 +++++++-- synapse/http/server.py | 7 ++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index aa7c722ef..15c454af7 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -225,13 +225,18 @@ class SynapseHomeServer(HomeServer): if not config.no_tls and config.bind_port is not None: reactor.listenSSL( - config.bind_port, Site(self.root_resource), self.tls_context_factory + config.bind_port, + Site(self.root_resource), + self.tls_context_factory, + interface=config.bind_host ) logger.info("Synapse now listening on port %d", config.bind_port) if config.unsecure_port is not None: reactor.listenTCP( - config.unsecure_port, Site(self.root_resource) + config.unsecure_port, + Site(self.root_resource), + interface=config.bind_host ) logger.info("Synapse now listening on port %d", config.unsecure_port) diff --git a/synapse/http/server.py b/synapse/http/server.py index d77cb7779..f1376ee24 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -86,6 +86,7 @@ class JsonResource(HttpServer, resource.Resource): self.clock = hs.get_clock() self.path_regexs = {} self.version_string = hs.version_string + self.hs = hs def register_path(self, method, path_pattern, callback): self.path_regexs.setdefault(method, []).append( @@ -99,7 +100,11 @@ class JsonResource(HttpServer, resource.Resource): port (int): The port to listen on. """ - reactor.listenTCP(port, server.Site(self)) + reactor.listenTCP( + port, + server.Site(self), + interface=self.hs.config.bind_host + ) # Gets called by twisted def render(self, request): From b677ff669233897c42bb55df6f9bb9c56d898eff Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Sat, 14 Mar 2015 00:22:41 +0000 Subject: [PATCH 090/112] add ToC and fix typoe --- README.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index c2af7c933..e4664ea76 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,5 @@ +.. contents:: + Introduction ============ @@ -250,7 +252,8 @@ fix try re-installing from PyPI or directly from ArchLinux --------- -If running `$ synctl start` fails wit 'returned non-zero exit status 1', you will need to explicitly call Python2.7 - either running as:: +If running `$ synctl start` fails with 'returned non-zero exit status 1', +you will need to explicitly call Python2.7 - either running as:: $ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml --pid-file homeserver.pid From ab8229479bddd89546ab486152344e80f01be820 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 00:17:25 +0000 Subject: [PATCH 091/112] Respect ban membership --- synapse/api/auth.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index b176db8ce..96963d743 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -166,6 +166,7 @@ class Auth(object): target = auth_events.get(key) target_in_room = target and target.membership == Membership.JOIN + target_banned = target and target.membership == Membership.BAN key = (EventTypes.JoinRules, "", ) join_rule_event = auth_events.get(key) @@ -194,6 +195,7 @@ class Auth(object): { "caller_in_room": caller_in_room, "caller_invited": caller_invited, + "target_banned": target_banned, "target_in_room": target_in_room, "membership": membership, "join_rule": join_rule, @@ -202,6 +204,11 @@ class Auth(object): } ) + if ban_level: + ban_level = int(ban_level) + else: + ban_level = 50 # FIXME (erikj): What should we do here? + if Membership.INVITE == membership: # TODO (erikj): We should probably handle this more intelligently # PRIVATE join rules. @@ -212,6 +219,10 @@ class Auth(object): 403, "%s not in room %s." % (event.user_id, event.room_id,) ) + elif target_banned: + raise AuthError( + 403, "%s is banned from the room" % (target_user_id,) + ) elif target_in_room: # the target is already in the room. raise AuthError(403, "%s is already in the room." % target_user_id) @@ -221,6 +232,8 @@ class Auth(object): # joined: It's a NOOP if event.user_id != target_user_id: raise AuthError(403, "Cannot force another user to join.") + elif target_banned: + raise AuthError(403, "You are banned from this room") elif join_rule == JoinRules.PUBLIC: pass elif join_rule == JoinRules.INVITE: @@ -238,6 +251,10 @@ class Auth(object): 403, "%s not in room %s." % (target_user_id, event.room_id,) ) + elif target_banned and user_level < ban_level: + raise AuthError( + 403, "You cannot unban user &s." % (target_user_id,) + ) elif target_user_id != event.user_id: if kick_level: kick_level = int(kick_level) @@ -249,11 +266,6 @@ class Auth(object): 403, "You cannot kick user %s." % target_user_id ) elif Membership.BAN == membership: - if ban_level: - ban_level = int(ban_level) - else: - ban_level = 50 # FIXME (erikj): What should we do here? - if user_level < ban_level: raise AuthError(403, "You don't have permission to ban") else: From ea8590cf6626364e9532860548a5f1ae3b172d80 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 00:18:08 +0000 Subject: [PATCH 092/112] Make context.auth_events grap auth events from current state. Otherwise auth is wrong. --- synapse/api/auth.py | 8 +++++++- synapse/state.py | 22 +++++----------------- 2 files changed, 12 insertions(+), 18 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 96963d743..4873cf9d1 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -28,6 +28,12 @@ import logging logger = logging.getLogger(__name__) +AuthEventTypes = ( + EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels, + EventTypes.JoinRules, +) + + class Auth(object): def __init__(self, hs): @@ -427,7 +433,7 @@ class Auth(object): context.auth_events = { k: v for k, v in context.current_state.items() - if v.event_id in auth_ids + if v.type in AuthEventTypes } def compute_auth_events(self, event, current_state): diff --git a/synapse/state.py b/synapse/state.py index 80cced351..345046cd8 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -21,6 +21,7 @@ from synapse.util.async import run_on_reactor from synapse.util.expiringcache import ExpiringCache from synapse.api.constants import EventTypes from synapse.api.errors import AuthError +from synapse.api.auth import AuthEventTypes from synapse.events.snapshot import EventContext from collections import namedtuple @@ -38,12 +39,6 @@ def _get_state_key_from_event(event): KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key")) -AuthEventTypes = ( - EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels, - EventTypes.JoinRules, -) - - SIZE_OF_CACHE = 1000 EVICTION_TIMEOUT_SECONDS = 20 @@ -187,17 +182,10 @@ class StateHandler(object): replaces = context.current_state[key] event.unsigned["replaces_state"] = replaces.event_id - if hasattr(event, "auth_events") and event.auth_events: - auth_ids = self.hs.get_auth().compute_auth_events( - event, context.current_state - ) - context.auth_events = { - k: v - for k, v in context.current_state.items() - if v.event_id in auth_ids - } - else: - context.auth_events = {} + context.auth_events = { + k: e for k, e in context.current_state.items() + if k[0] in AuthEventTypes + } context.prev_state_events = prev_state defer.returnValue(context) From 758d114cbce3b71f4253bf49669ec366185bfde9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 00:27:59 +0000 Subject: [PATCH 093/112] Send all membership events to the remote homeserver --- synapse/handlers/_base.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 1773fa20a..349a52b85 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -133,10 +133,9 @@ class BaseHandler(object): for k, s in context.current_state.items(): try: if k[0] == EventTypes.Member: - if s.content["membership"] == Membership.JOIN: - destinations.add( - UserID.from_string(s.state_key).domain - ) + destinations.add( + UserID.from_string(s.state_key).domain + ) except SynapseError: logger.warn( "Failed to get destination from event %s", s.event_id From e7ce5d8b062561b17df0441f5b1be38026b0d2b3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 00:30:59 +0000 Subject: [PATCH 094/112] Fix test --- tests/handlers/test_room.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py index 6417f7330..0da7eb877 100644 --- a/tests/handlers/test_room.py +++ b/tests/handlers/test_room.py @@ -219,7 +219,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): yield room_handler.change_membership(event, context) self.federation.handle_new_event.assert_called_once_with( - event, destinations=set() + event, destinations=set(['red']) ) self.datastore.persist_event.assert_called_once_with( From b2e6ee5b43ebcd9e7ba37a56ed22b26c63b01370 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 13:06:23 +0000 Subject: [PATCH 095/112] Remove concept of context.auth_events, instead use context.current_state --- synapse/api/auth.py | 6 ------ synapse/events/snapshot.py | 3 +-- synapse/handlers/_base.py | 6 +++--- synapse/handlers/federation.py | 8 +++----- synapse/state.py | 17 ----------------- 5 files changed, 7 insertions(+), 33 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 4873cf9d1..90f9eb684 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -430,12 +430,6 @@ class Auth(object): builder.auth_events = auth_events_entries - context.auth_events = { - k: v - for k, v in context.current_state.items() - if v.type in AuthEventTypes - } - def compute_auth_events(self, event, current_state): if event.type == EventTypes.Create: return [] diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index 7e98bdef2..4ecadf087 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -16,8 +16,7 @@ class EventContext(object): - def __init__(self, current_state=None, auth_events=None): + def __init__(self, current_state=None): self.current_state = current_state - self.auth_events = auth_events self.state_group = None self.rejected = False diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 349a52b85..261335b27 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -90,8 +90,8 @@ class BaseHandler(object): event = builder.build() logger.debug( - "Created event %s with auth_events: %s, current state: %s", - event.event_id, context.auth_events, context.current_state, + "Created event %s with current state: %s", + event.event_id, context.current_state, ) defer.returnValue( @@ -106,7 +106,7 @@ class BaseHandler(object): # We now need to go and hit out to wherever we need to hit out to. if not suppress_auth: - self.auth.check(event, auth_events=context.auth_events) + self.auth.check(event, auth_events=context.current_state) yield self.store.persist_event(event, context=context) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index ae4e9b316..65cfacba2 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -464,11 +464,9 @@ class FederationHandler(BaseHandler): builder=builder, ) - self.auth.check(event, auth_events=context.auth_events) + self.auth.check(event, auth_events=context.current_state) - pdu = event - - defer.returnValue(pdu) + defer.returnValue(event) @defer.inlineCallbacks @log_function @@ -705,7 +703,7 @@ class FederationHandler(BaseHandler): ) if not auth_events: - auth_events = context.auth_events + auth_events = context.current_state logger.debug( "_handle_new_event: %s, auth_events: %s", diff --git a/synapse/state.py b/synapse/state.py index 345046cd8..ba2500d61 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -134,18 +134,6 @@ class StateHandler(object): } context.state_group = None - if hasattr(event, "auth_events") and event.auth_events: - auth_ids = self.hs.get_auth().compute_auth_events( - event, context.current_state - ) - context.auth_events = { - k: v - for k, v in context.current_state.items() - if v.event_id in auth_ids - } - else: - context.auth_events = {} - if event.is_state(): key = (event.type, event.state_key) if key in context.current_state: @@ -182,11 +170,6 @@ class StateHandler(object): replaces = context.current_state[key] event.unsigned["replaces_state"] = replaces.event_id - context.auth_events = { - k: e for k, e in context.current_state.items() - if k[0] in AuthEventTypes - } - context.prev_state_events = prev_state defer.returnValue(context) From 250e143084dd1e6d29c6378abaa3b5177323ebf9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 13:11:42 +0000 Subject: [PATCH 096/112] Use 403 instead of 400 --- synapse/rest/client/v1/register.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index a7c9c5bb6..86519fd9d 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -354,7 +354,7 @@ class RegisterRestServlet(ClientV1RestServlet): }) else: raise SynapseError( - 400, "HMAC incorrect", + 403, "HMAC incorrect", ) From 8bad40701b00cbbedd5bf1f4c32a2f7ac77b200b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 13:13:07 +0000 Subject: [PATCH 097/112] Comment. --- synapse/config/registration.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 3fed8364c..4401e774d 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -25,6 +25,9 @@ class RegistrationConfig(Config): def __init__(self, args): super(RegistrationConfig, self).__init__(args) + # `args.disable_registration` may either be a bool or a string depending + # on if the option was given a value (e.g. --disable-registration=false + # would set `args.disable_registration` to "false" not False.) self.disable_registration = bool( distutils.util.strtobool(str(args.disable_registration)) ) From c2c9471cbab2795edaced4d89ed9ef80ef129afe Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 13:16:37 +0000 Subject: [PATCH 098/112] Don't block waiting on waking up all the listeners when sending an event. --- synapse/handlers/_base.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 1773fa20a..7f07f0521 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -142,7 +142,16 @@ class BaseHandler(object): "Failed to get destination from event %s", s.event_id ) - yield self.notifier.on_new_room_event(event, extra_users=extra_users) + # Don't block waiting on waking up all the listeners. + d = self.notifier.on_new_room_event(event, extra_users=extra_users) + + def log_failure(f): + logger.warn( + "Failed to notify about %s: %s", + event.event_id, f.value + ) + + d.addErrback(log_failure) yield federation_handler.handle_new_event( event, destinations=destinations, From 857810d2dd5e3ca6fe39b3bec7d76d75cb0c94ec Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 15:12:47 +0000 Subject: [PATCH 099/112] Revert incorrect changes to where we send events --- synapse/handlers/_base.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 261335b27..2a9d9ec13 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -133,9 +133,10 @@ class BaseHandler(object): for k, s in context.current_state.items(): try: if k[0] == EventTypes.Member: - destinations.add( - UserID.from_string(s.state_key).domain - ) + if s.content["membership"] == Membership.JOIN: + destinations.add( + UserID.from_string(s.state_key).domain + ) except SynapseError: logger.warn( "Failed to get destination from event %s", s.event_id From f1d2b94e0b6fbdde811a7deef3ab4ab7386a207f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 15:13:05 +0000 Subject: [PATCH 100/112] Copy dict of context.current_state before changing it. --- synapse/storage/state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 71db16d0e..456e4bd45 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -82,7 +82,7 @@ class StateStore(SQLBaseStore): if context.current_state is None: return - state_events = context.current_state + state_events = dict(context.current_state) if event.is_state(): state_events[(event.type, event.state_key)] = event From 6df319b6f07f2acce0c1b9aa19fd9f6005aee4ac Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Mar 2015 15:15:14 +0000 Subject: [PATCH 101/112] Fix tests --- tests/handlers/test_room.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py index 0da7eb877..6417f7330 100644 --- a/tests/handlers/test_room.py +++ b/tests/handlers/test_room.py @@ -219,7 +219,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): yield room_handler.change_membership(event, context) self.federation.handle_new_event.assert_called_once_with( - event, destinations=set(['red']) + event, destinations=set() ) self.datastore.persist_event.assert_called_once_with( From be170b1426f31fb56fdb06e4b52ba3fdf617f246 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Mon, 16 Mar 2015 17:21:59 +0000 Subject: [PATCH 102/112] Add a metric for the scheduling latency of SQL queries --- synapse/storage/_base.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 40f2fc6d7..2979a8352 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -38,6 +38,8 @@ transaction_logger = logging.getLogger("synapse.storage.txn") metrics = synapse.metrics.get_metrics_for("synapse.storage") +sql_scheduling_timer = metrics.register_distribution("schedule_time") + sql_query_timer = metrics.register_distribution("query_time", labels=["verb"]) sql_txn_timer = metrics.register_distribution("transaction_time", labels=["desc"]) sql_getevents_timer = metrics.register_distribution("getEvents_time", labels=["desc"]) @@ -240,6 +242,8 @@ class SQLBaseStore(object): """Wraps the .runInteraction() method on the underlying db_pool.""" current_context = LoggingContext.current_context() + start_time = time.time() * 1000 + def inner_func(txn, *args, **kwargs): with LoggingContext("runInteraction") as context: current_context.copy_to(context) @@ -252,6 +256,7 @@ class SQLBaseStore(object): name = "%s-%x" % (desc, txn_id, ) + sql_scheduling_timer.inc_by(time.time() * 1000 - start_time) transaction_logger.debug("[TXN START] {%s}", name) try: return func(LoggingTransaction(txn, name), *args, **kwargs) From 3f7a31d3663049eeab6fff90e18d060966c51853 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Mon, 16 Mar 2015 18:31:29 +0000 Subject: [PATCH 103/112] Add a DistributionMetric to HTTP request/response processing time in the server --- synapse/http/server.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/synapse/http/server.py b/synapse/http/server.py index f1376ee24..dee49b9e1 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -46,6 +46,11 @@ outgoing_responses_counter = metrics.register_counter( labels=["method", "code"], ) +response_timer = metrics.register_distribution( + "response_time", + labels=["method", "servlet"] +) + class HttpServer(object): """ Interface for registering callbacks on a HTTP server @@ -169,6 +174,10 @@ class JsonResource(HttpServer, resource.Resource): code, response = yield callback(request, *args) self._send_response(request, code, response) + response_timer.inc_by( + self.clock.time_msec() - start, request.method, servlet_classname + ) + return # Huh. No one wanted to handle that? Fiiiiiine. Send 400. From 7564dac8cbb245581c4cba19717f1c30b431059e Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 17 Mar 2015 12:45:37 +0100 Subject: [PATCH 104/112] Wire up the webclient option It existed but was hardcoded to True. Give it an underscore for consistency. Also don't pull in syweb unless we're actually using the web client. --- synapse/app/homeserver.py | 8 +++++--- synapse/config/server.py | 4 +++- synapse/python_dependencies.py | 20 ++++++++++++++++---- 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 15c454af7..500cae05f 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -60,7 +60,6 @@ import re import resource import subprocess import sqlite3 -import syweb logger = logging.getLogger(__name__) @@ -83,6 +82,7 @@ class SynapseHomeServer(HomeServer): return AppServiceRestResource(self) def build_resource_for_web_client(self): + import syweb syweb_path = os.path.dirname(syweb.__file__) webclient_path = os.path.join(syweb_path, "webclient") return File(webclient_path) # TODO configurable? @@ -130,7 +130,7 @@ class SynapseHomeServer(HomeServer): True. """ config = self.get_config() - web_client = config.webclient + web_client = config.web_client # list containing (path_str, Resource) e.g: # [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ] @@ -343,7 +343,8 @@ def setup(config_options): config.setup_logging() - check_requirements() + # check any extra requirements we have now we have a config + check_requirements(config) version_string = get_version_string() @@ -450,6 +451,7 @@ def run(hs): def main(): with LoggingContext("main"): + # check base requirements check_requirements() hs = setup(sys.argv[1:]) run(hs) diff --git a/synapse/config/server.py b/synapse/config/server.py index b042d4eed..58a828cc4 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -28,7 +28,7 @@ class ServerConfig(Config): self.unsecure_port = args.unsecure_port self.daemonize = args.daemonize self.pid_file = self.abspath(args.pid_file) - self.webclient = True + self.web_client = args.web_client self.manhole = args.manhole self.soft_file_limit = args.soft_file_limit @@ -68,6 +68,8 @@ class ServerConfig(Config): server_group.add_argument('--pid-file', default="homeserver.pid", help="When running as a daemon, the file to" " store the pid in") + server_group.add_argument('--web_client', default=True, type=bool, + help="Whether or not to serve a web client") server_group.add_argument("--manhole", metavar="PORT", dest="manhole", type=int, help="Turn on the twisted telnet manhole" diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 8a5849d96..e27ecbed2 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -5,7 +5,6 @@ logger = logging.getLogger(__name__) REQUIREMENTS = { "syutil>=0.0.3": ["syutil"], - "matrix_angular_sdk>=0.6.5": ["syweb>=0.6.5"], "Twisted==14.0.2": ["twisted==14.0.2"], "service_identity>=1.0.0": ["service_identity>=1.0.0"], "pyopenssl>=0.14": ["OpenSSL>=0.14"], @@ -18,6 +17,19 @@ REQUIREMENTS = { "pillow": ["PIL"], "pydenticon": ["pydenticon"], } +CONDITIONAL_REQUIREMENTS = { + "web_client": { + "matrix_angular_sdk>=0.6.5": ["syweb>=0.6.5"], + } +} + + +def requirements(config=None, include_conditional=False): + reqs = REQUIREMENTS.copy() + for key,req in CONDITIONAL_REQUIREMENTS.items(): + if (config and getattr(config, key)) or include_conditional: + reqs.update(req) + return reqs def github_link(project, version, egg): @@ -46,10 +58,10 @@ class MissingRequirementError(Exception): pass -def check_requirements(): +def check_requirements(config=None): """Checks that all the modules needed by synapse have been correctly installed and are at the correct version""" - for dependency, module_requirements in REQUIREMENTS.items(): + for dependency, module_requirements in requirements(config, include_conditional=False).items(): for module_requirement in module_requirements: if ">=" in module_requirement: module_name, required_version = module_requirement.split(">=") @@ -110,7 +122,7 @@ def list_requirements(): egg = link.split("#egg=")[1] linked.append(egg.split('-')[0]) result.append(link) - for requirement in REQUIREMENTS: + for requirement in requirements(include_conditional=True): is_linked = False for link in linked: if requirement.replace('-', '_').startswith(link): From 6d33f97703f95607da5777299404cb784e1930aa Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 17 Mar 2015 11:53:55 +0000 Subject: [PATCH 105/112] pep8 --- synapse/python_dependencies.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index e27ecbed2..6b6d5508b 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -26,7 +26,7 @@ CONDITIONAL_REQUIREMENTS = { def requirements(config=None, include_conditional=False): reqs = REQUIREMENTS.copy() - for key,req in CONDITIONAL_REQUIREMENTS.items(): + for key, req in CONDITIONAL_REQUIREMENTS.items(): if (config and getattr(config, key)) or include_conditional: reqs.update(req) return reqs @@ -61,7 +61,8 @@ class MissingRequirementError(Exception): def check_requirements(config=None): """Checks that all the modules needed by synapse have been correctly installed and are at the correct version""" - for dependency, module_requirements in requirements(config, include_conditional=False).items(): + for dependency, module_requirements in ( + requirements(config, include_conditional=False).items()): for module_requirement in module_requirements: if ">=" in module_requirement: module_name, required_version = module_requirement.split(">=") From 1489521ee5e5edd4d3d8735e1a3a5f6a9c7ee58b Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 17 Mar 2015 17:19:22 +0000 Subject: [PATCH 106/112] Be polite and ensure we use @functools.wraps() when creating a function decorator --- synapse/storage/_base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 2979a8352..9125bb119 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -25,6 +25,7 @@ import synapse.metrics from twisted.internet import defer from collections import namedtuple, OrderedDict +import functools import simplejson as json import sys import time @@ -81,6 +82,7 @@ def cached(max_entries=1000): cache[key] = value + @functools.wraps(orig) @defer.inlineCallbacks def wrapped(self, key): if key in cache: From 93978c5e2beb22b097061e58182777e9c2257228 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Tue, 17 Mar 2015 17:24:51 +0000 Subject: [PATCH 107/112] @cached() annotate get_user_by_token() - achieves a minor DB performance improvement --- synapse/api/auth.py | 2 +- synapse/storage/registration.py | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 90f9eb684..64f605b96 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -388,7 +388,7 @@ class Auth(object): AuthError if no user by that token exists or the token is invalid. """ try: - ret = yield self.store.get_user_by_token(token=token) + ret = yield self.store.get_user_by_token(token) if not ret: raise StoreError(400, "Unknown token") user_info = { diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index adc8fc079..3c2f1d6a1 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -19,7 +19,7 @@ from sqlite3 import IntegrityError from synapse.api.errors import StoreError, Codes -from ._base import SQLBaseStore +from ._base import SQLBaseStore, cached class RegistrationStore(SQLBaseStore): @@ -91,6 +91,11 @@ class RegistrationStore(SQLBaseStore): "get_user_by_id", self.cursor_to_dict, query, user_id ) + @cached() + # TODO(paul): Currently there's no code to invalidate this cache. That + # means if/when we ever add internal ways to invalidate access tokens or + # change whether a user is a server admin, those will need to invoke + # store.get_user_by_token.invalidate(token) def get_user_by_token(self, token): """Get a user from the given access token. From bb246091585dc648177f70a022a185784b3ff862 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Mar 2015 11:19:47 +0000 Subject: [PATCH 108/112] Clean out event_forward_extremities table when the server rejoins the room --- synapse/handlers/federation.py | 2 ++ synapse/storage/event_federation.py | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 65cfacba2..15ba417e0 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -290,6 +290,8 @@ class FederationHandler(BaseHandler): """ logger.debug("Joining %s to %s", joinee, room_id) + yield self.store.clean_room_for_join(room_id) + origin, pdu = yield self.replication_layer.make_join( target_hosts, room_id, diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 2deda8ac5..032334bfd 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -429,3 +429,15 @@ class EventFederationStore(SQLBaseStore): ) return events[:limit] + + def clean_room_for_join(self, room_id): + return self.runInteraction( + "clean_room_for_join", + self._clean_room_for_join_txn, + room_id, + ) + + def _clean_room_for_join_txn(self, txn, room_id): + query = "DELETE FROM event_forward_extremities WHERE room_id = ?" + + txn.execute(query, (room_id,)) From 57976f646ffe60eeb5fafce646983641fbfd7944 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Mar 2015 11:30:04 +0000 Subject: [PATCH 109/112] Do more validation of incoming request --- synapse/rest/client/v1/register.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index 86519fd9d..ccc457924 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -316,11 +316,11 @@ class RegisterRestServlet(ClientV1RestServlet): def _do_shared_secret(self, request, register_json, session): yield run_on_reactor() - if "mac" not in register_json: + if not isinstance(register_json.get("mac", None), basestring): raise SynapseError(400, "Expected mac.") - if "user" not in register_json: + if not isinstance(register_json.get("user", None), basestring): raise SynapseError(400, "Expected 'user' key.") - if "password" not in register_json: + if not isinstance(register_json.get("password", None), basestring): raise SynapseError(400, "Expected 'password' key.") if not self.hs.config.registration_shared_secret: From f88db7ac0bc36974240db869606634b817471842 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Mar 2015 11:33:46 +0000 Subject: [PATCH 110/112] Factor out user id validation checks --- synapse/handlers/register.py | 8 ++++++++ synapse/rest/client/v1/register.py | 14 +++++--------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index cda4a8502..c25e32109 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -31,6 +31,7 @@ import base64 import bcrypt import json import logging +import urllib logger = logging.getLogger(__name__) @@ -63,6 +64,13 @@ class RegistrationHandler(BaseHandler): password_hash = bcrypt.hashpw(password, bcrypt.gensalt()) if localpart: + if localpart and urllib.quote(localpart) != localpart: + raise SynapseError( + 400, + "User ID must only contain characters which do not" + " require URL encoding." + ) + user = UserID(localpart, self.hs.hostname) user_id = user.to_string() diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index ccc457924..a56834e36 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -27,7 +27,6 @@ from hashlib import sha1 import hmac import simplejson as json import logging -import urllib logger = logging.getLogger(__name__) @@ -263,14 +262,11 @@ class RegisterRestServlet(ClientV1RestServlet): ) password = register_json["password"].encode("utf-8") - desired_user_id = (register_json["user"].encode("utf-8") - if "user" in register_json else None) - if (desired_user_id - and urllib.quote(desired_user_id) != desired_user_id): - raise SynapseError( - 400, - "User ID must only contain characters which do not " + - "require URL encoding.") + desired_user_id = ( + register_json["user"].encode("utf-8") + if "user" in register_json else None + ) + handler = self.handlers.registration_handler (user_id, token) = yield handler.register( localpart=desired_user_id, From 6cc046302f634c0cca8371108b7442600c531d5e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Mar 2015 11:41:00 +0000 Subject: [PATCH 111/112] Bump version --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index f46a6df1f..e134fb241 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.8.0" +__version__ = "0.8.1" From 0a55a2b69229fff4e115f65a34e5d8db2ddd522d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Mar 2015 11:48:47 +0000 Subject: [PATCH 112/112] Update CHANGES --- CHANGES.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index f89542a2b..da31af960 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,12 @@ +Changes in synapse v0.8.1 (2015-03-18) +====================================== + +* Disable registration by default. New users can be added using the command + ``register_new_matrix_user`` or by enabling registration in the config. +* Add metrics to synapse. To enable metrics use config options + ``enable_metrics`` and ``metrics_port``. +* Fix bug where banning only kicked the user. + Changes in synapse v0.8.0 (2015-03-06) ======================================