Merge pull request #555 from matrix-org/erikj/logcontext

Allowing tagging log contexts
This commit is contained in:
Erik Johnston 2016-02-03 15:20:00 +00:00
commit c8e4d5de7f
3 changed files with 42 additions and 16 deletions

View File

@ -18,6 +18,7 @@ from ._base import BaseHandler
from synapse.streams.config import PaginationConfig from synapse.streams.config import PaginationConfig
from synapse.api.constants import Membership, EventTypes from synapse.api.constants import Membership, EventTypes
from synapse.util import unwrapFirstError from synapse.util import unwrapFirstError
from synapse.util.logcontext import LoggingContext
from twisted.internet import defer from twisted.internet import defer
@ -140,6 +141,15 @@ class SyncHandler(BaseHandler):
A Deferred SyncResult. A Deferred SyncResult.
""" """
context = LoggingContext.current_context()
if context:
if since_token is None:
context.tag = "initial_sync"
elif full_state:
context.tag = "full_state_sync"
else:
context.tag = "incremental_sync"
if timeout == 0 or since_token is None or full_state: if timeout == 0 or since_token is None or full_state:
# we are going to return immediately, so don't bother calling # we are going to return immediately, so don't bother calling
# notifier.wait_for_events. # notifier.wait_for_events.

View File

@ -41,7 +41,7 @@ metrics = synapse.metrics.get_metrics_for(__name__)
incoming_requests_counter = metrics.register_counter( incoming_requests_counter = metrics.register_counter(
"requests", "requests",
labels=["method", "servlet"], labels=["method", "servlet", "tag"],
) )
outgoing_responses_counter = metrics.register_counter( outgoing_responses_counter = metrics.register_counter(
"responses", "responses",
@ -50,23 +50,23 @@ outgoing_responses_counter = metrics.register_counter(
response_timer = metrics.register_distribution( response_timer = metrics.register_distribution(
"response_time", "response_time",
labels=["method", "servlet"] labels=["method", "servlet", "tag"]
) )
response_ru_utime = metrics.register_distribution( response_ru_utime = metrics.register_distribution(
"response_ru_utime", labels=["method", "servlet"] "response_ru_utime", labels=["method", "servlet", "tag"]
) )
response_ru_stime = metrics.register_distribution( response_ru_stime = metrics.register_distribution(
"response_ru_stime", labels=["method", "servlet"] "response_ru_stime", labels=["method", "servlet", "tag"]
) )
response_db_txn_count = metrics.register_distribution( response_db_txn_count = metrics.register_distribution(
"response_db_txn_count", labels=["method", "servlet"] "response_db_txn_count", labels=["method", "servlet", "tag"]
) )
response_db_txn_duration = metrics.register_distribution( response_db_txn_duration = metrics.register_distribution(
"response_db_txn_duration", labels=["method", "servlet"] "response_db_txn_duration", labels=["method", "servlet", "tag"]
) )
@ -226,7 +226,6 @@ class JsonResource(HttpServer, resource.Resource):
servlet_classname = servlet_instance.__class__.__name__ servlet_classname = servlet_instance.__class__.__name__
else: else:
servlet_classname = "%r" % callback servlet_classname = "%r" % callback
incoming_requests_counter.inc(request.method, servlet_classname)
args = [ args = [
urllib.unquote(u).decode("UTF-8") if u else u for u in m.groups() urllib.unquote(u).decode("UTF-8") if u else u for u in m.groups()
@ -237,21 +236,33 @@ class JsonResource(HttpServer, resource.Resource):
code, response = callback_return code, response = callback_return
self._send_response(request, code, response) self._send_response(request, code, response)
response_timer.inc_by(
self.clock.time_msec() - start, request.method, servlet_classname
)
try: try:
context = LoggingContext.current_context() context = LoggingContext.current_context()
tag = ""
if context:
tag = context.tag
incoming_requests_counter.inc(request.method, servlet_classname, tag)
response_timer.inc_by(
self.clock.time_msec() - start, request.method,
servlet_classname, tag
)
ru_utime, ru_stime = context.get_resource_usage() ru_utime, ru_stime = context.get_resource_usage()
response_ru_utime.inc_by(ru_utime, request.method, servlet_classname) response_ru_utime.inc_by(
response_ru_stime.inc_by(ru_stime, request.method, servlet_classname) ru_utime, request.method, servlet_classname, tag
)
response_ru_stime.inc_by(
ru_stime, request.method, servlet_classname, tag
)
response_db_txn_count.inc_by( response_db_txn_count.inc_by(
context.db_txn_count, request.method, servlet_classname context.db_txn_count, request.method, servlet_classname, tag
) )
response_db_txn_duration.inc_by( response_db_txn_duration.inc_by(
context.db_txn_duration, request.method, servlet_classname context.db_txn_duration, request.method, servlet_classname, tag
) )
except: except:
pass pass

View File

@ -47,7 +47,8 @@ class LoggingContext(object):
""" """
__slots__ = [ __slots__ = [
"parent_context", "name", "usage_start", "usage_end", "main_thread", "__dict__" "parent_context", "name", "usage_start", "usage_end", "main_thread",
"__dict__", "tag",
] ]
thread_local = threading.local() thread_local = threading.local()
@ -72,6 +73,9 @@ class LoggingContext(object):
def add_database_transaction(self, duration_ms): def add_database_transaction(self, duration_ms):
pass pass
def __nonzero__(self):
return False
sentinel = Sentinel() sentinel = Sentinel()
def __init__(self, name=None): def __init__(self, name=None):
@ -83,6 +87,7 @@ class LoggingContext(object):
self.db_txn_duration = 0. self.db_txn_duration = 0.
self.usage_start = None self.usage_start = None
self.main_thread = threading.current_thread() self.main_thread = threading.current_thread()
self.tag = ""
def __str__(self): def __str__(self):
return "%s@%x" % (self.name, id(self)) return "%s@%x" % (self.name, id(self))