Merge pull request #6251 from matrix-org/michaelkaye/debug_guard_logging

Reduce debug logging overhead
This commit is contained in:
Erik Johnston 2019-10-25 10:05:44 +01:00 committed by GitHub
commit 44ab048cfe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 25 additions and 17 deletions

1
changelog.d/6251.misc Normal file
View File

@ -0,0 +1 @@
Reduce impact of debug logging.

View File

@ -125,8 +125,10 @@ def compute_event_signature(event_dict, signature_name, signing_key):
redact_json = prune_event_dict(event_dict) redact_json = prune_event_dict(event_dict)
redact_json.pop("age_ts", None) redact_json.pop("age_ts", None)
redact_json.pop("unsigned", None) redact_json.pop("unsigned", None)
if logger.isEnabledFor(logging.DEBUG):
logger.debug("Signing event: %s", encode_canonical_json(redact_json)) logger.debug("Signing event: %s", encode_canonical_json(redact_json))
redact_json = sign_json(redact_json, signature_name, signing_key) redact_json = sign_json(redact_json, signature_name, signing_key)
if logger.isEnabledFor(logging.DEBUG):
logger.debug("Signed event: %s", encode_canonical_json(redact_json)) logger.debug("Signed event: %s", encode_canonical_json(redact_json))
return redact_json["signatures"] return redact_json["signatures"]

View File

@ -196,7 +196,7 @@ class FederationClient(FederationBase):
dest, room_id, extremities, limit dest, room_id, extremities, limit
) )
logger.debug("backfill transaction_data=%s", repr(transaction_data)) logger.debug("backfill transaction_data=%r", transaction_data)
room_version = yield self.store.get_room_version(room_id) room_version = yield self.store.get_room_version(room_id)
format_ver = room_version_to_event_format(room_version) format_ver = room_version_to_event_format(room_version)

View File

@ -122,10 +122,10 @@ class TransportLayerClient(object):
Deferred: Results in a dict received from the remote homeserver. Deferred: Results in a dict received from the remote homeserver.
""" """
logger.debug( logger.debug(
"backfill dest=%s, room_id=%s, event_tuples=%s, limit=%s", "backfill dest=%s, room_id=%s, event_tuples=%r, limit=%s",
destination, destination,
room_id, room_id,
repr(event_tuples), event_tuples,
str(limit), str(limit),
) )

View File

@ -112,9 +112,14 @@ class SyncRestServlet(RestServlet):
full_state = parse_boolean(request, "full_state", default=False) full_state = parse_boolean(request, "full_state", default=False)
logger.debug( logger.debug(
"/sync: user=%r, timeout=%r, since=%r," "/sync: user=%r, timeout=%r, since=%r, "
" set_presence=%r, filter_id=%r, device_id=%r" "set_presence=%r, filter_id=%r, device_id=%r",
% (user, timeout, since, set_presence, filter_id, device_id) user,
timeout,
since,
set_presence,
filter_id,
device_id,
) )
request_key = (user, timeout, since, filter_id, full_state, device_id) request_key = (user, timeout, since, filter_id, full_state, device_id)

View File

@ -117,8 +117,10 @@ class PreviewUrlResource(DirectServeResource):
pattern = entry[attrib] pattern = entry[attrib]
value = getattr(url_tuple, attrib) value = getattr(url_tuple, attrib)
logger.debug( logger.debug(
("Matching attrib '%s' with value '%s' against" " pattern '%s'") "Matching attrib '%s' with value '%s' against" " pattern '%s'",
% (attrib, value, pattern) attrib,
value,
pattern,
) )
if value is None: if value is None:
@ -186,7 +188,7 @@ class PreviewUrlResource(DirectServeResource):
media_info = yield self._download_url(url, user) media_info = yield self._download_url(url, user)
logger.debug("got media_info of '%s'" % media_info) logger.debug("got media_info of '%s'", media_info)
if _is_media(media_info["media_type"]): if _is_media(media_info["media_type"]):
file_id = media_info["filesystem_id"] file_id = media_info["filesystem_id"]
@ -254,7 +256,7 @@ class PreviewUrlResource(DirectServeResource):
og["og:image:width"] = dims["width"] og["og:image:width"] = dims["width"]
og["og:image:height"] = dims["height"] og["og:image:height"] = dims["height"]
else: else:
logger.warn("Couldn't get dims for %s" % og["og:image"]) logger.warn("Couldn't get dims for %s", og["og:image"])
og["og:image"] = "mxc://%s/%s" % ( og["og:image"] = "mxc://%s/%s" % (
self.server_name, self.server_name,
@ -268,7 +270,7 @@ class PreviewUrlResource(DirectServeResource):
logger.warn("Failed to find any OG data in %s", url) logger.warn("Failed to find any OG data in %s", url)
og = {} og = {}
logger.debug("Calculated OG for %s as %s" % (url, og)) logger.debug("Calculated OG for %s as %s", url, og)
jsonog = json.dumps(og) jsonog = json.dumps(og)
@ -297,7 +299,7 @@ class PreviewUrlResource(DirectServeResource):
with self.media_storage.store_into_file(file_info) as (f, fname, finish): with self.media_storage.store_into_file(file_info) as (f, fname, finish):
try: try:
logger.debug("Trying to get url '%s'" % url) logger.debug("Trying to get url '%s'", url)
length, headers, uri, code = yield self.client.get_file( length, headers, uri, code = yield self.client.get_file(
url, output_stream=f, max_size=self.max_spider_size url, output_stream=f, max_size=self.max_spider_size
) )

View File

@ -364,9 +364,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
) )
def _get_backfill_events(self, txn, room_id, event_list, limit): def _get_backfill_events(self, txn, room_id, event_list, limit):
logger.debug( logger.debug("_get_backfill_events: %s, %r, %s", room_id, event_list, limit)
"_get_backfill_events: %s, %s, %s", room_id, repr(event_list), limit
)
event_results = set() event_results = set()