mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-06-15 12:29:13 -04:00
Instrument the federation/backfill part of /messages
(#13489)
Instrument the federation/backfill part of `/messages` so it's easier to follow what's going on in Jaeger when viewing a trace. Split out from https://github.com/matrix-org/synapse/pull/13440 Follow-up from https://github.com/matrix-org/synapse/pull/13368 Part of https://github.com/matrix-org/synapse/issues/13356
This commit is contained in:
parent
5ace5d7b15
commit
0a4efbc1dd
11 changed files with 220 additions and 33 deletions
|
@ -54,6 +54,7 @@ from synapse.logging.context import (
|
|||
current_context,
|
||||
make_deferred_yieldable,
|
||||
)
|
||||
from synapse.logging.opentracing import start_active_span, tag_args, trace
|
||||
from synapse.metrics.background_process_metrics import (
|
||||
run_as_background_process,
|
||||
wrap_as_background_process,
|
||||
|
@ -430,6 +431,8 @@ class EventsWorkerStore(SQLBaseStore):
|
|||
|
||||
return {e.event_id: e for e in events}
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
async def get_events_as_list(
|
||||
self,
|
||||
event_ids: Collection[str],
|
||||
|
@ -1090,23 +1093,42 @@ class EventsWorkerStore(SQLBaseStore):
|
|||
"""
|
||||
fetched_event_ids: Set[str] = set()
|
||||
fetched_events: Dict[str, _EventRow] = {}
|
||||
events_to_fetch = event_ids
|
||||
|
||||
while events_to_fetch:
|
||||
row_map = await self._enqueue_events(events_to_fetch)
|
||||
async def _fetch_event_ids_and_get_outstanding_redactions(
|
||||
event_ids_to_fetch: Collection[str],
|
||||
) -> Collection[str]:
|
||||
"""
|
||||
Fetch all of the given event_ids and return any associated redaction event_ids
|
||||
that we still need to fetch in the next iteration.
|
||||
"""
|
||||
row_map = await self._enqueue_events(event_ids_to_fetch)
|
||||
|
||||
# we need to recursively fetch any redactions of those events
|
||||
redaction_ids: Set[str] = set()
|
||||
for event_id in events_to_fetch:
|
||||
for event_id in event_ids_to_fetch:
|
||||
row = row_map.get(event_id)
|
||||
fetched_event_ids.add(event_id)
|
||||
if row:
|
||||
fetched_events[event_id] = row
|
||||
redaction_ids.update(row.redactions)
|
||||
|
||||
events_to_fetch = redaction_ids.difference(fetched_event_ids)
|
||||
if events_to_fetch:
|
||||
logger.debug("Also fetching redaction events %s", events_to_fetch)
|
||||
event_ids_to_fetch = redaction_ids.difference(fetched_event_ids)
|
||||
return event_ids_to_fetch
|
||||
|
||||
# Grab the initial list of events requested
|
||||
event_ids_to_fetch = await _fetch_event_ids_and_get_outstanding_redactions(
|
||||
event_ids
|
||||
)
|
||||
# Then go and recursively find all of the associated redactions
|
||||
with start_active_span("recursively fetching redactions"):
|
||||
while event_ids_to_fetch:
|
||||
logger.debug("Also fetching redaction events %s", event_ids_to_fetch)
|
||||
|
||||
event_ids_to_fetch = (
|
||||
await _fetch_event_ids_and_get_outstanding_redactions(
|
||||
event_ids_to_fetch
|
||||
)
|
||||
)
|
||||
|
||||
# build a map from event_id to EventBase
|
||||
event_map: Dict[str, EventBase] = {}
|
||||
|
@ -1424,6 +1446,8 @@ class EventsWorkerStore(SQLBaseStore):
|
|||
|
||||
return {r["event_id"] for r in rows}
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
async def have_seen_events(
|
||||
self, room_id: str, event_ids: Iterable[str]
|
||||
) -> Set[str]:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue