2014-12-10 11:10:25 -05:00
|
|
|
#
|
2023-11-21 15:29:58 -05:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 06:26:48 -05:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2023-11-21 15:29:58 -05:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2014-12-10 11:10:25 -05:00
|
|
|
#
|
|
|
|
#
|
|
|
|
|
|
|
|
|
2020-07-20 16:43:49 -04:00
|
|
|
import argparse
|
2014-12-10 11:10:25 -05:00
|
|
|
import datetime
|
2022-06-10 08:30:14 -04:00
|
|
|
import html
|
2020-07-20 16:43:49 -04:00
|
|
|
import json
|
|
|
|
import sqlite3
|
|
|
|
|
|
|
|
import pydot
|
2014-12-10 11:10:25 -05:00
|
|
|
|
2022-06-10 08:30:14 -04:00
|
|
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
|
|
|
from synapse.events import make_event_from_dict
|
2015-02-16 08:15:41 -05:00
|
|
|
from synapse.util.frozenutils import unfreeze
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
|
2022-06-10 08:30:14 -04:00
|
|
|
def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None:
|
|
|
|
"""
|
|
|
|
Generate a dot and SVG file for a graph of events in the room based on the
|
|
|
|
topological ordering by reading from a Synapse SQLite database.
|
|
|
|
"""
|
2014-12-10 11:10:25 -05:00
|
|
|
conn = sqlite3.connect(db_name)
|
|
|
|
|
2022-06-10 08:30:14 -04:00
|
|
|
sql = "SELECT room_version FROM rooms WHERE room_id = ?"
|
|
|
|
c = conn.execute(sql, (room_id,))
|
|
|
|
room_version = KNOWN_ROOM_VERSIONS[c.fetchone()[0]]
|
|
|
|
|
2015-01-08 05:53:03 -05:00
|
|
|
sql = (
|
2022-06-10 08:30:14 -04:00
|
|
|
"SELECT json, internal_metadata FROM event_json as j "
|
2015-01-08 05:53:03 -05:00
|
|
|
"INNER JOIN events as e ON e.event_id = j.event_id "
|
|
|
|
"WHERE j.room_id = ?"
|
2014-12-10 11:10:25 -05:00
|
|
|
)
|
|
|
|
|
2015-01-08 05:53:03 -05:00
|
|
|
args = [room_id]
|
|
|
|
|
|
|
|
if limit:
|
2019-10-23 11:49:05 -04:00
|
|
|
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
|
2015-01-08 05:53:03 -05:00
|
|
|
|
|
|
|
args.append(limit)
|
|
|
|
|
|
|
|
c = conn.execute(sql, args)
|
|
|
|
|
2022-06-10 08:30:14 -04:00
|
|
|
events = [
|
|
|
|
make_event_from_dict(json.loads(e[0]), room_version, json.loads(e[1]))
|
|
|
|
for e in c.fetchall()
|
|
|
|
]
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
events.sort(key=lambda e: e.depth)
|
|
|
|
|
|
|
|
node_map = {}
|
|
|
|
state_groups = {}
|
|
|
|
|
|
|
|
graph = pydot.Dot(graph_name="Test")
|
|
|
|
|
|
|
|
for event in events:
|
|
|
|
c = conn.execute(
|
2019-10-23 11:49:05 -04:00
|
|
|
"SELECT state_group FROM event_to_state_groups WHERE event_id = ?",
|
2019-06-20 05:32:02 -04:00
|
|
|
(event.event_id,),
|
2014-12-10 11:10:25 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
res = c.fetchone()
|
|
|
|
state_group = res[0] if res else None
|
|
|
|
|
|
|
|
if state_group is not None:
|
|
|
|
state_groups.setdefault(state_group, []).append(event.event_id)
|
|
|
|
|
|
|
|
t = datetime.datetime.fromtimestamp(
|
|
|
|
float(event.origin_server_ts) / 1000
|
2019-06-20 05:32:02 -04:00
|
|
|
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
2014-12-10 11:10:25 -05:00
|
|
|
|
2015-02-16 08:15:41 -05:00
|
|
|
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
label = (
|
|
|
|
"<"
|
|
|
|
"<b>%(name)s </b><br/>"
|
|
|
|
"Type: <b>%(type)s </b><br/>"
|
|
|
|
"State key: <b>%(state_key)s </b><br/>"
|
|
|
|
"Content: <b>%(content)s </b><br/>"
|
|
|
|
"Time: <b>%(time)s </b><br/>"
|
|
|
|
"Depth: <b>%(depth)s </b><br/>"
|
|
|
|
"State group: %(state_group)s<br/>"
|
|
|
|
">"
|
|
|
|
) % {
|
|
|
|
"name": event.event_id,
|
|
|
|
"type": event.type,
|
|
|
|
"state_key": event.get("state_key", None),
|
2022-06-10 08:30:14 -04:00
|
|
|
"content": html.escape(content, quote=True),
|
2014-12-10 11:10:25 -05:00
|
|
|
"time": t,
|
|
|
|
"depth": event.depth,
|
|
|
|
"state_group": state_group,
|
|
|
|
}
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
node = pydot.Node(name=event.event_id, label=label)
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
node_map[event.event_id] = node
|
|
|
|
graph.add_node(node)
|
|
|
|
|
|
|
|
for event in events:
|
2022-06-10 08:30:14 -04:00
|
|
|
for prev_id in event.prev_event_ids():
|
2014-12-10 11:10:25 -05:00
|
|
|
try:
|
|
|
|
end_node = node_map[prev_id]
|
2020-07-20 16:43:49 -04:00
|
|
|
except Exception:
|
2022-06-10 08:30:14 -04:00
|
|
|
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
node_map[prev_id] = end_node
|
|
|
|
graph.add_node(end_node)
|
|
|
|
|
|
|
|
edge = pydot.Edge(node_map[event.event_id], end_node)
|
|
|
|
graph.add_edge(edge)
|
|
|
|
|
|
|
|
for group, event_ids in state_groups.items():
|
|
|
|
if len(event_ids) <= 1:
|
|
|
|
continue
|
|
|
|
|
2022-06-10 08:30:14 -04:00
|
|
|
cluster = pydot.Cluster(str(group), label=f"<State Group: {str(group)}>")
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
for event_id in event_ids:
|
|
|
|
cluster.add_node(node_map[event_id])
|
|
|
|
|
|
|
|
graph.add_subgraph(cluster)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
|
|
|
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
|
|
|
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Generate a PDU graph for a given room by talking "
|
2022-06-10 08:30:14 -04:00
|
|
|
"to the given Synapse SQLite file to get the list of PDUs. \n"
|
2019-06-20 05:32:02 -04:00
|
|
|
"Requires pydot."
|
2014-12-10 11:10:25 -05:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2019-06-20 05:32:02 -04:00
|
|
|
"-p",
|
|
|
|
"--prefix",
|
|
|
|
dest="prefix",
|
2015-01-08 05:53:03 -05:00
|
|
|
help="String to prefix output files with",
|
2019-06-20 05:32:02 -04:00
|
|
|
default="graph_output",
|
2014-12-10 11:10:25 -05:00
|
|
|
)
|
2019-06-20 05:32:02 -04:00
|
|
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
|
|
|
parser.add_argument("db")
|
|
|
|
parser.add_argument("room")
|
2014-12-10 11:10:25 -05:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2015-01-08 05:53:03 -05:00
|
|
|
make_graph(args.db, args.room, args.prefix, args.limit)
|