mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-10-01 11:49:51 -04:00
Remove more dead/broken dev scripts (#12355)
This commit is contained in:
parent
f0b03186d9
commit
80839a44f1
1
changelog.d/12355.misc
Normal file
1
changelog.d/12355.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Remove broken and unused development scripts.
|
3
mypy.ini
3
mypy.ini
@ -24,11 +24,8 @@ files =
|
|||||||
exclude = (?x)
|
exclude = (?x)
|
||||||
^(
|
^(
|
||||||
|scripts-dev/build_debian_packages.py
|
|scripts-dev/build_debian_packages.py
|
||||||
|scripts-dev/definitions.py
|
|
||||||
|scripts-dev/federation_client.py
|
|scripts-dev/federation_client.py
|
||||||
|scripts-dev/hash_history.py
|
|
||||||
|scripts-dev/release.py
|
|scripts-dev/release.py
|
||||||
|scripts-dev/tail-synapse.py
|
|
||||||
|
|
||||||
|synapse/_scripts/export_signing_key.py
|
|synapse/_scripts/export_signing_key.py
|
||||||
|synapse/_scripts/move_remote_media_to_new_store.py
|
|synapse/_scripts/move_remote_media_to_new_store.py
|
||||||
|
@ -1,208 +0,0 @@
|
|||||||
#! /usr/bin/python
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import ast
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
|
|
||||||
class DefinitionVisitor(ast.NodeVisitor):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
self.functions = {}
|
|
||||||
self.classes = {}
|
|
||||||
self.names = {}
|
|
||||||
self.attrs = set()
|
|
||||||
self.definitions = {
|
|
||||||
"def": self.functions,
|
|
||||||
"class": self.classes,
|
|
||||||
"names": self.names,
|
|
||||||
"attrs": self.attrs,
|
|
||||||
}
|
|
||||||
|
|
||||||
def visit_Name(self, node):
|
|
||||||
self.names.setdefault(type(node.ctx).__name__, set()).add(node.id)
|
|
||||||
|
|
||||||
def visit_Attribute(self, node):
|
|
||||||
self.attrs.add(node.attr)
|
|
||||||
for child in ast.iter_child_nodes(node):
|
|
||||||
self.visit(child)
|
|
||||||
|
|
||||||
def visit_ClassDef(self, node):
|
|
||||||
visitor = DefinitionVisitor()
|
|
||||||
self.classes[node.name] = visitor.definitions
|
|
||||||
for child in ast.iter_child_nodes(node):
|
|
||||||
visitor.visit(child)
|
|
||||||
|
|
||||||
def visit_FunctionDef(self, node):
|
|
||||||
visitor = DefinitionVisitor()
|
|
||||||
self.functions[node.name] = visitor.definitions
|
|
||||||
for child in ast.iter_child_nodes(node):
|
|
||||||
visitor.visit(child)
|
|
||||||
|
|
||||||
|
|
||||||
def non_empty(defs):
|
|
||||||
functions = {name: non_empty(f) for name, f in defs["def"].items()}
|
|
||||||
classes = {name: non_empty(f) for name, f in defs["class"].items()}
|
|
||||||
result = {}
|
|
||||||
if functions:
|
|
||||||
result["def"] = functions
|
|
||||||
if classes:
|
|
||||||
result["class"] = classes
|
|
||||||
names = defs["names"]
|
|
||||||
uses = []
|
|
||||||
for name in names.get("Load", ()):
|
|
||||||
if name not in names.get("Param", ()) and name not in names.get("Store", ()):
|
|
||||||
uses.append(name)
|
|
||||||
uses.extend(defs["attrs"])
|
|
||||||
if uses:
|
|
||||||
result["uses"] = uses
|
|
||||||
result["names"] = names
|
|
||||||
result["attrs"] = defs["attrs"]
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def definitions_in_code(input_code):
|
|
||||||
input_ast = ast.parse(input_code)
|
|
||||||
visitor = DefinitionVisitor()
|
|
||||||
visitor.visit(input_ast)
|
|
||||||
definitions = non_empty(visitor.definitions)
|
|
||||||
return definitions
|
|
||||||
|
|
||||||
|
|
||||||
def definitions_in_file(filepath):
|
|
||||||
with open(filepath) as f:
|
|
||||||
return definitions_in_code(f.read())
|
|
||||||
|
|
||||||
|
|
||||||
def defined_names(prefix, defs, names):
|
|
||||||
for name, funcs in defs.get("def", {}).items():
|
|
||||||
names.setdefault(name, {"defined": []})["defined"].append(prefix + name)
|
|
||||||
defined_names(prefix + name + ".", funcs, names)
|
|
||||||
|
|
||||||
for name, funcs in defs.get("class", {}).items():
|
|
||||||
names.setdefault(name, {"defined": []})["defined"].append(prefix + name)
|
|
||||||
defined_names(prefix + name + ".", funcs, names)
|
|
||||||
|
|
||||||
|
|
||||||
def used_names(prefix, item, defs, names):
|
|
||||||
for name, funcs in defs.get("def", {}).items():
|
|
||||||
used_names(prefix + name + ".", name, funcs, names)
|
|
||||||
|
|
||||||
for name, funcs in defs.get("class", {}).items():
|
|
||||||
used_names(prefix + name + ".", name, funcs, names)
|
|
||||||
|
|
||||||
path = prefix.rstrip(".")
|
|
||||||
for used in defs.get("uses", ()):
|
|
||||||
if used in names:
|
|
||||||
if item:
|
|
||||||
names[item].setdefault("uses", []).append(used)
|
|
||||||
names[used].setdefault("used", {}).setdefault(item, []).append(path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Find definitions.")
|
|
||||||
parser.add_argument(
|
|
||||||
"--unused", action="store_true", help="Only list unused definitions"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--pattern", action="append", metavar="REGEXP", help="Search for a pattern"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"directories",
|
|
||||||
nargs="+",
|
|
||||||
metavar="DIR",
|
|
||||||
help="Directories to search for definitions",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--referrers",
|
|
||||||
default=0,
|
|
||||||
type=int,
|
|
||||||
help="Include referrers up to the given depth",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--referred",
|
|
||||||
default=0,
|
|
||||||
type=int,
|
|
||||||
help="Include referred down to the given depth",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--format", default="yaml", help="Output format, one of 'yaml' or 'dot'"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
definitions = {}
|
|
||||||
for directory in args.directories:
|
|
||||||
for root, _, files in os.walk(directory):
|
|
||||||
for filename in files:
|
|
||||||
if filename.endswith(".py"):
|
|
||||||
filepath = os.path.join(root, filename)
|
|
||||||
definitions[filepath] = definitions_in_file(filepath)
|
|
||||||
|
|
||||||
names = {}
|
|
||||||
for filepath, defs in definitions.items():
|
|
||||||
defined_names(filepath + ":", defs, names)
|
|
||||||
|
|
||||||
for filepath, defs in definitions.items():
|
|
||||||
used_names(filepath + ":", None, defs, names)
|
|
||||||
|
|
||||||
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
|
|
||||||
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
|
|
||||||
|
|
||||||
result = {}
|
|
||||||
for name, definition in names.items():
|
|
||||||
if patterns and not any(pattern.match(name) for pattern in patterns):
|
|
||||||
continue
|
|
||||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
|
||||||
continue
|
|
||||||
if args.unused and definition.get("used"):
|
|
||||||
continue
|
|
||||||
result[name] = definition
|
|
||||||
|
|
||||||
referrer_depth = args.referrers
|
|
||||||
referrers = set()
|
|
||||||
while referrer_depth:
|
|
||||||
referrer_depth -= 1
|
|
||||||
for entry in result.values():
|
|
||||||
for used_by in entry.get("used", ()):
|
|
||||||
referrers.add(used_by)
|
|
||||||
for name, definition in names.items():
|
|
||||||
if name not in referrers:
|
|
||||||
continue
|
|
||||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
|
||||||
continue
|
|
||||||
result[name] = definition
|
|
||||||
|
|
||||||
referred_depth = args.referred
|
|
||||||
referred = set()
|
|
||||||
while referred_depth:
|
|
||||||
referred_depth -= 1
|
|
||||||
for entry in result.values():
|
|
||||||
for uses in entry.get("uses", ()):
|
|
||||||
referred.add(uses)
|
|
||||||
for name, definition in names.items():
|
|
||||||
if name not in referred:
|
|
||||||
continue
|
|
||||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
|
||||||
continue
|
|
||||||
result[name] = definition
|
|
||||||
|
|
||||||
if args.format == "yaml":
|
|
||||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
|
||||||
elif args.format == "dot":
|
|
||||||
print("digraph {")
|
|
||||||
for name, entry in result.items():
|
|
||||||
print(name)
|
|
||||||
for used_by in entry.get("used", ()):
|
|
||||||
if used_by in result:
|
|
||||||
print(used_by, "->", name)
|
|
||||||
print("}")
|
|
||||||
else:
|
|
||||||
raise ValueError("Unknown format %r" % (args.format))
|
|
@ -1,81 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from unpaddedbase64 import decode_base64, encode_base64
|
|
||||||
|
|
||||||
from synapse.crypto.event_signing import (
|
|
||||||
add_event_pdu_content_hash,
|
|
||||||
compute_pdu_event_reference_hash,
|
|
||||||
)
|
|
||||||
from synapse.federation.units import Pdu
|
|
||||||
from synapse.storage._base import SQLBaseStore
|
|
||||||
from synapse.storage.pdu import PduStore
|
|
||||||
from synapse.storage.signatures import SignatureStore
|
|
||||||
|
|
||||||
|
|
||||||
class Store:
|
|
||||||
_get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"]
|
|
||||||
_get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"]
|
|
||||||
_get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"]
|
|
||||||
_get_pdu_origin_signatures_txn = SignatureStore.__dict__[
|
|
||||||
"_get_pdu_origin_signatures_txn"
|
|
||||||
]
|
|
||||||
_store_pdu_content_hash_txn = SignatureStore.__dict__["_store_pdu_content_hash_txn"]
|
|
||||||
_store_pdu_reference_hash_txn = SignatureStore.__dict__[
|
|
||||||
"_store_pdu_reference_hash_txn"
|
|
||||||
]
|
|
||||||
_store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
|
|
||||||
simple_insert_txn = SQLBaseStore.__dict__["simple_insert_txn"]
|
|
||||||
|
|
||||||
|
|
||||||
store = Store()
|
|
||||||
|
|
||||||
|
|
||||||
def select_pdus(cursor):
|
|
||||||
cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC")
|
|
||||||
|
|
||||||
ids = cursor.fetchall()
|
|
||||||
|
|
||||||
pdu_tuples = store._get_pdu_tuples(cursor, ids)
|
|
||||||
|
|
||||||
pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples]
|
|
||||||
|
|
||||||
reference_hashes = {}
|
|
||||||
|
|
||||||
for pdu in pdus:
|
|
||||||
try:
|
|
||||||
if pdu.prev_pdus:
|
|
||||||
print("PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
|
|
||||||
for pdu_id, origin, hashes in pdu.prev_pdus:
|
|
||||||
ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
|
|
||||||
hashes[ref_alg] = encode_base64(ref_hsh)
|
|
||||||
store._store_prev_pdu_hash_txn(
|
|
||||||
cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh
|
|
||||||
)
|
|
||||||
print("SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
|
|
||||||
pdu = add_event_pdu_content_hash(pdu)
|
|
||||||
ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
|
|
||||||
reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
|
|
||||||
store._store_pdu_reference_hash_txn(
|
|
||||||
cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh
|
|
||||||
)
|
|
||||||
|
|
||||||
for alg, hsh_base64 in pdu.hashes.items():
|
|
||||||
print(alg, hsh_base64)
|
|
||||||
store._store_pdu_content_hash_txn(
|
|
||||||
cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64)
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
print("FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
conn = sqlite3.connect(sys.argv[1])
|
|
||||||
cursor = conn.cursor()
|
|
||||||
select_pdus(cursor)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -1,67 +0,0 @@
|
|||||||
import collections
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
Entry = collections.namedtuple("Entry", "name position rows")
|
|
||||||
|
|
||||||
ROW_TYPES = {}
|
|
||||||
|
|
||||||
|
|
||||||
def row_type_for_columns(name, column_names):
|
|
||||||
column_names = tuple(column_names)
|
|
||||||
row_type = ROW_TYPES.get((name, column_names))
|
|
||||||
if row_type is None:
|
|
||||||
row_type = collections.namedtuple(name, column_names)
|
|
||||||
ROW_TYPES[(name, column_names)] = row_type
|
|
||||||
return row_type
|
|
||||||
|
|
||||||
|
|
||||||
def parse_response(content):
|
|
||||||
streams = json.loads(content)
|
|
||||||
result = {}
|
|
||||||
for name, value in streams.items():
|
|
||||||
row_type = row_type_for_columns(name, value["field_names"])
|
|
||||||
position = value["position"]
|
|
||||||
rows = [row_type(*row) for row in value["rows"]]
|
|
||||||
result[name] = Entry(name, position, rows)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def replicate(server, streams):
|
|
||||||
return parse_response(
|
|
||||||
requests.get(
|
|
||||||
server + "/_synapse/replication", verify=False, params=streams
|
|
||||||
).content
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
server = sys.argv[1]
|
|
||||||
|
|
||||||
streams = None
|
|
||||||
while not streams:
|
|
||||||
try:
|
|
||||||
streams = {
|
|
||||||
row.name: row.position
|
|
||||||
for row in replicate(server, {"streams": "-1"})["streams"].rows
|
|
||||||
}
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
results = replicate(server, streams)
|
|
||||||
except Exception:
|
|
||||||
sys.stdout.write("connection_lost(" + repr(streams) + ")\n")
|
|
||||||
break
|
|
||||||
for update in results.values():
|
|
||||||
for row in update.rows:
|
|
||||||
sys.stdout.write(repr(row) + "\n")
|
|
||||||
streams[update.name] = update.position
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
Loading…
Reference in New Issue
Block a user