diff --git a/changelog.d/6951.misc b/changelog.d/6951.misc new file mode 100644 index 000000000..378f52f0a --- /dev/null +++ b/changelog.d/6951.misc @@ -0,0 +1 @@ +Revert #6937. diff --git a/synapse/storage/data_stores/main/event_federation.py b/synapse/storage/data_stores/main/event_federation.py index dcc375b84..60c67457b 100644 --- a/synapse/storage/data_stores/main/event_federation.py +++ b/synapse/storage/data_stores/main/event_federation.py @@ -26,7 +26,6 @@ from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause from synapse.storage.data_stores.main.events_worker import EventsWorkerStore from synapse.storage.data_stores.main.signatures import SignatureWorkerStore from synapse.storage.database import Database -from synapse.storage.engines import PostgresEngine from synapse.util.caches.descriptors import cached logger = logging.getLogger(__name__) @@ -67,33 +66,6 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas else: results = set() - if isinstance(self.database_engine, PostgresEngine): - # For efficiency we make the database do this if we can. - - # We need to be a little careful with querying large amounts at - # once, for some reason postgres really doesn't like it. We do this - # by only asking for auth chain of 500 events at a time. - event_ids = list(event_ids) - chunks = [event_ids[x : x + 500] for x in range(0, len(event_ids), 500)] - for chunk in chunks: - sql = """ - WITH RECURSIVE auth_chain(event_id) AS ( - SELECT auth_id FROM event_auth WHERE event_id = ANY(?) - UNION - SELECT auth_id FROM event_auth - INNER JOIN auth_chain USING (event_id) - ) - SELECT event_id FROM auth_chain - """ - txn.execute(sql, (chunk,)) - - results.update(event_id for event_id, in txn) - - return list(results) - - # Database doesn't necessarily support recursive CTE, so we fall - # back to do doing it manually. - base_sql = "SELECT auth_id FROM event_auth WHERE " front = set(event_ids)