Fix bug where we ignored event_edge_hashes table

This commit is contained in:
Erik Johnston 2014-12-15 13:55:22 +00:00
parent 23c7cb6220
commit c8dd3314d6
5 changed files with 10 additions and 7 deletions

View File

@ -18,6 +18,9 @@ class dictobj(dict):
def get_full_dict(self): def get_full_dict(self):
return dict(self) return dict(self)
def get_pdu_json(self):
return dict(self)
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()

View File

@ -42,6 +42,7 @@ def prune_event(event):
"auth_events", "auth_events",
"origin", "origin",
"origin_server_ts", "origin_server_ts",
"membership",
] ]
new_content = {} new_content = {}

View File

@ -91,7 +91,6 @@ class DataStore(RoomMemberStore, RoomStore,
def __init__(self, hs): def __init__(self, hs):
super(DataStore, self).__init__(hs) super(DataStore, self).__init__(hs)
self.event_factory = hs.get_event_factory()
self.hs = hs self.hs = hs
self.min_token_deferred = self._get_min_token() self.min_token_deferred = self._get_min_token()

View File

@ -83,7 +83,6 @@ class SQLBaseStore(object):
def __init__(self, hs): def __init__(self, hs):
self.hs = hs self.hs = hs
self._db_pool = hs.get_db_pool() self._db_pool = hs.get_db_pool()
self.event_factory = hs.get_event_factory()
self._clock = hs.get_clock() self._clock = hs.get_clock()
@defer.inlineCallbacks @defer.inlineCallbacks

View File

@ -177,14 +177,15 @@ class EventFederationStore(SQLBaseStore):
retcols=["prev_event_id", "is_state"], retcols=["prev_event_id", "is_state"],
) )
hashes = self._get_prev_event_hashes_txn(txn, event_id)
results = [] results = []
for d in res: for d in res:
hashes = self._get_event_reference_hashes_txn( edge_hash = self._get_event_reference_hashes_txn(txn, d["prev_event_id"])
txn, edge_hash.update(hashes.get(d["prev_event_id"], {}))
d["prev_event_id"]
)
prev_hashes = { prev_hashes = {
k: encode_base64(v) for k, v in hashes.items() k: encode_base64(v)
for k, v in edge_hash.items()
if k == "sha256" if k == "sha256"
} }
results.append((d["prev_event_id"], prev_hashes, d["is_state"])) results.append((d["prev_event_id"], prev_hashes, d["is_state"]))