2014-10-28 12:42:35 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 08:21:39 -05:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-10-28 12:42:35 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-05-14 08:45:48 -04:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2015-05-01 08:06:26 -04:00
|
|
|
from ._base import SQLBaseStore, cached
|
2014-10-28 13:15:32 -04:00
|
|
|
from syutil.base64util import encode_base64
|
2014-10-28 12:42:35 -04:00
|
|
|
|
|
|
|
import logging
|
2015-05-21 10:37:43 -04:00
|
|
|
from Queue import PriorityQueue, Empty
|
2014-10-28 12:42:35 -04:00
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class EventFederationStore(SQLBaseStore):
|
2014-11-12 10:02:31 -05:00
|
|
|
""" Responsible for storing and serving up the various graphs associated
|
|
|
|
with an event. Including the main event graph and the auth chains for an
|
|
|
|
event.
|
|
|
|
|
|
|
|
Also has methods for getting the front (latest) and back (oldest) edges
|
|
|
|
of the event graphs. These are used to generate the parents for new events
|
|
|
|
and backfilling from another server respectively.
|
|
|
|
"""
|
2014-10-28 12:42:35 -04:00
|
|
|
|
2015-02-23 09:35:23 -05:00
|
|
|
def get_auth_chain(self, event_ids):
|
2015-05-14 08:45:48 -04:00
|
|
|
return self.get_auth_chain_ids(event_ids).addCallback(self._get_events)
|
2014-11-07 10:35:53 -05:00
|
|
|
|
2014-12-16 13:57:36 -05:00
|
|
|
def get_auth_chain_ids(self, event_ids):
|
2014-11-07 10:35:53 -05:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_auth_chain_ids",
|
|
|
|
self._get_auth_chain_ids_txn,
|
2014-12-16 13:57:36 -05:00
|
|
|
event_ids
|
2014-11-07 10:35:53 -05:00
|
|
|
)
|
|
|
|
|
2015-02-23 09:35:23 -05:00
|
|
|
def _get_auth_chain_ids_txn(self, txn, event_ids):
|
2014-11-07 06:21:20 -05:00
|
|
|
results = set()
|
|
|
|
|
|
|
|
base_sql = (
|
2015-02-12 09:39:31 -05:00
|
|
|
"SELECT auth_id FROM event_auth WHERE event_id = ?"
|
2014-11-07 06:21:20 -05:00
|
|
|
)
|
2014-11-07 05:42:31 -05:00
|
|
|
|
2014-12-16 13:57:36 -05:00
|
|
|
front = set(event_ids)
|
2014-11-07 05:42:31 -05:00
|
|
|
while front:
|
2015-02-12 09:39:31 -05:00
|
|
|
new_front = set()
|
|
|
|
for f in front:
|
|
|
|
txn.execute(base_sql, (f,))
|
|
|
|
new_front.update([r[0] for r in txn.fetchall()])
|
2015-02-19 12:24:14 -05:00
|
|
|
|
|
|
|
new_front -= results
|
|
|
|
|
2015-02-12 09:39:31 -05:00
|
|
|
front = new_front
|
2014-11-07 06:21:20 -05:00
|
|
|
results.update(front)
|
2014-11-07 05:42:31 -05:00
|
|
|
|
2014-11-07 10:35:53 -05:00
|
|
|
return list(results)
|
2014-11-07 05:42:31 -05:00
|
|
|
|
2014-10-31 05:59:02 -04:00
|
|
|
def get_oldest_events_in_room(self, room_id):
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_oldest_events_in_room",
|
|
|
|
self._get_oldest_events_in_room_txn,
|
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
|
2015-05-11 13:01:31 -04:00
|
|
|
def get_oldest_events_with_depth_in_room(self, room_id):
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_oldest_events_with_depth_in_room",
|
|
|
|
self.get_oldest_events_with_depth_in_room_txn,
|
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_oldest_events_with_depth_in_room_txn(self, txn, room_id):
|
|
|
|
sql = (
|
|
|
|
"SELECT b.event_id, MAX(e.depth) FROM events as e"
|
|
|
|
" INNER JOIN event_edges as g"
|
|
|
|
" ON g.event_id = e.event_id AND g.room_id = e.room_id"
|
|
|
|
" INNER JOIN event_backward_extremities as b"
|
|
|
|
" ON g.prev_event_id = b.event_id AND g.room_id = b.room_id"
|
|
|
|
" WHERE b.room_id = ? AND g.is_state is ?"
|
|
|
|
" GROUP BY b.event_id"
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, (room_id, False,))
|
|
|
|
|
|
|
|
return dict(txn.fetchall())
|
|
|
|
|
2014-10-31 05:59:02 -04:00
|
|
|
def _get_oldest_events_in_room_txn(self, txn, room_id):
|
|
|
|
return self._simple_select_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="event_backward_extremities",
|
|
|
|
keyvalues={
|
|
|
|
"room_id": room_id,
|
|
|
|
},
|
|
|
|
retcol="event_id",
|
|
|
|
)
|
|
|
|
|
2014-10-29 12:59:24 -04:00
|
|
|
def get_latest_events_in_room(self, room_id):
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_latest_events_in_room",
|
|
|
|
self._get_latest_events_in_room,
|
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
|
2015-05-01 08:06:26 -04:00
|
|
|
@cached()
|
2015-05-01 05:17:19 -04:00
|
|
|
def get_latest_event_ids_in_room(self, room_id):
|
|
|
|
return self._simple_select_onecol(
|
|
|
|
table="event_forward_extremities",
|
|
|
|
keyvalues={
|
|
|
|
"room_id": room_id,
|
|
|
|
},
|
|
|
|
retcol="event_id",
|
2015-05-05 05:24:10 -04:00
|
|
|
desc="get_latest_event_ids_in_room",
|
2015-05-01 05:17:19 -04:00
|
|
|
)
|
|
|
|
|
2014-10-28 12:42:35 -04:00
|
|
|
def _get_latest_events_in_room(self, txn, room_id):
|
2014-10-29 12:59:24 -04:00
|
|
|
sql = (
|
|
|
|
"SELECT e.event_id, e.depth FROM events as e "
|
|
|
|
"INNER JOIN event_forward_extremities as f "
|
|
|
|
"ON e.event_id = f.event_id "
|
2015-05-01 05:17:19 -04:00
|
|
|
"AND e.room_id = f.room_id "
|
2014-10-29 12:59:24 -04:00
|
|
|
"WHERE f.room_id = ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, (room_id, ))
|
|
|
|
|
2014-10-28 12:42:35 -04:00
|
|
|
results = []
|
2014-10-29 12:59:24 -04:00
|
|
|
for event_id, depth in txn.fetchall():
|
2014-10-31 11:35:39 -04:00
|
|
|
hashes = self._get_event_reference_hashes_txn(txn, event_id)
|
2014-10-29 12:59:24 -04:00
|
|
|
prev_hashes = {
|
|
|
|
k: encode_base64(v) for k, v in hashes.items()
|
|
|
|
if k == "sha256"
|
|
|
|
}
|
|
|
|
results.append((event_id, prev_hashes, depth))
|
|
|
|
|
|
|
|
return results
|
2014-10-28 12:42:35 -04:00
|
|
|
|
2014-11-06 10:10:55 -05:00
|
|
|
def _get_latest_state_in_room(self, txn, room_id, type, state_key):
|
|
|
|
event_ids = self._simple_select_onecol_txn(
|
2014-11-05 08:23:35 -05:00
|
|
|
txn,
|
2014-11-06 10:10:55 -05:00
|
|
|
table="state_forward_extremities",
|
2014-11-05 08:23:35 -05:00
|
|
|
keyvalues={
|
2014-11-06 10:10:55 -05:00
|
|
|
"room_id": room_id,
|
|
|
|
"type": type,
|
|
|
|
"state_key": state_key,
|
2014-11-05 08:23:35 -05:00
|
|
|
},
|
2014-11-06 10:10:55 -05:00
|
|
|
retcol="event_id",
|
2014-11-05 08:23:35 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
results = []
|
2014-11-06 10:10:55 -05:00
|
|
|
for event_id in event_ids:
|
|
|
|
hashes = self._get_event_reference_hashes_txn(txn, event_id)
|
2014-11-05 08:23:35 -05:00
|
|
|
prev_hashes = {
|
|
|
|
k: encode_base64(v) for k, v in hashes.items()
|
|
|
|
if k == "sha256"
|
|
|
|
}
|
|
|
|
results.append((event_id, prev_hashes))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2014-11-06 10:10:55 -05:00
|
|
|
def _get_prev_events(self, txn, event_id):
|
|
|
|
results = self._get_prev_events_and_state(
|
|
|
|
txn,
|
|
|
|
event_id,
|
|
|
|
is_state=0,
|
|
|
|
)
|
|
|
|
|
|
|
|
return [(e_id, h, ) for e_id, h, _ in results]
|
|
|
|
|
|
|
|
def _get_prev_state(self, txn, event_id):
|
|
|
|
results = self._get_prev_events_and_state(
|
|
|
|
txn,
|
|
|
|
event_id,
|
2015-04-14 08:53:20 -04:00
|
|
|
is_state=True,
|
2014-11-06 10:10:55 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
return [(e_id, h, ) for e_id, h, _ in results]
|
|
|
|
|
|
|
|
def _get_prev_events_and_state(self, txn, event_id, is_state=None):
|
|
|
|
keyvalues = {
|
|
|
|
"event_id": event_id,
|
|
|
|
}
|
|
|
|
|
|
|
|
if is_state is not None:
|
2015-04-14 08:53:20 -04:00
|
|
|
keyvalues["is_state"] = bool(is_state)
|
2014-11-06 10:10:55 -05:00
|
|
|
|
|
|
|
res = self._simple_select_list_txn(
|
|
|
|
txn,
|
|
|
|
table="event_edges",
|
|
|
|
keyvalues=keyvalues,
|
|
|
|
retcols=["prev_event_id", "is_state"],
|
|
|
|
)
|
|
|
|
|
2014-12-15 08:55:22 -05:00
|
|
|
hashes = self._get_prev_event_hashes_txn(txn, event_id)
|
|
|
|
|
2014-11-06 10:10:55 -05:00
|
|
|
results = []
|
|
|
|
for d in res:
|
2014-12-15 08:55:22 -05:00
|
|
|
edge_hash = self._get_event_reference_hashes_txn(txn, d["prev_event_id"])
|
|
|
|
edge_hash.update(hashes.get(d["prev_event_id"], {}))
|
2014-11-06 10:10:55 -05:00
|
|
|
prev_hashes = {
|
2014-12-15 08:55:22 -05:00
|
|
|
k: encode_base64(v)
|
|
|
|
for k, v in edge_hash.items()
|
2014-11-06 10:10:55 -05:00
|
|
|
if k == "sha256"
|
|
|
|
}
|
|
|
|
results.append((d["prev_event_id"], prev_hashes, d["is_state"]))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2014-11-06 13:42:18 -05:00
|
|
|
def _get_auth_events(self, txn, event_id):
|
|
|
|
auth_ids = self._simple_select_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="event_auth",
|
|
|
|
keyvalues={
|
|
|
|
"event_id": event_id,
|
|
|
|
},
|
|
|
|
retcol="auth_id",
|
|
|
|
)
|
|
|
|
|
|
|
|
results = []
|
|
|
|
for auth_id in auth_ids:
|
|
|
|
hashes = self._get_event_reference_hashes_txn(txn, auth_id)
|
|
|
|
prev_hashes = {
|
|
|
|
k: encode_base64(v) for k, v in hashes.items()
|
|
|
|
if k == "sha256"
|
|
|
|
}
|
|
|
|
results.append((auth_id, prev_hashes))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2014-10-31 06:47:34 -04:00
|
|
|
def get_min_depth(self, room_id):
|
2014-11-12 10:02:31 -05:00
|
|
|
""" For hte given room, get the minimum depth we have seen for it.
|
|
|
|
"""
|
2014-10-31 06:47:34 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_min_depth",
|
|
|
|
self._get_min_depth_interaction,
|
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
|
2014-10-28 12:42:35 -04:00
|
|
|
def _get_min_depth_interaction(self, txn, room_id):
|
|
|
|
min_depth = self._simple_select_one_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="room_depth",
|
2014-11-10 08:46:44 -05:00
|
|
|
keyvalues={"room_id": room_id},
|
2014-10-28 12:42:35 -04:00
|
|
|
retcol="min_depth",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
return int(min_depth) if min_depth is not None else None
|
|
|
|
|
|
|
|
def _update_min_depth_for_room_txn(self, txn, room_id, depth):
|
|
|
|
min_depth = self._get_min_depth_interaction(txn, room_id)
|
|
|
|
|
|
|
|
do_insert = depth < min_depth if min_depth else True
|
|
|
|
|
|
|
|
if do_insert:
|
2015-05-11 13:01:31 -04:00
|
|
|
self._simple_upsert_txn(
|
2014-10-28 12:42:35 -04:00
|
|
|
txn,
|
|
|
|
table="room_depth",
|
2015-05-11 13:01:31 -04:00
|
|
|
keyvalues={
|
2014-10-28 12:42:35 -04:00
|
|
|
"room_id": room_id,
|
2015-05-11 13:01:31 -04:00
|
|
|
},
|
|
|
|
values={
|
2014-10-28 12:42:35 -04:00
|
|
|
"min_depth": depth,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
def _handle_prev_events(self, txn, outlier, event_id, prev_events,
|
|
|
|
room_id):
|
2014-11-12 10:02:31 -05:00
|
|
|
"""
|
|
|
|
For the given event, update the event edges table and forward and
|
|
|
|
backward extremities tables.
|
|
|
|
"""
|
2015-05-05 10:13:25 -04:00
|
|
|
self._simple_insert_many_txn(
|
|
|
|
txn,
|
2015-05-05 10:14:48 -04:00
|
|
|
table="event_edges",
|
|
|
|
values=[
|
|
|
|
{
|
|
|
|
"event_id": event_id,
|
|
|
|
"prev_event_id": e_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"is_state": False,
|
|
|
|
}
|
|
|
|
for e_id, _ in prev_events
|
|
|
|
],
|
2015-05-05 10:13:25 -04:00
|
|
|
)
|
2014-10-28 12:42:35 -04:00
|
|
|
|
|
|
|
# Update the extremities table if this is not an outlier.
|
|
|
|
if not outlier:
|
2014-10-29 12:59:24 -04:00
|
|
|
for e_id, _ in prev_events:
|
2014-10-28 12:42:35 -04:00
|
|
|
# TODO (erikj): This could be done as a bulk insert
|
|
|
|
self._simple_delete_txn(
|
|
|
|
txn,
|
|
|
|
table="event_forward_extremities",
|
|
|
|
keyvalues={
|
|
|
|
"event_id": e_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2014-11-12 10:02:31 -05:00
|
|
|
# We only insert as a forward extremity the new event if there are
|
|
|
|
# no other events that reference it as a prev event
|
2014-10-28 12:42:35 -04:00
|
|
|
query = (
|
2015-03-19 11:59:48 -04:00
|
|
|
"SELECT 1 FROM event_edges WHERE prev_event_id = ?"
|
|
|
|
)
|
2014-10-28 12:42:35 -04:00
|
|
|
|
2015-03-19 11:59:48 -04:00
|
|
|
txn.execute(query, (event_id,))
|
|
|
|
|
|
|
|
if not txn.fetchone():
|
|
|
|
query = (
|
|
|
|
"INSERT INTO event_forward_extremities"
|
|
|
|
" (event_id, room_id)"
|
|
|
|
" VALUES (?, ?)"
|
|
|
|
)
|
2014-10-28 12:42:35 -04:00
|
|
|
|
2015-03-19 11:59:48 -04:00
|
|
|
txn.execute(query, (event_id, room_id))
|
2014-10-28 12:42:35 -04:00
|
|
|
|
|
|
|
query = (
|
2015-05-12 09:41:50 -04:00
|
|
|
"INSERT INTO event_backward_extremities (event_id, room_id)"
|
|
|
|
" SELECT ?, ? WHERE NOT EXISTS ("
|
|
|
|
" SELECT 1 FROM event_backward_extremities"
|
|
|
|
" WHERE event_id = ? AND room_id = ?"
|
|
|
|
" )"
|
2015-05-12 10:02:53 -04:00
|
|
|
" AND NOT EXISTS ("
|
2015-05-19 10:27:00 -04:00
|
|
|
" SELECT 1 FROM events WHERE event_id = ? AND room_id = ? "
|
|
|
|
" AND outlier = ?"
|
2015-05-12 10:02:53 -04:00
|
|
|
" )"
|
2014-10-29 12:59:24 -04:00
|
|
|
)
|
2015-05-12 09:41:50 -04:00
|
|
|
|
|
|
|
txn.executemany(query, [
|
2015-05-19 10:27:00 -04:00
|
|
|
(e_id, room_id, e_id, room_id, e_id, room_id, False)
|
2015-05-12 09:41:50 -04:00
|
|
|
for e_id, _ in prev_events
|
|
|
|
])
|
2014-10-31 05:59:02 -04:00
|
|
|
|
2015-05-12 09:47:23 -04:00
|
|
|
query = (
|
|
|
|
"DELETE FROM event_backward_extremities"
|
|
|
|
" WHERE event_id = ? AND room_id = ?"
|
|
|
|
)
|
2015-05-12 10:02:53 -04:00
|
|
|
txn.execute(query, (event_id, room_id))
|
2015-05-12 09:47:23 -04:00
|
|
|
|
2015-05-05 12:32:21 -04:00
|
|
|
txn.call_after(
|
2015-05-05 09:57:08 -04:00
|
|
|
self.get_latest_event_ids_in_room.invalidate, room_id
|
2015-05-05 12:32:21 -04:00
|
|
|
)
|
2015-05-01 08:06:26 -04:00
|
|
|
|
2014-10-31 05:59:02 -04:00
|
|
|
def get_backfill_events(self, room_id, event_list, limit):
|
2014-11-12 10:02:31 -05:00
|
|
|
"""Get a list of Events for a given topic that occurred before (and
|
|
|
|
including) the events in event_list. Return a list of max size `limit`
|
2014-10-31 05:59:02 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
txn
|
|
|
|
room_id (str)
|
|
|
|
event_list (list)
|
|
|
|
limit (int)
|
|
|
|
"""
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_backfill_events",
|
|
|
|
self._get_backfill_events, room_id, event_list, limit
|
2015-05-21 10:57:35 -04:00
|
|
|
).addCallback(
|
|
|
|
self._get_events
|
|
|
|
).addCallback(
|
2015-05-21 10:58:41 -04:00
|
|
|
lambda l: sorted(l, key=lambda e: -e.depth)
|
2015-05-21 10:57:35 -04:00
|
|
|
)
|
2014-10-31 05:59:02 -04:00
|
|
|
|
|
|
|
def _get_backfill_events(self, txn, room_id, event_list, limit):
|
|
|
|
logger.debug(
|
|
|
|
"_get_backfill_events: %s, %s, %s",
|
|
|
|
room_id, repr(event_list), limit
|
|
|
|
)
|
|
|
|
|
2015-05-21 10:44:05 -04:00
|
|
|
event_results = set()
|
2014-10-31 05:59:02 -04:00
|
|
|
|
2015-05-20 07:57:00 -04:00
|
|
|
# We want to make sure that we do a breadth-first, "depth" ordered
|
|
|
|
# search.
|
2014-10-31 05:59:02 -04:00
|
|
|
|
|
|
|
query = (
|
2015-05-20 07:57:00 -04:00
|
|
|
"SELECT depth, prev_event_id FROM event_edges"
|
|
|
|
" INNER JOIN events"
|
|
|
|
" ON prev_event_id = events.event_id"
|
|
|
|
" AND event_edges.room_id = events.room_id"
|
|
|
|
" WHERE event_edges.room_id = ? AND event_edges.event_id = ?"
|
2015-05-21 10:52:29 -04:00
|
|
|
" AND event_edges.is_state = ?"
|
2015-05-20 07:57:00 -04:00
|
|
|
" LIMIT ?"
|
2014-10-31 05:59:02 -04:00
|
|
|
)
|
|
|
|
|
2015-05-20 07:57:00 -04:00
|
|
|
queue = PriorityQueue()
|
2014-10-31 05:59:02 -04:00
|
|
|
|
2015-05-20 07:57:00 -04:00
|
|
|
for event_id in event_list:
|
2015-05-21 10:46:07 -04:00
|
|
|
depth = self._simple_select_one_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="events",
|
|
|
|
keyvalues={
|
|
|
|
"event_id": event_id,
|
|
|
|
},
|
|
|
|
retcol="depth"
|
|
|
|
)
|
|
|
|
|
|
|
|
queue.put((-depth, event_id))
|
2014-10-31 05:59:02 -04:00
|
|
|
|
2015-05-20 07:57:00 -04:00
|
|
|
while not queue.empty() and len(event_results) < limit:
|
2015-05-21 10:37:43 -04:00
|
|
|
try:
|
|
|
|
_, event_id = queue.get_nowait()
|
|
|
|
except Empty:
|
|
|
|
break
|
2014-10-31 05:59:02 -04:00
|
|
|
|
2015-05-21 10:48:56 -04:00
|
|
|
if event_id in event_results:
|
|
|
|
continue
|
|
|
|
|
2015-05-20 07:57:00 -04:00
|
|
|
event_results.add(event_id)
|
|
|
|
|
|
|
|
txn.execute(
|
|
|
|
query,
|
2015-05-21 10:52:29 -04:00
|
|
|
(room_id, event_id, False, limit - len(event_results))
|
2015-05-20 07:57:00 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
for row in txn.fetchall():
|
2015-05-21 10:40:22 -04:00
|
|
|
if row[1] not in event_results:
|
|
|
|
queue.put((-row[0], row[1]))
|
2014-10-31 05:59:02 -04:00
|
|
|
|
2015-05-14 08:45:48 -04:00
|
|
|
return event_results
|
2015-02-19 12:24:14 -05:00
|
|
|
|
2015-05-14 08:45:48 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-02-19 12:24:14 -05:00
|
|
|
def get_missing_events(self, room_id, earliest_events, latest_events,
|
|
|
|
limit, min_depth):
|
2015-05-14 08:45:48 -04:00
|
|
|
ids = yield self.runInteraction(
|
2015-02-19 12:24:14 -05:00
|
|
|
"get_missing_events",
|
|
|
|
self._get_missing_events,
|
|
|
|
room_id, earliest_events, latest_events, limit, min_depth
|
|
|
|
)
|
|
|
|
|
2015-05-14 08:45:48 -04:00
|
|
|
events = yield self._get_events(ids)
|
|
|
|
|
|
|
|
events = sorted(
|
|
|
|
[ev for ev in events if ev.depth >= min_depth],
|
|
|
|
key=lambda e: e.depth,
|
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(events[:limit])
|
|
|
|
|
2015-02-19 12:24:14 -05:00
|
|
|
def _get_missing_events(self, txn, room_id, earliest_events, latest_events,
|
|
|
|
limit, min_depth):
|
|
|
|
|
|
|
|
earliest_events = set(earliest_events)
|
|
|
|
front = set(latest_events) - earliest_events
|
|
|
|
|
|
|
|
event_results = set()
|
|
|
|
|
|
|
|
query = (
|
|
|
|
"SELECT prev_event_id FROM event_edges "
|
2015-04-14 08:53:20 -04:00
|
|
|
"WHERE room_id = ? AND event_id = ? AND is_state = ? "
|
2015-02-19 12:24:14 -05:00
|
|
|
"LIMIT ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
while front and len(event_results) < limit:
|
|
|
|
new_front = set()
|
|
|
|
for event_id in front:
|
|
|
|
txn.execute(
|
|
|
|
query,
|
2015-04-14 08:53:20 -04:00
|
|
|
(room_id, event_id, False, limit - len(event_results))
|
2015-02-19 12:24:14 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
for e_id, in txn.fetchall():
|
|
|
|
new_front.add(e_id)
|
|
|
|
|
|
|
|
new_front -= earliest_events
|
|
|
|
new_front -= event_results
|
|
|
|
|
|
|
|
front = new_front
|
|
|
|
event_results |= new_front
|
|
|
|
|
2015-05-14 08:45:48 -04:00
|
|
|
return event_results
|
2015-03-18 07:19:47 -04:00
|
|
|
|
|
|
|
def clean_room_for_join(self, room_id):
|
|
|
|
return self.runInteraction(
|
|
|
|
"clean_room_for_join",
|
|
|
|
self._clean_room_for_join_txn,
|
|
|
|
room_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _clean_room_for_join_txn(self, txn, room_id):
|
|
|
|
query = "DELETE FROM event_forward_extremities WHERE room_id = ?"
|
|
|
|
|
|
|
|
txn.execute(query, (room_id,))
|
2015-05-22 08:00:50 -04:00
|
|
|
txn.call_after(self.get_latest_event_ids_in_room.invalidate, room_id)
|