Handle sending events and device messages over federation

This commit is contained in:
Erik Johnston 2016-11-17 15:46:44 +00:00
parent ed787cf09e
commit f8ee66250a
14 changed files with 189 additions and 59 deletions

View file

@ -453,7 +453,7 @@ class ReplicationResource(Resource):
)
upto_token = _position_from_rows(to_device_rows, current_position)
writer.write_header_and_rows("to_device", to_device_rows, (
"position", "user_id", "device_id", "message_json"
"position", "entity",
), position=upto_token)
@defer.inlineCallbacks

View file

@ -38,6 +38,7 @@ class SlavedDeviceInboxStore(BaseSlavedStore):
get_new_messages_for_device = DataStore.get_new_messages_for_device.__func__
get_new_device_msgs_for_remote = DataStore.get_new_device_msgs_for_remote.__func__
delete_messages_for_device = DataStore.delete_messages_for_device.__func__
delete_device_msgs_for_remote = DataStore.delete_device_msgs_for_remote.__func__
def stream_positions(self):
result = super(SlavedDeviceInboxStore, self).stream_positions()
@ -50,9 +51,15 @@ class SlavedDeviceInboxStore(BaseSlavedStore):
self._device_inbox_id_gen.advance(int(stream["position"]))
for row in stream["rows"]:
stream_id = row[0]
user_id = row[1]
self._device_inbox_stream_cache.entity_has_changed(
user_id, stream_id
)
entity = row[1]
if entity.startswith("@"):
self._device_inbox_stream_cache.entity_has_changed(
entity, stream_id
)
else:
self._device_federation_outbox_stream_cache.entity_has_changed(
entity, stream_id
)
return super(SlavedDeviceInboxStore, self).process_replication(result)

View file

@ -26,6 +26,11 @@ from synapse.storage.stream import StreamStore
from synapse.util.caches.stream_change_cache import StreamChangeCache
import ujson as json
import logging
logger = logging.getLogger(__name__)
# So, um, we want to borrow a load of functions intended for reading from
# a DataStore, but we don't want to take functions that either write to the
@ -180,6 +185,8 @@ class SlavedEventStore(BaseSlavedStore):
EventFederationStore.__dict__["_get_forward_extremeties_for_room"]
)
get_all_new_events_stream = DataStore.get_all_new_events_stream.__func__
def stream_positions(self):
result = super(SlavedEventStore, self).stream_positions()
result["events"] = self._stream_id_gen.get_current_token()
@ -194,6 +201,10 @@ class SlavedEventStore(BaseSlavedStore):
stream = result.get("events")
if stream:
self._stream_id_gen.advance(int(stream["position"]))
if stream["rows"]:
logger.info("Got %d event rows", len(stream["rows"]))
for row in stream["rows"]:
self._process_replication_row(
row, backfilled=False, state_resets=state_resets

View file

@ -25,8 +25,8 @@ class TransactionStore(BaseSlavedStore):
].orig
_get_destination_retry_timings = DataStore._get_destination_retry_timings.__func__
def prep_send_transaction(self, transaction_id, destination, origin_server_ts):
return []
prep_send_transaction = DataStore.prep_send_transaction.__func__
delivered_txn = DataStore.delivered_txn.__func__
# For now, don't record the destination rety timings
def set_destination_retry_timings(*args, **kwargs):