Revert "Make all process_replication_rows methods async (#13304)" (#13312)

This reverts commit 5d4028f217.
This commit is contained in:
Erik Johnston 2022-07-18 14:28:14 +01:00 committed by GitHub
parent cf5fa5063d
commit f721f1baba
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 25 additions and 40 deletions

View file

@ -49,7 +49,7 @@ class SlavedDeviceStore(DeviceWorkerStore, BaseSlavedStore):
def get_device_stream_token(self) -> int:
return self._device_list_id_gen.get_current_token()
async def process_replication_rows(
def process_replication_rows(
self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
) -> None:
if stream_name == DeviceListsStream.NAME:
@ -59,9 +59,7 @@ class SlavedDeviceStore(DeviceWorkerStore, BaseSlavedStore):
self._device_list_id_gen.advance(instance_name, token)
for row in rows:
self._user_signature_stream_cache.entity_has_changed(row.user_id, token)
return await super().process_replication_rows(
stream_name, instance_name, token, rows
)
return super().process_replication_rows(stream_name, instance_name, token, rows)
def _invalidate_caches_for_devices(
self, token: int, rows: Iterable[DeviceListsStream.DeviceListsStreamRow]

View file

@ -24,7 +24,7 @@ class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore):
def get_max_push_rules_stream_id(self) -> int:
return self._push_rules_stream_id_gen.get_current_token()
async def process_replication_rows(
def process_replication_rows(
self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
) -> None:
if stream_name == PushRulesStream.NAME:
@ -33,6 +33,4 @@ class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore):
self.get_push_rules_for_user.invalidate((row.user_id,))
self.get_push_rules_enabled_for_user.invalidate((row.user_id,))
self.push_rules_stream_cache.entity_has_changed(row.user_id, token)
return await super().process_replication_rows(
stream_name, instance_name, token, rows
)
return super().process_replication_rows(stream_name, instance_name, token, rows)

View file

@ -40,11 +40,9 @@ class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore):
def get_pushers_stream_token(self) -> int:
return self._pushers_id_gen.get_current_token()
async def process_replication_rows(
def process_replication_rows(
self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
) -> None:
if stream_name == PushersStream.NAME:
self._pushers_id_gen.advance(instance_name, token)
return await super().process_replication_rows(
stream_name, instance_name, token, rows
)
return super().process_replication_rows(stream_name, instance_name, token, rows)

View file

@ -144,15 +144,13 @@ class ReplicationDataHandler:
token: stream token for this batch of rows
rows: a list of Stream.ROW_TYPE objects as returned by Stream.parse_row.
"""
await self.store.process_replication_rows(
stream_name, instance_name, token, rows
)
self.store.process_replication_rows(stream_name, instance_name, token, rows)
if self.send_handler:
await self.send_handler.process_replication_rows(stream_name, token, rows)
if stream_name == TypingStream.NAME:
await self._typing_handler.process_replication_rows(token, rows)
self._typing_handler.process_replication_rows(token, rows)
self.notifier.on_new_event(
StreamKeyType.TYPING, token, rooms=[row.room_id for row in rows]
)