mirror of
https://mau.dev/maunium/synapse.git
synced 2024-10-01 01:36:05 -04:00
1f36ff69e8
This allows us to have the logic on both master and workers, which is necessary to move event persistence off master. We also combine the instantiation of ID generators from DataStore and slave stores to the base worker stores. This allows us to select which process writes events independently of the master/worker splits.
84 lines
3.3 KiB
Python
84 lines
3.3 KiB
Python
# -*- coding: utf-8 -*-
|
|
# Copyright 2016 OpenMarket Ltd
|
|
# Copyright 2018 New Vector Ltd
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
import logging
|
|
|
|
from synapse.storage.data_stores.main.event_federation import EventFederationWorkerStore
|
|
from synapse.storage.data_stores.main.event_push_actions import (
|
|
EventPushActionsWorkerStore,
|
|
)
|
|
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
|
from synapse.storage.data_stores.main.relations import RelationsWorkerStore
|
|
from synapse.storage.data_stores.main.roommember import RoomMemberWorkerStore
|
|
from synapse.storage.data_stores.main.signatures import SignatureWorkerStore
|
|
from synapse.storage.data_stores.main.state import StateGroupWorkerStore
|
|
from synapse.storage.data_stores.main.stream import StreamWorkerStore
|
|
from synapse.storage.data_stores.main.user_erasure_store import UserErasureWorkerStore
|
|
from synapse.storage.database import Database
|
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
|
|
|
from ._base import BaseSlavedStore
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
# So, um, we want to borrow a load of functions intended for reading from
|
|
# a DataStore, but we don't want to take functions that either write to the
|
|
# DataStore or are cached and don't have cache invalidation logic.
|
|
#
|
|
# Rather than write duplicate versions of those functions, or lift them to
|
|
# a common base class, we going to grab the underlying __func__ object from
|
|
# the method descriptor on the DataStore and chuck them into our class.
|
|
|
|
|
|
class SlavedEventStore(
|
|
EventFederationWorkerStore,
|
|
RoomMemberWorkerStore,
|
|
EventPushActionsWorkerStore,
|
|
StreamWorkerStore,
|
|
StateGroupWorkerStore,
|
|
EventsWorkerStore,
|
|
SignatureWorkerStore,
|
|
UserErasureWorkerStore,
|
|
RelationsWorkerStore,
|
|
BaseSlavedStore,
|
|
):
|
|
def __init__(self, database: Database, db_conn, hs):
|
|
super(SlavedEventStore, self).__init__(database, db_conn, hs)
|
|
|
|
events_max = self._stream_id_gen.get_current_token()
|
|
curr_state_delta_prefill, min_curr_state_delta_id = self.db.get_cache_dict(
|
|
db_conn,
|
|
"current_state_delta_stream",
|
|
entity_column="room_id",
|
|
stream_column="stream_id",
|
|
max_value=events_max, # As we share the stream id with events token
|
|
limit=1000,
|
|
)
|
|
self._curr_state_delta_stream_cache = StreamChangeCache(
|
|
"_curr_state_delta_stream_cache",
|
|
min_curr_state_delta_id,
|
|
prefilled_cache=curr_state_delta_prefill,
|
|
)
|
|
|
|
# Cached functions can't be accessed through a class instance so we need
|
|
# to reach inside the __dict__ to extract them.
|
|
|
|
def get_room_max_stream_ordering(self):
|
|
return self._stream_id_gen.get_current_token()
|
|
|
|
def get_room_min_stream_ordering(self):
|
|
return self._backfill_id_gen.get_current_token()
|