mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-12-17 17:54:20 -05:00
Merge pull request #6487 from matrix-org/erikj/pass_in_db
Pass in Database object to data stores.
This commit is contained in:
commit
30e9adf32f
@ -1,7 +1,7 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
# Configuration file used for testing the 'synapse_port_db' script.
|
||||||
# Tells the script to connect to the postgresql database that will be available in the
|
# Tells the script to connect to the postgresql database that will be available in the
|
||||||
# CI's Docker setup at the point where this file is considered.
|
# CI's Docker setup at the point where this file is considered.
|
||||||
server_name: "test"
|
server_name: "localhost:8800"
|
||||||
|
|
||||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
# Configuration file used for testing the 'synapse_port_db' script.
|
||||||
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
|
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
|
||||||
# schema and run background updates on it.
|
# schema and run background updates on it.
|
||||||
server_name: "test"
|
server_name: "localhost:8800"
|
||||||
|
|
||||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||||
|
|
||||||
|
1
changelog.d/6487.misc
Normal file
1
changelog.d/6487.misc
Normal file
@ -0,0 +1 @@
|
|||||||
|
Pass in `Database` object to data stores.
|
@ -55,6 +55,7 @@ from synapse.storage.data_stores.main.stats import StatsStore
|
|||||||
from synapse.storage.data_stores.main.user_directory import (
|
from synapse.storage.data_stores.main.user_directory import (
|
||||||
UserDirectoryBackgroundUpdateStore,
|
UserDirectoryBackgroundUpdateStore,
|
||||||
)
|
)
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.storage.prepare_database import prepare_database
|
from synapse.storage.prepare_database import prepare_database
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
@ -139,39 +140,6 @@ class Store(
|
|||||||
UserDirectoryBackgroundUpdateStore,
|
UserDirectoryBackgroundUpdateStore,
|
||||||
StatsStore,
|
StatsStore,
|
||||||
):
|
):
|
||||||
def __init__(self, db_conn, hs):
|
|
||||||
super().__init__(db_conn, hs)
|
|
||||||
self.db_pool = hs.get_db_pool()
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def runInteraction(self, desc, func, *args, **kwargs):
|
|
||||||
def r(conn):
|
|
||||||
try:
|
|
||||||
i = 0
|
|
||||||
N = 5
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
txn = conn.cursor()
|
|
||||||
return func(
|
|
||||||
LoggingTransaction(txn, desc, self.database_engine, [], []),
|
|
||||||
*args,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
except self.database_engine.module.DatabaseError as e:
|
|
||||||
if self.database_engine.is_deadlock(e):
|
|
||||||
logger.warning("[TXN DEADLOCK] {%s} %d/%d", desc, i, N)
|
|
||||||
if i < N:
|
|
||||||
i += 1
|
|
||||||
conn.rollback()
|
|
||||||
continue
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug("[TXN FAIL] {%s} %s", desc, e)
|
|
||||||
raise
|
|
||||||
|
|
||||||
with PreserveLoggingContext():
|
|
||||||
return (yield self.db_pool.runWithConnection(r))
|
|
||||||
|
|
||||||
def execute(self, f, *args, **kwargs):
|
def execute(self, f, *args, **kwargs):
|
||||||
return self.db.runInteraction(f.__name__, f, *args, **kwargs)
|
return self.db.runInteraction(f.__name__, f, *args, **kwargs)
|
||||||
|
|
||||||
@ -512,7 +480,7 @@ class Porter(object):
|
|||||||
|
|
||||||
hs = MockHomeserver(self.hs_config, engine, conn, db_pool)
|
hs = MockHomeserver(self.hs_config, engine, conn, db_pool)
|
||||||
|
|
||||||
store = Store(conn, hs)
|
store = Store(Database(hs), conn, hs)
|
||||||
|
|
||||||
yield store.db.runInteraction(
|
yield store.db.runInteraction(
|
||||||
"%s_engine.check_database" % config["name"], engine.check_database,
|
"%s_engine.check_database" % config["name"], engine.check_database,
|
||||||
|
@ -40,6 +40,7 @@ from synapse.replication.slave.storage.transactions import SlavedTransactionStor
|
|||||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||||
from synapse.replication.tcp.streams._base import ReceiptsStream
|
from synapse.replication.tcp.streams._base import ReceiptsStream
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.types import ReadReceipt
|
from synapse.types import ReadReceipt
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
@ -59,8 +60,8 @@ class FederationSenderSlaveStore(
|
|||||||
SlavedDeviceStore,
|
SlavedDeviceStore,
|
||||||
SlavedPresenceStore,
|
SlavedPresenceStore,
|
||||||
):
|
):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(FederationSenderSlaveStore, self).__init__(db_conn, hs)
|
super(FederationSenderSlaveStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# We pull out the current federation stream position now so that we
|
# We pull out the current federation stream position now so that we
|
||||||
# always have a known value for the federation position in memory so
|
# always have a known value for the federation position in memory so
|
||||||
|
@ -68,9 +68,9 @@ from synapse.rest.key.v2 import KeyApiV2Resource
|
|||||||
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
||||||
from synapse.rest.well_known import WellKnownResource
|
from synapse.rest.well_known import WellKnownResource
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage import DataStore, are_all_users_on_domain
|
from synapse.storage import DataStore
|
||||||
from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
|
from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
|
||||||
from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
|
from synapse.storage.prepare_database import UpgradeDatabaseException
|
||||||
from synapse.util.caches import CACHE_SIZE_FACTOR
|
from synapse.util.caches import CACHE_SIZE_FACTOR
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
@ -294,22 +294,6 @@ class SynapseHomeServer(HomeServer):
|
|||||||
else:
|
else:
|
||||||
logger.warning("Unrecognized listener type: %s", listener["type"])
|
logger.warning("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
def run_startup_checks(self, db_conn, database_engine):
|
|
||||||
all_users_native = are_all_users_on_domain(
|
|
||||||
db_conn.cursor(), database_engine, self.hostname
|
|
||||||
)
|
|
||||||
if not all_users_native:
|
|
||||||
quit_with_error(
|
|
||||||
"Found users in database not native to %s!\n"
|
|
||||||
"You cannot changed a synapse server_name after it's been configured"
|
|
||||||
% (self.hostname,)
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
database_engine.check_database(db_conn.cursor())
|
|
||||||
except IncorrectDatabaseSetup as e:
|
|
||||||
quit_with_error(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
# Gauges to expose monthly active user control metrics
|
# Gauges to expose monthly active user control metrics
|
||||||
current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
|
current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
|
||||||
@ -357,16 +341,12 @@ def setup(config_options):
|
|||||||
|
|
||||||
synapse.config.logger.setup_logging(hs, config, use_worker_options=False)
|
synapse.config.logger.setup_logging(hs, config, use_worker_options=False)
|
||||||
|
|
||||||
logger.info("Preparing database: %s...", config.database_config["name"])
|
logger.info("Setting up server")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with hs.get_db_conn(run_new_connection=False) as db_conn:
|
hs.setup()
|
||||||
prepare_database(db_conn, database_engine, config=config)
|
except IncorrectDatabaseSetup as e:
|
||||||
database_engine.on_new_connection(db_conn)
|
quit_with_error(str(e))
|
||||||
|
|
||||||
hs.run_startup_checks(db_conn, database_engine)
|
|
||||||
|
|
||||||
db_conn.commit()
|
|
||||||
except UpgradeDatabaseException:
|
except UpgradeDatabaseException:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"\nFailed to upgrade database.\n"
|
"\nFailed to upgrade database.\n"
|
||||||
@ -375,9 +355,6 @@ def setup(config_options):
|
|||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
logger.info("Database prepared in %s.", config.database_config["name"])
|
|
||||||
|
|
||||||
hs.setup()
|
|
||||||
hs.setup_master()
|
hs.setup_master()
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -43,6 +43,7 @@ from synapse.replication.tcp.streams.events import (
|
|||||||
from synapse.rest.client.v2_alpha import user_directory
|
from synapse.rest.client.v2_alpha import user_directory
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage.data_stores.main.user_directory import UserDirectoryStore
|
from synapse.storage.data_stores.main.user_directory import UserDirectoryStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
@ -60,8 +61,8 @@ class UserDirectorySlaveStore(
|
|||||||
UserDirectoryStore,
|
UserDirectoryStore,
|
||||||
BaseSlavedStore,
|
BaseSlavedStore,
|
||||||
):
|
):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(UserDirectorySlaveStore, self).__init__(db_conn, hs)
|
super(UserDirectorySlaveStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
events_max = self._stream_id_gen.get_current_token()
|
events_max = self._stream_id_gen.get_current_token()
|
||||||
curr_state_delta_prefill, min_curr_state_delta_id = self.db.get_cache_dict(
|
curr_state_delta_prefill, min_curr_state_delta_id = self.db.get_cache_dict(
|
||||||
|
@ -20,6 +20,7 @@ import six
|
|||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.data_stores.main.cache import CURRENT_STATE_CACHE_NAME
|
from synapse.storage.data_stores.main.cache import CURRENT_STATE_CACHE_NAME
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
|
|
||||||
from ._slaved_id_tracker import SlavedIdTracker
|
from ._slaved_id_tracker import SlavedIdTracker
|
||||||
@ -35,8 +36,8 @@ def __func__(inp):
|
|||||||
|
|
||||||
|
|
||||||
class BaseSlavedStore(SQLBaseStore):
|
class BaseSlavedStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(BaseSlavedStore, self).__init__(db_conn, hs)
|
super(BaseSlavedStore, self).__init__(database, db_conn, hs)
|
||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
self._cache_id_gen = SlavedIdTracker(
|
self._cache_id_gen = SlavedIdTracker(
|
||||||
db_conn, "cache_invalidation_stream", "stream_id"
|
db_conn, "cache_invalidation_stream", "stream_id"
|
||||||
|
@ -18,15 +18,16 @@ from synapse.replication.slave.storage._base import BaseSlavedStore
|
|||||||
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
||||||
from synapse.storage.data_stores.main.account_data import AccountDataWorkerStore
|
from synapse.storage.data_stores.main.account_data import AccountDataWorkerStore
|
||||||
from synapse.storage.data_stores.main.tags import TagsWorkerStore
|
from synapse.storage.data_stores.main.tags import TagsWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
|
|
||||||
class SlavedAccountDataStore(TagsWorkerStore, AccountDataWorkerStore, BaseSlavedStore):
|
class SlavedAccountDataStore(TagsWorkerStore, AccountDataWorkerStore, BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
self._account_data_id_gen = SlavedIdTracker(
|
self._account_data_id_gen = SlavedIdTracker(
|
||||||
db_conn, "account_data_max_stream_id", "stream_id"
|
db_conn, "account_data_max_stream_id", "stream_id"
|
||||||
)
|
)
|
||||||
|
|
||||||
super(SlavedAccountDataStore, self).__init__(db_conn, hs)
|
super(SlavedAccountDataStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def get_max_account_data_stream_id(self):
|
def get_max_account_data_stream_id(self):
|
||||||
return self._account_data_id_gen.get_current_token()
|
return self._account_data_id_gen.get_current_token()
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.client_ips import LAST_SEEN_GRANULARITY
|
from synapse.storage.data_stores.main.client_ips import LAST_SEEN_GRANULARITY
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches import CACHE_SIZE_FACTOR
|
from synapse.util.caches import CACHE_SIZE_FACTOR
|
||||||
from synapse.util.caches.descriptors import Cache
|
from synapse.util.caches.descriptors import Cache
|
||||||
|
|
||||||
@ -21,8 +22,8 @@ from ._base import BaseSlavedStore
|
|||||||
|
|
||||||
|
|
||||||
class SlavedClientIpStore(BaseSlavedStore):
|
class SlavedClientIpStore(BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SlavedClientIpStore, self).__init__(db_conn, hs)
|
super(SlavedClientIpStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.client_ip_last_seen = Cache(
|
self.client_ip_last_seen = Cache(
|
||||||
name="client_ip_last_seen", keylen=4, max_entries=50000 * CACHE_SIZE_FACTOR
|
name="client_ip_last_seen", keylen=4, max_entries=50000 * CACHE_SIZE_FACTOR
|
||||||
|
@ -16,13 +16,14 @@
|
|||||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
||||||
from synapse.storage.data_stores.main.deviceinbox import DeviceInboxWorkerStore
|
from synapse.storage.data_stores.main.deviceinbox import DeviceInboxWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
|
|
||||||
|
|
||||||
class SlavedDeviceInboxStore(DeviceInboxWorkerStore, BaseSlavedStore):
|
class SlavedDeviceInboxStore(DeviceInboxWorkerStore, BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SlavedDeviceInboxStore, self).__init__(db_conn, hs)
|
super(SlavedDeviceInboxStore, self).__init__(database, db_conn, hs)
|
||||||
self._device_inbox_id_gen = SlavedIdTracker(
|
self._device_inbox_id_gen = SlavedIdTracker(
|
||||||
db_conn, "device_max_stream_id", "stream_id"
|
db_conn, "device_max_stream_id", "stream_id"
|
||||||
)
|
)
|
||||||
|
@ -18,12 +18,13 @@ from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
|||||||
from synapse.replication.tcp.streams._base import DeviceListsStream, UserSignatureStream
|
from synapse.replication.tcp.streams._base import DeviceListsStream, UserSignatureStream
|
||||||
from synapse.storage.data_stores.main.devices import DeviceWorkerStore
|
from synapse.storage.data_stores.main.devices import DeviceWorkerStore
|
||||||
from synapse.storage.data_stores.main.end_to_end_keys import EndToEndKeyWorkerStore
|
from synapse.storage.data_stores.main.end_to_end_keys import EndToEndKeyWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
|
|
||||||
|
|
||||||
class SlavedDeviceStore(EndToEndKeyWorkerStore, DeviceWorkerStore, BaseSlavedStore):
|
class SlavedDeviceStore(EndToEndKeyWorkerStore, DeviceWorkerStore, BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SlavedDeviceStore, self).__init__(db_conn, hs)
|
super(SlavedDeviceStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
|
|
||||||
|
@ -31,6 +31,7 @@ from synapse.storage.data_stores.main.signatures import SignatureWorkerStore
|
|||||||
from synapse.storage.data_stores.main.state import StateGroupWorkerStore
|
from synapse.storage.data_stores.main.state import StateGroupWorkerStore
|
||||||
from synapse.storage.data_stores.main.stream import StreamWorkerStore
|
from synapse.storage.data_stores.main.stream import StreamWorkerStore
|
||||||
from synapse.storage.data_stores.main.user_erasure_store import UserErasureWorkerStore
|
from synapse.storage.data_stores.main.user_erasure_store import UserErasureWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from ._base import BaseSlavedStore
|
from ._base import BaseSlavedStore
|
||||||
from ._slaved_id_tracker import SlavedIdTracker
|
from ._slaved_id_tracker import SlavedIdTracker
|
||||||
@ -59,13 +60,13 @@ class SlavedEventStore(
|
|||||||
RelationsWorkerStore,
|
RelationsWorkerStore,
|
||||||
BaseSlavedStore,
|
BaseSlavedStore,
|
||||||
):
|
):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
self._stream_id_gen = SlavedIdTracker(db_conn, "events", "stream_ordering")
|
self._stream_id_gen = SlavedIdTracker(db_conn, "events", "stream_ordering")
|
||||||
self._backfill_id_gen = SlavedIdTracker(
|
self._backfill_id_gen = SlavedIdTracker(
|
||||||
db_conn, "events", "stream_ordering", step=-1
|
db_conn, "events", "stream_ordering", step=-1
|
||||||
)
|
)
|
||||||
|
|
||||||
super(SlavedEventStore, self).__init__(db_conn, hs)
|
super(SlavedEventStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# Cached functions can't be accessed through a class instance so we need
|
# Cached functions can't be accessed through a class instance so we need
|
||||||
# to reach inside the __dict__ to extract them.
|
# to reach inside the __dict__ to extract them.
|
||||||
|
@ -14,13 +14,14 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.filtering import FilteringStore
|
from synapse.storage.data_stores.main.filtering import FilteringStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from ._base import BaseSlavedStore
|
from ._base import BaseSlavedStore
|
||||||
|
|
||||||
|
|
||||||
class SlavedFilteringStore(BaseSlavedStore):
|
class SlavedFilteringStore(BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SlavedFilteringStore, self).__init__(db_conn, hs)
|
super(SlavedFilteringStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# Filters are immutable so this cache doesn't need to be expired
|
# Filters are immutable so this cache doesn't need to be expired
|
||||||
get_user_filter = FilteringStore.__dict__["get_user_filter"]
|
get_user_filter = FilteringStore.__dict__["get_user_filter"]
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.storage import DataStore
|
from synapse.storage import DataStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
|
|
||||||
from ._base import BaseSlavedStore, __func__
|
from ._base import BaseSlavedStore, __func__
|
||||||
@ -21,8 +22,8 @@ from ._slaved_id_tracker import SlavedIdTracker
|
|||||||
|
|
||||||
|
|
||||||
class SlavedGroupServerStore(BaseSlavedStore):
|
class SlavedGroupServerStore(BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SlavedGroupServerStore, self).__init__(db_conn, hs)
|
super(SlavedGroupServerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
from synapse.storage import DataStore
|
from synapse.storage import DataStore
|
||||||
from synapse.storage.data_stores.main.presence import PresenceStore
|
from synapse.storage.data_stores.main.presence import PresenceStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
|
|
||||||
from ._base import BaseSlavedStore, __func__
|
from ._base import BaseSlavedStore, __func__
|
||||||
@ -22,8 +23,8 @@ from ._slaved_id_tracker import SlavedIdTracker
|
|||||||
|
|
||||||
|
|
||||||
class SlavedPresenceStore(BaseSlavedStore):
|
class SlavedPresenceStore(BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SlavedPresenceStore, self).__init__(db_conn, hs)
|
super(SlavedPresenceStore, self).__init__(database, db_conn, hs)
|
||||||
self._presence_id_gen = SlavedIdTracker(db_conn, "presence_stream", "stream_id")
|
self._presence_id_gen = SlavedIdTracker(db_conn, "presence_stream", "stream_id")
|
||||||
|
|
||||||
self._presence_on_startup = self._get_active_presence(db_conn)
|
self._presence_on_startup = self._get_active_presence(db_conn)
|
||||||
|
@ -15,17 +15,18 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.push_rule import PushRulesWorkerStore
|
from synapse.storage.data_stores.main.push_rule import PushRulesWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from ._slaved_id_tracker import SlavedIdTracker
|
from ._slaved_id_tracker import SlavedIdTracker
|
||||||
from .events import SlavedEventStore
|
from .events import SlavedEventStore
|
||||||
|
|
||||||
|
|
||||||
class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore):
|
class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
self._push_rules_stream_id_gen = SlavedIdTracker(
|
self._push_rules_stream_id_gen = SlavedIdTracker(
|
||||||
db_conn, "push_rules_stream", "stream_id"
|
db_conn, "push_rules_stream", "stream_id"
|
||||||
)
|
)
|
||||||
super(SlavedPushRuleStore, self).__init__(db_conn, hs)
|
super(SlavedPushRuleStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def get_push_rules_stream_token(self):
|
def get_push_rules_stream_token(self):
|
||||||
return (
|
return (
|
||||||
|
@ -15,14 +15,15 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.pusher import PusherWorkerStore
|
from synapse.storage.data_stores.main.pusher import PusherWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from ._base import BaseSlavedStore
|
from ._base import BaseSlavedStore
|
||||||
from ._slaved_id_tracker import SlavedIdTracker
|
from ._slaved_id_tracker import SlavedIdTracker
|
||||||
|
|
||||||
|
|
||||||
class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore):
|
class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SlavedPusherStore, self).__init__(db_conn, hs)
|
super(SlavedPusherStore, self).__init__(database, db_conn, hs)
|
||||||
self._pushers_id_gen = SlavedIdTracker(
|
self._pushers_id_gen = SlavedIdTracker(
|
||||||
db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")]
|
db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")]
|
||||||
)
|
)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.receipts import ReceiptsWorkerStore
|
from synapse.storage.data_stores.main.receipts import ReceiptsWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from ._base import BaseSlavedStore
|
from ._base import BaseSlavedStore
|
||||||
from ._slaved_id_tracker import SlavedIdTracker
|
from ._slaved_id_tracker import SlavedIdTracker
|
||||||
@ -29,14 +30,14 @@ from ._slaved_id_tracker import SlavedIdTracker
|
|||||||
|
|
||||||
|
|
||||||
class SlavedReceiptsStore(ReceiptsWorkerStore, BaseSlavedStore):
|
class SlavedReceiptsStore(ReceiptsWorkerStore, BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
# We instantiate this first as the ReceiptsWorkerStore constructor
|
# We instantiate this first as the ReceiptsWorkerStore constructor
|
||||||
# needs to be able to call get_max_receipt_stream_id
|
# needs to be able to call get_max_receipt_stream_id
|
||||||
self._receipts_id_gen = SlavedIdTracker(
|
self._receipts_id_gen = SlavedIdTracker(
|
||||||
db_conn, "receipts_linearized", "stream_id"
|
db_conn, "receipts_linearized", "stream_id"
|
||||||
)
|
)
|
||||||
|
|
||||||
super(SlavedReceiptsStore, self).__init__(db_conn, hs)
|
super(SlavedReceiptsStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def get_max_receipt_stream_id(self):
|
def get_max_receipt_stream_id(self):
|
||||||
return self._receipts_id_gen.get_current_token()
|
return self._receipts_id_gen.get_current_token()
|
||||||
|
@ -14,14 +14,15 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.room import RoomWorkerStore
|
from synapse.storage.data_stores.main.room import RoomWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from ._base import BaseSlavedStore
|
from ._base import BaseSlavedStore
|
||||||
from ._slaved_id_tracker import SlavedIdTracker
|
from ._slaved_id_tracker import SlavedIdTracker
|
||||||
|
|
||||||
|
|
||||||
class RoomStore(RoomWorkerStore, BaseSlavedStore):
|
class RoomStore(RoomWorkerStore, BaseSlavedStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RoomStore, self).__init__(db_conn, hs)
|
super(RoomStore, self).__init__(database, db_conn, hs)
|
||||||
self._public_room_id_gen = SlavedIdTracker(
|
self._public_room_id_gen = SlavedIdTracker(
|
||||||
db_conn, "public_room_list_stream", "stream_id"
|
db_conn, "public_room_list_stream", "stream_id"
|
||||||
)
|
)
|
||||||
|
@ -238,8 +238,7 @@ class HomeServer(object):
|
|||||||
def setup(self):
|
def setup(self):
|
||||||
logger.info("Setting up.")
|
logger.info("Setting up.")
|
||||||
with self.get_db_conn() as conn:
|
with self.get_db_conn() as conn:
|
||||||
datastore = self.DATASTORE_CLASS(conn, self)
|
self.datastores = DataStores(self.DATASTORE_CLASS, conn, self)
|
||||||
self.datastores = DataStores(datastore, conn, self)
|
|
||||||
conn.commit()
|
conn.commit()
|
||||||
self.start_time = int(self.get_clock().time())
|
self.start_time = int(self.get_clock().time())
|
||||||
logger.info("Finished setting up.")
|
logger.info("Finished setting up.")
|
||||||
|
@ -49,15 +49,3 @@ class Storage(object):
|
|||||||
self.persistence = EventsPersistenceStorage(hs, stores)
|
self.persistence = EventsPersistenceStorage(hs, stores)
|
||||||
self.purge_events = PurgeEventsStorage(hs, stores)
|
self.purge_events = PurgeEventsStorage(hs, stores)
|
||||||
self.state = StateGroupStorage(hs, stores)
|
self.state = StateGroupStorage(hs, stores)
|
||||||
|
|
||||||
|
|
||||||
def are_all_users_on_domain(txn, database_engine, domain):
|
|
||||||
sql = database_engine.convert_param_style(
|
|
||||||
"SELECT COUNT(*) FROM users WHERE name NOT LIKE ?"
|
|
||||||
)
|
|
||||||
pat = "%:" + domain
|
|
||||||
txn.execute(sql, (pat,))
|
|
||||||
num_not_matching = txn.fetchall()[0][0]
|
|
||||||
if num_not_matching == 0:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
@ -37,11 +37,11 @@ class SQLBaseStore(object):
|
|||||||
per data store (and not one per physical database).
|
per data store (and not one per physical database).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
self._clock = hs.get_clock()
|
self._clock = hs.get_clock()
|
||||||
self.database_engine = hs.database_engine
|
self.database_engine = hs.database_engine
|
||||||
self.db = Database(hs) # In future this will be passed in
|
self.db = database
|
||||||
self.rand = random.SystemRandom()
|
self.rand = random.SystemRandom()
|
||||||
|
|
||||||
def _invalidate_state_caches(self, room_id, members_changed):
|
def _invalidate_state_caches(self, room_id, members_changed):
|
||||||
|
@ -379,7 +379,7 @@ class BackgroundUpdater(object):
|
|||||||
logger.debug("[SQL] %s", sql)
|
logger.debug("[SQL] %s", sql)
|
||||||
c.execute(sql)
|
c.execute(sql)
|
||||||
|
|
||||||
if isinstance(self.db.database_engine, engines.PostgresEngine):
|
if isinstance(self.db.engine, engines.PostgresEngine):
|
||||||
runner = create_index_psql
|
runner = create_index_psql
|
||||||
elif psql_only:
|
elif psql_only:
|
||||||
runner = None
|
runner = None
|
||||||
|
@ -13,6 +13,9 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
from synapse.storage.prepare_database import prepare_database
|
||||||
|
|
||||||
|
|
||||||
class DataStores(object):
|
class DataStores(object):
|
||||||
"""The various data stores.
|
"""The various data stores.
|
||||||
@ -20,7 +23,14 @@ class DataStores(object):
|
|||||||
These are low level interfaces to physical databases.
|
These are low level interfaces to physical databases.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, main_store, db_conn, hs):
|
def __init__(self, main_store_class, db_conn, hs):
|
||||||
# Note we pass in the main store here as workers use a different main
|
# Note we pass in the main store class here as workers use a different main
|
||||||
# store.
|
# store.
|
||||||
self.main = main_store
|
database = Database(hs)
|
||||||
|
|
||||||
|
# Check that db is correctly configured.
|
||||||
|
database.engine.check_database(db_conn.cursor())
|
||||||
|
|
||||||
|
prepare_database(db_conn, database.engine, config=hs.config)
|
||||||
|
|
||||||
|
self.main = main_store_class(database, db_conn, hs)
|
||||||
|
@ -20,6 +20,7 @@ import logging
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from synapse.api.constants import PresenceState
|
from synapse.api.constants import PresenceState
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.storage.util.id_generators import (
|
from synapse.storage.util.id_generators import (
|
||||||
ChainedIdGenerator,
|
ChainedIdGenerator,
|
||||||
@ -111,10 +112,20 @@ class DataStore(
|
|||||||
RelationsStore,
|
RelationsStore,
|
||||||
CacheInvalidationStore,
|
CacheInvalidationStore,
|
||||||
):
|
):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
self._clock = hs.get_clock()
|
self._clock = hs.get_clock()
|
||||||
self.database_engine = hs.database_engine
|
self.database_engine = database.engine
|
||||||
|
|
||||||
|
all_users_native = are_all_users_on_domain(
|
||||||
|
db_conn.cursor(), database.engine, hs.hostname
|
||||||
|
)
|
||||||
|
if not all_users_native:
|
||||||
|
raise Exception(
|
||||||
|
"Found users in database not native to %s!\n"
|
||||||
|
"You cannot changed a synapse server_name after it's been configured"
|
||||||
|
% (hs.hostname,)
|
||||||
|
)
|
||||||
|
|
||||||
self._stream_id_gen = StreamIdGenerator(
|
self._stream_id_gen = StreamIdGenerator(
|
||||||
db_conn,
|
db_conn,
|
||||||
@ -169,7 +180,7 @@ class DataStore(
|
|||||||
else:
|
else:
|
||||||
self._cache_id_gen = None
|
self._cache_id_gen = None
|
||||||
|
|
||||||
super(DataStore, self).__init__(db_conn, hs)
|
super(DataStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self._presence_on_startup = self._get_active_presence(db_conn)
|
self._presence_on_startup = self._get_active_presence(db_conn)
|
||||||
|
|
||||||
@ -554,3 +565,15 @@ class DataStore(
|
|||||||
retcols=["name", "password_hash", "is_guest", "admin", "user_type"],
|
retcols=["name", "password_hash", "is_guest", "admin", "user_type"],
|
||||||
desc="search_users",
|
desc="search_users",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def are_all_users_on_domain(txn, database_engine, domain):
|
||||||
|
sql = database_engine.convert_param_style(
|
||||||
|
"SELECT COUNT(*) FROM users WHERE name NOT LIKE ?"
|
||||||
|
)
|
||||||
|
pat = "%:" + domain
|
||||||
|
txn.execute(sql, (pat,))
|
||||||
|
num_not_matching = txn.fetchall()[0][0]
|
||||||
|
if num_not_matching == 0:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
@ -22,6 +22,7 @@ from canonicaljson import json
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.util.id_generators import StreamIdGenerator
|
from synapse.storage.util.id_generators import StreamIdGenerator
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
@ -38,13 +39,13 @@ class AccountDataWorkerStore(SQLBaseStore):
|
|||||||
# the abstract methods being implemented.
|
# the abstract methods being implemented.
|
||||||
__metaclass__ = abc.ABCMeta
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
account_max = self.get_max_account_data_stream_id()
|
account_max = self.get_max_account_data_stream_id()
|
||||||
self._account_data_stream_cache = StreamChangeCache(
|
self._account_data_stream_cache = StreamChangeCache(
|
||||||
"AccountDataAndTagsChangeCache", account_max
|
"AccountDataAndTagsChangeCache", account_max
|
||||||
)
|
)
|
||||||
|
|
||||||
super(AccountDataWorkerStore, self).__init__(db_conn, hs)
|
super(AccountDataWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_max_account_data_stream_id(self):
|
def get_max_account_data_stream_id(self):
|
||||||
@ -270,12 +271,12 @@ class AccountDataWorkerStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class AccountDataStore(AccountDataWorkerStore):
|
class AccountDataStore(AccountDataWorkerStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
self._account_data_id_gen = StreamIdGenerator(
|
self._account_data_id_gen = StreamIdGenerator(
|
||||||
db_conn, "account_data_max_stream_id", "stream_id"
|
db_conn, "account_data_max_stream_id", "stream_id"
|
||||||
)
|
)
|
||||||
|
|
||||||
super(AccountDataStore, self).__init__(db_conn, hs)
|
super(AccountDataStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def get_max_account_data_stream_id(self):
|
def get_max_account_data_stream_id(self):
|
||||||
"""Get the current max stream id for the private user data stream
|
"""Get the current max stream id for the private user data stream
|
||||||
|
@ -24,6 +24,7 @@ from synapse.appservice import AppServiceTransaction
|
|||||||
from synapse.config.appservice import load_appservices
|
from synapse.config.appservice import load_appservices
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -48,13 +49,13 @@ def _make_exclusive_regex(services_cache):
|
|||||||
|
|
||||||
|
|
||||||
class ApplicationServiceWorkerStore(SQLBaseStore):
|
class ApplicationServiceWorkerStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
self.services_cache = load_appservices(
|
self.services_cache = load_appservices(
|
||||||
hs.hostname, hs.config.app_service_config_files
|
hs.hostname, hs.config.app_service_config_files
|
||||||
)
|
)
|
||||||
self.exclusive_user_regex = _make_exclusive_regex(self.services_cache)
|
self.exclusive_user_regex = _make_exclusive_regex(self.services_cache)
|
||||||
|
|
||||||
super(ApplicationServiceWorkerStore, self).__init__(db_conn, hs)
|
super(ApplicationServiceWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def get_app_services(self):
|
def get_app_services(self):
|
||||||
return self.services_cache
|
return self.services_cache
|
||||||
|
@ -21,6 +21,7 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches import CACHE_SIZE_FACTOR
|
from synapse.util.caches import CACHE_SIZE_FACTOR
|
||||||
from synapse.util.caches.descriptors import Cache
|
from synapse.util.caches.descriptors import Cache
|
||||||
|
|
||||||
@ -33,8 +34,8 @@ LAST_SEEN_GRANULARITY = 120 * 1000
|
|||||||
|
|
||||||
|
|
||||||
class ClientIpBackgroundUpdateStore(SQLBaseStore):
|
class ClientIpBackgroundUpdateStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(ClientIpBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(ClientIpBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.db.updates.register_background_index_update(
|
self.db.updates.register_background_index_update(
|
||||||
"user_ips_device_index",
|
"user_ips_device_index",
|
||||||
@ -363,13 +364,13 @@ class ClientIpBackgroundUpdateStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class ClientIpStore(ClientIpBackgroundUpdateStore):
|
class ClientIpStore(ClientIpBackgroundUpdateStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
|
|
||||||
self.client_ip_last_seen = Cache(
|
self.client_ip_last_seen = Cache(
|
||||||
name="client_ip_last_seen", keylen=4, max_entries=50000 * CACHE_SIZE_FACTOR
|
name="client_ip_last_seen", keylen=4, max_entries=50000 * CACHE_SIZE_FACTOR
|
||||||
)
|
)
|
||||||
|
|
||||||
super(ClientIpStore, self).__init__(db_conn, hs)
|
super(ClientIpStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.user_ips_max_age = hs.config.user_ips_max_age
|
self.user_ips_max_age = hs.config.user_ips_max_age
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from synapse.logging.opentracing import log_kv, set_tag, trace
|
from synapse.logging.opentracing import log_kv, set_tag, trace
|
||||||
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -210,8 +211,8 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
|||||||
class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
|
class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
|
||||||
DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
|
DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(DeviceInboxBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(DeviceInboxBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.db.updates.register_background_index_update(
|
self.db.updates.register_background_index_update(
|
||||||
"device_inbox_stream_index",
|
"device_inbox_stream_index",
|
||||||
@ -241,8 +242,8 @@ class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
|
|||||||
class DeviceInboxStore(DeviceInboxWorkerStore, DeviceInboxBackgroundUpdateStore):
|
class DeviceInboxStore(DeviceInboxWorkerStore, DeviceInboxBackgroundUpdateStore):
|
||||||
DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
|
DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(DeviceInboxStore, self).__init__(db_conn, hs)
|
super(DeviceInboxStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# Map of (user_id, device_id) to the last stream_id that has been
|
# Map of (user_id, device_id) to the last stream_id that has been
|
||||||
# deleted up to. This is so that we can no op deletions.
|
# deleted up to. This is so that we can no op deletions.
|
||||||
|
@ -31,6 +31,7 @@ from synapse.logging.opentracing import (
|
|||||||
)
|
)
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.types import get_verify_key_from_cross_signing_key
|
from synapse.types import get_verify_key_from_cross_signing_key
|
||||||
from synapse.util import batch_iter
|
from synapse.util import batch_iter
|
||||||
from synapse.util.caches.descriptors import (
|
from synapse.util.caches.descriptors import (
|
||||||
@ -642,8 +643,8 @@ class DeviceWorkerStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class DeviceBackgroundUpdateStore(SQLBaseStore):
|
class DeviceBackgroundUpdateStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(DeviceBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(DeviceBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.db.updates.register_background_index_update(
|
self.db.updates.register_background_index_update(
|
||||||
"device_lists_stream_idx",
|
"device_lists_stream_idx",
|
||||||
@ -692,8 +693,8 @@ class DeviceBackgroundUpdateStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(DeviceStore, self).__init__(db_conn, hs)
|
super(DeviceStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# Map of (user_id, device_id) -> bool. If there is an entry that implies
|
# Map of (user_id, device_id) -> bool. If there is an entry that implies
|
||||||
# the device exists.
|
# the device exists.
|
||||||
|
@ -28,6 +28,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process
|
|||||||
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
||||||
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
||||||
from synapse.storage.data_stores.main.signatures import SignatureWorkerStore
|
from synapse.storage.data_stores.main.signatures import SignatureWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -491,8 +492,8 @@ class EventFederationStore(EventFederationWorkerStore):
|
|||||||
|
|
||||||
EVENT_AUTH_STATE_ONLY = "event_auth_state_only"
|
EVENT_AUTH_STATE_ONLY = "event_auth_state_only"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(EventFederationStore, self).__init__(db_conn, hs)
|
super(EventFederationStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.db.updates.register_background_update_handler(
|
self.db.updates.register_background_update_handler(
|
||||||
self.EVENT_AUTH_STATE_ONLY, self._background_delete_non_state_event_auth
|
self.EVENT_AUTH_STATE_ONLY, self._background_delete_non_state_event_auth
|
||||||
|
@ -24,6 +24,7 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -68,8 +69,8 @@ def _deserialize_action(actions, is_highlight):
|
|||||||
|
|
||||||
|
|
||||||
class EventPushActionsWorkerStore(SQLBaseStore):
|
class EventPushActionsWorkerStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(EventPushActionsWorkerStore, self).__init__(db_conn, hs)
|
super(EventPushActionsWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# These get correctly set by _find_stream_orderings_for_times_txn
|
# These get correctly set by _find_stream_orderings_for_times_txn
|
||||||
self.stream_ordering_month_ago = None
|
self.stream_ordering_month_ago = None
|
||||||
@ -611,8 +612,8 @@ class EventPushActionsWorkerStore(SQLBaseStore):
|
|||||||
class EventPushActionsStore(EventPushActionsWorkerStore):
|
class EventPushActionsStore(EventPushActionsWorkerStore):
|
||||||
EPA_HIGHLIGHT_INDEX = "epa_highlight_index"
|
EPA_HIGHLIGHT_INDEX = "epa_highlight_index"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(EventPushActionsStore, self).__init__(db_conn, hs)
|
super(EventPushActionsStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.db.updates.register_background_index_update(
|
self.db.updates.register_background_index_update(
|
||||||
self.EPA_HIGHLIGHT_INDEX,
|
self.EPA_HIGHLIGHT_INDEX,
|
||||||
|
@ -41,6 +41,7 @@ from synapse.storage._base import make_in_list_sql_clause
|
|||||||
from synapse.storage.data_stores.main.event_federation import EventFederationStore
|
from synapse.storage.data_stores.main.event_federation import EventFederationStore
|
||||||
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
||||||
from synapse.storage.data_stores.main.state import StateGroupWorkerStore
|
from synapse.storage.data_stores.main.state import StateGroupWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.types import RoomStreamToken, get_domain_from_id
|
from synapse.types import RoomStreamToken, get_domain_from_id
|
||||||
from synapse.util import batch_iter
|
from synapse.util import batch_iter
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
@ -95,8 +96,8 @@ def _retry_on_integrity_error(func):
|
|||||||
class EventsStore(
|
class EventsStore(
|
||||||
StateGroupWorkerStore, EventFederationStore, EventsWorkerStore,
|
StateGroupWorkerStore, EventFederationStore, EventsWorkerStore,
|
||||||
):
|
):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(EventsStore, self).__init__(db_conn, hs)
|
super(EventsStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# Collect metrics on the number of forward extremities that exist.
|
# Collect metrics on the number of forward extremities that exist.
|
||||||
# Counter of number of extremities to count
|
# Counter of number of extremities to count
|
||||||
|
@ -23,6 +23,7 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from synapse.api.constants import EventContentFields
|
from synapse.api.constants import EventContentFields
|
||||||
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -33,8 +34,8 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
|||||||
EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url"
|
EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url"
|
||||||
DELETE_SOFT_FAILED_EXTREMITIES = "delete_soft_failed_extremities"
|
DELETE_SOFT_FAILED_EXTREMITIES = "delete_soft_failed_extremities"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(EventsBackgroundUpdatesStore, self).__init__(db_conn, hs)
|
super(EventsBackgroundUpdatesStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.db.updates.register_background_update_handler(
|
self.db.updates.register_background_update_handler(
|
||||||
self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
|
self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
|
||||||
|
@ -33,6 +33,7 @@ from synapse.events.utils import prune_event
|
|||||||
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.types import get_domain_from_id
|
from synapse.types import get_domain_from_id
|
||||||
from synapse.util import batch_iter
|
from synapse.util import batch_iter
|
||||||
from synapse.util.caches.descriptors import Cache
|
from synapse.util.caches.descriptors import Cache
|
||||||
@ -55,8 +56,8 @@ _EventCacheEntry = namedtuple("_EventCacheEntry", ("event", "redacted_event"))
|
|||||||
|
|
||||||
|
|
||||||
class EventsWorkerStore(SQLBaseStore):
|
class EventsWorkerStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(EventsWorkerStore, self).__init__(db_conn, hs)
|
super(EventsWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self._get_event_cache = Cache(
|
self._get_event_cache = Cache(
|
||||||
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
|
"*getEvent*", keylen=3, max_entries=hs.config.event_cache_size
|
||||||
|
@ -13,11 +13,14 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
|
|
||||||
class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
|
class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(MediaRepositoryBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(MediaRepositoryBackgroundUpdateStore, self).__init__(
|
||||||
|
database, db_conn, hs
|
||||||
|
)
|
||||||
|
|
||||||
self.db.updates.register_background_index_update(
|
self.db.updates.register_background_index_update(
|
||||||
update_name="local_media_repository_url_idx",
|
update_name="local_media_repository_url_idx",
|
||||||
@ -31,8 +34,8 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
|
|||||||
class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||||
"""Persistence for attachments and avatars"""
|
"""Persistence for attachments and avatars"""
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(MediaRepositoryStore, self).__init__(db_conn, hs)
|
super(MediaRepositoryStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def get_local_media(self, media_id):
|
def get_local_media(self, media_id):
|
||||||
"""Get the metadata for a local piece of media
|
"""Get the metadata for a local piece of media
|
||||||
|
@ -17,6 +17,7 @@ import logging
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -27,13 +28,13 @@ LAST_SEEN_GRANULARITY = 60 * 60 * 1000
|
|||||||
|
|
||||||
|
|
||||||
class MonthlyActiveUsersStore(SQLBaseStore):
|
class MonthlyActiveUsersStore(SQLBaseStore):
|
||||||
def __init__(self, dbconn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(MonthlyActiveUsersStore, self).__init__(None, hs)
|
super(MonthlyActiveUsersStore, self).__init__(database, db_conn, hs)
|
||||||
self._clock = hs.get_clock()
|
self._clock = hs.get_clock()
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
# Do not add more reserved users than the total allowable number
|
# Do not add more reserved users than the total allowable number
|
||||||
self.db.new_transaction(
|
self.db.new_transaction(
|
||||||
dbconn,
|
db_conn,
|
||||||
"initialise_mau_threepids",
|
"initialise_mau_threepids",
|
||||||
[],
|
[],
|
||||||
[],
|
[],
|
||||||
|
@ -27,6 +27,7 @@ from synapse.storage.data_stores.main.appservice import ApplicationServiceWorker
|
|||||||
from synapse.storage.data_stores.main.pusher import PusherWorkerStore
|
from synapse.storage.data_stores.main.pusher import PusherWorkerStore
|
||||||
from synapse.storage.data_stores.main.receipts import ReceiptsWorkerStore
|
from synapse.storage.data_stores.main.receipts import ReceiptsWorkerStore
|
||||||
from synapse.storage.data_stores.main.roommember import RoomMemberWorkerStore
|
from synapse.storage.data_stores.main.roommember import RoomMemberWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException
|
from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException
|
||||||
from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList
|
from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
@ -72,8 +73,8 @@ class PushRulesWorkerStore(
|
|||||||
# the abstract methods being implemented.
|
# the abstract methods being implemented.
|
||||||
__metaclass__ = abc.ABCMeta
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(PushRulesWorkerStore, self).__init__(db_conn, hs)
|
super(PushRulesWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
push_rules_prefill, push_rules_id = self.db.get_cache_dict(
|
push_rules_prefill, push_rules_id = self.db.get_cache_dict(
|
||||||
db_conn,
|
db_conn,
|
||||||
|
@ -22,6 +22,7 @@ from canonicaljson import json
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.util.id_generators import StreamIdGenerator
|
from synapse.storage.util.id_generators import StreamIdGenerator
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
@ -38,8 +39,8 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
|||||||
# the abstract methods being implemented.
|
# the abstract methods being implemented.
|
||||||
__metaclass__ = abc.ABCMeta
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(ReceiptsWorkerStore, self).__init__(db_conn, hs)
|
super(ReceiptsWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self._receipts_stream_cache = StreamChangeCache(
|
self._receipts_stream_cache = StreamChangeCache(
|
||||||
"ReceiptsRoomChangeCache", self.get_max_receipt_stream_id()
|
"ReceiptsRoomChangeCache", self.get_max_receipt_stream_id()
|
||||||
@ -315,14 +316,14 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class ReceiptsStore(ReceiptsWorkerStore):
|
class ReceiptsStore(ReceiptsWorkerStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
# We instantiate this first as the ReceiptsWorkerStore constructor
|
# We instantiate this first as the ReceiptsWorkerStore constructor
|
||||||
# needs to be able to call get_max_receipt_stream_id
|
# needs to be able to call get_max_receipt_stream_id
|
||||||
self._receipts_id_gen = StreamIdGenerator(
|
self._receipts_id_gen = StreamIdGenerator(
|
||||||
db_conn, "receipts_linearized", "stream_id"
|
db_conn, "receipts_linearized", "stream_id"
|
||||||
)
|
)
|
||||||
|
|
||||||
super(ReceiptsStore, self).__init__(db_conn, hs)
|
super(ReceiptsStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def get_max_receipt_stream_id(self):
|
def get_max_receipt_stream_id(self):
|
||||||
return self._receipts_id_gen.get_current_token()
|
return self._receipts_id_gen.get_current_token()
|
||||||
|
@ -27,6 +27,7 @@ from synapse.api.constants import UserTypes
|
|||||||
from synapse.api.errors import Codes, StoreError, SynapseError, ThreepidValidationError
|
from synapse.api.errors import Codes, StoreError, SynapseError, ThreepidValidationError
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
|
|
||||||
@ -36,8 +37,8 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class RegistrationWorkerStore(SQLBaseStore):
|
class RegistrationWorkerStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RegistrationWorkerStore, self).__init__(db_conn, hs)
|
super(RegistrationWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.config = hs.config
|
self.config = hs.config
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
@ -794,8 +795,8 @@ class RegistrationWorkerStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class RegistrationBackgroundUpdateStore(RegistrationWorkerStore):
|
class RegistrationBackgroundUpdateStore(RegistrationWorkerStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RegistrationBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(RegistrationBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
self.config = hs.config
|
self.config = hs.config
|
||||||
@ -920,8 +921,8 @@ class RegistrationBackgroundUpdateStore(RegistrationWorkerStore):
|
|||||||
|
|
||||||
|
|
||||||
class RegistrationStore(RegistrationBackgroundUpdateStore):
|
class RegistrationStore(RegistrationBackgroundUpdateStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RegistrationStore, self).__init__(db_conn, hs)
|
super(RegistrationStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self._account_validity = hs.config.account_validity
|
self._account_validity = hs.config.account_validity
|
||||||
|
|
||||||
|
@ -29,6 +29,7 @@ from synapse.api.constants import EventTypes
|
|||||||
from synapse.api.errors import StoreError
|
from synapse.api.errors import StoreError
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.data_stores.main.search import SearchStore
|
from synapse.storage.data_stores.main.search import SearchStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.types import ThirdPartyInstanceID
|
from synapse.types import ThirdPartyInstanceID
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
|
|
||||||
@ -361,8 +362,8 @@ class RoomWorkerStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class RoomBackgroundUpdateStore(SQLBaseStore):
|
class RoomBackgroundUpdateStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RoomBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(RoomBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.config = hs.config
|
self.config = hs.config
|
||||||
|
|
||||||
@ -440,8 +441,8 @@ class RoomBackgroundUpdateStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore, SearchStore):
|
class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore, SearchStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RoomStore, self).__init__(db_conn, hs)
|
super(RoomStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.config = hs.config
|
self.config = hs.config
|
||||||
|
|
||||||
|
@ -32,6 +32,7 @@ from synapse.storage._base import (
|
|||||||
make_in_list_sql_clause,
|
make_in_list_sql_clause,
|
||||||
)
|
)
|
||||||
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import Sqlite3Engine
|
from synapse.storage.engines import Sqlite3Engine
|
||||||
from synapse.storage.roommember import (
|
from synapse.storage.roommember import (
|
||||||
GetRoomsForUserWithStreamOrdering,
|
GetRoomsForUserWithStreamOrdering,
|
||||||
@ -54,8 +55,8 @@ _CURRENT_STATE_MEMBERSHIP_UPDATE_NAME = "current_state_events_membership"
|
|||||||
|
|
||||||
|
|
||||||
class RoomMemberWorkerStore(EventsWorkerStore):
|
class RoomMemberWorkerStore(EventsWorkerStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RoomMemberWorkerStore, self).__init__(db_conn, hs)
|
super(RoomMemberWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# Is the current_state_events.membership up to date? Or is the
|
# Is the current_state_events.membership up to date? Or is the
|
||||||
# background update still running?
|
# background update still running?
|
||||||
@ -835,8 +836,8 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||||||
|
|
||||||
|
|
||||||
class RoomMemberBackgroundUpdateStore(SQLBaseStore):
|
class RoomMemberBackgroundUpdateStore(SQLBaseStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RoomMemberBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(RoomMemberBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
self.db.updates.register_background_update_handler(
|
self.db.updates.register_background_update_handler(
|
||||||
_MEMBERSHIP_PROFILE_UPDATE_NAME, self._background_add_membership_profile
|
_MEMBERSHIP_PROFILE_UPDATE_NAME, self._background_add_membership_profile
|
||||||
)
|
)
|
||||||
@ -991,8 +992,8 @@ class RoomMemberBackgroundUpdateStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class RoomMemberStore(RoomMemberWorkerStore, RoomMemberBackgroundUpdateStore):
|
class RoomMemberStore(RoomMemberWorkerStore, RoomMemberBackgroundUpdateStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(RoomMemberStore, self).__init__(db_conn, hs)
|
super(RoomMemberStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def _store_room_members_txn(self, txn, events, backfilled):
|
def _store_room_members_txn(self, txn, events, backfilled):
|
||||||
"""Store a room member in the database.
|
"""Store a room member in the database.
|
||||||
|
@ -25,6 +25,7 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -42,8 +43,8 @@ class SearchBackgroundUpdateStore(SQLBaseStore):
|
|||||||
EVENT_SEARCH_USE_GIST_POSTGRES_NAME = "event_search_postgres_gist"
|
EVENT_SEARCH_USE_GIST_POSTGRES_NAME = "event_search_postgres_gist"
|
||||||
EVENT_SEARCH_USE_GIN_POSTGRES_NAME = "event_search_postgres_gin"
|
EVENT_SEARCH_USE_GIN_POSTGRES_NAME = "event_search_postgres_gin"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SearchBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(SearchBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
if not hs.config.enable_search:
|
if not hs.config.enable_search:
|
||||||
return
|
return
|
||||||
@ -342,8 +343,8 @@ class SearchBackgroundUpdateStore(SQLBaseStore):
|
|||||||
|
|
||||||
|
|
||||||
class SearchStore(SearchBackgroundUpdateStore):
|
class SearchStore(SearchBackgroundUpdateStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(SearchStore, self).__init__(db_conn, hs)
|
super(SearchStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def store_event_search_txn(self, txn, event, key, value):
|
def store_event_search_txn(self, txn, event, key, value):
|
||||||
"""Add event to the search table
|
"""Add event to the search table
|
||||||
|
@ -28,6 +28,7 @@ from synapse.events import EventBase
|
|||||||
from synapse.events.snapshot import EventContext
|
from synapse.events.snapshot import EventContext
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.storage.state import StateFilter
|
from synapse.storage.state import StateFilter
|
||||||
from synapse.util.caches import get_cache_factor_for, intern_string
|
from synapse.util.caches import get_cache_factor_for, intern_string
|
||||||
@ -213,8 +214,8 @@ class StateGroupWorkerStore(
|
|||||||
STATE_GROUP_INDEX_UPDATE_NAME = "state_group_state_type_index"
|
STATE_GROUP_INDEX_UPDATE_NAME = "state_group_state_type_index"
|
||||||
CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx"
|
CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(StateGroupWorkerStore, self).__init__(db_conn, hs)
|
super(StateGroupWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
# Originally the state store used a single DictionaryCache to cache the
|
# Originally the state store used a single DictionaryCache to cache the
|
||||||
# event IDs for the state types in a given state group to avoid hammering
|
# event IDs for the state types in a given state group to avoid hammering
|
||||||
@ -1029,8 +1030,8 @@ class StateBackgroundUpdateStore(StateGroupBackgroundUpdateStore):
|
|||||||
CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx"
|
CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx"
|
||||||
EVENT_STATE_GROUP_INDEX_UPDATE_NAME = "event_to_state_groups_sg_index"
|
EVENT_STATE_GROUP_INDEX_UPDATE_NAME = "event_to_state_groups_sg_index"
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(StateBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(StateBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
self.db.updates.register_background_update_handler(
|
self.db.updates.register_background_update_handler(
|
||||||
self.STATE_GROUP_DEDUPLICATION_UPDATE_NAME,
|
self.STATE_GROUP_DEDUPLICATION_UPDATE_NAME,
|
||||||
self._background_deduplicate_state,
|
self._background_deduplicate_state,
|
||||||
@ -1245,8 +1246,8 @@ class StateStore(StateGroupWorkerStore, StateBackgroundUpdateStore):
|
|||||||
* `state_groups_state`: Maps state group to state events.
|
* `state_groups_state`: Maps state group to state events.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(StateStore, self).__init__(db_conn, hs)
|
super(StateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def _store_event_state_mappings_txn(
|
def _store_event_state_mappings_txn(
|
||||||
self, txn, events_and_contexts: Iterable[Tuple[EventBase, EventContext]]
|
self, txn, events_and_contexts: Iterable[Tuple[EventBase, EventContext]]
|
||||||
|
@ -22,6 +22,7 @@ from twisted.internet.defer import DeferredLock
|
|||||||
|
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.storage.data_stores.main.state_deltas import StateDeltasStore
|
from synapse.storage.data_stores.main.state_deltas import StateDeltasStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached
|
||||||
|
|
||||||
@ -58,8 +59,8 @@ TYPE_TO_ORIGIN_TABLE = {"room": ("rooms", "room_id"), "user": ("users", "name")}
|
|||||||
|
|
||||||
|
|
||||||
class StatsStore(StateDeltasStore):
|
class StatsStore(StateDeltasStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(StatsStore, self).__init__(db_conn, hs)
|
super(StatsStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.server_name = hs.hostname
|
self.server_name = hs.hostname
|
||||||
self.clock = self.hs.get_clock()
|
self.clock = self.hs.get_clock()
|
||||||
|
@ -47,6 +47,7 @@ from twisted.internet import defer
|
|||||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.types import RoomStreamToken
|
from synapse.types import RoomStreamToken
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
@ -251,8 +252,8 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
|
|||||||
|
|
||||||
__metaclass__ = abc.ABCMeta
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(StreamWorkerStore, self).__init__(db_conn, hs)
|
super(StreamWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
events_max = self.get_room_max_stream_ordering()
|
events_max = self.get_room_max_stream_ordering()
|
||||||
event_cache_prefill, min_event_val = self.db.get_cache_dict(
|
event_cache_prefill, min_event_val = self.db.get_cache_dict(
|
||||||
|
@ -24,6 +24,7 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
|
|
||||||
# py2 sqlite has buffer hardcoded as only binary type, so we must use it,
|
# py2 sqlite has buffer hardcoded as only binary type, so we must use it,
|
||||||
@ -52,8 +53,8 @@ class TransactionStore(SQLBaseStore):
|
|||||||
"""A collection of queries for handling PDUs.
|
"""A collection of queries for handling PDUs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(TransactionStore, self).__init__(db_conn, hs)
|
super(TransactionStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self._clock.looping_call(self._start_cleanup_transactions, 30 * 60 * 1000)
|
self._clock.looping_call(self._start_cleanup_transactions, 30 * 60 * 1000)
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ from twisted.internet import defer
|
|||||||
from synapse.api.constants import EventTypes, JoinRules
|
from synapse.api.constants import EventTypes, JoinRules
|
||||||
from synapse.storage.data_stores.main.state import StateFilter
|
from synapse.storage.data_stores.main.state import StateFilter
|
||||||
from synapse.storage.data_stores.main.state_deltas import StateDeltasStore
|
from synapse.storage.data_stores.main.state_deltas import StateDeltasStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||||
from synapse.types import get_domain_from_id, get_localpart_from_id
|
from synapse.types import get_domain_from_id, get_localpart_from_id
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached
|
||||||
@ -37,8 +38,8 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore):
|
|||||||
# add_users_who_share_private_rooms?
|
# add_users_who_share_private_rooms?
|
||||||
SHARE_PRIVATE_WORKING_SET = 500
|
SHARE_PRIVATE_WORKING_SET = 500
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(UserDirectoryBackgroundUpdateStore, self).__init__(db_conn, hs)
|
super(UserDirectoryBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self.server_name = hs.hostname
|
self.server_name = hs.hostname
|
||||||
|
|
||||||
@ -549,8 +550,8 @@ class UserDirectoryStore(UserDirectoryBackgroundUpdateStore):
|
|||||||
# add_users_who_share_private_rooms?
|
# add_users_who_share_private_rooms?
|
||||||
SHARE_PRIVATE_WORKING_SET = 500
|
SHARE_PRIVATE_WORKING_SET = 500
|
||||||
|
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(UserDirectoryStore, self).__init__(db_conn, hs)
|
super(UserDirectoryStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
def remove_from_user_dir(self, user_id):
|
def remove_from_user_dir(self, user_id):
|
||||||
def _remove_from_user_dir_txn(txn):
|
def _remove_from_user_dir_txn(txn):
|
||||||
|
@ -234,7 +234,7 @@ class Database(object):
|
|||||||
# to watch it
|
# to watch it
|
||||||
self._txn_perf_counters = PerformanceCounters()
|
self._txn_perf_counters = PerformanceCounters()
|
||||||
|
|
||||||
self.database_engine = hs.database_engine
|
self.engine = hs.database_engine
|
||||||
|
|
||||||
# A set of tables that are not safe to use native upserts in.
|
# A set of tables that are not safe to use native upserts in.
|
||||||
self._unsafe_to_upsert_tables = set(UNIQUE_INDEX_BACKGROUND_UPDATES.keys())
|
self._unsafe_to_upsert_tables = set(UNIQUE_INDEX_BACKGROUND_UPDATES.keys())
|
||||||
@ -242,10 +242,10 @@ class Database(object):
|
|||||||
# We add the user_directory_search table to the blacklist on SQLite
|
# We add the user_directory_search table to the blacklist on SQLite
|
||||||
# because the existing search table does not have an index, making it
|
# because the existing search table does not have an index, making it
|
||||||
# unsafe to use native upserts.
|
# unsafe to use native upserts.
|
||||||
if isinstance(self.database_engine, Sqlite3Engine):
|
if isinstance(self.engine, Sqlite3Engine):
|
||||||
self._unsafe_to_upsert_tables.add("user_directory_search")
|
self._unsafe_to_upsert_tables.add("user_directory_search")
|
||||||
|
|
||||||
if self.database_engine.can_native_upsert:
|
if self.engine.can_native_upsert:
|
||||||
# Check ASAP (and then later, every 1s) to see if we have finished
|
# Check ASAP (and then later, every 1s) to see if we have finished
|
||||||
# background updates of tables that aren't safe to update.
|
# background updates of tables that aren't safe to update.
|
||||||
self._clock.call_later(
|
self._clock.call_later(
|
||||||
@ -331,7 +331,7 @@ class Database(object):
|
|||||||
cursor = LoggingTransaction(
|
cursor = LoggingTransaction(
|
||||||
conn.cursor(),
|
conn.cursor(),
|
||||||
name,
|
name,
|
||||||
self.database_engine,
|
self.engine,
|
||||||
after_callbacks,
|
after_callbacks,
|
||||||
exception_callbacks,
|
exception_callbacks,
|
||||||
)
|
)
|
||||||
@ -339,7 +339,7 @@ class Database(object):
|
|||||||
r = func(cursor, *args, **kwargs)
|
r = func(cursor, *args, **kwargs)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
return r
|
return r
|
||||||
except self.database_engine.module.OperationalError as e:
|
except self.engine.module.OperationalError as e:
|
||||||
# This can happen if the database disappears mid
|
# This can happen if the database disappears mid
|
||||||
# transaction.
|
# transaction.
|
||||||
logger.warning(
|
logger.warning(
|
||||||
@ -353,20 +353,20 @@ class Database(object):
|
|||||||
i += 1
|
i += 1
|
||||||
try:
|
try:
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
except self.database_engine.module.Error as e1:
|
except self.engine.module.Error as e1:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"[TXN EROLL] {%s} %s", name, exception_to_unicode(e1)
|
"[TXN EROLL] {%s} %s", name, exception_to_unicode(e1)
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
raise
|
raise
|
||||||
except self.database_engine.module.DatabaseError as e:
|
except self.engine.module.DatabaseError as e:
|
||||||
if self.database_engine.is_deadlock(e):
|
if self.engine.is_deadlock(e):
|
||||||
logger.warning("[TXN DEADLOCK] {%s} %d/%d", name, i, N)
|
logger.warning("[TXN DEADLOCK] {%s} %d/%d", name, i, N)
|
||||||
if i < N:
|
if i < N:
|
||||||
i += 1
|
i += 1
|
||||||
try:
|
try:
|
||||||
conn.rollback()
|
conn.rollback()
|
||||||
except self.database_engine.module.Error as e1:
|
except self.engine.module.Error as e1:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"[TXN EROLL] {%s} %s",
|
"[TXN EROLL] {%s} %s",
|
||||||
name,
|
name,
|
||||||
@ -494,7 +494,7 @@ class Database(object):
|
|||||||
sql_scheduling_timer.observe(sched_duration_sec)
|
sql_scheduling_timer.observe(sched_duration_sec)
|
||||||
context.add_database_scheduled(sched_duration_sec)
|
context.add_database_scheduled(sched_duration_sec)
|
||||||
|
|
||||||
if self.database_engine.is_connection_closed(conn):
|
if self.engine.is_connection_closed(conn):
|
||||||
logger.debug("Reconnecting closed database connection")
|
logger.debug("Reconnecting closed database connection")
|
||||||
conn.reconnect()
|
conn.reconnect()
|
||||||
|
|
||||||
@ -561,7 +561,7 @@ class Database(object):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
yield self.runInteraction(desc, self.simple_insert_txn, table, values)
|
yield self.runInteraction(desc, self.simple_insert_txn, table, values)
|
||||||
except self.database_engine.module.IntegrityError:
|
except self.engine.module.IntegrityError:
|
||||||
# We have to do or_ignore flag at this layer, since we can't reuse
|
# We have to do or_ignore flag at this layer, since we can't reuse
|
||||||
# a cursor after we receive an error from the db.
|
# a cursor after we receive an error from the db.
|
||||||
if not or_ignore:
|
if not or_ignore:
|
||||||
@ -660,7 +660,7 @@ class Database(object):
|
|||||||
lock=lock,
|
lock=lock,
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
except self.database_engine.module.IntegrityError as e:
|
except self.engine.module.IntegrityError as e:
|
||||||
attempts += 1
|
attempts += 1
|
||||||
if attempts >= 5:
|
if attempts >= 5:
|
||||||
# don't retry forever, because things other than races
|
# don't retry forever, because things other than races
|
||||||
@ -692,10 +692,7 @@ class Database(object):
|
|||||||
upserts return True if a new entry was created, False if an existing
|
upserts return True if a new entry was created, False if an existing
|
||||||
one was updated.
|
one was updated.
|
||||||
"""
|
"""
|
||||||
if (
|
if self.engine.can_native_upsert and table not in self._unsafe_to_upsert_tables:
|
||||||
self.database_engine.can_native_upsert
|
|
||||||
and table not in self._unsafe_to_upsert_tables
|
|
||||||
):
|
|
||||||
return self.simple_upsert_txn_native_upsert(
|
return self.simple_upsert_txn_native_upsert(
|
||||||
txn, table, keyvalues, values, insertion_values=insertion_values
|
txn, table, keyvalues, values, insertion_values=insertion_values
|
||||||
)
|
)
|
||||||
@ -726,7 +723,7 @@ class Database(object):
|
|||||||
"""
|
"""
|
||||||
# We need to lock the table :(, unless we're *really* careful
|
# We need to lock the table :(, unless we're *really* careful
|
||||||
if lock:
|
if lock:
|
||||||
self.database_engine.lock_table(txn, table)
|
self.engine.lock_table(txn, table)
|
||||||
|
|
||||||
def _getwhere(key):
|
def _getwhere(key):
|
||||||
# If the value we're passing in is None (aka NULL), we need to use
|
# If the value we're passing in is None (aka NULL), we need to use
|
||||||
@ -828,10 +825,7 @@ class Database(object):
|
|||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
if (
|
if self.engine.can_native_upsert and table not in self._unsafe_to_upsert_tables:
|
||||||
self.database_engine.can_native_upsert
|
|
||||||
and table not in self._unsafe_to_upsert_tables
|
|
||||||
):
|
|
||||||
return self.simple_upsert_many_txn_native_upsert(
|
return self.simple_upsert_many_txn_native_upsert(
|
||||||
txn, table, key_names, key_values, value_names, value_values
|
txn, table, key_names, key_values, value_names, value_values
|
||||||
)
|
)
|
||||||
@ -1301,7 +1295,7 @@ class Database(object):
|
|||||||
"limit": limit,
|
"limit": limit,
|
||||||
}
|
}
|
||||||
|
|
||||||
sql = self.database_engine.convert_param_style(sql)
|
sql = self.engine.convert_param_style(sql)
|
||||||
|
|
||||||
txn = db_conn.cursor()
|
txn = db_conn.cursor()
|
||||||
txn.execute(sql, (int(max_value),))
|
txn.execute(sql, (int(max_value),))
|
||||||
|
@ -20,6 +20,7 @@ from synapse.replication.tcp.client import (
|
|||||||
ReplicationClientHandler,
|
ReplicationClientHandler,
|
||||||
)
|
)
|
||||||
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
|
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.server import FakeTransport
|
from tests.server import FakeTransport
|
||||||
@ -42,7 +43,9 @@ class BaseSlavedStoreTestCase(unittest.HomeserverTestCase):
|
|||||||
|
|
||||||
self.master_store = self.hs.get_datastore()
|
self.master_store = self.hs.get_datastore()
|
||||||
self.storage = hs.get_storage()
|
self.storage = hs.get_storage()
|
||||||
self.slaved_store = self.STORE_TYPE(self.hs.get_db_conn(), self.hs)
|
self.slaved_store = self.STORE_TYPE(
|
||||||
|
Database(hs), self.hs.get_db_conn(), self.hs
|
||||||
|
)
|
||||||
self.event_id = 0
|
self.event_id = 0
|
||||||
|
|
||||||
server_factory = ReplicationStreamProtocolFactory(self.hs)
|
server_factory = ReplicationStreamProtocolFactory(self.hs)
|
||||||
|
@ -28,6 +28,7 @@ from synapse.storage.data_stores.main.appservice import (
|
|||||||
ApplicationServiceStore,
|
ApplicationServiceStore,
|
||||||
ApplicationServiceTransactionStore,
|
ApplicationServiceTransactionStore,
|
||||||
)
|
)
|
||||||
|
from synapse.storage.database import Database
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.utils import setup_test_homeserver
|
from tests.utils import setup_test_homeserver
|
||||||
@ -54,7 +55,8 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
|
|||||||
self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob")
|
self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob")
|
||||||
self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob")
|
self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob")
|
||||||
# must be done after inserts
|
# must be done after inserts
|
||||||
self.store = ApplicationServiceStore(hs.get_db_conn(), hs)
|
database = Database(hs)
|
||||||
|
self.store = ApplicationServiceStore(database, hs.get_db_conn(), hs)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
# TODO: suboptimal that we need to create files for tests!
|
# TODO: suboptimal that we need to create files for tests!
|
||||||
@ -123,7 +125,8 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
self.as_yaml_files = []
|
self.as_yaml_files = []
|
||||||
|
|
||||||
self.store = TestTransactionStore(hs.get_db_conn(), hs)
|
database = Database(hs)
|
||||||
|
self.store = TestTransactionStore(database, hs.get_db_conn(), hs)
|
||||||
|
|
||||||
def _add_service(self, url, as_token, id):
|
def _add_service(self, url, as_token, id):
|
||||||
as_yaml = dict(
|
as_yaml = dict(
|
||||||
@ -382,8 +385,8 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
# required for ApplicationServiceTransactionStoreTestCase tests
|
# required for ApplicationServiceTransactionStoreTestCase tests
|
||||||
class TestTransactionStore(ApplicationServiceTransactionStore, ApplicationServiceStore):
|
class TestTransactionStore(ApplicationServiceTransactionStore, ApplicationServiceStore):
|
||||||
def __init__(self, db_conn, hs):
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
super(TestTransactionStore, self).__init__(db_conn, hs)
|
super(TestTransactionStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
|
|
||||||
class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
|
class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
|
||||||
@ -416,7 +419,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
|
|||||||
hs.config.event_cache_size = 1
|
hs.config.event_cache_size = 1
|
||||||
hs.config.password_providers = []
|
hs.config.password_providers = []
|
||||||
|
|
||||||
ApplicationServiceStore(hs.get_db_conn(), hs)
|
ApplicationServiceStore(Database(hs), hs.get_db_conn(), hs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_duplicate_ids(self):
|
def test_duplicate_ids(self):
|
||||||
@ -432,7 +435,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
|
|||||||
hs.config.password_providers = []
|
hs.config.password_providers = []
|
||||||
|
|
||||||
with self.assertRaises(ConfigError) as cm:
|
with self.assertRaises(ConfigError) as cm:
|
||||||
ApplicationServiceStore(hs.get_db_conn(), hs)
|
ApplicationServiceStore(Database(hs), hs.get_db_conn(), hs)
|
||||||
|
|
||||||
e = cm.exception
|
e = cm.exception
|
||||||
self.assertIn(f1, str(e))
|
self.assertIn(f1, str(e))
|
||||||
@ -453,7 +456,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
|
|||||||
hs.config.password_providers = []
|
hs.config.password_providers = []
|
||||||
|
|
||||||
with self.assertRaises(ConfigError) as cm:
|
with self.assertRaises(ConfigError) as cm:
|
||||||
ApplicationServiceStore(hs.get_db_conn(), hs)
|
ApplicationServiceStore(Database(hs), hs.get_db_conn(), hs)
|
||||||
|
|
||||||
e = cm.exception
|
e = cm.exception
|
||||||
self.assertIn(f1, str(e))
|
self.assertIn(f1, str(e))
|
||||||
|
@ -21,6 +21,7 @@ from mock import Mock
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
|
from synapse.storage.database import Database
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
@ -59,7 +60,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
|
|||||||
"test", db_pool=self.db_pool, config=config, database_engine=fake_engine
|
"test", db_pool=self.db_pool, config=config, database_engine=fake_engine
|
||||||
)
|
)
|
||||||
|
|
||||||
self.datastore = SQLBaseStore(None, hs)
|
self.datastore = SQLBaseStore(Database(hs), None, hs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_insert_1col(self):
|
def test_insert_1col(self):
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.profile import ProfileStore
|
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
@ -28,7 +27,7 @@ class ProfileStoreTestCase(unittest.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
hs = yield setup_test_homeserver(self.addCleanup)
|
hs = yield setup_test_homeserver(self.addCleanup)
|
||||||
|
|
||||||
self.store = ProfileStore(hs.get_db_conn(), hs)
|
self.store = hs.get_datastore()
|
||||||
|
|
||||||
self.u_frank = UserID.from_string("@frank:test")
|
self.u_frank = UserID.from_string("@frank:test")
|
||||||
|
|
||||||
|
@ -15,8 +15,6 @@
|
|||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage.data_stores.main.user_directory import UserDirectoryStore
|
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.utils import setup_test_homeserver
|
from tests.utils import setup_test_homeserver
|
||||||
|
|
||||||
@ -29,7 +27,7 @@ class UserDirectoryStoreTestCase(unittest.TestCase):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.hs = yield setup_test_homeserver(self.addCleanup)
|
self.hs = yield setup_test_homeserver(self.addCleanup)
|
||||||
self.store = UserDirectoryStore(self.hs.get_db_conn(), self.hs)
|
self.store = self.hs.get_datastore()
|
||||||
|
|
||||||
# alice and bob are both in !room_id. bobby is not but shares
|
# alice and bob are both in !room_id. bobby is not but shares
|
||||||
# a homeserver with alice.
|
# a homeserver with alice.
|
||||||
|
@ -33,6 +33,8 @@ class MessageAcceptTests(unittest.TestCase):
|
|||||||
self.reactor.advance(0.1)
|
self.reactor.advance(0.1)
|
||||||
self.room_id = self.successResultOf(room)["room_id"]
|
self.room_id = self.successResultOf(room)["room_id"]
|
||||||
|
|
||||||
|
self.store = self.homeserver.get_datastore()
|
||||||
|
|
||||||
# Figure out what the most recent event is
|
# Figure out what the most recent event is
|
||||||
most_recent = self.successResultOf(
|
most_recent = self.successResultOf(
|
||||||
maybeDeferred(
|
maybeDeferred(
|
||||||
@ -77,10 +79,7 @@ class MessageAcceptTests(unittest.TestCase):
|
|||||||
# Make sure we actually joined the room
|
# Make sure we actually joined the room
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.successResultOf(
|
self.successResultOf(
|
||||||
maybeDeferred(
|
maybeDeferred(self.store.get_latest_event_ids_in_room, self.room_id)
|
||||||
self.homeserver.get_datastore().get_latest_event_ids_in_room,
|
|
||||||
self.room_id,
|
|
||||||
)
|
|
||||||
)[0],
|
)[0],
|
||||||
"$join:test.serv",
|
"$join:test.serv",
|
||||||
)
|
)
|
||||||
@ -100,10 +99,7 @@ class MessageAcceptTests(unittest.TestCase):
|
|||||||
|
|
||||||
# Figure out what the most recent event is
|
# Figure out what the most recent event is
|
||||||
most_recent = self.successResultOf(
|
most_recent = self.successResultOf(
|
||||||
maybeDeferred(
|
maybeDeferred(self.store.get_latest_event_ids_in_room, self.room_id)
|
||||||
self.homeserver.get_datastore().get_latest_event_ids_in_room,
|
|
||||||
self.room_id,
|
|
||||||
)
|
|
||||||
)[0]
|
)[0]
|
||||||
|
|
||||||
# Now lie about an event
|
# Now lie about an event
|
||||||
@ -141,7 +137,5 @@ class MessageAcceptTests(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Make sure the invalid event isn't there
|
# Make sure the invalid event isn't there
|
||||||
extrem = maybeDeferred(
|
extrem = maybeDeferred(self.store.get_latest_event_ids_in_room, self.room_id)
|
||||||
self.homeserver.get_datastore().get_latest_event_ids_in_room, self.room_id
|
|
||||||
)
|
|
||||||
self.assertEqual(self.successResultOf(extrem)[0], "$join:test.serv")
|
self.assertEqual(self.successResultOf(extrem)[0], "$join:test.serv")
|
||||||
|
Loading…
Reference in New Issue
Block a user