2015-10-13 06:38:48 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 13:01:18 -05:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2018-03-28 09:03:37 -04:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-10-13 06:38:48 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
import imp
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2019-12-19 10:07:37 -05:00
|
|
|
from collections import Counter
|
2020-12-30 08:09:53 -05:00
|
|
|
from typing import Generator, Iterable, List, Optional, TextIO, Tuple
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
import attr
|
2020-12-30 08:09:53 -05:00
|
|
|
from typing_extensions import Counter as CounterType
|
2019-10-21 11:08:40 -04:00
|
|
|
|
2020-09-07 06:41:50 -04:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2020-10-02 10:20:45 -04:00
|
|
|
from synapse.storage.database import LoggingDatabaseConnection
|
2020-09-07 06:41:50 -04:00
|
|
|
from synapse.storage.engines import BaseDatabaseEngine
|
2019-06-03 08:02:47 -04:00
|
|
|
from synapse.storage.engines.postgres import PostgresEngine
|
2020-10-02 10:20:45 -04:00
|
|
|
from synapse.storage.types import Cursor
|
2020-09-07 06:41:50 -04:00
|
|
|
from synapse.types import Collection
|
2019-06-03 08:02:47 -04:00
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
# Remember to update this number every time a change is made to database
|
|
|
|
# schema files, so the users will be informed on server restarts.
|
2020-05-07 08:51:08 -04:00
|
|
|
# XXX: If you're about to bump this to 59 (or higher) please create an update
|
|
|
|
# that drops the unused `cache_invalidation_stream` table, as per #7436!
|
2020-06-09 11:28:57 -04:00
|
|
|
# XXX: Also add an update to drop `account_data_max_stream_id` as per #7656!
|
2020-03-31 12:43:19 -04:00
|
|
|
SCHEMA_VERSION = 58
|
2015-10-13 06:38:48 -04:00
|
|
|
|
|
|
|
dir_path = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
|
|
|
|
|
|
|
|
class PrepareDatabaseException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class UpgradeDatabaseException(PrepareDatabaseException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2020-09-07 08:04:10 -04:00
|
|
|
OUTDATED_SCHEMA_ON_WORKER_ERROR = (
|
|
|
|
"Expected database schema version %i but got %i: run the main synapse process to "
|
|
|
|
"upgrade the database schema before starting worker processes."
|
|
|
|
)
|
|
|
|
|
|
|
|
EMPTY_DATABASE_ON_WORKER_ERROR = (
|
|
|
|
"Uninitialised database: run the main synapse process to prepare the database "
|
|
|
|
"schema before starting worker processes."
|
|
|
|
)
|
|
|
|
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR = (
|
|
|
|
"Database schema delta %s has not been applied: run the main synapse process to "
|
|
|
|
"upgrade the database schema before starting worker processes."
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-09-07 06:41:50 -04:00
|
|
|
def prepare_database(
|
2020-10-02 10:20:45 -04:00
|
|
|
db_conn: LoggingDatabaseConnection,
|
2020-09-07 06:41:50 -04:00
|
|
|
database_engine: BaseDatabaseEngine,
|
|
|
|
config: Optional[HomeServerConfig],
|
2020-12-30 08:09:53 -05:00
|
|
|
databases: Collection[str] = ("main", "state"),
|
2020-09-07 06:41:50 -04:00
|
|
|
):
|
2020-08-05 16:38:57 -04:00
|
|
|
"""Prepares a physical database for usage. Will either create all necessary tables
|
2015-10-13 06:38:48 -04:00
|
|
|
or upgrade from an older schema version.
|
2016-04-06 09:08:18 -04:00
|
|
|
|
|
|
|
If `config` is None then prepare_database will assert that no upgrade is
|
|
|
|
necessary, *or* will create a fresh database if the database is empty.
|
2017-10-30 11:16:21 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
db_conn:
|
|
|
|
database_engine:
|
2020-09-07 06:41:50 -04:00
|
|
|
config :
|
2017-10-30 11:16:21 -04:00
|
|
|
application config, or None if we are connecting to an existing
|
|
|
|
database which we expect to be configured already
|
2020-09-07 06:41:50 -04:00
|
|
|
databases: The name of the databases that will be used
|
2020-08-05 16:38:57 -04:00
|
|
|
with this physical database. Defaults to all databases.
|
2015-10-13 06:38:48 -04:00
|
|
|
"""
|
2019-10-21 11:08:40 -04:00
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
try:
|
2020-10-02 10:20:45 -04:00
|
|
|
cur = db_conn.cursor(txn_name="prepare_database")
|
2020-09-07 06:41:50 -04:00
|
|
|
|
|
|
|
# sqlite does not automatically start transactions for DDL / SELECT statements,
|
|
|
|
# so we start one before running anything. This ensures that any upgrades
|
|
|
|
# are either applied completely, or not at all.
|
|
|
|
#
|
|
|
|
# (psycopg2 automatically starts a transaction as soon as we run any statements
|
|
|
|
# at all, so this is redundant but harmless there.)
|
|
|
|
cur.execute("BEGIN TRANSACTION")
|
|
|
|
|
2020-09-07 08:36:02 -04:00
|
|
|
logger.info("%r: Checking existing schema version", databases)
|
2015-10-13 06:38:48 -04:00
|
|
|
version_info = _get_or_create_schema_state(cur, database_engine)
|
|
|
|
|
|
|
|
if version_info:
|
|
|
|
user_version, delta_files, upgraded = version_info
|
2020-09-07 08:36:02 -04:00
|
|
|
logger.info(
|
|
|
|
"%r: Existing schema is %i (+%i deltas)",
|
|
|
|
databases,
|
|
|
|
user_version,
|
|
|
|
len(delta_files),
|
|
|
|
)
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2020-09-07 08:04:10 -04:00
|
|
|
# config should only be None when we are preparing an in-memory SQLite db,
|
|
|
|
# which should be empty.
|
2016-04-06 09:08:18 -04:00
|
|
|
if config is None:
|
2020-09-07 08:04:10 -04:00
|
|
|
raise ValueError(
|
|
|
|
"config==None in prepare_database, but databse is not empty"
|
2016-04-06 09:08:18 -04:00
|
|
|
)
|
2020-09-07 08:04:10 -04:00
|
|
|
|
|
|
|
# if it's a worker app, refuse to upgrade the database, to avoid multiple
|
|
|
|
# workers doing it at once.
|
|
|
|
if config.worker_app is not None and user_version != SCHEMA_VERSION:
|
|
|
|
raise UpgradeDatabaseException(
|
|
|
|
OUTDATED_SCHEMA_ON_WORKER_ERROR % (SCHEMA_VERSION, user_version)
|
|
|
|
)
|
|
|
|
|
|
|
|
_upgrade_existing_database(
|
|
|
|
cur,
|
|
|
|
user_version,
|
|
|
|
delta_files,
|
|
|
|
upgraded,
|
|
|
|
database_engine,
|
|
|
|
config,
|
|
|
|
databases=databases,
|
|
|
|
)
|
2016-04-06 09:08:18 -04:00
|
|
|
else:
|
2020-09-07 08:36:02 -04:00
|
|
|
logger.info("%r: Initialising new database", databases)
|
|
|
|
|
2020-09-07 08:04:10 -04:00
|
|
|
# if it's a worker app, refuse to upgrade the database, to avoid multiple
|
|
|
|
# workers doing it at once.
|
|
|
|
if config and config.worker_app is not None:
|
|
|
|
raise UpgradeDatabaseException(EMPTY_DATABASE_ON_WORKER_ERROR)
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
_setup_new_database(cur, database_engine, databases=databases)
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2017-10-30 11:16:21 -04:00
|
|
|
# check if any of our configured dynamic modules want a database
|
|
|
|
if config is not None:
|
|
|
|
_apply_module_schemas(cur, database_engine, config)
|
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
cur.close()
|
|
|
|
db_conn.commit()
|
2017-10-23 10:52:32 -04:00
|
|
|
except Exception:
|
2015-10-13 06:38:48 -04:00
|
|
|
db_conn.rollback()
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
def _setup_new_database(
|
|
|
|
cur: Cursor, database_engine: BaseDatabaseEngine, databases: Collection[str]
|
|
|
|
) -> None:
|
2020-08-05 16:38:57 -04:00
|
|
|
"""Sets up the physical database by finding a base set of "full schemas" and
|
|
|
|
then applying any necessary deltas, including schemas from the given data
|
2019-10-22 13:43:31 -04:00
|
|
|
stores.
|
2015-10-13 06:38:48 -04:00
|
|
|
|
|
|
|
The "full_schemas" directory has subdirectories named after versions. This
|
|
|
|
function searches for the highest version less than or equal to
|
|
|
|
`SCHEMA_VERSION` and executes all .sql files in that directory.
|
|
|
|
|
|
|
|
The function will then apply all deltas for all versions after the base
|
|
|
|
version.
|
|
|
|
|
|
|
|
Example directory structure:
|
|
|
|
|
|
|
|
schema/
|
|
|
|
delta/
|
|
|
|
...
|
|
|
|
full_schemas/
|
|
|
|
3/
|
|
|
|
test.sql
|
|
|
|
...
|
|
|
|
11/
|
|
|
|
foo.sql
|
|
|
|
bar.sql
|
|
|
|
...
|
|
|
|
|
|
|
|
In the example foo.sql and bar.sql would be run, and then any delta files
|
|
|
|
for versions strictly greater than 11.
|
2019-10-22 13:43:31 -04:00
|
|
|
|
|
|
|
Note: we apply the full schemas and deltas from the top level `schema/`
|
|
|
|
folder as well those in the data stores specified.
|
|
|
|
|
|
|
|
Args:
|
2020-12-30 08:09:53 -05:00
|
|
|
cur: a database cursor
|
|
|
|
database_engine
|
|
|
|
databases: The names of the databases to instantiate on the given physical database.
|
2015-10-13 06:38:48 -04:00
|
|
|
"""
|
2020-01-28 08:44:21 -05:00
|
|
|
|
|
|
|
# We're about to set up a brand new database so we check that its
|
|
|
|
# configured to our liking.
|
|
|
|
database_engine.check_new_database(cur)
|
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
current_dir = os.path.join(dir_path, "schema", "full_schemas")
|
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
# First we find the highest full schema version we have
|
|
|
|
valid_versions = []
|
2019-06-03 08:02:47 -04:00
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
for filename in os.listdir(current_dir):
|
2019-10-21 11:08:40 -04:00
|
|
|
try:
|
|
|
|
ver = int(filename)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
if ver <= SCHEMA_VERSION:
|
|
|
|
valid_versions.append(ver)
|
|
|
|
|
|
|
|
if not valid_versions:
|
2015-10-13 06:38:48 -04:00
|
|
|
raise PrepareDatabaseException(
|
|
|
|
"Could not find a suitable base set of full schemas"
|
|
|
|
)
|
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
max_current_ver = max(valid_versions)
|
2015-10-13 06:38:48 -04:00
|
|
|
|
|
|
|
logger.debug("Initialising schema v%d", max_current_ver)
|
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
# Now lets find all the full schema files, both in the global schema and
|
|
|
|
# in data store schemas.
|
|
|
|
directories = [os.path.join(current_dir, str(max_current_ver))]
|
|
|
|
directories.extend(
|
|
|
|
os.path.join(
|
|
|
|
dir_path,
|
2020-08-05 16:38:57 -04:00
|
|
|
"databases",
|
|
|
|
database,
|
2019-10-21 11:08:40 -04:00
|
|
|
"schema",
|
|
|
|
"full_schemas",
|
|
|
|
str(max_current_ver),
|
|
|
|
)
|
2020-08-05 16:38:57 -04:00
|
|
|
for database in databases
|
2019-10-21 11:08:40 -04:00
|
|
|
)
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
directory_entries = [] # type: List[_DirectoryListing]
|
2019-10-21 11:08:40 -04:00
|
|
|
for directory in directories:
|
|
|
|
directory_entries.extend(
|
|
|
|
_DirectoryListing(file_name, os.path.join(directory, file_name))
|
|
|
|
for file_name in os.listdir(directory)
|
|
|
|
)
|
|
|
|
|
|
|
|
if isinstance(database_engine, PostgresEngine):
|
|
|
|
specific = "postgres"
|
|
|
|
else:
|
|
|
|
specific = "sqlite"
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
directory_entries.sort()
|
|
|
|
for entry in directory_entries:
|
|
|
|
if entry.file_name.endswith(".sql") or entry.file_name.endswith(
|
|
|
|
".sql." + specific
|
|
|
|
):
|
|
|
|
logger.debug("Applying schema %s", entry.absolute_path)
|
|
|
|
executescript(cur, entry.absolute_path)
|
2015-10-13 06:38:48 -04:00
|
|
|
|
|
|
|
cur.execute(
|
2020-10-02 10:20:45 -04:00
|
|
|
"INSERT INTO schema_version (version, upgraded) VALUES (?,?)",
|
2019-04-03 05:07:29 -04:00
|
|
|
(max_current_ver, False),
|
2015-10-13 06:38:48 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
_upgrade_existing_database(
|
|
|
|
cur,
|
|
|
|
current_version=max_current_ver,
|
|
|
|
applied_delta_files=[],
|
|
|
|
upgraded=False,
|
|
|
|
database_engine=database_engine,
|
2016-04-06 09:08:18 -04:00
|
|
|
config=None,
|
2020-08-05 16:38:57 -04:00
|
|
|
databases=databases,
|
2016-04-06 09:08:18 -04:00
|
|
|
is_empty=True,
|
2015-10-13 06:38:48 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
def _upgrade_existing_database(
|
2020-12-30 08:09:53 -05:00
|
|
|
cur: Cursor,
|
|
|
|
current_version: int,
|
|
|
|
applied_delta_files: List[str],
|
|
|
|
upgraded: bool,
|
|
|
|
database_engine: BaseDatabaseEngine,
|
|
|
|
config: Optional[HomeServerConfig],
|
|
|
|
databases: Collection[str],
|
|
|
|
is_empty: bool = False,
|
|
|
|
) -> None:
|
2020-08-05 16:38:57 -04:00
|
|
|
"""Upgrades an existing physical database.
|
2015-10-13 06:38:48 -04:00
|
|
|
|
|
|
|
Delta files can either be SQL stored in *.sql files, or python modules
|
|
|
|
in *.py.
|
|
|
|
|
|
|
|
There can be multiple delta files per version. Synapse will keep track of
|
|
|
|
which delta files have been applied, and will apply any that haven't been
|
|
|
|
even if there has been no version bump. This is useful for development
|
|
|
|
where orthogonal schema changes may happen on separate branches.
|
|
|
|
|
|
|
|
Different delta files for the same version *must* be orthogonal and give
|
|
|
|
the same result when applied in any order. No guarantees are made on the
|
|
|
|
order of execution of these scripts.
|
|
|
|
|
|
|
|
This is a no-op of current_version == SCHEMA_VERSION.
|
|
|
|
|
|
|
|
Example directory structure:
|
|
|
|
|
|
|
|
schema/
|
|
|
|
delta/
|
|
|
|
11/
|
|
|
|
foo.sql
|
|
|
|
...
|
|
|
|
12/
|
|
|
|
foo.sql
|
|
|
|
bar.py
|
|
|
|
...
|
|
|
|
full_schemas/
|
|
|
|
...
|
|
|
|
|
|
|
|
In the example, if current_version is 11, then foo.sql will be run if and
|
|
|
|
only if `upgraded` is True. Then `foo.sql` and `bar.py` would be run in
|
|
|
|
some arbitrary order.
|
|
|
|
|
2019-10-22 13:43:31 -04:00
|
|
|
Note: we apply the delta files from the specified data stores as well as
|
|
|
|
those in the top-level schema. We apply all delta files across data stores
|
|
|
|
for a version before applying those in the next version.
|
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
Args:
|
2020-12-30 08:09:53 -05:00
|
|
|
cur
|
|
|
|
current_version: The current version of the schema.
|
|
|
|
applied_delta_files: A list of deltas that have already been applied.
|
|
|
|
upgraded: Whether the current version was generated by having
|
2015-10-13 06:38:48 -04:00
|
|
|
applied deltas or from full schema file. If `True` the function
|
|
|
|
will never apply delta files for the given `current_version`, since
|
|
|
|
the current_version wasn't generated by applying those delta files.
|
2020-12-30 08:09:53 -05:00
|
|
|
database_engine
|
|
|
|
config:
|
2020-02-25 12:46:00 -05:00
|
|
|
None if we are initialising a blank database, otherwise the application
|
|
|
|
config
|
2020-12-30 08:09:53 -05:00
|
|
|
databases: The names of the databases to instantiate
|
2020-08-05 16:38:57 -04:00
|
|
|
on the given physical database.
|
2020-12-30 08:09:53 -05:00
|
|
|
is_empty: Is this a blank database? I.e. do we need to run the
|
2019-10-22 13:43:31 -04:00
|
|
|
upgrade portions of the delta scripts.
|
2015-10-13 06:38:48 -04:00
|
|
|
"""
|
2020-02-25 12:46:00 -05:00
|
|
|
if is_empty:
|
|
|
|
assert not applied_delta_files
|
|
|
|
else:
|
|
|
|
assert config
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2020-09-07 08:04:10 -04:00
|
|
|
is_worker = config and config.worker_app is not None
|
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
if current_version > SCHEMA_VERSION:
|
|
|
|
raise ValueError(
|
2019-04-03 05:07:29 -04:00
|
|
|
"Cannot use this database as it is too "
|
|
|
|
+ "new for the server to understand"
|
2015-10-13 06:38:48 -04:00
|
|
|
)
|
|
|
|
|
2020-02-25 12:46:00 -05:00
|
|
|
# some of the deltas assume that config.server_name is set correctly, so now
|
|
|
|
# is a good time to run the sanity check.
|
2020-08-05 16:38:57 -04:00
|
|
|
if not is_empty and "main" in databases:
|
|
|
|
from synapse.storage.databases.main import check_database_before_upgrade
|
2020-02-25 12:46:00 -05:00
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
assert config is not None
|
2020-02-25 12:46:00 -05:00
|
|
|
check_database_before_upgrade(cur, database_engine, config)
|
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
start_ver = current_version
|
|
|
|
if not upgraded:
|
|
|
|
start_ver += 1
|
|
|
|
|
|
|
|
logger.debug("applied_delta_files: %s", applied_delta_files)
|
|
|
|
|
2019-08-27 03:52:20 -04:00
|
|
|
if isinstance(database_engine, PostgresEngine):
|
|
|
|
specific_engine_extension = ".postgres"
|
|
|
|
else:
|
|
|
|
specific_engine_extension = ".sqlite"
|
|
|
|
|
2019-08-27 04:39:11 -04:00
|
|
|
specific_engine_extensions = (".sqlite", ".postgres")
|
2019-08-27 03:52:20 -04:00
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
for v in range(start_ver, SCHEMA_VERSION + 1):
|
2020-09-07 08:04:10 -04:00
|
|
|
logger.info("Applying schema deltas for v%d", v)
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
# We need to search both the global and per data store schema
|
|
|
|
# directories for schema updates.
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
# First we find the directories to search in
|
|
|
|
delta_dir = os.path.join(dir_path, "schema", "delta", str(v))
|
|
|
|
directories = [delta_dir]
|
2020-08-05 16:38:57 -04:00
|
|
|
for database in databases:
|
2019-10-21 11:08:40 -04:00
|
|
|
directories.append(
|
2020-08-05 16:38:57 -04:00
|
|
|
os.path.join(dir_path, "databases", database, "schema", "delta", str(v))
|
2015-10-13 06:38:48 -04:00
|
|
|
)
|
|
|
|
|
2019-12-19 10:07:37 -05:00
|
|
|
# Used to check if we have any duplicate file names
|
2020-12-30 08:09:53 -05:00
|
|
|
file_name_counter = Counter() # type: CounterType[str]
|
2019-12-19 10:07:37 -05:00
|
|
|
|
2019-10-21 11:08:40 -04:00
|
|
|
# Now find which directories have anything of interest.
|
2020-12-30 08:09:53 -05:00
|
|
|
directory_entries = [] # type: List[_DirectoryListing]
|
2019-10-21 11:08:40 -04:00
|
|
|
for directory in directories:
|
|
|
|
logger.debug("Looking for schema deltas in %s", directory)
|
|
|
|
try:
|
|
|
|
file_names = os.listdir(directory)
|
|
|
|
directory_entries.extend(
|
|
|
|
_DirectoryListing(file_name, os.path.join(directory, file_name))
|
|
|
|
for file_name in file_names
|
|
|
|
)
|
2019-12-19 10:07:37 -05:00
|
|
|
|
|
|
|
for file_name in file_names:
|
|
|
|
file_name_counter[file_name] += 1
|
2019-10-21 11:08:40 -04:00
|
|
|
except FileNotFoundError:
|
|
|
|
# Data stores can have empty entries for a given version delta.
|
|
|
|
pass
|
|
|
|
except OSError:
|
|
|
|
raise UpgradeDatabaseException(
|
2019-10-22 13:43:31 -04:00
|
|
|
"Could not open delta dir for version %d: %s" % (v, directory)
|
2019-10-21 11:08:40 -04:00
|
|
|
)
|
|
|
|
|
2020-02-21 07:15:07 -05:00
|
|
|
duplicates = {
|
2019-12-19 10:07:37 -05:00
|
|
|
file_name for file_name, count in file_name_counter.items() if count > 1
|
2020-02-21 07:15:07 -05:00
|
|
|
}
|
2019-12-19 10:07:37 -05:00
|
|
|
if duplicates:
|
|
|
|
# We don't support using the same file name in the same delta version.
|
|
|
|
raise PrepareDatabaseException(
|
2020-05-27 11:26:59 -04:00
|
|
|
"Found multiple delta files with the same name in v%d: %s"
|
|
|
|
% (v, duplicates,)
|
2019-12-19 10:07:37 -05:00
|
|
|
)
|
|
|
|
|
2019-10-22 13:43:31 -04:00
|
|
|
# We sort to ensure that we apply the delta files in a consistent
|
|
|
|
# order (to avoid bugs caused by inconsistent directory listing order)
|
2015-10-13 06:38:48 -04:00
|
|
|
directory_entries.sort()
|
2019-10-21 11:08:40 -04:00
|
|
|
for entry in directory_entries:
|
|
|
|
file_name = entry.file_name
|
2015-10-13 06:38:48 -04:00
|
|
|
relative_path = os.path.join(str(v), file_name)
|
2019-10-21 11:08:40 -04:00
|
|
|
absolute_path = entry.absolute_path
|
|
|
|
|
2019-10-22 13:43:31 -04:00
|
|
|
logger.debug("Found file: %s (%s)", relative_path, absolute_path)
|
2015-10-13 06:38:48 -04:00
|
|
|
if relative_path in applied_delta_files:
|
|
|
|
continue
|
|
|
|
|
|
|
|
root_name, ext = os.path.splitext(file_name)
|
2020-09-07 08:04:10 -04:00
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
if ext == ".py":
|
|
|
|
# This is a python upgrade module. We need to import into some
|
|
|
|
# package and then execute its `run_upgrade` function.
|
2020-09-07 08:04:10 -04:00
|
|
|
if is_worker:
|
|
|
|
raise PrepareDatabaseException(
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
|
|
|
|
)
|
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
module_name = "synapse.storage.v%d_%s" % (v, root_name)
|
2015-10-13 06:38:48 -04:00
|
|
|
with open(absolute_path) as python_file:
|
2020-12-30 08:09:53 -05:00
|
|
|
module = imp.load_source(module_name, absolute_path, python_file) # type: ignore
|
2016-09-08 08:40:46 -04:00
|
|
|
logger.info("Running script %s", relative_path)
|
2020-12-30 08:09:53 -05:00
|
|
|
module.run_create(cur, database_engine) # type: ignore
|
2016-04-06 09:08:18 -04:00
|
|
|
if not is_empty:
|
2020-12-30 08:09:53 -05:00
|
|
|
module.run_upgrade(cur, database_engine, config=config) # type: ignore
|
2018-11-20 17:46:51 -05:00
|
|
|
elif ext == ".pyc" or file_name == "__pycache__":
|
2015-10-13 06:38:48 -04:00
|
|
|
# Sometimes .pyc files turn up anyway even though we've
|
|
|
|
# disabled their generation; e.g. from distribution package
|
|
|
|
# installers. Silently skip it
|
2019-08-27 03:52:20 -04:00
|
|
|
continue
|
2015-10-13 06:38:48 -04:00
|
|
|
elif ext == ".sql":
|
|
|
|
# A plain old .sql file, just read and execute it
|
2020-09-07 08:04:10 -04:00
|
|
|
if is_worker:
|
|
|
|
raise PrepareDatabaseException(
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
|
|
|
|
)
|
2016-09-08 08:40:46 -04:00
|
|
|
logger.info("Applying schema %s", relative_path)
|
2015-10-13 06:38:48 -04:00
|
|
|
executescript(cur, absolute_path)
|
2019-08-27 03:52:20 -04:00
|
|
|
elif ext == specific_engine_extension and root_name.endswith(".sql"):
|
|
|
|
# A .sql file specific to our engine; just read and execute it
|
2020-09-07 08:04:10 -04:00
|
|
|
if is_worker:
|
|
|
|
raise PrepareDatabaseException(
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
|
|
|
|
)
|
2019-08-27 03:52:20 -04:00
|
|
|
logger.info("Applying engine-specific schema %s", relative_path)
|
|
|
|
executescript(cur, absolute_path)
|
|
|
|
elif ext in specific_engine_extensions and root_name.endswith(".sql"):
|
|
|
|
# A .sql file for a different engine; skip it.
|
|
|
|
continue
|
2015-10-13 06:38:48 -04:00
|
|
|
else:
|
|
|
|
# Not a valid delta file.
|
2019-08-27 03:52:20 -04:00
|
|
|
logger.warning(
|
|
|
|
"Found directory entry that did not end in .py or .sql: %s",
|
2015-10-13 06:38:48 -04:00
|
|
|
relative_path,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Mark as done.
|
|
|
|
cur.execute(
|
2020-10-02 10:20:45 -04:00
|
|
|
"INSERT INTO applied_schema_deltas (version, file) VALUES (?,?)",
|
2019-04-03 05:07:29 -04:00
|
|
|
(v, relative_path),
|
2015-10-13 06:38:48 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
cur.execute("DELETE FROM schema_version")
|
|
|
|
cur.execute(
|
2020-10-02 10:20:45 -04:00
|
|
|
"INSERT INTO schema_version (version, upgraded) VALUES (?,?)",
|
2019-04-03 05:07:29 -04:00
|
|
|
(v, True),
|
2015-10-13 06:38:48 -04:00
|
|
|
)
|
|
|
|
|
2020-09-07 08:04:10 -04:00
|
|
|
logger.info("Schema now up to date")
|
|
|
|
|
2015-10-13 06:38:48 -04:00
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
def _apply_module_schemas(
|
|
|
|
txn: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig
|
|
|
|
) -> None:
|
2017-10-30 11:16:21 -04:00
|
|
|
"""Apply the module schemas for the dynamic modules, if any
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cur: database cursor
|
2020-12-30 08:09:53 -05:00
|
|
|
database_engine:
|
|
|
|
config: application config
|
2017-10-30 11:16:21 -04:00
|
|
|
"""
|
|
|
|
for (mod, _config) in config.password_providers:
|
2019-06-20 05:32:02 -04:00
|
|
|
if not hasattr(mod, "get_db_schema_files"):
|
2017-10-30 11:16:21 -04:00
|
|
|
continue
|
|
|
|
modname = ".".join((mod.__module__, mod.__name__))
|
|
|
|
_apply_module_schema_files(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn, database_engine, modname, mod.get_db_schema_files()
|
2017-10-30 11:16:21 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
def _apply_module_schema_files(
|
|
|
|
cur: Cursor,
|
|
|
|
database_engine: BaseDatabaseEngine,
|
|
|
|
modname: str,
|
|
|
|
names_and_streams: Iterable[Tuple[str, TextIO]],
|
|
|
|
) -> None:
|
2017-10-30 11:16:21 -04:00
|
|
|
"""Apply the module schemas for a single module
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cur: database cursor
|
|
|
|
database_engine: synapse database engine class
|
2020-12-30 08:09:53 -05:00
|
|
|
modname: fully qualified name of the module
|
|
|
|
names_and_streams: the names and streams of schemas to be applied
|
2017-10-30 11:16:21 -04:00
|
|
|
"""
|
|
|
|
cur.execute(
|
2020-10-02 10:20:45 -04:00
|
|
|
"SELECT file FROM applied_module_schemas WHERE module_name = ?", (modname,),
|
2017-10-30 11:16:21 -04:00
|
|
|
)
|
2020-02-21 07:15:07 -05:00
|
|
|
applied_deltas = {d for d, in cur}
|
2017-10-30 11:16:21 -04:00
|
|
|
for (name, stream) in names_and_streams:
|
|
|
|
if name in applied_deltas:
|
|
|
|
continue
|
|
|
|
|
|
|
|
root_name, ext = os.path.splitext(name)
|
2019-06-20 05:32:02 -04:00
|
|
|
if ext != ".sql":
|
2017-10-30 11:16:21 -04:00
|
|
|
raise PrepareDatabaseException(
|
2019-04-03 05:07:29 -04:00
|
|
|
"only .sql files are currently supported for module schemas"
|
2017-10-30 11:16:21 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.info("applying schema %s for %s", name, modname)
|
2020-05-26 06:43:17 -04:00
|
|
|
execute_statements_from_stream(cur, stream)
|
2017-10-30 11:16:21 -04:00
|
|
|
|
|
|
|
# Mark as done.
|
|
|
|
cur.execute(
|
2020-10-02 10:20:45 -04:00
|
|
|
"INSERT INTO applied_module_schemas (module_name, file) VALUES (?,?)",
|
2019-04-03 05:07:29 -04:00
|
|
|
(modname, name),
|
2017-10-30 11:16:21 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
def get_statements(f: Iterable[str]) -> Generator[str, None, None]:
|
2015-10-13 06:38:48 -04:00
|
|
|
statement_buffer = ""
|
|
|
|
in_comment = False # If we're in a /* ... */ style comment
|
|
|
|
|
|
|
|
for line in f:
|
|
|
|
line = line.strip()
|
|
|
|
|
|
|
|
if in_comment:
|
|
|
|
# Check if this line contains an end to the comment
|
|
|
|
comments = line.split("*/", 1)
|
|
|
|
if len(comments) == 1:
|
|
|
|
continue
|
|
|
|
line = comments[1]
|
|
|
|
in_comment = False
|
|
|
|
|
|
|
|
# Remove inline block comments
|
|
|
|
line = re.sub(r"/\*.*\*/", " ", line)
|
|
|
|
|
|
|
|
# Does this line start a comment?
|
|
|
|
comments = line.split("/*", 1)
|
|
|
|
if len(comments) > 1:
|
|
|
|
line = comments[0]
|
|
|
|
in_comment = True
|
|
|
|
|
|
|
|
# Deal with line comments
|
|
|
|
line = line.split("--", 1)[0]
|
|
|
|
line = line.split("//", 1)[0]
|
|
|
|
|
|
|
|
# Find *all* semicolons. We need to treat first and last entry
|
|
|
|
# specially.
|
|
|
|
statements = line.split(";")
|
|
|
|
|
|
|
|
# We must prepend statement_buffer to the first statement
|
2019-04-03 05:07:29 -04:00
|
|
|
first_statement = "%s %s" % (statement_buffer.strip(), statements[0].strip())
|
2015-10-13 06:38:48 -04:00
|
|
|
statements[0] = first_statement
|
|
|
|
|
|
|
|
# Every entry, except the last, is a full statement
|
|
|
|
for statement in statements[:-1]:
|
|
|
|
yield statement.strip()
|
|
|
|
|
|
|
|
# The last entry did *not* end in a semicolon, so we store it for the
|
|
|
|
# next semicolon we find
|
|
|
|
statement_buffer = statements[-1].strip()
|
|
|
|
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
def executescript(txn: Cursor, schema_path: str) -> None:
|
2019-06-20 05:32:02 -04:00
|
|
|
with open(schema_path, "r") as f:
|
2020-05-26 06:43:17 -04:00
|
|
|
execute_statements_from_stream(txn, f)
|
|
|
|
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
def execute_statements_from_stream(cur: Cursor, f: TextIO) -> None:
|
2020-05-26 06:43:17 -04:00
|
|
|
for statement in get_statements(f):
|
|
|
|
cur.execute(statement)
|
2015-10-13 06:38:48 -04:00
|
|
|
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
def _get_or_create_schema_state(
|
|
|
|
txn: Cursor, database_engine: BaseDatabaseEngine
|
|
|
|
) -> Optional[Tuple[int, List[str], bool]]:
|
2015-10-13 06:38:48 -04:00
|
|
|
# Bluntly try creating the schema_version tables.
|
2019-04-03 05:07:29 -04:00
|
|
|
schema_path = os.path.join(dir_path, "schema", "schema_version.sql")
|
2015-10-13 06:38:48 -04:00
|
|
|
executescript(txn, schema_path)
|
|
|
|
|
|
|
|
txn.execute("SELECT version, upgraded FROM schema_version")
|
|
|
|
row = txn.fetchone()
|
|
|
|
current_version = int(row[0]) if row else None
|
|
|
|
|
|
|
|
if current_version:
|
|
|
|
txn.execute(
|
2020-10-02 10:20:45 -04:00
|
|
|
"SELECT file FROM applied_schema_deltas WHERE version >= ?",
|
2019-04-03 05:07:29 -04:00
|
|
|
(current_version,),
|
2015-10-13 06:38:48 -04:00
|
|
|
)
|
2017-03-23 13:53:49 -04:00
|
|
|
applied_deltas = [d for d, in txn]
|
2020-12-30 08:09:53 -05:00
|
|
|
upgraded = bool(row[1])
|
2015-10-13 06:38:48 -04:00
|
|
|
return current_version, applied_deltas, upgraded
|
|
|
|
|
|
|
|
return None
|
2019-10-21 11:08:40 -04:00
|
|
|
|
|
|
|
|
2020-09-14 12:50:06 -04:00
|
|
|
@attr.s(slots=True)
|
2020-09-04 06:54:56 -04:00
|
|
|
class _DirectoryListing:
|
2019-10-21 11:08:40 -04:00
|
|
|
"""Helper class to store schema file name and the
|
|
|
|
absolute path to it.
|
2019-10-22 13:43:31 -04:00
|
|
|
|
|
|
|
These entries get sorted, so for consistency we want to ensure that
|
|
|
|
`file_name` attr is kept first.
|
2019-10-21 11:08:40 -04:00
|
|
|
"""
|
|
|
|
|
2020-12-30 08:09:53 -05:00
|
|
|
file_name = attr.ib(type=str)
|
|
|
|
absolute_path = attr.ib(type=str)
|