Merge remote-tracking branch 'origin/release-v1.20.0' into develop

This commit is contained in:
Richard van der Hoff 2020-09-08 09:58:07 +01:00
commit 8d6f97f932
11 changed files with 122 additions and 36 deletions

View file

@ -47,9 +47,14 @@ class Databases:
engine = create_engine(database_config.config)
with make_conn(database_config, engine) as db_conn:
logger.info("Preparing database %r...", db_name)
logger.info("[database config %r]: Checking database server", db_name)
engine.check_database(db_conn)
logger.info(
"[database config %r]: Preparing for databases %r",
db_name,
database_config.databases,
)
prepare_database(
db_conn, engine, hs.config, databases=database_config.databases,
)
@ -57,7 +62,9 @@ class Databases:
database = DatabasePool(hs, database_config, engine)
if "main" in database_config.databases:
logger.info("Starting 'main' data store")
logger.info(
"[database config %r]: Starting 'main' database", db_name
)
# Sanity check we don't try and configure the main store on
# multiple databases.
@ -72,7 +79,9 @@ class Databases:
persist_events = PersistEventsStore(hs, database, main)
if "state" in database_config.databases:
logger.info("Starting 'state' data store")
logger.info(
"[database config %r]: Starting 'state' database", db_name
)
# Sanity check we don't try and configure the state store on
# multiple databases.
@ -85,7 +94,7 @@ class Databases:
self.databases.append(database)
logger.info("Database %r prepared", db_name)
logger.info("[database config %r]: prepared", db_name)
# Closing the context manager doesn't close the connection.
# psycopg will close the connection when the object gets GCed, but *only*
@ -98,10 +107,10 @@ class Databases:
# Sanity check that we have actually configured all the required stores.
if not main:
raise Exception("No 'main' data store configured")
raise Exception("No 'main' database configured")
if not state:
raise Exception("No 'state' data store configured")
raise Exception("No 'state' database configured")
# We use local variables here to ensure that the databases do not have
# optional types.

View file

@ -29,6 +29,7 @@ from synapse.storage.util.id_generators import (
MultiWriterIdGenerator,
StreamIdGenerator,
)
from synapse.types import get_domain_from_id
from synapse.util.caches.stream_change_cache import StreamChangeCache
from .account_data import AccountDataStore
@ -591,21 +592,24 @@ def check_database_before_upgrade(cur, database_engine, config: HomeServerConfig
"""Called before upgrading an existing database to check that it is broadly sane
compared with the configuration.
"""
domain = config.server_name
logger.info("Checking database for consistency with configuration...")
sql = database_engine.convert_param_style(
"SELECT COUNT(*) FROM users WHERE name NOT LIKE ?"
)
pat = "%:" + domain
cur.execute(sql, (pat,))
num_not_matching = cur.fetchall()[0][0]
if num_not_matching == 0:
# if there are any users in the database, check that the username matches our
# configured server name.
cur.execute("SELECT name FROM users LIMIT 1")
rows = cur.fetchall()
if not rows:
return
user_domain = get_domain_from_id(rows[0][0])
if user_domain == config.server_name:
return
raise Exception(
"Found users in database not native to %s!\n"
"You cannot changed a synapse server_name after it's been configured"
% (domain,)
"You cannot change a synapse server_name after it's been configured"
% (config.server_name,)
)

View file

@ -177,7 +177,12 @@ class EventPushActionsWorkerStore(SQLBaseStore):
if row:
notif_count += row[0]
unread_count += row[1]
if row[1] is not None:
# The unread_count column of event_push_summary is NULLable, so we need
# to make sure we don't try increasing the unread counts if it's NULL
# for this row.
unread_count += row[1]
return {
"notify_count": notif_count,