mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-05-02 19:44:53 -04:00
Add logging on startup/shutdown (#8448)
This is so we can tell what is going on when things are taking a while to start up. The main change here is to ensure that transactions that are created during startup get correctly logged like normal transactions.
This commit is contained in:
parent
ec10bdd32b
commit
e3debf9682
25 changed files with 152 additions and 113 deletions
|
@ -46,7 +46,7 @@ class Databases:
|
|||
db_name = database_config.name
|
||||
engine = create_engine(database_config.config)
|
||||
|
||||
with make_conn(database_config, engine) as db_conn:
|
||||
with make_conn(database_config, engine, "startup") as db_conn:
|
||||
logger.info("[database config %r]: Checking database server", db_name)
|
||||
engine.check_database(db_conn)
|
||||
|
||||
|
|
|
@ -284,7 +284,6 @@ class DataStore(
|
|||
" last_user_sync_ts, status_msg, currently_active FROM presence_stream"
|
||||
" WHERE state != ?"
|
||||
)
|
||||
sql = self.database_engine.convert_param_style(sql)
|
||||
|
||||
txn = db_conn.cursor()
|
||||
txn.execute(sql, (PresenceState.OFFLINE,))
|
||||
|
|
|
@ -20,7 +20,7 @@ from typing import Dict, List, Optional, Tuple, Union
|
|||
import attr
|
||||
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore, db_to_json
|
||||
from synapse.storage._base import SQLBaseStore, db_to_json
|
||||
from synapse.storage.database import DatabasePool
|
||||
from synapse.util import json_encoder
|
||||
from synapse.util.caches.descriptors import cached
|
||||
|
@ -74,11 +74,7 @@ class EventPushActionsWorkerStore(SQLBaseStore):
|
|||
self.stream_ordering_month_ago = None
|
||||
self.stream_ordering_day_ago = None
|
||||
|
||||
cur = LoggingTransaction(
|
||||
db_conn.cursor(),
|
||||
name="_find_stream_orderings_for_times_txn",
|
||||
database_engine=self.database_engine,
|
||||
)
|
||||
cur = db_conn.cursor(txn_name="_find_stream_orderings_for_times_txn")
|
||||
self._find_stream_orderings_for_times_txn(cur)
|
||||
cur.close()
|
||||
|
||||
|
|
|
@ -214,7 +214,6 @@ class MonthlyActiveUsersStore(MonthlyActiveUsersWorkerStore):
|
|||
self._mau_stats_only = hs.config.mau_stats_only
|
||||
|
||||
# Do not add more reserved users than the total allowable number
|
||||
# cur = LoggingTransaction(
|
||||
self.db_pool.new_transaction(
|
||||
db_conn,
|
||||
"initialise_mau_threepids",
|
||||
|
|
|
@ -21,12 +21,7 @@ from synapse.events import EventBase
|
|||
from synapse.events.snapshot import EventContext
|
||||
from synapse.metrics import LaterGauge
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.storage._base import (
|
||||
LoggingTransaction,
|
||||
SQLBaseStore,
|
||||
db_to_json,
|
||||
make_in_list_sql_clause,
|
||||
)
|
||||
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
||||
from synapse.storage.database import DatabasePool
|
||||
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
||||
from synapse.storage.engines import Sqlite3Engine
|
||||
|
@ -60,10 +55,8 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||
# background update still running?
|
||||
self._current_state_events_membership_up_to_date = False
|
||||
|
||||
txn = LoggingTransaction(
|
||||
db_conn.cursor(),
|
||||
name="_check_safe_current_state_events_membership_updated",
|
||||
database_engine=self.database_engine,
|
||||
txn = db_conn.cursor(
|
||||
txn_name="_check_safe_current_state_events_membership_updated"
|
||||
)
|
||||
self._check_safe_current_state_events_membership_updated_txn(txn)
|
||||
txn.close()
|
||||
|
|
|
@ -66,16 +66,15 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
row[8] = bytes(row[8]).decode("utf-8")
|
||||
row[11] = bytes(row[11]).decode("utf-8")
|
||||
cur.execute(
|
||||
database_engine.convert_param_style(
|
||||
"""
|
||||
INSERT into pushers2 (
|
||||
id, user_name, access_token, profile_tag, kind,
|
||||
app_id, app_display_name, device_display_name,
|
||||
pushkey, ts, lang, data, last_token, last_success,
|
||||
failing_since
|
||||
) values (%s)"""
|
||||
% (",".join(["?" for _ in range(len(row))]))
|
||||
),
|
||||
"""
|
||||
INSERT into pushers2 (
|
||||
id, user_name, access_token, profile_tag, kind,
|
||||
app_id, app_display_name, device_display_name,
|
||||
pushkey, ts, lang, data, last_token, last_success,
|
||||
failing_since
|
||||
) values (%s)
|
||||
"""
|
||||
% (",".join(["?" for _ in range(len(row))])),
|
||||
row,
|
||||
)
|
||||
count += 1
|
||||
|
|
|
@ -71,8 +71,6 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
" VALUES (?, ?)"
|
||||
)
|
||||
|
||||
sql = database_engine.convert_param_style(sql)
|
||||
|
||||
cur.execute(sql, ("event_search", progress_json))
|
||||
|
||||
|
||||
|
|
|
@ -50,8 +50,6 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
" VALUES (?, ?)"
|
||||
)
|
||||
|
||||
sql = database_engine.convert_param_style(sql)
|
||||
|
||||
cur.execute(sql, ("event_origin_server_ts", progress_json))
|
||||
|
||||
|
||||
|
|
|
@ -59,9 +59,7 @@ def run_upgrade(cur, database_engine, config, *args, **kwargs):
|
|||
user_chunks = (user_ids[i : i + 100] for i in range(0, len(user_ids), n))
|
||||
for chunk in user_chunks:
|
||||
cur.execute(
|
||||
database_engine.convert_param_style(
|
||||
"UPDATE users SET appservice_id = ? WHERE name IN (%s)"
|
||||
% (",".join("?" for _ in chunk),)
|
||||
),
|
||||
"UPDATE users SET appservice_id = ? WHERE name IN (%s)"
|
||||
% (",".join("?" for _ in chunk),),
|
||||
[as_id] + chunk,
|
||||
)
|
||||
|
|
|
@ -65,16 +65,15 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
row = list(row)
|
||||
row[12] = token_to_stream_ordering(row[12])
|
||||
cur.execute(
|
||||
database_engine.convert_param_style(
|
||||
"""
|
||||
INSERT into pushers2 (
|
||||
id, user_name, access_token, profile_tag, kind,
|
||||
app_id, app_display_name, device_display_name,
|
||||
pushkey, ts, lang, data, last_stream_ordering, last_success,
|
||||
failing_since
|
||||
) values (%s)"""
|
||||
% (",".join(["?" for _ in range(len(row))]))
|
||||
),
|
||||
"""
|
||||
INSERT into pushers2 (
|
||||
id, user_name, access_token, profile_tag, kind,
|
||||
app_id, app_display_name, device_display_name,
|
||||
pushkey, ts, lang, data, last_stream_ordering, last_success,
|
||||
failing_since
|
||||
) values (%s)
|
||||
"""
|
||||
% (",".join(["?" for _ in range(len(row))])),
|
||||
row,
|
||||
)
|
||||
count += 1
|
||||
|
|
|
@ -55,8 +55,6 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
" VALUES (?, ?)"
|
||||
)
|
||||
|
||||
sql = database_engine.convert_param_style(sql)
|
||||
|
||||
cur.execute(sql, ("event_search_order", progress_json))
|
||||
|
||||
|
||||
|
|
|
@ -50,8 +50,6 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
" VALUES (?, ?)"
|
||||
)
|
||||
|
||||
sql = database_engine.convert_param_style(sql)
|
||||
|
||||
cur.execute(sql, ("event_fields_sender_url", progress_json))
|
||||
|
||||
|
||||
|
|
|
@ -23,8 +23,5 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
|
||||
def run_upgrade(cur, database_engine, *args, **kwargs):
|
||||
cur.execute(
|
||||
database_engine.convert_param_style(
|
||||
"UPDATE remote_media_cache SET last_access_ts = ?"
|
||||
),
|
||||
(int(time.time() * 1000),),
|
||||
"UPDATE remote_media_cache SET last_access_ts = ?", (int(time.time() * 1000),),
|
||||
)
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import logging
|
||||
from io import StringIO
|
||||
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
from synapse.storage.prepare_database import execute_statements_from_stream
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -46,7 +48,4 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
select_clause,
|
||||
)
|
||||
|
||||
if isinstance(database_engine, PostgresEngine):
|
||||
cur.execute(sql)
|
||||
else:
|
||||
cur.executescript(sql)
|
||||
execute_statements_from_stream(cur, StringIO(sql))
|
||||
|
|
|
@ -68,7 +68,6 @@ def run_upgrade(cur, database_engine, config, *args, **kwargs):
|
|||
INNER JOIN room_memberships AS r USING (event_id)
|
||||
WHERE type = 'm.room.member' AND state_key LIKE ?
|
||||
"""
|
||||
sql = database_engine.convert_param_style(sql)
|
||||
cur.execute(sql, ("%:" + config.server_name,))
|
||||
|
||||
cur.execute(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue