forked-synapse/synapse/storage/schema/main/delta/31/search_update.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

67 lines
2.1 KiB
Python
Raw Normal View History

2016-04-21 15:41:39 +00:00
#
2023-11-21 20:29:58 +00:00
# This file is licensed under the Affero General Public License (AGPL) version 3.
#
# Copyright 2016 OpenMarket Ltd
2023-11-21 20:29:58 +00:00
# Copyright (C) 2023 New Vector, Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# See the GNU Affero General Public License for more details:
# <https://www.gnu.org/licenses/agpl-3.0.html>.
#
# Originally licensed under the Apache License, Version 2.0:
# <http://www.apache.org/licenses/LICENSE-2.0>.
#
# [This file includes modifications made by New Vector Limited]
2016-04-21 15:41:39 +00:00
#
#
import json
2016-04-21 15:41:39 +00:00
import logging
2018-07-09 06:09:20 +00:00
from synapse.storage.database import LoggingTransaction
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
2018-07-09 06:09:20 +00:00
from synapse.storage.prepare_database import get_statements
2016-04-21 15:41:39 +00:00
logger = logging.getLogger(__name__)
ALTER_TABLE = """
2016-04-21 17:09:48 +00:00
ALTER TABLE event_search ADD COLUMN origin_server_ts BIGINT;
ALTER TABLE event_search ADD COLUMN stream_ordering BIGINT;
2016-04-21 15:41:39 +00:00
"""
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
2016-04-21 15:41:39 +00:00
if not isinstance(database_engine, PostgresEngine):
return
for statement in get_statements(ALTER_TABLE.splitlines()):
cur.execute(statement)
cur.execute("SELECT MIN(stream_ordering) FROM events")
rows = cur.fetchall()
min_stream_id = rows[0][0]
cur.execute("SELECT MAX(stream_ordering) FROM events")
rows = cur.fetchall()
max_stream_id = rows[0][0]
if min_stream_id is not None and max_stream_id is not None:
progress = {
"target_min_stream_id_inclusive": min_stream_id,
"max_stream_id_exclusive": max_stream_id + 1,
"rows_inserted": 0,
2016-04-21 16:16:11 +00:00
"have_added_indexes": False,
2016-04-21 15:41:39 +00:00
}
progress_json = json.dumps(progress)
2016-04-21 15:41:39 +00:00
sql = (
"INSERT into background_updates (update_name, progress_json)"
" VALUES (?, ?)"
)
cur.execute(sql, ("event_search_order", progress_json))