2014-08-12 10:10:52 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 08:21:39 -05:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-08-12 10:10:52 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2014-08-14 13:40:50 -04:00
|
|
|
from twisted.internet import defer
|
2015-03-09 13:01:19 -04:00
|
|
|
from .appservice import (
|
|
|
|
ApplicationServiceStore, ApplicationServiceTransactionStore
|
|
|
|
)
|
2015-03-20 13:08:15 -04:00
|
|
|
from ._base import Cache
|
2014-08-12 10:10:52 -04:00
|
|
|
from .directory import DirectoryStore
|
2015-03-20 09:52:56 -04:00
|
|
|
from .events import EventsStore
|
2014-08-12 10:10:52 -04:00
|
|
|
from .presence import PresenceStore
|
|
|
|
from .profile import ProfileStore
|
|
|
|
from .registration import RegistrationStore
|
|
|
|
from .room import RoomStore
|
|
|
|
from .roommember import RoomMemberStore
|
|
|
|
from .stream import StreamStore
|
|
|
|
from .transactions import TransactionStore
|
2014-08-28 13:19:47 -04:00
|
|
|
from .keys import KeyStore
|
2014-10-28 12:42:35 -04:00
|
|
|
from .event_federation import EventFederationStore
|
2014-11-19 13:20:59 -05:00
|
|
|
from .pusher import PusherStore
|
2015-01-22 12:38:53 -05:00
|
|
|
from .push_rule import PushRuleStore
|
2014-12-02 14:51:47 -05:00
|
|
|
from .media_repository import MediaRepositoryStore
|
2015-01-22 10:50:17 -05:00
|
|
|
from .rejections import RejectionsStore
|
2014-10-27 07:58:32 -04:00
|
|
|
|
2014-10-14 11:59:51 -04:00
|
|
|
from .state import StateStore
|
2014-10-15 12:09:04 -04:00
|
|
|
from .signatures import SignatureStore
|
2015-01-27 12:48:13 -05:00
|
|
|
from .filtering import FilteringStore
|
2014-10-15 12:09:04 -04:00
|
|
|
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
import fnmatch
|
2015-03-02 13:23:55 -05:00
|
|
|
import imp
|
2014-08-19 09:20:03 -04:00
|
|
|
import logging
|
2014-08-12 10:10:52 -04:00
|
|
|
import os
|
2015-03-04 07:04:19 -05:00
|
|
|
import re
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
|
2014-08-19 09:20:03 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2015-03-04 08:43:17 -05:00
|
|
|
# Remember to update this number every time a change is made to database
|
2015-03-04 08:34:11 -05:00
|
|
|
# schema files, so the users will be informed on server restarts.
|
2015-04-29 08:32:32 -04:00
|
|
|
SCHEMA_VERSION = 17
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2015-02-17 12:22:24 -05:00
|
|
|
dir_path = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
|
2015-03-20 13:08:15 -04:00
|
|
|
# Number of msec of granularity to store the user IP 'last seen' time. Smaller
|
|
|
|
# times give more inserts into the database even for readonly API hits
|
|
|
|
# 120 seconds == 2 minutes
|
|
|
|
LAST_SEEN_GRANULARITY = 120*1000
|
2014-08-19 09:20:03 -04:00
|
|
|
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2014-08-14 12:34:37 -04:00
|
|
|
class DataStore(RoomMemberStore, RoomStore,
|
2015-03-20 09:52:56 -04:00
|
|
|
RegistrationStore, StreamStore, ProfileStore,
|
2014-10-31 10:00:14 -04:00
|
|
|
PresenceStore, TransactionStore,
|
2014-10-28 12:42:35 -04:00
|
|
|
DirectoryStore, KeyStore, StateStore, SignatureStore,
|
2015-02-02 11:05:34 -05:00
|
|
|
ApplicationServiceStore,
|
2014-12-02 14:51:47 -05:00
|
|
|
EventFederationStore,
|
|
|
|
MediaRepositoryStore,
|
2015-01-22 10:50:17 -05:00
|
|
|
RejectionsStore,
|
2015-01-27 12:48:13 -05:00
|
|
|
FilteringStore,
|
2014-12-18 10:15:22 -05:00
|
|
|
PusherStore,
|
2015-03-20 09:52:56 -04:00
|
|
|
PushRuleStore,
|
2015-03-16 06:16:59 -04:00
|
|
|
ApplicationServiceTransactionStore,
|
2015-03-20 09:52:56 -04:00
|
|
|
EventsStore,
|
2014-12-02 14:51:47 -05:00
|
|
|
):
|
2014-08-12 10:10:52 -04:00
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(DataStore, self).__init__(hs)
|
2014-08-18 10:50:41 -04:00
|
|
|
self.hs = hs
|
2014-08-12 10:10:52 -04:00
|
|
|
|
2014-08-18 11:00:46 -04:00
|
|
|
self.min_token_deferred = self._get_min_token()
|
|
|
|
self.min_token = None
|
|
|
|
|
2015-03-20 13:08:15 -04:00
|
|
|
self.client_ip_last_seen = Cache(
|
|
|
|
name="client_ip_last_seen",
|
|
|
|
keylen=4,
|
2014-09-24 09:18:08 -04:00
|
|
|
)
|
2014-09-23 10:28:32 -04:00
|
|
|
|
2014-08-13 11:27:14 -04:00
|
|
|
@defer.inlineCallbacks
|
2014-09-29 09:59:52 -04:00
|
|
|
def insert_client_ip(self, user, access_token, device_id, ip, user_agent):
|
2015-03-20 13:08:15 -04:00
|
|
|
now = int(self._clock.time_msec())
|
|
|
|
key = (user.to_string(), access_token, device_id, ip)
|
2014-08-18 11:00:46 -04:00
|
|
|
|
2015-03-20 13:08:15 -04:00
|
|
|
try:
|
|
|
|
last_seen = self.client_ip_last_seen.get(*key)
|
|
|
|
except KeyError:
|
|
|
|
last_seen = None
|
2014-08-19 09:20:03 -04:00
|
|
|
|
2015-03-20 13:08:15 -04:00
|
|
|
# Rate-limited inserts
|
|
|
|
if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
|
|
|
|
defer.returnValue(None)
|
2014-08-18 11:00:46 -04:00
|
|
|
|
2015-03-20 13:08:15 -04:00
|
|
|
self.client_ip_last_seen.prefill(*key + (now,))
|
2014-08-18 11:00:46 -04:00
|
|
|
|
2015-05-01 05:46:48 -04:00
|
|
|
# It's safe not to lock here: a) no unique constraint,
|
|
|
|
# b) LAST_SEEN_GRANULARITY makes concurrent updates incredibly unlikely
|
2015-04-07 13:05:39 -04:00
|
|
|
yield self._simple_upsert(
|
2014-09-26 11:36:24 -04:00
|
|
|
"user_ips",
|
2015-03-24 12:17:39 -04:00
|
|
|
keyvalues={
|
2015-04-14 08:54:09 -04:00
|
|
|
"user_id": user.to_string(),
|
2014-09-26 11:36:24 -04:00
|
|
|
"access_token": access_token,
|
2014-09-29 08:35:15 -04:00
|
|
|
"ip": ip,
|
|
|
|
"user_agent": user_agent,
|
2015-03-24 12:17:39 -04:00
|
|
|
},
|
|
|
|
values={
|
|
|
|
"device_id": device_id,
|
2015-03-20 13:08:15 -04:00
|
|
|
"last_seen": now,
|
2015-03-19 11:59:48 -04:00
|
|
|
},
|
2015-03-20 11:59:18 -04:00
|
|
|
desc="insert_client_ip",
|
2015-05-01 05:46:48 -04:00
|
|
|
lock=False,
|
2014-09-26 11:36:24 -04:00
|
|
|
)
|
|
|
|
|
2014-09-29 09:59:52 -04:00
|
|
|
def get_user_ip_and_agents(self, user):
|
|
|
|
return self._simple_select_list(
|
|
|
|
table="user_ips",
|
2015-04-14 08:54:09 -04:00
|
|
|
keyvalues={"user_id": user.to_string()},
|
2014-09-29 09:59:52 -04:00
|
|
|
retcols=[
|
|
|
|
"device_id", "access_token", "ip", "user_agent", "last_seen"
|
|
|
|
],
|
2015-03-20 11:59:18 -04:00
|
|
|
desc="get_user_ip_and_agents",
|
2014-09-29 09:59:52 -04:00
|
|
|
)
|
|
|
|
|
2014-08-22 12:00:10 -04:00
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
def read_schema(path):
|
2014-08-12 10:10:52 -04:00
|
|
|
""" Read the named database schema.
|
|
|
|
|
|
|
|
Args:
|
2015-03-04 07:04:19 -05:00
|
|
|
path: Path of the database schema.
|
2014-08-12 10:10:52 -04:00
|
|
|
Returns:
|
|
|
|
A string containing the database schema.
|
|
|
|
"""
|
2015-03-04 07:04:19 -05:00
|
|
|
with open(path) as schema_file:
|
2014-08-12 10:10:52 -04:00
|
|
|
return schema_file.read()
|
2014-09-10 10:42:15 -04:00
|
|
|
|
|
|
|
|
2014-12-16 09:20:32 -05:00
|
|
|
class PrepareDatabaseException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class UpgradeDatabaseException(PrepareDatabaseException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-04-01 09:12:33 -04:00
|
|
|
def prepare_database(db_conn, database_engine):
|
2015-03-02 13:23:55 -05:00
|
|
|
"""Prepares a database for usage. Will either create all necessary tables
|
|
|
|
or upgrade from an older schema version.
|
2014-09-10 10:42:15 -04:00
|
|
|
"""
|
2015-03-04 09:03:21 -05:00
|
|
|
try:
|
2015-03-04 08:43:17 -05:00
|
|
|
cur = db_conn.cursor()
|
2015-04-01 09:12:33 -04:00
|
|
|
version_info = _get_or_create_schema_state(cur, database_engine)
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2015-03-04 08:43:17 -05:00
|
|
|
if version_info:
|
|
|
|
user_version, delta_files, upgraded = version_info
|
2015-04-01 09:12:33 -04:00
|
|
|
_upgrade_existing_database(
|
|
|
|
cur, user_version, delta_files, upgraded, database_engine
|
|
|
|
)
|
2015-03-04 08:43:17 -05:00
|
|
|
else:
|
2015-04-01 09:12:33 -04:00
|
|
|
_setup_new_database(cur, database_engine)
|
2015-03-02 13:23:55 -05:00
|
|
|
|
2015-03-19 11:59:48 -04:00
|
|
|
# cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
|
2015-03-02 13:23:55 -05:00
|
|
|
|
2015-03-04 08:43:17 -05:00
|
|
|
cur.close()
|
2015-03-04 09:03:21 -05:00
|
|
|
db_conn.commit()
|
|
|
|
except:
|
|
|
|
db_conn.rollback()
|
|
|
|
raise
|
2015-03-02 13:23:55 -05:00
|
|
|
|
|
|
|
|
2015-04-01 09:12:33 -04:00
|
|
|
def _setup_new_database(cur, database_engine):
|
2015-03-04 08:34:11 -05:00
|
|
|
"""Sets up the database by finding a base set of "full schemas" and then
|
|
|
|
applying any necessary deltas.
|
|
|
|
|
|
|
|
The "full_schemas" directory has subdirectories named after versions. This
|
|
|
|
function searches for the highest version less than or equal to
|
2015-03-04 09:20:14 -05:00
|
|
|
`SCHEMA_VERSION` and executes all .sql files in that directory.
|
2015-03-04 08:34:11 -05:00
|
|
|
|
|
|
|
The function will then apply all deltas for all versions after the base
|
|
|
|
version.
|
2015-03-04 09:20:14 -05:00
|
|
|
|
|
|
|
Example directory structure:
|
|
|
|
|
|
|
|
schema/
|
|
|
|
delta/
|
|
|
|
...
|
|
|
|
full_schemas/
|
|
|
|
3/
|
|
|
|
test.sql
|
|
|
|
...
|
|
|
|
11/
|
|
|
|
foo.sql
|
|
|
|
bar.sql
|
|
|
|
...
|
|
|
|
|
|
|
|
In the example foo.sql and bar.sql would be run, and then any delta files
|
|
|
|
for versions strictly greater than 11.
|
2015-03-04 08:34:11 -05:00
|
|
|
"""
|
|
|
|
current_dir = os.path.join(dir_path, "schema", "full_schemas")
|
2015-03-04 07:04:19 -05:00
|
|
|
directory_entries = os.listdir(current_dir)
|
|
|
|
|
|
|
|
valid_dirs = []
|
|
|
|
pattern = re.compile(r"^\d+(\.sql)?$")
|
|
|
|
for filename in directory_entries:
|
|
|
|
match = pattern.match(filename)
|
|
|
|
abs_path = os.path.join(current_dir, filename)
|
|
|
|
if match and os.path.isdir(abs_path):
|
|
|
|
ver = int(match.group(0))
|
2015-03-04 08:34:11 -05:00
|
|
|
if ver <= SCHEMA_VERSION:
|
2015-03-04 07:04:19 -05:00
|
|
|
valid_dirs.append((ver, abs_path))
|
2015-03-04 08:34:11 -05:00
|
|
|
else:
|
|
|
|
logger.warn("Unexpected entry in 'full_schemas': %s", filename)
|
2015-03-04 07:04:19 -05:00
|
|
|
|
|
|
|
if not valid_dirs:
|
2015-03-04 08:34:11 -05:00
|
|
|
raise PrepareDatabaseException(
|
|
|
|
"Could not find a suitable base set of full schemas"
|
|
|
|
)
|
2015-03-04 07:04:19 -05:00
|
|
|
|
|
|
|
max_current_ver, sql_dir = max(valid_dirs, key=lambda x: x[0])
|
|
|
|
|
|
|
|
logger.debug("Initialising schema v%d", max_current_ver)
|
|
|
|
|
|
|
|
directory_entries = os.listdir(sql_dir)
|
|
|
|
|
|
|
|
for filename in fnmatch.filter(directory_entries, "*.sql"):
|
|
|
|
sql_loc = os.path.join(sql_dir, filename)
|
2015-03-04 09:21:53 -05:00
|
|
|
logger.debug("Applying schema %s", sql_loc)
|
2015-03-19 11:59:48 -04:00
|
|
|
executescript(cur, sql_loc)
|
2015-03-02 13:23:55 -05:00
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
cur.execute(
|
2015-04-01 09:12:33 -04:00
|
|
|
database_engine.convert_param_style(
|
2015-04-14 08:53:20 -04:00
|
|
|
"INSERT INTO schema_version (version, upgraded)"
|
2015-03-19 11:59:48 -04:00
|
|
|
" VALUES (?,?)"
|
|
|
|
),
|
|
|
|
(max_current_ver, False,)
|
2015-03-04 07:04:19 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
_upgrade_existing_database(
|
|
|
|
cur,
|
|
|
|
current_version=max_current_ver,
|
2015-03-04 10:06:22 -05:00
|
|
|
applied_delta_files=[],
|
2015-04-01 09:12:33 -04:00
|
|
|
upgraded=False,
|
|
|
|
database_engine=database_engine,
|
2015-03-04 07:04:19 -05:00
|
|
|
)
|
|
|
|
|
2015-03-02 13:23:55 -05:00
|
|
|
|
2015-03-04 10:06:22 -05:00
|
|
|
def _upgrade_existing_database(cur, current_version, applied_delta_files,
|
2015-04-01 09:12:33 -04:00
|
|
|
upgraded, database_engine):
|
2015-03-02 13:23:55 -05:00
|
|
|
"""Upgrades an existing database.
|
|
|
|
|
|
|
|
Delta files can either be SQL stored in *.sql files, or python modules
|
|
|
|
in *.py.
|
|
|
|
|
|
|
|
There can be multiple delta files per version. Synapse will keep track of
|
|
|
|
which delta files have been applied, and will apply any that haven't been
|
|
|
|
even if there has been no version bump. This is useful for development
|
|
|
|
where orthogonal schema changes may happen on separate branches.
|
2015-03-04 07:04:19 -05:00
|
|
|
|
2015-03-04 10:10:05 -05:00
|
|
|
Different delta files for the same version *must* be orthogonal and give
|
|
|
|
the same result when applied in any order. No guarantees are made on the
|
|
|
|
order of execution of these scripts.
|
|
|
|
|
2015-03-04 08:34:11 -05:00
|
|
|
This is a no-op of current_version == SCHEMA_VERSION.
|
|
|
|
|
2015-03-04 09:20:14 -05:00
|
|
|
Example directory structure:
|
|
|
|
|
|
|
|
schema/
|
|
|
|
delta/
|
|
|
|
11/
|
|
|
|
foo.sql
|
|
|
|
...
|
|
|
|
12/
|
|
|
|
foo.sql
|
|
|
|
bar.py
|
|
|
|
...
|
|
|
|
full_schemas/
|
|
|
|
...
|
|
|
|
|
|
|
|
In the example, if current_version is 11, then foo.sql will be run if and
|
|
|
|
only if `upgraded` is True. Then `foo.sql` and `bar.py` would be run in
|
|
|
|
some arbitrary order.
|
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
Args:
|
|
|
|
cur (Cursor)
|
2015-03-04 10:06:22 -05:00
|
|
|
current_version (int): The current version of the schema.
|
|
|
|
applied_delta_files (list): A list of deltas that have already been
|
|
|
|
applied.
|
2015-03-04 07:04:19 -05:00
|
|
|
upgraded (bool): Whether the current version was generated by having
|
|
|
|
applied deltas or from full schema file. If `True` the function
|
|
|
|
will never apply delta files for the given `current_version`, since
|
|
|
|
the current_version wasn't generated by applying those delta files.
|
2015-03-02 13:23:55 -05:00
|
|
|
"""
|
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
if current_version > SCHEMA_VERSION:
|
2015-03-02 13:23:55 -05:00
|
|
|
raise ValueError(
|
|
|
|
"Cannot use this database as it is too " +
|
|
|
|
"new for the server to understand"
|
|
|
|
)
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
start_ver = current_version
|
|
|
|
if not upgraded:
|
|
|
|
start_ver += 1
|
|
|
|
|
2015-03-19 11:59:48 -04:00
|
|
|
logger.debug("applied_delta_files: %s", applied_delta_files)
|
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
for v in range(start_ver, SCHEMA_VERSION + 1):
|
|
|
|
logger.debug("Upgrading schema to v%d", v)
|
|
|
|
|
|
|
|
delta_dir = os.path.join(dir_path, "schema", "delta", str(v))
|
|
|
|
|
|
|
|
try:
|
|
|
|
directory_entries = os.listdir(delta_dir)
|
|
|
|
except OSError:
|
|
|
|
logger.exception("Could not open delta dir for version %d", v)
|
2015-03-04 08:11:01 -05:00
|
|
|
raise UpgradeDatabaseException(
|
|
|
|
"Could not open delta dir for version %d" % (v,)
|
|
|
|
)
|
2015-03-02 13:23:55 -05:00
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
directory_entries.sort()
|
2015-03-02 13:23:55 -05:00
|
|
|
for file_name in directory_entries:
|
2015-03-04 07:04:19 -05:00
|
|
|
relative_path = os.path.join(str(v), file_name)
|
2015-03-19 11:59:48 -04:00
|
|
|
logger.debug("Found file: %s", relative_path)
|
2015-03-04 10:06:22 -05:00
|
|
|
if relative_path in applied_delta_files:
|
2015-03-02 13:23:55 -05:00
|
|
|
continue
|
|
|
|
|
|
|
|
absolute_path = os.path.join(
|
|
|
|
dir_path, "schema", "delta", relative_path,
|
2014-09-10 11:23:58 -04:00
|
|
|
)
|
2015-03-02 13:23:55 -05:00
|
|
|
root_name, ext = os.path.splitext(file_name)
|
|
|
|
if ext == ".py":
|
2015-03-04 08:11:01 -05:00
|
|
|
# This is a python upgrade module. We need to import into some
|
|
|
|
# package and then execute its `run_upgrade` function.
|
2015-03-04 07:04:19 -05:00
|
|
|
module_name = "synapse.storage.v%d_%s" % (
|
2015-03-02 13:23:55 -05:00
|
|
|
v, root_name
|
|
|
|
)
|
2015-03-04 08:11:01 -05:00
|
|
|
with open(absolute_path) as python_file:
|
2015-03-02 13:23:55 -05:00
|
|
|
module = imp.load_source(
|
2015-03-04 08:11:01 -05:00
|
|
|
module_name, absolute_path, python_file
|
2015-03-02 13:23:55 -05:00
|
|
|
)
|
2015-03-04 07:04:19 -05:00
|
|
|
logger.debug("Running script %s", relative_path)
|
2015-03-02 13:23:55 -05:00
|
|
|
module.run_upgrade(cur)
|
|
|
|
elif ext == ".sql":
|
2015-03-04 08:11:01 -05:00
|
|
|
# A plain old .sql file, just read and execute it
|
2015-03-04 07:04:19 -05:00
|
|
|
logger.debug("Applying schema %s", relative_path)
|
2015-03-19 11:59:48 -04:00
|
|
|
executescript(cur, absolute_path)
|
2015-03-02 13:23:55 -05:00
|
|
|
else:
|
|
|
|
# Not a valid delta file.
|
|
|
|
logger.warn(
|
|
|
|
"Found directory entry that did not end in .py or"
|
|
|
|
" .sql: %s",
|
|
|
|
relative_path,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Mark as done.
|
|
|
|
cur.execute(
|
2015-04-01 09:12:33 -04:00
|
|
|
database_engine.convert_param_style(
|
2015-03-19 11:59:48 -04:00
|
|
|
"INSERT INTO applied_schema_deltas (version, file)"
|
2015-04-01 09:12:33 -04:00
|
|
|
" VALUES (?,?)",
|
2015-03-19 11:59:48 -04:00
|
|
|
),
|
2015-03-02 13:23:55 -05:00
|
|
|
(v, relative_path)
|
2014-09-10 11:23:58 -04:00
|
|
|
)
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2015-04-29 08:55:44 -04:00
|
|
|
cur.execute("DELETE FROM schema_version")
|
2015-03-04 07:04:19 -05:00
|
|
|
cur.execute(
|
2015-04-01 09:12:33 -04:00
|
|
|
database_engine.convert_param_style(
|
2015-04-29 08:55:44 -04:00
|
|
|
"INSERT INTO schema_version (version, upgraded)"
|
2015-04-01 09:12:33 -04:00
|
|
|
" VALUES (?,?)",
|
2015-03-19 11:59:48 -04:00
|
|
|
),
|
2015-03-04 07:04:19 -05:00
|
|
|
(v, True)
|
|
|
|
)
|
|
|
|
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2015-03-19 11:59:48 -04:00
|
|
|
def get_statements(f):
|
|
|
|
statement_buffer = ""
|
|
|
|
in_comment = False # If we're in a /* ... */ style comment
|
|
|
|
|
|
|
|
for line in f:
|
|
|
|
line = line.strip()
|
|
|
|
|
|
|
|
if in_comment:
|
|
|
|
# Check if this line contains an end to the comment
|
|
|
|
comments = line.split("*/", 1)
|
|
|
|
if len(comments) == 1:
|
|
|
|
continue
|
|
|
|
line = comments[1]
|
|
|
|
in_comment = False
|
|
|
|
|
|
|
|
# Remove inline block comments
|
|
|
|
line = re.sub(r"/\*.*\*/", " ", line)
|
|
|
|
|
|
|
|
# Does this line start a comment?
|
|
|
|
comments = line.split("/*", 1)
|
|
|
|
if len(comments) > 1:
|
|
|
|
line = comments[0]
|
|
|
|
in_comment = True
|
|
|
|
|
|
|
|
# Deal with line comments
|
|
|
|
line = line.split("--", 1)[0]
|
|
|
|
line = line.split("//", 1)[0]
|
|
|
|
|
|
|
|
# Find *all* semicolons. We need to treat first and last entry
|
|
|
|
# specially.
|
|
|
|
statements = line.split(";")
|
|
|
|
|
|
|
|
# We must prepend statement_buffer to the first statement
|
|
|
|
first_statement = "%s %s" % (
|
|
|
|
statement_buffer.strip(),
|
|
|
|
statements[0].strip()
|
|
|
|
)
|
|
|
|
statements[0] = first_statement
|
|
|
|
|
|
|
|
# Every entry, except the last, is a full statement
|
|
|
|
for statement in statements[:-1]:
|
|
|
|
yield statement.strip()
|
|
|
|
|
|
|
|
# The last entry did *not* end in a semicolon, so we store it for the
|
|
|
|
# next semicolon we find
|
|
|
|
statement_buffer = statements[-1].strip()
|
|
|
|
|
|
|
|
|
|
|
|
def executescript(txn, schema_path):
|
|
|
|
with open(schema_path, 'r') as f:
|
|
|
|
for statement in get_statements(f):
|
|
|
|
txn.execute(statement)
|
|
|
|
|
|
|
|
|
2015-04-01 09:12:33 -04:00
|
|
|
def _get_or_create_schema_state(txn, database_engine):
|
2015-04-14 08:53:20 -04:00
|
|
|
# Bluntly try creating the schema_version tables.
|
|
|
|
schema_path = os.path.join(
|
|
|
|
dir_path, "schema", "schema_version.sql",
|
|
|
|
)
|
|
|
|
executescript(txn, schema_path)
|
2014-09-10 10:42:15 -04:00
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
txn.execute("SELECT version, upgraded FROM schema_version")
|
|
|
|
row = txn.fetchone()
|
|
|
|
current_version = int(row[0]) if row else None
|
|
|
|
upgraded = bool(row[1]) if row else None
|
|
|
|
|
|
|
|
if current_version:
|
|
|
|
txn.execute(
|
2015-04-01 09:12:33 -04:00
|
|
|
database_engine.convert_param_style(
|
2015-03-19 11:59:48 -04:00
|
|
|
"SELECT file FROM applied_schema_deltas WHERE version >= ?"
|
|
|
|
),
|
2015-03-04 07:04:19 -05:00
|
|
|
(current_version,)
|
|
|
|
)
|
2015-03-19 11:59:48 -04:00
|
|
|
applied_deltas = [d for d, in txn.fetchall()]
|
|
|
|
return current_version, applied_deltas, upgraded
|
2015-03-02 13:23:55 -05:00
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
return None
|
2015-03-02 13:23:55 -05:00
|
|
|
|
|
|
|
|
2015-03-04 07:04:19 -05:00
|
|
|
def prepare_sqlite3_database(db_conn):
|
|
|
|
"""This function should be called before `prepare_database` on sqlite3
|
|
|
|
databases.
|
|
|
|
|
|
|
|
Since we changed the way we store the current schema version and handle
|
|
|
|
updates to schemas, we need a way to upgrade from the old method to the
|
|
|
|
new. This only affects sqlite databases since they were the only ones
|
|
|
|
supported at the time.
|
|
|
|
"""
|
|
|
|
with db_conn:
|
|
|
|
schema_path = os.path.join(
|
|
|
|
dir_path, "schema", "schema_version.sql",
|
|
|
|
)
|
|
|
|
create_schema = read_schema(schema_path)
|
|
|
|
db_conn.executescript(create_schema)
|
|
|
|
|
|
|
|
c = db_conn.execute("SELECT * FROM schema_version")
|
|
|
|
rows = c.fetchall()
|
|
|
|
c.close()
|
|
|
|
|
|
|
|
if not rows:
|
|
|
|
c = db_conn.execute("PRAGMA user_version")
|
|
|
|
row = c.fetchone()
|
|
|
|
c.close()
|
|
|
|
|
|
|
|
if row and row[0]:
|
|
|
|
db_conn.execute(
|
2015-04-07 07:13:58 -04:00
|
|
|
"REPLACE INTO schema_version (version, upgraded)"
|
|
|
|
" VALUES (?,?)",
|
2015-03-04 07:04:19 -05:00
|
|
|
(row[0], False)
|
|
|
|
)
|
2015-04-27 06:46:00 -04:00
|
|
|
|
|
|
|
|
2015-04-28 08:39:42 -04:00
|
|
|
def are_all_users_on_domain(txn, database_engine, domain):
|
|
|
|
sql = database_engine.convert_param_style(
|
|
|
|
"SELECT COUNT(*) FROM users WHERE name NOT LIKE ?"
|
|
|
|
)
|
2015-04-27 06:46:00 -04:00
|
|
|
pat = "%:" + domain
|
2015-04-28 08:39:42 -04:00
|
|
|
txn.execute(sql, (pat,))
|
|
|
|
num_not_matching = txn.fetchall()[0][0]
|
2015-04-27 06:46:00 -04:00
|
|
|
if num_not_matching == 0:
|
|
|
|
return True
|
2015-04-27 06:49:18 -04:00
|
|
|
return False
|