2015-12-01 13:41:32 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2018-02-23 05:33:55 -05:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-12-01 13:41:32 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import abc
|
|
|
|
import logging
|
2020-08-12 09:29:06 -04:00
|
|
|
from typing import List, Optional, Tuple
|
2018-07-09 02:09:20 -04:00
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2020-07-16 11:32:19 -04:00
|
|
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
2020-08-05 16:38:57 -04:00
|
|
|
from synapse.storage.database import DatabasePool
|
2018-02-23 05:31:16 -05:00
|
|
|
from synapse.storage.util.id_generators import StreamIdGenerator
|
2020-08-12 09:29:06 -04:00
|
|
|
from synapse.types import JsonDict
|
2020-08-07 08:02:55 -04:00
|
|
|
from synapse.util import json_encoder
|
2020-08-12 09:29:06 -04:00
|
|
|
from synapse.util.caches.descriptors import _CacheContext, cached
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
2015-12-01 13:41:32 -05:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-02-16 07:08:42 -05:00
|
|
|
class AccountDataWorkerStore(SQLBaseStore):
|
|
|
|
"""This is an abstract base class where subclasses must implement
|
|
|
|
`get_max_account_data_stream_id` which can be called in the initializer.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# This ABCMeta metaclass ensures that we cannot be instantiated without
|
|
|
|
# the abstract methods being implemented.
|
|
|
|
__metaclass__ = abc.ABCMeta
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
2018-02-16 07:08:42 -05:00
|
|
|
account_max = self.get_max_account_data_stream_id()
|
|
|
|
self._account_data_stream_cache = StreamChangeCache(
|
2019-04-03 05:07:29 -04:00
|
|
|
"AccountDataAndTagsChangeCache", account_max
|
2018-02-16 07:08:42 -05:00
|
|
|
)
|
|
|
|
|
2019-12-06 08:08:40 -05:00
|
|
|
super(AccountDataWorkerStore, self).__init__(database, db_conn, hs)
|
2018-02-16 07:08:42 -05:00
|
|
|
|
|
|
|
@abc.abstractmethod
|
|
|
|
def get_max_account_data_stream_id(self):
|
|
|
|
"""Get the current max stream ID for account data stream
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
int
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2016-05-03 11:01:24 -04:00
|
|
|
@cached()
|
2015-12-01 13:41:32 -05:00
|
|
|
def get_account_data_for_user(self, user_id):
|
|
|
|
"""Get all the client account_data for a user.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user to get the account_data for.
|
|
|
|
Returns:
|
|
|
|
A deferred pair of a dict of global account_data and a dict
|
|
|
|
mapping from room_id string to per room account_data dicts.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_account_data_for_user_txn(txn):
|
2020-08-05 16:38:57 -04:00
|
|
|
rows = self.db_pool.simple_select_list_txn(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn,
|
|
|
|
"account_data",
|
|
|
|
{"user_id": user_id},
|
|
|
|
["account_data_type", "content"],
|
2015-12-01 13:41:32 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
global_account_data = {
|
2020-07-16 11:32:19 -04:00
|
|
|
row["account_data_type"]: db_to_json(row["content"]) for row in rows
|
2015-12-01 13:41:32 -05:00
|
|
|
}
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
rows = self.db_pool.simple_select_list_txn(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn,
|
|
|
|
"room_account_data",
|
|
|
|
{"user_id": user_id},
|
|
|
|
["room_id", "account_data_type", "content"],
|
2015-12-01 13:41:32 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
by_room = {}
|
|
|
|
for row in rows:
|
|
|
|
room_data = by_room.setdefault(row["room_id"], {})
|
2020-07-16 11:32:19 -04:00
|
|
|
room_data[row["account_data_type"]] = db_to_json(row["content"])
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2019-08-30 11:28:26 -04:00
|
|
|
return global_account_data, by_room
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return self.db_pool.runInteraction(
|
2015-12-01 13:41:32 -05:00
|
|
|
"get_account_data_for_user", get_account_data_for_user_txn
|
|
|
|
)
|
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
@cached(num_args=2, max_entries=5000)
|
|
|
|
async def get_global_account_data_by_type_for_user(
|
|
|
|
self, data_type: str, user_id: str
|
|
|
|
) -> Optional[JsonDict]:
|
2016-05-03 11:01:24 -04:00
|
|
|
"""
|
|
|
|
Returns:
|
2020-08-12 09:29:06 -04:00
|
|
|
The account data.
|
2016-05-03 11:01:24 -04:00
|
|
|
"""
|
2020-08-12 09:29:06 -04:00
|
|
|
result = await self.db_pool.simple_select_one_onecol(
|
2016-05-03 11:01:24 -04:00
|
|
|
table="account_data",
|
2019-04-03 05:07:29 -04:00
|
|
|
keyvalues={"user_id": user_id, "account_data_type": data_type},
|
2016-05-03 11:01:24 -04:00
|
|
|
retcol="content",
|
|
|
|
desc="get_global_account_data_by_type_for_user",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
if result:
|
2020-07-16 11:32:19 -04:00
|
|
|
return db_to_json(result)
|
2016-05-03 11:01:24 -04:00
|
|
|
else:
|
2019-07-23 09:00:55 -04:00
|
|
|
return None
|
2016-05-03 11:01:24 -04:00
|
|
|
|
2018-03-01 10:30:57 -05:00
|
|
|
@cached(num_args=2)
|
2015-12-01 13:41:32 -05:00
|
|
|
def get_account_data_for_room(self, user_id, room_id):
|
|
|
|
"""Get all the client account_data for a user for a room.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user to get the account_data for.
|
|
|
|
room_id(str): The room to get the account_data for.
|
|
|
|
Returns:
|
|
|
|
A deferred dict of the room account_data
|
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
def get_account_data_for_room_txn(txn):
|
2020-08-05 16:38:57 -04:00
|
|
|
rows = self.db_pool.simple_select_list_txn(
|
2019-04-03 05:07:29 -04:00
|
|
|
txn,
|
|
|
|
"room_account_data",
|
|
|
|
{"user_id": user_id, "room_id": room_id},
|
|
|
|
["account_data_type", "content"],
|
2015-12-01 13:41:32 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
return {
|
2020-07-16 11:32:19 -04:00
|
|
|
row["account_data_type"]: db_to_json(row["content"]) for row in rows
|
2015-12-01 13:41:32 -05:00
|
|
|
}
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return self.db_pool.runInteraction(
|
2015-12-01 13:41:32 -05:00
|
|
|
"get_account_data_for_room", get_account_data_for_room_txn
|
|
|
|
)
|
|
|
|
|
2018-03-01 10:53:04 -05:00
|
|
|
@cached(num_args=3, max_entries=5000)
|
|
|
|
def get_account_data_for_room_and_type(self, user_id, room_id, account_data_type):
|
2018-03-01 12:00:35 -05:00
|
|
|
"""Get the client account_data of given type for a user for a room.
|
2018-03-01 10:53:04 -05:00
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user to get the account_data for.
|
|
|
|
room_id(str): The room to get the account_data for.
|
|
|
|
account_data_type (str): The account data type to get.
|
|
|
|
Returns:
|
2018-03-01 12:00:35 -05:00
|
|
|
A deferred of the room account_data for that type, or None if
|
2018-03-01 10:53:04 -05:00
|
|
|
there isn't any set.
|
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2018-03-01 10:53:04 -05:00
|
|
|
def get_account_data_for_room_and_type_txn(txn):
|
2020-08-05 16:38:57 -04:00
|
|
|
content_json = self.db_pool.simple_select_one_onecol_txn(
|
2018-03-01 10:53:04 -05:00
|
|
|
txn,
|
|
|
|
table="room_account_data",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"account_data_type": account_data_type,
|
|
|
|
},
|
|
|
|
retcol="content",
|
2019-04-03 05:07:29 -04:00
|
|
|
allow_none=True,
|
2018-03-01 10:53:04 -05:00
|
|
|
)
|
|
|
|
|
2020-07-16 11:32:19 -04:00
|
|
|
return db_to_json(content_json) if content_json else None
|
2018-03-01 10:53:04 -05:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return self.db_pool.runInteraction(
|
2019-04-03 05:07:29 -04:00
|
|
|
"get_account_data_for_room_and_type", get_account_data_for_room_and_type_txn
|
2018-03-01 10:53:04 -05:00
|
|
|
)
|
|
|
|
|
2020-05-15 14:03:25 -04:00
|
|
|
async def get_updated_global_account_data(
|
|
|
|
self, last_id: int, current_id: int, limit: int
|
|
|
|
) -> List[Tuple[int, str, str]]:
|
|
|
|
"""Get the global account_data that has changed, for the account_data stream
|
|
|
|
|
2016-03-01 09:49:41 -05:00
|
|
|
Args:
|
2020-05-15 14:03:25 -04:00
|
|
|
last_id: the last stream_id from the previous batch.
|
|
|
|
current_id: the maximum stream_id to return up to
|
|
|
|
limit: the maximum number of rows to return
|
|
|
|
|
2016-03-01 09:49:41 -05:00
|
|
|
Returns:
|
2020-05-15 14:03:25 -04:00
|
|
|
A list of tuples of stream_id int, user_id string,
|
|
|
|
and type string.
|
2016-03-01 09:49:41 -05:00
|
|
|
"""
|
2020-05-15 14:03:25 -04:00
|
|
|
if last_id == current_id:
|
|
|
|
return []
|
2016-06-08 06:33:30 -04:00
|
|
|
|
2020-05-15 14:03:25 -04:00
|
|
|
def get_updated_global_account_data_txn(txn):
|
2016-03-01 09:49:41 -05:00
|
|
|
sql = (
|
2019-11-08 06:42:55 -05:00
|
|
|
"SELECT stream_id, user_id, account_data_type"
|
2016-03-01 09:49:41 -05:00
|
|
|
" FROM account_data WHERE ? < stream_id AND stream_id <= ?"
|
|
|
|
" ORDER BY stream_id ASC LIMIT ?"
|
|
|
|
)
|
2020-05-15 14:03:25 -04:00
|
|
|
txn.execute(sql, (last_id, current_id, limit))
|
|
|
|
return txn.fetchall()
|
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-05-15 14:03:25 -04:00
|
|
|
"get_updated_global_account_data", get_updated_global_account_data_txn
|
|
|
|
)
|
|
|
|
|
|
|
|
async def get_updated_room_account_data(
|
|
|
|
self, last_id: int, current_id: int, limit: int
|
|
|
|
) -> List[Tuple[int, str, str, str]]:
|
|
|
|
"""Get the global account_data that has changed, for the account_data stream
|
2016-03-01 09:49:41 -05:00
|
|
|
|
2020-05-15 14:03:25 -04:00
|
|
|
Args:
|
|
|
|
last_id: the last stream_id from the previous batch.
|
|
|
|
current_id: the maximum stream_id to return up to
|
|
|
|
limit: the maximum number of rows to return
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of tuples of stream_id int, user_id string,
|
|
|
|
room_id string and type string.
|
|
|
|
"""
|
|
|
|
if last_id == current_id:
|
|
|
|
return []
|
|
|
|
|
|
|
|
def get_updated_room_account_data_txn(txn):
|
2016-03-01 09:49:41 -05:00
|
|
|
sql = (
|
2019-11-08 06:42:55 -05:00
|
|
|
"SELECT stream_id, user_id, room_id, account_data_type"
|
2016-03-01 09:49:41 -05:00
|
|
|
" FROM room_account_data WHERE ? < stream_id AND stream_id <= ?"
|
|
|
|
" ORDER BY stream_id ASC LIMIT ?"
|
|
|
|
)
|
2020-05-15 14:03:25 -04:00
|
|
|
txn.execute(sql, (last_id, current_id, limit))
|
|
|
|
return txn.fetchall()
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-05-15 14:03:25 -04:00
|
|
|
"get_updated_room_account_data", get_updated_room_account_data_txn
|
2016-03-01 09:49:41 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_updated_account_data_for_user(self, user_id, stream_id):
|
|
|
|
"""Get all the client account_data for a that's changed for a user
|
2015-12-01 13:41:32 -05:00
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user to get the account_data for.
|
|
|
|
stream_id(int): The point in the stream since which to get updates
|
|
|
|
Returns:
|
|
|
|
A deferred pair of a dict of global account_data and a dict
|
|
|
|
mapping from room_id string to per room account_data dicts.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_updated_account_data_for_user_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT account_data_type, content FROM account_data"
|
|
|
|
" WHERE user_id = ? AND stream_id > ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, (user_id, stream_id))
|
|
|
|
|
2020-07-16 11:32:19 -04:00
|
|
|
global_account_data = {row[0]: db_to_json(row[1]) for row in txn}
|
2015-12-01 13:41:32 -05:00
|
|
|
|
|
|
|
sql = (
|
|
|
|
"SELECT room_id, account_data_type, content FROM room_account_data"
|
|
|
|
" WHERE user_id = ? AND stream_id > ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, (user_id, stream_id))
|
|
|
|
|
|
|
|
account_data_by_room = {}
|
2017-03-23 13:53:49 -04:00
|
|
|
for row in txn:
|
2015-12-01 13:41:32 -05:00
|
|
|
room_account_data = account_data_by_room.setdefault(row[0], {})
|
2020-07-16 11:32:19 -04:00
|
|
|
room_account_data[row[1]] = db_to_json(row[2])
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2019-08-30 11:28:26 -04:00
|
|
|
return global_account_data, account_data_by_room
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2016-01-28 11:39:18 -05:00
|
|
|
changed = self._account_data_stream_cache.has_entity_changed(
|
2016-01-28 11:37:41 -05:00
|
|
|
user_id, int(stream_id)
|
|
|
|
)
|
|
|
|
if not changed:
|
2019-12-06 05:14:59 -05:00
|
|
|
return defer.succeed(({}, {}))
|
2016-01-28 11:37:41 -05:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return self.db_pool.runInteraction(
|
2015-12-01 13:41:32 -05:00
|
|
|
"get_updated_account_data_for_user", get_updated_account_data_for_user_txn
|
|
|
|
)
|
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
@cached(num_args=2, cache_context=True, max_entries=5000)
|
|
|
|
async def is_ignored_by(
|
|
|
|
self, ignored_user_id: str, ignorer_user_id: str, cache_context: _CacheContext
|
|
|
|
) -> bool:
|
|
|
|
ignored_account_data = await self.get_global_account_data_by_type_for_user(
|
2019-04-03 05:07:29 -04:00
|
|
|
"m.ignored_user_list",
|
|
|
|
ignorer_user_id,
|
2018-02-16 07:08:42 -05:00
|
|
|
on_invalidate=cache_context.invalidate,
|
|
|
|
)
|
|
|
|
if not ignored_account_data:
|
2019-07-23 09:00:55 -04:00
|
|
|
return False
|
2018-02-16 07:08:42 -05:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return ignored_user_id in ignored_account_data.get("ignored_users", {})
|
2018-02-16 07:08:42 -05:00
|
|
|
|
|
|
|
|
|
|
|
class AccountDataStore(AccountDataWorkerStore):
|
2020-08-05 16:38:57 -04:00
|
|
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
2018-02-16 07:08:42 -05:00
|
|
|
self._account_data_id_gen = StreamIdGenerator(
|
2020-06-09 11:28:57 -04:00
|
|
|
db_conn,
|
|
|
|
"account_data_max_stream_id",
|
|
|
|
"stream_id",
|
|
|
|
extra_tables=[
|
|
|
|
("room_account_data", "stream_id"),
|
|
|
|
("room_tags_revisions", "stream_id"),
|
|
|
|
],
|
2018-02-16 07:08:42 -05:00
|
|
|
)
|
|
|
|
|
2019-12-06 08:08:40 -05:00
|
|
|
super(AccountDataStore, self).__init__(database, db_conn, hs)
|
2018-02-16 07:08:42 -05:00
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
def get_max_account_data_stream_id(self) -> int:
|
2018-02-16 07:08:42 -05:00
|
|
|
"""Get the current max stream id for the private user data stream
|
|
|
|
|
|
|
|
Returns:
|
2020-08-12 09:29:06 -04:00
|
|
|
The maximum stream ID.
|
2018-02-16 07:08:42 -05:00
|
|
|
"""
|
|
|
|
return self._account_data_id_gen.get_current_token()
|
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
async def add_account_data_to_room(
|
|
|
|
self, user_id: str, room_id: str, account_data_type: str, content: JsonDict
|
|
|
|
) -> int:
|
2015-12-01 13:41:32 -05:00
|
|
|
"""Add some account_data to a room for a user.
|
2020-08-12 09:29:06 -04:00
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
Args:
|
2020-08-12 09:29:06 -04:00
|
|
|
user_id: The user to add a tag for.
|
|
|
|
room_id: The room to add a tag for.
|
|
|
|
account_data_type: The type of account_data to add.
|
|
|
|
content: A json object to associate with the tag.
|
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
Returns:
|
2020-08-12 09:29:06 -04:00
|
|
|
The maximum stream ID.
|
2015-12-01 13:41:32 -05:00
|
|
|
"""
|
2020-08-07 08:02:55 -04:00
|
|
|
content_json = json_encoder.encode(content)
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2017-11-16 13:07:01 -05:00
|
|
|
with self._account_data_id_gen.get_next() as next_id:
|
|
|
|
# no need to lock here as room_account_data has a unique constraint
|
2019-12-04 05:15:55 -05:00
|
|
|
# on (user_id, room_id, account_data_type) so simple_upsert will
|
2017-11-16 13:07:01 -05:00
|
|
|
# retry if there is a conflict.
|
2020-08-12 09:29:06 -04:00
|
|
|
await self.db_pool.simple_upsert(
|
2017-11-16 13:07:01 -05:00
|
|
|
desc="add_room_account_data",
|
2015-12-01 13:41:32 -05:00
|
|
|
table="room_account_data",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"account_data_type": account_data_type,
|
|
|
|
},
|
2019-04-03 05:07:29 -04:00
|
|
|
values={"stream_id": next_id, "content": content_json},
|
2017-11-16 13:07:01 -05:00
|
|
|
lock=False,
|
2016-01-29 11:41:51 -05:00
|
|
|
)
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2017-11-16 13:07:01 -05:00
|
|
|
# it's theoretically possible for the above to succeed and the
|
|
|
|
# below to fail - in which case we might reuse a stream id on
|
|
|
|
# restart, and the above update might not get propagated. That
|
|
|
|
# doesn't sound any worse than the whole update getting lost,
|
|
|
|
# which is what would happen if we combined the two into one
|
|
|
|
# transaction.
|
2020-08-12 09:29:06 -04:00
|
|
|
await self._update_max_stream_id(next_id)
|
2017-11-16 13:07:01 -05:00
|
|
|
|
|
|
|
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
|
|
|
self.get_account_data_for_user.invalidate((user_id,))
|
2019-04-03 05:07:29 -04:00
|
|
|
self.get_account_data_for_room.invalidate((user_id, room_id))
|
2018-03-01 10:53:04 -05:00
|
|
|
self.get_account_data_for_room_and_type.prefill(
|
2019-04-03 05:07:29 -04:00
|
|
|
(user_id, room_id, account_data_type), content
|
2018-03-01 10:53:04 -05:00
|
|
|
)
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
return self._account_data_id_gen.get_current_token()
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
async def add_account_data_for_user(
|
|
|
|
self, user_id: str, account_data_type: str, content: JsonDict
|
|
|
|
) -> int:
|
2015-12-01 13:41:32 -05:00
|
|
|
"""Add some account_data to a room for a user.
|
2020-08-12 09:29:06 -04:00
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
Args:
|
2020-08-12 09:29:06 -04:00
|
|
|
user_id: The user to add a tag for.
|
|
|
|
account_data_type: The type of account_data to add.
|
|
|
|
content: A json object to associate with the tag.
|
|
|
|
|
2015-12-01 13:41:32 -05:00
|
|
|
Returns:
|
2020-08-12 09:29:06 -04:00
|
|
|
The maximum stream ID.
|
2015-12-01 13:41:32 -05:00
|
|
|
"""
|
2020-08-07 08:02:55 -04:00
|
|
|
content_json = json_encoder.encode(content)
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2017-11-16 13:07:01 -05:00
|
|
|
with self._account_data_id_gen.get_next() as next_id:
|
|
|
|
# no need to lock here as account_data has a unique constraint on
|
2019-12-04 05:15:55 -05:00
|
|
|
# (user_id, account_data_type) so simple_upsert will retry if
|
2017-11-16 13:07:01 -05:00
|
|
|
# there is a conflict.
|
2020-08-12 09:29:06 -04:00
|
|
|
await self.db_pool.simple_upsert(
|
2017-11-16 13:07:01 -05:00
|
|
|
desc="add_user_account_data",
|
2015-12-01 13:41:32 -05:00
|
|
|
table="account_data",
|
2019-04-03 05:07:29 -04:00
|
|
|
keyvalues={"user_id": user_id, "account_data_type": account_data_type},
|
|
|
|
values={"stream_id": next_id, "content": content_json},
|
2017-11-16 13:07:01 -05:00
|
|
|
lock=False,
|
2015-12-01 13:41:32 -05:00
|
|
|
)
|
2017-11-16 13:07:01 -05:00
|
|
|
|
|
|
|
# it's theoretically possible for the above to succeed and the
|
|
|
|
# below to fail - in which case we might reuse a stream id on
|
|
|
|
# restart, and the above update might not get propagated. That
|
|
|
|
# doesn't sound any worse than the whole update getting lost,
|
|
|
|
# which is what would happen if we combined the two into one
|
|
|
|
# transaction.
|
2020-06-09 11:28:57 -04:00
|
|
|
#
|
|
|
|
# Note: This is only here for backwards compat to allow admins to
|
|
|
|
# roll back to a previous Synapse version. Next time we update the
|
|
|
|
# database version we can remove this table.
|
2020-08-12 09:29:06 -04:00
|
|
|
await self._update_max_stream_id(next_id)
|
2017-11-16 13:07:01 -05:00
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
2017-11-16 13:07:01 -05:00
|
|
|
self.get_account_data_for_user.invalidate((user_id,))
|
|
|
|
self.get_global_account_data_by_type_for_user.invalidate(
|
2019-04-03 05:07:29 -04:00
|
|
|
(account_data_type, user_id)
|
2016-05-03 11:01:24 -04:00
|
|
|
)
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
return self._account_data_id_gen.get_current_token()
|
2015-12-01 13:41:32 -05:00
|
|
|
|
2020-08-12 09:29:06 -04:00
|
|
|
def _update_max_stream_id(self, next_id: int):
|
2015-12-01 13:41:32 -05:00
|
|
|
"""Update the max stream_id
|
|
|
|
|
|
|
|
Args:
|
2020-08-12 09:29:06 -04:00
|
|
|
next_id: The the revision to advance to.
|
2015-12-01 13:41:32 -05:00
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-06-09 11:28:57 -04:00
|
|
|
# Note: This is only here for backwards compat to allow admins to
|
|
|
|
# roll back to a previous Synapse version. Next time we update the
|
|
|
|
# database version we can remove this table.
|
|
|
|
|
2017-11-16 13:07:01 -05:00
|
|
|
def _update(txn):
|
|
|
|
update_max_id_sql = (
|
|
|
|
"UPDATE account_data_max_stream_id"
|
|
|
|
" SET stream_id = ?"
|
|
|
|
" WHERE stream_id < ?"
|
|
|
|
)
|
|
|
|
txn.execute(update_max_id_sql, (next_id, next_id))
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2020-08-05 16:38:57 -04:00
|
|
|
return self.db_pool.runInteraction("update_account_data_max_stream_id", _update)
|