2015-10-28 12:06:57 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2018-02-23 05:33:55 -05:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-10-28 12:06:57 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import logging
|
2018-02-16 07:08:42 -05:00
|
|
|
|
2018-06-28 09:49:57 -04:00
|
|
|
from canonicaljson import json
|
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from twisted.internet import defer
|
2015-10-28 12:06:57 -04:00
|
|
|
|
2019-10-21 07:56:42 -04:00
|
|
|
from synapse.storage.data_stores.main.account_data import AccountDataWorkerStore
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.util.caches.descriptors import cached
|
2018-04-28 07:57:00 -04:00
|
|
|
|
2015-10-28 12:06:57 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-02-16 07:08:42 -05:00
|
|
|
class TagsWorkerStore(AccountDataWorkerStore):
|
2015-10-28 12:06:57 -04:00
|
|
|
@cached()
|
|
|
|
def get_tags_for_user(self, user_id):
|
|
|
|
"""Get all the tags for a user.
|
|
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user to get the tags for.
|
|
|
|
Returns:
|
2015-12-01 13:41:32 -05:00
|
|
|
A deferred dict mapping from room_id strings to dicts mapping from
|
|
|
|
tag strings to tag content.
|
2015-10-28 12:06:57 -04:00
|
|
|
"""
|
|
|
|
|
2019-12-04 08:52:46 -05:00
|
|
|
deferred = self.db.simple_select_list(
|
2015-11-02 10:11:31 -05:00
|
|
|
"room_tags", {"user_id": user_id}, ["room_id", "tag", "content"]
|
2015-10-28 12:06:57 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
@deferred.addCallback
|
|
|
|
def tags_by_room(rows):
|
|
|
|
tags_by_room = {}
|
|
|
|
for row in rows:
|
2015-11-02 10:11:31 -05:00
|
|
|
room_tags = tags_by_room.setdefault(row["room_id"], {})
|
|
|
|
room_tags[row["tag"]] = json.loads(row["content"])
|
2015-10-28 12:06:57 -04:00
|
|
|
return tags_by_room
|
|
|
|
|
|
|
|
return deferred
|
|
|
|
|
2016-03-01 09:49:41 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_all_updated_tags(self, last_id, current_id, limit):
|
|
|
|
"""Get all the client tags that have changed on the server
|
|
|
|
Args:
|
|
|
|
last_id(int): The position to fetch from.
|
|
|
|
current_id(int): The position to fetch up to.
|
|
|
|
Returns:
|
|
|
|
A deferred list of tuples of stream_id int, user_id string,
|
|
|
|
room_id string, tag string and content string.
|
|
|
|
"""
|
2016-06-08 06:33:30 -04:00
|
|
|
if last_id == current_id:
|
2019-07-23 09:00:55 -04:00
|
|
|
return []
|
2016-06-08 06:33:30 -04:00
|
|
|
|
2016-03-01 09:49:41 -05:00
|
|
|
def get_all_updated_tags_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT stream_id, user_id, room_id"
|
|
|
|
" FROM room_tags_revisions as r"
|
|
|
|
" WHERE ? < stream_id AND stream_id <= ?"
|
|
|
|
" ORDER BY stream_id ASC LIMIT ?"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (last_id, current_id, limit))
|
|
|
|
return txn.fetchall()
|
|
|
|
|
2019-12-04 08:52:46 -05:00
|
|
|
tag_ids = yield self.db.runInteraction(
|
2016-03-01 09:49:41 -05:00
|
|
|
"get_all_updated_tags", get_all_updated_tags_txn
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_tag_content(txn, tag_ids):
|
2019-11-21 07:00:14 -05:00
|
|
|
sql = "SELECT tag, content FROM room_tags WHERE user_id=? AND room_id=?"
|
2016-03-01 09:49:41 -05:00
|
|
|
results = []
|
|
|
|
for stream_id, user_id, room_id in tag_ids:
|
|
|
|
txn.execute(sql, (user_id, room_id))
|
|
|
|
tags = []
|
2017-03-23 13:53:49 -04:00
|
|
|
for tag, content in txn:
|
2016-03-01 09:49:41 -05:00
|
|
|
tags.append(json.dumps(tag) + ":" + content)
|
|
|
|
tag_json = "{" + ",".join(tags) + "}"
|
|
|
|
results.append((stream_id, user_id, room_id, tag_json))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
batch_size = 50
|
|
|
|
results = []
|
2018-04-28 07:57:00 -04:00
|
|
|
for i in range(0, len(tag_ids), batch_size):
|
2019-12-04 08:52:46 -05:00
|
|
|
tags = yield self.db.runInteraction(
|
2016-03-01 09:49:41 -05:00
|
|
|
"get_all_updated_tag_content",
|
|
|
|
get_tag_content,
|
2019-04-03 05:07:29 -04:00
|
|
|
tag_ids[i : i + batch_size],
|
2016-03-01 09:49:41 -05:00
|
|
|
)
|
|
|
|
results.extend(tags)
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return results
|
2016-03-01 09:49:41 -05:00
|
|
|
|
2015-10-28 12:06:57 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_updated_tags(self, user_id, stream_id):
|
|
|
|
"""Get all the tags for the rooms where the tags have changed since the
|
|
|
|
given version
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user to get the tags for.
|
|
|
|
stream_id(int): The earliest update to get for the user.
|
|
|
|
Returns:
|
|
|
|
A deferred dict mapping from room_id strings to lists of tag
|
|
|
|
strings for all the rooms that changed since the stream_id token.
|
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2015-10-28 12:06:57 -04:00
|
|
|
def get_updated_tags_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT room_id from room_tags_revisions"
|
|
|
|
" WHERE user_id = ? AND stream_id > ?"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (user_id, stream_id))
|
2017-03-23 13:53:49 -04:00
|
|
|
room_ids = [row[0] for row in txn]
|
2015-10-28 12:06:57 -04:00
|
|
|
return room_ids
|
|
|
|
|
2016-01-28 13:20:00 -05:00
|
|
|
changed = self._account_data_stream_cache.has_entity_changed(
|
|
|
|
user_id, int(stream_id)
|
|
|
|
)
|
2016-01-28 11:37:41 -05:00
|
|
|
if not changed:
|
2019-07-23 09:00:55 -04:00
|
|
|
return {}
|
2016-01-28 11:37:41 -05:00
|
|
|
|
2019-12-04 08:52:46 -05:00
|
|
|
room_ids = yield self.db.runInteraction(
|
|
|
|
"get_updated_tags", get_updated_tags_txn
|
|
|
|
)
|
2015-10-28 12:06:57 -04:00
|
|
|
|
|
|
|
results = {}
|
|
|
|
if room_ids:
|
2015-10-29 11:20:52 -04:00
|
|
|
tags_by_room = yield self.get_tags_for_user(user_id)
|
2015-10-28 12:45:57 -04:00
|
|
|
for room_id in room_ids:
|
2015-11-09 09:52:18 -05:00
|
|
|
results[room_id] = tags_by_room.get(room_id, {})
|
2015-10-28 12:06:57 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return results
|
2015-10-28 12:06:57 -04:00
|
|
|
|
|
|
|
def get_tags_for_room(self, user_id, room_id):
|
|
|
|
"""Get all the tags for the given room
|
|
|
|
Args:
|
|
|
|
user_id(str): The user to get tags for
|
|
|
|
room_id(str): The room to get tags for
|
|
|
|
Returns:
|
|
|
|
A deferred list of string tags.
|
|
|
|
"""
|
2019-12-04 08:52:46 -05:00
|
|
|
return self.db.simple_select_list(
|
2015-10-28 12:06:57 -04:00
|
|
|
table="room_tags",
|
|
|
|
keyvalues={"user_id": user_id, "room_id": room_id},
|
2015-11-02 10:11:31 -05:00
|
|
|
retcols=("tag", "content"),
|
2015-10-28 12:06:57 -04:00
|
|
|
desc="get_tags_for_room",
|
2019-04-03 05:07:29 -04:00
|
|
|
).addCallback(
|
|
|
|
lambda rows: {row["tag"]: json.loads(row["content"]) for row in rows}
|
|
|
|
)
|
2015-10-28 12:06:57 -04:00
|
|
|
|
2018-02-16 07:08:42 -05:00
|
|
|
|
|
|
|
class TagsStore(TagsWorkerStore):
|
2015-10-28 12:06:57 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-11-02 10:11:31 -05:00
|
|
|
def add_tag_to_room(self, user_id, room_id, tag, content):
|
2015-10-28 12:06:57 -04:00
|
|
|
"""Add a tag to a room for a user.
|
2015-11-02 10:11:31 -05:00
|
|
|
Args:
|
|
|
|
user_id(str): The user to add a tag for.
|
|
|
|
room_id(str): The room to add a tag for.
|
|
|
|
tag(str): The tag name to add.
|
|
|
|
content(dict): A json object to associate with the tag.
|
2015-10-28 12:06:57 -04:00
|
|
|
Returns:
|
|
|
|
A deferred that completes once the tag has been added.
|
|
|
|
"""
|
2015-11-02 10:11:31 -05:00
|
|
|
content_json = json.dumps(content)
|
|
|
|
|
2015-10-28 12:06:57 -04:00
|
|
|
def add_tag_txn(txn, next_id):
|
2019-12-04 08:52:46 -05:00
|
|
|
self.db.simple_upsert_txn(
|
2015-11-02 10:11:31 -05:00
|
|
|
txn,
|
|
|
|
table="room_tags",
|
2019-04-03 05:07:29 -04:00
|
|
|
keyvalues={"user_id": user_id, "room_id": room_id, "tag": tag},
|
|
|
|
values={"content": content_json},
|
2015-10-28 12:06:57 -04:00
|
|
|
)
|
|
|
|
self._update_revision_txn(txn, user_id, room_id, next_id)
|
|
|
|
|
2016-03-01 09:32:56 -05:00
|
|
|
with self._account_data_id_gen.get_next() as next_id:
|
2019-12-04 08:52:46 -05:00
|
|
|
yield self.db.runInteraction("add_tag", add_tag_txn, next_id)
|
2015-10-28 12:06:57 -04:00
|
|
|
|
|
|
|
self.get_tags_for_user.invalidate((user_id,))
|
|
|
|
|
2016-04-01 08:29:05 -04:00
|
|
|
result = self._account_data_id_gen.get_current_token()
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2015-10-29 11:20:52 -04:00
|
|
|
|
2015-10-28 12:06:57 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def remove_tag_from_room(self, user_id, room_id, tag):
|
|
|
|
"""Remove a tag from a room for a user.
|
|
|
|
Returns:
|
2015-11-03 09:27:35 -05:00
|
|
|
A deferred that completes once the tag has been removed
|
2015-10-28 12:06:57 -04:00
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2015-10-28 12:06:57 -04:00
|
|
|
def remove_tag_txn(txn, next_id):
|
|
|
|
sql = (
|
|
|
|
"DELETE FROM room_tags "
|
|
|
|
" WHERE user_id = ? AND room_id = ? AND tag = ?"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (user_id, room_id, tag))
|
|
|
|
self._update_revision_txn(txn, user_id, room_id, next_id)
|
|
|
|
|
2016-03-01 09:32:56 -05:00
|
|
|
with self._account_data_id_gen.get_next() as next_id:
|
2019-12-04 08:52:46 -05:00
|
|
|
yield self.db.runInteraction("remove_tag", remove_tag_txn, next_id)
|
2015-10-28 12:06:57 -04:00
|
|
|
|
|
|
|
self.get_tags_for_user.invalidate((user_id,))
|
|
|
|
|
2016-04-01 08:29:05 -04:00
|
|
|
result = self._account_data_id_gen.get_current_token()
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2015-10-29 11:20:52 -04:00
|
|
|
|
2015-10-28 12:06:57 -04:00
|
|
|
def _update_revision_txn(self, txn, user_id, room_id, next_id):
|
|
|
|
"""Update the latest revision of the tags for the given user and room.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn: The database cursor
|
|
|
|
user_id(str): The ID of the user.
|
|
|
|
room_id(str): The ID of the room.
|
|
|
|
next_id(int): The the revision to advance to.
|
|
|
|
"""
|
|
|
|
|
2016-01-28 13:20:00 -05:00
|
|
|
txn.call_after(
|
2019-04-03 05:07:29 -04:00
|
|
|
self._account_data_stream_cache.entity_has_changed, user_id, next_id
|
2016-01-28 13:20:00 -05:00
|
|
|
)
|
2016-01-28 11:37:41 -05:00
|
|
|
|
2020-06-09 11:28:57 -04:00
|
|
|
# Note: This is only here for backwards compat to allow admins to
|
|
|
|
# roll back to a previous Synapse version. Next time we update the
|
|
|
|
# database version we can remove this table.
|
2015-10-28 12:06:57 -04:00
|
|
|
update_max_id_sql = (
|
2015-11-18 10:31:04 -05:00
|
|
|
"UPDATE account_data_max_stream_id"
|
2015-10-28 12:06:57 -04:00
|
|
|
" SET stream_id = ?"
|
|
|
|
" WHERE stream_id < ?"
|
|
|
|
)
|
|
|
|
txn.execute(update_max_id_sql, (next_id, next_id))
|
|
|
|
|
|
|
|
update_sql = (
|
|
|
|
"UPDATE room_tags_revisions"
|
|
|
|
" SET stream_id = ?"
|
|
|
|
" WHERE user_id = ?"
|
|
|
|
" AND room_id = ?"
|
|
|
|
)
|
|
|
|
txn.execute(update_sql, (next_id, user_id, room_id))
|
|
|
|
|
|
|
|
if txn.rowcount == 0:
|
|
|
|
insert_sql = (
|
|
|
|
"INSERT INTO room_tags_revisions (user_id, room_id, stream_id)"
|
|
|
|
" VALUES (?, ?, ?)"
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
txn.execute(insert_sql, (user_id, room_id, next_id))
|
2015-10-28 12:45:57 -04:00
|
|
|
except self.database_engine.module.IntegrityError:
|
2015-10-28 12:06:57 -04:00
|
|
|
# Ignore insertion errors. It doesn't matter if the row wasn't
|
|
|
|
# inserted because if two updates happend concurrently the one
|
|
|
|
# with the higher stream_id will not be reported to a client
|
|
|
|
# unless the previous update has completed. It doesn't matter
|
|
|
|
# which stream_id ends up in the table, as long as it is higher
|
|
|
|
# than the id that the client has.
|
|
|
|
pass
|