From b8ca494ee9e42e5b1aca8958088bd35cc5707437 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Jul 2017 15:44:15 +0100 Subject: [PATCH 001/223] Initial group server implementation --- synapse/federation/transport/client.py | 34 ++ synapse/federation/transport/server.py | 143 ++++++- synapse/groups/__init__.py | 0 synapse/groups/attestations.py | 120 ++++++ synapse/groups/groups_server.py | 382 ++++++++++++++++++ synapse/handlers/room_list.py | 18 +- synapse/http/server.py | 4 +- synapse/server.py | 14 + synapse/storage/__init__.py | 3 +- synapse/storage/group_server.py | 280 +++++++++++++ .../storage/schema/delta/43/group_server.sql | 77 ++++ 11 files changed, 1064 insertions(+), 11 deletions(-) create mode 100644 synapse/groups/__init__.py create mode 100644 synapse/groups/attestations.py create mode 100644 synapse/groups/groups_server.py create mode 100644 synapse/storage/group_server.py create mode 100644 synapse/storage/schema/delta/43/group_server.sql diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 52b2a717d..17b93a28a 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -471,3 +471,37 @@ class TransportLayerClient(object): ) defer.returnValue(content) + + @log_function + def invite_to_group_notification(self, destination, group_id, user_id, content): + path = PREFIX + "/groups/local/%s/users/%s/invite" % (group_id, user_id) + + return self.client.post_json( + destination=destination, + path=path, + data=content, + ignore_backoff=True, + ) + + @log_function + def remove_user_from_group_notification(self, destination, group_id, user_id, + content): + path = PREFIX + "/groups/local/%s/users/%s/remove" % (group_id, user_id) + + return self.client.post_json( + destination=destination, + path=path, + data=content, + ignore_backoff=True, + ) + + @log_function + def renew_group_attestation(self, destination, group_id, user_id, content): + path = PREFIX + "/groups/%s/renew_attestation/%s" % (group_id, user_id) + + return self.client.post_json( + destination=destination, + path=path, + data=content, + ignore_backoff=True, + ) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index a78f01e44..e6b0f432f 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -25,7 +25,7 @@ from synapse.http.servlet import ( from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.versionstring import get_version_string from synapse.util.logcontext import preserve_fn -from synapse.types import ThirdPartyInstanceID +from synapse.types import ThirdPartyInstanceID, get_domain_from_id import functools import logging @@ -609,6 +609,115 @@ class FederationVersionServlet(BaseFederationServlet): })) +class FederationGroupsProfileServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/profile$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id): + requester_user_id = content["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.get_group_profile( + group_id, requester_user_id + ) + + defer.returnValue((200, new_content)) + + +class FederationGroupsRoomsServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/rooms$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id): + requester_user_id = content["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.get_rooms_in_group( + group_id, requester_user_id + ) + + defer.returnValue((200, new_content)) + + +class FederationGroupsUsersServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/users$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id): + requester_user_id = content["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.get_users_in_group( + group_id, requester_user_id + ) + + defer.returnValue((200, new_content)) + + +class FederationGroupsInviteServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/users/(?P[^/]*)/invite$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, user_id): + requester_user_id = content["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.invite_to_group( + group_id, user_id, requester_user_id, content, + ) + + defer.returnValue((200, new_content)) + + +class FederationGroupsAcceptInviteServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/users/(?P[^/]*)/accept_invite$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, user_id): + if get_domain_from_id(user_id) != origin: + raise SynapseError(403, "user_id doesn't match origin") + + new_content = yield self.handler.accept_invite( + group_id, user_id, content, + ) + + defer.returnValue((200, new_content)) + + +class FederationGroupsRemoveUserServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/users/(?P[^/]*)/remove$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, user_id): + requester_user_id = content["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.remove_user_from_group( + group_id, user_id, requester_user_id, content, + ) + + defer.returnValue((200, new_content)) + + +class FederationGroupsRenewAttestaionServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/renew_attestation/(?P[^/]*)$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, user_id): + # We don't need to check auth here as we check the attestation signatures + + new_content = yield self.handler.on_renew_group_attestation( + origin, content, group_id, user_id + ) + + defer.returnValue((200, new_content)) + + FEDERATION_SERVLET_CLASSES = ( FederationSendServlet, FederationPullServlet, @@ -635,11 +744,27 @@ FEDERATION_SERVLET_CLASSES = ( FederationVersionServlet, ) + ROOM_LIST_CLASSES = ( PublicRoomList, ) +GROUP_SERVER_SERVLET_CLASSES = ( + FederationGroupsProfileServlet, + FederationGroupsRoomsServlet, + FederationGroupsUsersServlet, + FederationGroupsInviteServlet, + FederationGroupsAcceptInviteServlet, + FederationGroupsRemoveUserServlet, +) + + +GROUP_ATTESTATION_SERVLET_CLASSES = ( + FederationGroupsRenewAttestaionServlet, +) + + def register_servlets(hs, resource, authenticator, ratelimiter): for servletclass in FEDERATION_SERVLET_CLASSES: servletclass( @@ -656,3 +781,19 @@ def register_servlets(hs, resource, authenticator, ratelimiter): ratelimiter=ratelimiter, server_name=hs.hostname, ).register(resource) + + for servletclass in GROUP_SERVER_SERVLET_CLASSES: + servletclass( + handler=hs.get_groups_server_handler(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + + for servletclass in GROUP_ATTESTATION_SERVLET_CLASSES: + servletclass( + handler=hs.get_groups_attestation_renewer(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) diff --git a/synapse/groups/__init__.py b/synapse/groups/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py new file mode 100644 index 000000000..d83076a9b --- /dev/null +++ b/synapse/groups/attestations.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Vector Creations Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from synapse.api.errors import SynapseError +from synapse.types import get_domain_from_id +from synapse.util.logcontext import preserve_fn + +from signedjson.sign import sign_json + + +DEFAULT_ATTESTATION_LENGTH_MS = 3 * 24 * 60 * 60 * 1000 +MIN_ATTESTATION_LENGTH_MS = 1 * 60 * 60 * 1000 +UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000 + + +class GroupAttestationSigning(object): + def __init__(self, hs): + self.keyring = hs.get_keyring() + self.clock = hs.get_clock() + self.server_name = hs.hostname + self.signing_key = hs.config.signing_key[0] + + @defer.inlineCallbacks + def verify_attestation(self, attestation, group_id, user_id, server_name=None): + if not server_name: + if get_domain_from_id(group_id) == self.server_name: + server_name = get_domain_from_id(user_id) + else: + server_name = get_domain_from_id(group_id) + + if user_id != attestation["user_id"]: + raise SynapseError(400, "Attestation has incorrect user_id") + + if group_id != attestation["group_id"]: + raise SynapseError(400, "Attestation has incorrect group_id") + + valid_until_ms = attestation["valid_until_ms"] + if valid_until_ms - self.clock.time_msec() < MIN_ATTESTATION_LENGTH_MS: + raise SynapseError(400, "Attestation not valid for long enough") + + yield self.keyring.verify_json_for_server(server_name, attestation) + + def create_attestation(self, group_id, user_id): + return sign_json({ + "group_id": group_id, + "user_id": user_id, + "valid_until_ms": self.clock.time_msec() + DEFAULT_ATTESTATION_LENGTH_MS, + }, self.server_name, self.signing_key) + + +class GroupAttestionRenewer(object): + def __init__(self, hs): + self.clock = hs.get_clock() + self.store = hs.get_datastore() + self.assestations = hs.get_groups_attestation_signing() + self.transport_client = hs.get_federation_transport_client() + + self._renew_attestations_loop = self.clock.looping_call( + self._renew_attestations, 30 * 60 * 1000, + ) + + @defer.inlineCallbacks + def on_renew_attestation(self, group_id, user_id, content): + attestation = content["attestation"] + + yield self.attestations.verify_attestation( + attestation, + user_id=user_id, + group_id=group_id, + ) + + yield self.store.update_remote_attestion(group_id, user_id, attestation) + + defer.returnValue({}) + + @defer.inlineCallbacks + def _renew_attestations(self): + now = self.clock.time_msec() + + rows = yield self.store.get_attestations_need_renewals( + now + UPDATE_ATTESTATION_TIME_MS + ) + + @defer.inlineCallbacks + def _renew_attestation(self, group_id, user_id): + attestation = self.attestations.create_attestation(group_id, user_id) + + if self.hs.is_mine_id(group_id): + destination = get_domain_from_id(user_id) + else: + destination = get_domain_from_id(group_id) + + yield self.transport_client.renew_group_attestation( + destination, group_id, user_id, + content={"attestation": attestation}, + ) + + yield self.store.update_attestation_renewal( + group_id, user_id, attestation + ) + + for row in rows: + group_id = row["group_id"] + user_id = row["user_id"] + + preserve_fn(_renew_attestation)(group_id, user_id) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py new file mode 100644 index 000000000..195f1eae5 --- /dev/null +++ b/synapse/groups/groups_server.py @@ -0,0 +1,382 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Vector Creations Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from synapse.api.errors import SynapseError +from synapse.types import UserID, get_domain_from_id + + +import functools +import logging + +logger = logging.getLogger(__name__) + + +# TODO: Allow users to "knock" or simpkly join depending on rules +# TODO: Federation admin APIs +# TODO: is_priveged flag to users and is_public to users and rooms +# TODO: Audit log for admins (profile updates, membership changes, users who tried +# to join but were rejected, etc) +# TODO: Flairs + + +UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000 + + +def check_group_is_ours(and_exists=False): + def g(func): + @functools.wraps(func) + @defer.inlineCallbacks + def h(self, group_id, *args, **kwargs): + if not self.is_mine_id(group_id): + raise SynapseError(400, "Group not on this server") + if and_exists: + group = yield self.store.get_group(group_id) + if not group: + raise SynapseError(404, "Unknown group") + + res = yield func(self, group_id, *args, **kwargs) + defer.returnValue(res) + + return h + return g + + +class GroupsServerHandler(object): + def __init__(self, hs): + self.hs = hs + self.store = hs.get_datastore() + self.room_list_handler = hs.get_room_list_handler() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.keyring = hs.get_keyring() + self.is_mine_id = hs.is_mine_id + self.signing_key = hs.config.signing_key[0] + self.server_name = hs.hostname + self.attestations = hs.get_groups_attestation_signing() + self.transport_client = hs.get_federation_transport_client() + + # Ensure attestations get renewed + hs.get_groups_attestation_renewer() + + @check_group_is_ours() + @defer.inlineCallbacks + def get_group_profile(self, group_id, requester_user_id): + group_description = yield self.store.get_group(group_id) + + if group_description: + defer.returnValue(group_description) + else: + raise SynapseError(404, "Unknown group") + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def get_users_in_group(self, group_id, requester_user_id): + is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) + + user_results = yield self.store.get_users_in_group( + group_id, include_private=is_user_in_group, + ) + + chunk = [] + for user_result in user_results: + g_user_id = user_result["user_id"] + is_public = user_result["is_public"] + + entry = {"user_id": g_user_id} + + # TODO: Get profile information + + if not is_public: + entry["is_public"] = False + + if not self.is_mine_id(requester_user_id): + attestation = yield self.store.get_remote_attestation(group_id, g_user_id) + if not attestation: + continue + + entry["attestation"] = attestation + else: + entry["attestation"] = self.attestations.create_attestation( + group_id, g_user_id, + ) + + chunk.append(entry) + + # TODO: If admin add lists of users whose attestations have timed out + + defer.returnValue({ + "chunk": chunk, + "total_user_count_estimate": len(user_results), + }) + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def get_rooms_in_group(self, group_id, requester_user_id): + is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) + + room_results = yield self.store.get_rooms_in_group( + group_id, include_private=is_user_in_group, + ) + + chunk = [] + for room_result in room_results: + room_id = room_result["room_id"] + is_public = room_result["is_public"] + + joined_users = yield self.store.get_users_in_room(room_id) + entry = yield self.room_list_handler.generate_room_entry( + room_id, len(joined_users), + with_alias=False, allow_private=True, + ) + + if not entry: + continue + + if not is_public: + entry["is_public"] = False + + chunk.append(entry) + + chunk.sort(key=lambda e: -e["num_joined_members"]) + + defer.returnValue({ + "chunk": chunk, + "total_room_count_estimate": len(room_results), + }) + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def add_room(self, group_id, requester_user_id, room_id, content): + is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + # TODO: Check if room has already been added + + visibility = content.get("visibility") + if visibility: + vis_type = visibility["type"] + if vis_type not in ("public", "private"): + raise SynapseError( + 400, "Synapse only supports 'public'/'private' visibility" + ) + is_public = vis_type == "public" + else: + is_public = True + + yield self.store.add_room_to_group(group_id, room_id, is_public=is_public) + + defer.returnValue({}) + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def invite_to_group(self, group_id, user_id, requester_user_id, content): + is_admin = yield self.store.is_user_admin_in_group( + group_id, requester_user_id + ) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + # TODO: Check if user knocked + # TODO: Check if user is already invited + + group = yield self.store.get_group(group_id) + content = { + "profile": { + "name": group["name"], + "avatar_url": group["avatar_url"], + }, + "inviter": requester_user_id, + } + + if self.hs.is_mine_id(user_id): + raise NotImplementedError() + else: + local_attestation = self.attestations.create_attestation(group_id, user_id) + content.update({ + "attestation": local_attestation, + }) + + res = yield self.transport_client.invite_to_group_notification( + get_domain_from_id(user_id), group_id, user_id, content + ) + + if res["state"] == "join": + if not self.hs.is_mine_id(user_id): + remote_attestation = res["attestation"] + + yield self.attestations.verify_attestation( + remote_attestation, + user_id=user_id, + group_id=group_id, + ) + else: + remote_attestation = None + + yield self.store.add_user_to_group( + group_id, user_id, + is_admin=False, + is_public=False, # TODO + local_attestation=local_attestation, + remote_attestation=remote_attestation, + ) + elif res["state"] == "invite": + yield self.store.add_group_invite( + group_id, user_id, + ) + defer.returnValue({ + "state": "invite" + }) + elif res["state"] == "reject": + defer.returnValue({ + "state": "reject" + }) + else: + raise SynapseError(502, "Unknown state returned by HS") + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def accept_invite(self, group_id, user_id, content): + if not self.store.is_user_invited_to_local_group(group_id, user_id): + raise SynapseError(403, "User not invited to group") + + if not self.hs.is_mine_id(user_id): + remote_attestation = content["attestation"] + + yield self.attestations.verify_attestation( + remote_attestation, + user_id=user_id, + group_id=group_id, + ) + else: + remote_attestation = None + + local_attestation = self.attestations.create_attestation(group_id, user_id) + + visibility = content.get("visibility") + if visibility: + vis_type = visibility["type"] + if vis_type not in ("public", "private"): + raise SynapseError( + 400, "Synapse only supports 'public'/'private' visibility" + ) + is_public = vis_type == "public" + else: + is_public = True + + yield self.store.add_user_to_group( + group_id, user_id, + is_admin=False, + is_public=is_public, + local_attestation=local_attestation, + remote_attestation=remote_attestation, + ) + + defer.returnValue({ + "state": "join", + "attestation": local_attestation, + }) + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def knock(self, group_id, user_id, content): + pass + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def accept_knock(self, group_id, user_id, content): + pass + + @check_group_is_ours(and_exists=True) + @defer.inlineCallbacks + def remove_user_from_group(self, group_id, user_id, requester_user_id, content): + is_kick = False + if requester_user_id != user_id: + is_admin = yield self.store.is_user_admin_in_group( + group_id, requester_user_id + ) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + is_kick = True + + yield self.store.remove_user_to_group( + group_id, user_id, + ) + + if is_kick: + if self.hs.is_mine_id(user_id): + raise NotImplementedError() + else: + yield self.transport_client.remove_user_from_group_notification( + get_domain_from_id(user_id), group_id, user_id, {} + ) + + defer.returnValue({}) + + @check_group_is_ours() + @defer.inlineCallbacks + def create_group(self, group_id, user_id, content): + logger.info("Attempting to create group with ID: %r", group_id) + group = yield self.store.get_group(group_id) + if group: + raise SynapseError(400, "Group already exists") + + is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id)) + if not is_admin and not group_id.startswith("+u/"): + raise SynapseError(403, "Group ID must start with '+u/' or be a server admin") + + profile = content.get("profile", {}) + name = profile.get("name") + avatar_url = profile.get("avatar_url") + short_description = profile.get("short_description") + long_description = profile.get("long_description") + + yield self.store.create_group( + group_id, + user_id, + name=name, + avatar_url=avatar_url, + short_description=short_description, + long_description=long_description, + ) + + if not self.hs.is_mine_id(user_id): + remote_attestation = content["attestation"] + + yield self.attestations.verify_attestation( + remote_attestation, + user_id=user_id, + group_id=group_id, + ) + + local_attestation = self.attestations.create_attestation(group_id, user_id) + else: + local_attestation = None + remote_attestation = None + + yield self.store.add_user_to_group( + group_id, user_id, + is_admin=True, + is_public=True, # TODO + local_attestation=local_attestation, + remote_attestation=remote_attestation, + ) + + defer.returnValue({ + "group_id": group_id, + }) diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 516cd9a6a..41e1781df 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -276,13 +276,14 @@ class RoomListHandler(BaseHandler): # We've already got enough, so lets just drop it. return - result = yield self._generate_room_entry(room_id, num_joined_users) + result = yield self.generate_room_entry(room_id, num_joined_users) if result and _matches_room_entry(result, search_filter): chunk.append(result) @cachedInlineCallbacks(num_args=1, cache_context=True) - def _generate_room_entry(self, room_id, num_joined_users, cache_context): + def generate_room_entry(self, room_id, num_joined_users, cache_context, + with_alias=True, allow_private=False): """Returns the entry for a room """ result = { @@ -316,14 +317,15 @@ class RoomListHandler(BaseHandler): join_rules_event = current_state.get((EventTypes.JoinRules, "")) if join_rules_event: join_rule = join_rules_event.content.get("join_rule", None) - if join_rule and join_rule != JoinRules.PUBLIC: + if not allow_private and join_rule and join_rule != JoinRules.PUBLIC: defer.returnValue(None) - aliases = yield self.store.get_aliases_for_room( - room_id, on_invalidate=cache_context.invalidate - ) - if aliases: - result["aliases"] = aliases + if with_alias: + aliases = yield self.store.get_aliases_for_room( + room_id, on_invalidate=cache_context.invalidate + ) + if aliases: + result["aliases"] = aliases name_event = yield current_state.get((EventTypes.Name, "")) if name_event: diff --git a/synapse/http/server.py b/synapse/http/server.py index 7ef3d526b..8a27e3b42 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -145,7 +145,9 @@ def wrap_request_handler(request_handler, include_metrics=False): "error": "Internal server error", "errcode": Codes.UNKNOWN, }, - send_cors=True + send_cors=True, + pretty_print=_request_user_agent_is_curl(request), + version_string=self.version_string, ) finally: try: diff --git a/synapse/server.py b/synapse/server.py index a38e5179e..d857cca84 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -50,6 +50,8 @@ from synapse.handlers.initial_sync import InitialSyncHandler from synapse.handlers.receipts import ReceiptsHandler from synapse.handlers.read_marker import ReadMarkerHandler from synapse.handlers.user_directory import UserDirectoyHandler +from synapse.groups.groups_server import GroupsServerHandler +from synapse.groups.attestations import GroupAttestionRenewer, GroupAttestationSigning from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.notifier import Notifier @@ -139,6 +141,9 @@ class HomeServer(object): 'read_marker_handler', 'action_generator', 'user_directory_handler', + 'groups_server_handler', + 'groups_attestation_signing', + 'groups_attestation_renewer', ] def __init__(self, hostname, **kwargs): @@ -309,6 +314,15 @@ class HomeServer(object): def build_user_directory_handler(self): return UserDirectoyHandler(self) + def build_groups_server_handler(self): + return GroupsServerHandler(self) + + def build_groups_attestation_signing(self): + return GroupAttestationSigning(self) + + def build_groups_attestation_renewer(self): + return GroupAttestionRenewer(self) + def remove_pusher(self, app_id, push_key, user_id): return self.get_pusherpool().remove_pusher(app_id, push_key, user_id) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index b92472df3..fdee9f1ad 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -37,7 +37,7 @@ from .media_repository import MediaRepositoryStore from .rejections import RejectionsStore from .event_push_actions import EventPushActionsStore from .deviceinbox import DeviceInboxStore - +from .group_server import GroupServerStore from .state import StateStore from .signatures import SignatureStore from .filtering import FilteringStore @@ -88,6 +88,7 @@ class DataStore(RoomMemberStore, RoomStore, DeviceStore, DeviceInboxStore, UserDirectoryStore, + GroupServerStore, ): def __init__(self, db_conn, hs): diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py new file mode 100644 index 000000000..01d9a982c --- /dev/null +++ b/synapse/storage/group_server.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Vector Creations Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from ._base import SQLBaseStore + +import ujson as json + + +class GroupServerStore(SQLBaseStore): + def get_group(self, group_id): + return self._simple_select_one( + table="groups", + keyvalues={ + "group_id": group_id, + }, + retcols=("name", "short_description", "long_description", "avatar_url",), + allow_none=True, + desc="is_user_in_group", + ) + + def get_users_in_group(self, group_id, include_private=False): + # TODO: Pagination + + keyvalues = { + "group_id": group_id, + } + if not include_private: + keyvalues["is_public"] = True + + return self._simple_select_list( + table="group_users", + keyvalues=keyvalues, + retcols=("user_id", "is_public",), + desc="get_users_in_group", + ) + + def get_rooms_in_group(self, group_id, include_private=False): + # TODO: Pagination + + keyvalues = { + "group_id": group_id, + } + if not include_private: + keyvalues["is_public"] = True + + return self._simple_select_list( + table="group_rooms", + keyvalues=keyvalues, + retcols=("room_id", "is_public",), + desc="get_rooms_in_group", + ) + + def is_user_in_group(self, user_id, group_id): + return self._simple_select_one_onecol( + table="group_users", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + retcol="user_id", + allow_none=True, + desc="is_user_in_group", + ).addCallback(lambda r: bool(r)) + + def is_user_admin_in_group(self, group_id, user_id): + return self._simple_select_one_onecol( + table="group_users", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + retcol="is_admin", + allow_none=True, + desc="is_user_adim_in_group", + ) + + def add_group_invite(self, group_id, user_id): + return self._simple_insert( + table="group_invites", + values={ + "group_id": group_id, + "user_id": user_id, + }, + desc="add_group_invite", + ) + + def is_user_invited_to_local_group(self, group_id, user_id): + return self._simple_select_one_onecol( + table="group_invites", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + retcol="user_id", + desc="is_user_invited_to_local_group", + allow_none=True, + ) + + def add_user_to_group(self, group_id, user_id, is_admin=False, is_public=True, + local_attestation=None, remote_attestation=None): + def _add_user_to_group_txn(txn): + self._simple_insert_txn( + txn, + table="group_users", + values={ + "group_id": group_id, + "user_id": user_id, + "is_admin": is_admin, + "is_public": is_public, + }, + ) + + self._simple_delete_txn( + txn, + table="group_invites", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + + if local_attestation: + self._simple_insert_txn( + txn, + table="group_attestations_renewals", + values={ + "group_id": group_id, + "user_id": user_id, + "valid_until_ms": local_attestation["valid_until_ms"], + }, + ) + if remote_attestation: + self._simple_insert_txn( + txn, + table="group_attestations_remote", + values={ + "group_id": group_id, + "user_id": user_id, + "valid_until_ms": remote_attestation["valid_until_ms"], + "attestation": json.dumps(remote_attestation), + }, + ) + + return self.runInteraction( + "add_user_to_group", _add_user_to_group_txn + ) + + def remove_user_to_group(self, group_id, user_id): + def _remove_user_to_group_txn(txn): + self._simple_delete_txn( + txn, + table="group_users", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + self._simple_delete_txn( + txn, + table="group_invites", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + self._simple_delete_txn( + txn, + table="group_attestations_renewals", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + self._simple_delete_txn( + txn, + table="group_attestations_remote", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + return self.runInteraction("remove_user_to_group", _remove_user_to_group_txn) + + def add_room_to_group(self, group_id, room_id, is_public): + return self._simple_insert( + table="group_rooms", + values={ + "group_id": group_id, + "room_id": room_id, + "is_public": is_public, + }, + desc="add_room_to_group", + ) + + @defer.inlineCallbacks + def create_group(self, group_id, user_id, name, avatar_url, short_description, + long_description,): + yield self._simple_insert( + table="groups", + values={ + "group_id": group_id, + "name": name, + "avatar_url": avatar_url, + "short_description": short_description, + "long_description": long_description, + }, + desc="create_group", + ) + + def get_attestations_need_renewals(self, valid_until_ms): + def _get_attestations_need_renewals_txn(txn): + sql = """ + SELECT group_id, user_id FROM group_attestations_renewals + WHERE valid_until_ms <= ? + """ + txn.execute(sql, (valid_until_ms,)) + return self.cursor_to_dict(txn) + return self.runInteraction( + "get_attestations_need_renewals", _get_attestations_need_renewals_txn + ) + + def update_attestation_renewal(self, group_id, user_id, attestation): + return self._simple_update_one( + table="group_attestations_renewals", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + updatevalues={ + "valid_until_ms": attestation["valid_until_ms"], + }, + desc="update_attestation_renewal", + ) + + def update_remote_attestion(self, group_id, user_id, attestation): + return self._simple_update_one( + table="group_attestations_remote", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + updatevalues={ + "valid_until_ms": attestation["valid_until_ms"], + "attestation": json.dumps(attestation) + }, + desc="update_remote_attestion", + ) + + @defer.inlineCallbacks + def get_remote_attestation(self, group_id, user_id): + row = yield self._simple_select_one( + table="group_attestations_remote", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + retcols=("valid_until_ms", "attestation"), + desc="get_remote_attestation", + allow_none=True, + ) + + now = int(self._clock.time_msec()) + if row and now < row["valid_until_ms"]: + defer.returnValue(json.loads(row["attestation"])) + + defer.returnValue(None) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql new file mode 100644 index 000000000..6f1a94199 --- /dev/null +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -0,0 +1,77 @@ +/* Copyright 2017 Vector Creations Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE groups ( + group_id TEXT NOT NULL, + name TEXT, + avatar_url TEXT, + short_description TEXT, + long_description TEXT +); + +CREATE UNIQUE INDEX groups_idx ON groups(group_id); + + +CREATE TABLE group_users ( + group_id TEXT NOT NULL, + user_id TEXT NOT NULL, + is_admin BOOLEAN NOT NULL, + is_public BOOLEAN NOT NULL +); + + +CREATE INDEX groups_users_g_idx ON group_users(group_id, user_id); +CREATE INDEX groups_users_u_idx ON group_users(user_id); + + +CREATE TABLE group_invites ( + group_id TEXT NOT NULL, + user_id TEXT NOT NULL +); + +CREATE INDEX groups_invites_g_idx ON group_invites(group_id, user_id); +CREATE INDEX groups_invites_u_idx ON group_invites(user_id); + + +CREATE TABLE group_rooms ( + group_id TEXT NOT NULL, + room_id TEXT NOT NULL, + is_public BOOLEAN NOT NULL +); + +CREATE INDEX groups_rooms_g_idx ON group_rooms(group_id, room_id); +CREATE INDEX groups_rooms_r_idx ON group_rooms(room_id); + + +CREATE TABLE group_attestations_renewals ( + group_id TEXT NOT NULL, + user_id TEXT NOT NULL, + valid_until_ms BIGINT NOT NULL +); + +CREATE INDEX group_attestations_renewals_g_idx ON group_attestations_renewals(group_id, user_id); +CREATE INDEX group_attestations_renewals_u_idx ON group_attestations_renewals(user_id); +CREATE INDEX group_attestations_renewals_v_idx ON group_attestations_renewals(valid_until_ms); + +CREATE TABLE group_attestations_remote ( + group_id TEXT NOT NULL, + user_id TEXT NOT NULL, + valid_until_ms BIGINT NOT NULL, + attestation TEXT NOT NULL +); + +CREATE INDEX group_attestations_remote_g_idx ON group_attestations_remote(group_id, user_id); +CREATE INDEX group_attestations_remote_u_idx ON group_attestations_remote(user_id); +CREATE INDEX group_attestations_remote_v_idx ON group_attestations_remote(valid_until_ms); From 83936293eb3ddb8998191b537153eaeec5e7afb0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 09:58:59 +0100 Subject: [PATCH 002/223] Comments --- synapse/groups/attestations.py | 29 ++++- synapse/groups/groups_server.py | 108 ++++++++++++------ synapse/storage/group_server.py | 32 +++++- .../storage/schema/delta/43/group_server.sql | 6 +- 4 files changed, 132 insertions(+), 43 deletions(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index d83076a9b..6937fa44c 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -28,6 +28,8 @@ UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000 class GroupAttestationSigning(object): + """Creates and verifies group attestations. + """ def __init__(self, hs): self.keyring = hs.get_keyring() self.clock = hs.get_clock() @@ -36,11 +38,20 @@ class GroupAttestationSigning(object): @defer.inlineCallbacks def verify_attestation(self, attestation, group_id, user_id, server_name=None): + """Verifies that the given attestation matches the given paramaters. + + An optional server_name can be supplied to explicitly set which server's + signature is expected. Otherwise assumes that either the group_id or user_id + is local and uses the other's server as the one to check. + """ + if not server_name: if get_domain_from_id(group_id) == self.server_name: server_name = get_domain_from_id(user_id) - else: + elif get_domain_from_id(user_id) == self.server_name: server_name = get_domain_from_id(group_id) + else: + raise Exception("Expected eitehr group_id or user_id to be local") if user_id != attestation["user_id"]: raise SynapseError(400, "Attestation has incorrect user_id") @@ -48,6 +59,7 @@ class GroupAttestationSigning(object): if group_id != attestation["group_id"]: raise SynapseError(400, "Attestation has incorrect group_id") + # TODO: valid_until_ms = attestation["valid_until_ms"] if valid_until_ms - self.clock.time_msec() < MIN_ATTESTATION_LENGTH_MS: raise SynapseError(400, "Attestation not valid for long enough") @@ -55,6 +67,9 @@ class GroupAttestationSigning(object): yield self.keyring.verify_json_for_server(server_name, attestation) def create_attestation(self, group_id, user_id): + """Create an attestation for the group_id and user_id with default + validity length. + """ return sign_json({ "group_id": group_id, "user_id": user_id, @@ -63,11 +78,15 @@ class GroupAttestationSigning(object): class GroupAttestionRenewer(object): + """Responsible for sending and receiving attestation updates. + """ + def __init__(self, hs): self.clock = hs.get_clock() self.store = hs.get_datastore() self.assestations = hs.get_groups_attestation_signing() self.transport_client = hs.get_federation_transport_client() + self.is_mine_id = hs.is_mind_id self._renew_attestations_loop = self.clock.looping_call( self._renew_attestations, 30 * 60 * 1000, @@ -75,8 +94,13 @@ class GroupAttestionRenewer(object): @defer.inlineCallbacks def on_renew_attestation(self, group_id, user_id, content): + """When a remote updates an attestation + """ attestation = content["attestation"] + if not self.is_mine_id(group_id) and not self.is_mine_id(user_id): + raise SynapseError(400, "Neither user not group are on this server") + yield self.attestations.verify_attestation( attestation, user_id=user_id, @@ -89,6 +113,9 @@ class GroupAttestionRenewer(object): @defer.inlineCallbacks def _renew_attestations(self): + """Called periodically to check if we need to update any of our attestations + """ + now = self.clock.time_msec() rows = yield self.store.get_attestations_need_renewals( diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 195f1eae5..44083100f 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -19,7 +19,6 @@ from synapse.api.errors import SynapseError from synapse.types import UserID, get_domain_from_id -import functools import logging logger = logging.getLogger(__name__) @@ -33,28 +32,6 @@ logger = logging.getLogger(__name__) # TODO: Flairs -UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000 - - -def check_group_is_ours(and_exists=False): - def g(func): - @functools.wraps(func) - @defer.inlineCallbacks - def h(self, group_id, *args, **kwargs): - if not self.is_mine_id(group_id): - raise SynapseError(400, "Group not on this server") - if and_exists: - group = yield self.store.get_group(group_id) - if not group: - raise SynapseError(404, "Unknown group") - - res = yield func(self, group_id, *args, **kwargs) - defer.returnValue(res) - - return h - return g - - class GroupsServerHandler(object): def __init__(self, hs): self.hs = hs @@ -72,9 +49,28 @@ class GroupsServerHandler(object): # Ensure attestations get renewed hs.get_groups_attestation_renewer() - @check_group_is_ours() + @defer.inlineCallbacks + def check_group_is_ours(self, group_id, and_exists=False): + """Check that the group is ours, and optionally if it exists. + + If group does exist then return group. + """ + if not self.is_mine_id(group_id): + raise SynapseError(400, "Group not on this server") + + group = yield self.store.get_group(group_id) + if and_exists and not group: + raise SynapseError(404, "Unknown group") + + defer.returnValue(group) + @defer.inlineCallbacks def get_group_profile(self, group_id, requester_user_id): + """Get the group profile as seen by requester_user_id + """ + + yield self.check_group_is_ours(group_id) + group_description = yield self.store.get_group(group_id) if group_description: @@ -82,9 +78,13 @@ class GroupsServerHandler(object): else: raise SynapseError(404, "Unknown group") - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): + """Get the users in group as seen by requester_user_id + """ + + yield self.check_group_is_ours(group_id, and_exists=True) + is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) user_results = yield self.store.get_users_in_group( @@ -123,9 +123,13 @@ class GroupsServerHandler(object): "total_user_count_estimate": len(user_results), }) - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def get_rooms_in_group(self, group_id, requester_user_id): + """Get the rooms in group as seen by requester_user_id + """ + + yield self.check_group_is_ours(group_id, and_exists=True) + is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) room_results = yield self.store.get_rooms_in_group( @@ -158,9 +162,13 @@ class GroupsServerHandler(object): "total_room_count_estimate": len(room_results), }) - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def add_room(self, group_id, requester_user_id, room_id, content): + """Add room to group + """ + + yield self.check_group_is_ours(group_id, and_exists=True) + is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id) if not is_admin: raise SynapseError(403, "User is not admin in group") @@ -182,9 +190,13 @@ class GroupsServerHandler(object): defer.returnValue({}) - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def invite_to_group(self, group_id, user_id, requester_user_id, content): + """Invite user to group + """ + + group = yield self.check_group_is_ours(group_id, and_exists=True) + is_admin = yield self.store.is_user_admin_in_group( group_id, requester_user_id ) @@ -194,7 +206,6 @@ class GroupsServerHandler(object): # TODO: Check if user knocked # TODO: Check if user is already invited - group = yield self.store.get_group(group_id) content = { "profile": { "name": group["name"], @@ -248,9 +259,16 @@ class GroupsServerHandler(object): else: raise SynapseError(502, "Unknown state returned by HS") - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def accept_invite(self, group_id, user_id, content): + """User tries to accept an invite to the group. + + This is different from them asking to join, and so should error if no + invite exists (and they're not a member of the group) + """ + + yield self.check_group_is_ours(group_id, and_exists=True) + if not self.store.is_user_invited_to_local_group(group_id, user_id): raise SynapseError(403, "User not invited to group") @@ -291,19 +309,33 @@ class GroupsServerHandler(object): "attestation": local_attestation, }) - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def knock(self, group_id, user_id, content): - pass + """A user requests becoming a member of the group + """ + yield self.check_group_is_ours(group_id, and_exists=True) + + raise NotImplementedError() - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def accept_knock(self, group_id, user_id, content): - pass + """Accept a users knock to the room. + + Errors if the user hasn't knocked, rather than inviting them. + """ + + yield self.check_group_is_ours(group_id, and_exists=True) + + raise NotImplementedError() - @check_group_is_ours(and_exists=True) @defer.inlineCallbacks def remove_user_from_group(self, group_id, user_id, requester_user_id, content): + """Remove a user from the group; either a user is leaving or and admin + kicked htem. + """ + + yield self.check_group_is_ours(group_id, and_exists=True) + is_kick = False if requester_user_id != user_id: is_admin = yield self.store.is_user_admin_in_group( @@ -314,7 +346,7 @@ class GroupsServerHandler(object): is_kick = True - yield self.store.remove_user_to_group( + yield self.store.remove_user_from_group( group_id, user_id, ) @@ -328,11 +360,11 @@ class GroupsServerHandler(object): defer.returnValue({}) - @check_group_is_ours() @defer.inlineCallbacks def create_group(self, group_id, user_id, content): + group = yield self.check_group_is_ours(group_id) + logger.info("Attempting to create group with ID: %r", group_id) - group = yield self.store.get_group(group_id) if group: raise SynapseError(400, "Group already exists") diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 01d9a982c..327d77086 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -89,6 +89,8 @@ class GroupServerStore(SQLBaseStore): ) def add_group_invite(self, group_id, user_id): + """Record that the group server has invited a user + """ return self._simple_insert( table="group_invites", values={ @@ -99,6 +101,8 @@ class GroupServerStore(SQLBaseStore): ) def is_user_invited_to_local_group(self, group_id, user_id): + """Has the group server invited a user? + """ return self._simple_select_one_onecol( table="group_invites", keyvalues={ @@ -112,6 +116,19 @@ class GroupServerStore(SQLBaseStore): def add_user_to_group(self, group_id, user_id, is_admin=False, is_public=True, local_attestation=None, remote_attestation=None): + """Add a user to the group server. + + Args: + group_id (str) + user_id (str) + is_admin (bool) + is_public (bool) + local_attestation (dict): The attestation the GS created to give + to the remote server. Optional if the user and group are on the + same server + remote_attestation (dict): The attestation given to GS by remote + server. Optional if the user and group are on the same server + """ def _add_user_to_group_txn(txn): self._simple_insert_txn( txn, @@ -159,8 +176,8 @@ class GroupServerStore(SQLBaseStore): "add_user_to_group", _add_user_to_group_txn ) - def remove_user_to_group(self, group_id, user_id): - def _remove_user_to_group_txn(txn): + def remove_user_from_group(self, group_id, user_id): + def _remove_user_from_group_txn(txn): self._simple_delete_txn( txn, table="group_users", @@ -193,7 +210,7 @@ class GroupServerStore(SQLBaseStore): "user_id": user_id, }, ) - return self.runInteraction("remove_user_to_group", _remove_user_to_group_txn) + return self.runInteraction("remove_user_from_group", _remove_user_from_group_txn) def add_room_to_group(self, group_id, room_id, is_public): return self._simple_insert( @@ -222,6 +239,8 @@ class GroupServerStore(SQLBaseStore): ) def get_attestations_need_renewals(self, valid_until_ms): + """Get all attestations that need to be renewed until givent time + """ def _get_attestations_need_renewals_txn(txn): sql = """ SELECT group_id, user_id FROM group_attestations_renewals @@ -234,6 +253,8 @@ class GroupServerStore(SQLBaseStore): ) def update_attestation_renewal(self, group_id, user_id, attestation): + """Update an attestation that we have renewed + """ return self._simple_update_one( table="group_attestations_renewals", keyvalues={ @@ -247,6 +268,8 @@ class GroupServerStore(SQLBaseStore): ) def update_remote_attestion(self, group_id, user_id, attestation): + """Update an attestation that a remote has renewed + """ return self._simple_update_one( table="group_attestations_remote", keyvalues={ @@ -262,6 +285,9 @@ class GroupServerStore(SQLBaseStore): @defer.inlineCallbacks def get_remote_attestation(self, group_id, user_id): + """Get the attestation that proves the remote agrees that the user is + in the group. + """ row = yield self._simple_select_one( table="group_attestations_remote", keyvalues={ diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index 6f1a94199..5dc7a497e 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -24,6 +24,7 @@ CREATE TABLE groups ( CREATE UNIQUE INDEX groups_idx ON groups(group_id); +-- list of users the group server thinks are joined CREATE TABLE group_users ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, @@ -35,7 +36,7 @@ CREATE TABLE group_users ( CREATE INDEX groups_users_g_idx ON group_users(group_id, user_id); CREATE INDEX groups_users_u_idx ON group_users(user_id); - +-- list of users the group server thinks are invited CREATE TABLE group_invites ( group_id TEXT NOT NULL, user_id TEXT NOT NULL @@ -55,6 +56,7 @@ CREATE INDEX groups_rooms_g_idx ON group_rooms(group_id, room_id); CREATE INDEX groups_rooms_r_idx ON group_rooms(room_id); +-- List of attestations we've given out and need to renew CREATE TABLE group_attestations_renewals ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, @@ -65,6 +67,8 @@ CREATE INDEX group_attestations_renewals_g_idx ON group_attestations_renewals(gr CREATE INDEX group_attestations_renewals_u_idx ON group_attestations_renewals(user_id); CREATE INDEX group_attestations_renewals_v_idx ON group_attestations_renewals(valid_until_ms); + +-- List of attestations we've received from remotes and are interested in. CREATE TABLE group_attestations_remote ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, From 429925a5e9d24bef0533d936d2bca8a149b2ad1c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 11:44:08 +0100 Subject: [PATCH 003/223] Lift out visibility parsing --- synapse/groups/groups_server.py | 41 +++++++++++++++++---------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 44083100f..1ac946abc 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -175,16 +175,7 @@ class GroupsServerHandler(object): # TODO: Check if room has already been added - visibility = content.get("visibility") - if visibility: - vis_type = visibility["type"] - if vis_type not in ("public", "private"): - raise SynapseError( - 400, "Synapse only supports 'public'/'private' visibility" - ) - is_public = vis_type == "public" - else: - is_public = True + is_public = _parse_visibility_from_contents(content) yield self.store.add_room_to_group(group_id, room_id, is_public=is_public) @@ -285,16 +276,7 @@ class GroupsServerHandler(object): local_attestation = self.attestations.create_attestation(group_id, user_id) - visibility = content.get("visibility") - if visibility: - vis_type = visibility["type"] - if vis_type not in ("public", "private"): - raise SynapseError( - 400, "Synapse only supports 'public'/'private' visibility" - ) - is_public = vis_type == "public" - else: - is_public = True + is_public = _parse_visibility_from_contents(content) yield self.store.add_user_to_group( group_id, user_id, @@ -412,3 +394,22 @@ class GroupsServerHandler(object): defer.returnValue({ "group_id": group_id, }) + + +def _parse_visibility_from_contents(content): + """Given a content for a request parse out whether the entity should be + public or not + """ + + visibility = content.get("visibility") + if visibility: + vis_type = visibility["type"] + if vis_type not in ("public", "private"): + raise SynapseError( + 400, "Synapse only supports 'public'/'private' visibility" + ) + is_public = vis_type == "public" + else: + is_public = True + + return is_public From 8ba89f1050c523506b99be376b9b052fd68e5bd5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 11:45:32 +0100 Subject: [PATCH 004/223] Remove u/ requirement --- synapse/groups/groups_server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 1ac946abc..61fe0d49d 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -350,9 +350,10 @@ class GroupsServerHandler(object): if group: raise SynapseError(400, "Group already exists") + # TODO: Add config to enforce that only server admins can create rooms is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id)) - if not is_admin and not group_id.startswith("+u/"): - raise SynapseError(403, "Group ID must start with '+u/' or be a server admin") + if not is_admin: + raise SynapseError(403, "Only server admin can create group on this server") profile = content.get("profile", {}) name = profile.get("name") From 6322fbbd41b3a44bc67982fd56999c317df08c08 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 11:52:03 +0100 Subject: [PATCH 005/223] Comment --- synapse/federation/transport/client.py | 12 ++++++++++++ synapse/federation/transport/server.py | 14 ++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 17b93a28a..d0f8da751 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -474,6 +474,10 @@ class TransportLayerClient(object): @log_function def invite_to_group_notification(self, destination, group_id, user_id, content): + """Sent by group server to inform a user's server that they have been + invited. + """ + path = PREFIX + "/groups/local/%s/users/%s/invite" % (group_id, user_id) return self.client.post_json( @@ -486,6 +490,10 @@ class TransportLayerClient(object): @log_function def remove_user_from_group_notification(self, destination, group_id, user_id, content): + """Sent by group server to inform a user's server that they have been + kicked from the group. + """ + path = PREFIX + "/groups/local/%s/users/%s/remove" % (group_id, user_id) return self.client.post_json( @@ -497,6 +505,10 @@ class TransportLayerClient(object): @log_function def renew_group_attestation(self, destination, group_id, user_id, content): + """Sent by either a group server or a user's server to periodically update + the attestations + """ + path = PREFIX + "/groups/%s/renew_attestation/%s" % (group_id, user_id) return self.client.post_json( diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index e6b0f432f..2286f6f8f 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -610,6 +610,8 @@ class FederationVersionServlet(BaseFederationServlet): class FederationGroupsProfileServlet(BaseFederationServlet): + """Get the basic profile of a group on behalf of a user + """ PATH = "/groups/(?P[^/]*)/profile$" @defer.inlineCallbacks @@ -626,6 +628,8 @@ class FederationGroupsProfileServlet(BaseFederationServlet): class FederationGroupsRoomsServlet(BaseFederationServlet): + """Get the rooms in a group on behalf of a user + """ PATH = "/groups/(?P[^/]*)/rooms$" @defer.inlineCallbacks @@ -642,6 +646,8 @@ class FederationGroupsRoomsServlet(BaseFederationServlet): class FederationGroupsUsersServlet(BaseFederationServlet): + """Get the users in a group on behalf of a user + """ PATH = "/groups/(?P[^/]*)/users$" @defer.inlineCallbacks @@ -658,6 +664,8 @@ class FederationGroupsUsersServlet(BaseFederationServlet): class FederationGroupsInviteServlet(BaseFederationServlet): + """Ask a group server to invite someone to the group + """ PATH = "/groups/(?P[^/]*)/users/(?P[^/]*)/invite$" @defer.inlineCallbacks @@ -674,6 +682,8 @@ class FederationGroupsInviteServlet(BaseFederationServlet): class FederationGroupsAcceptInviteServlet(BaseFederationServlet): + """Accept an invitation from the group server + """ PATH = "/groups/(?P[^/]*)/users/(?P[^/]*)/accept_invite$" @defer.inlineCallbacks @@ -689,6 +699,8 @@ class FederationGroupsAcceptInviteServlet(BaseFederationServlet): class FederationGroupsRemoveUserServlet(BaseFederationServlet): + """Leave or kick a user from the group + """ PATH = "/groups/(?P[^/]*)/users/(?P[^/]*)/remove$" @defer.inlineCallbacks @@ -705,6 +717,8 @@ class FederationGroupsRemoveUserServlet(BaseFederationServlet): class FederationGroupsRenewAttestaionServlet(BaseFederationServlet): + """A group or user's server renews their attestation + """ PATH = "/groups/(?P[^/]*)/renew_attestation/(?P[^/]*)$" @defer.inlineCallbacks From 0aac30d53b1dba2f399cad0044a905286d8c79d2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 14:23:50 +0100 Subject: [PATCH 006/223] Comments --- synapse/groups/attestations.py | 4 ++-- synapse/groups/groups_server.py | 6 +++++- synapse/storage/schema/delta/43/group_server.sql | 4 ++-- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 6937fa44c..0741b55c1 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -38,7 +38,7 @@ class GroupAttestationSigning(object): @defer.inlineCallbacks def verify_attestation(self, attestation, group_id, user_id, server_name=None): - """Verifies that the given attestation matches the given paramaters. + """Verifies that the given attestation matches the given parameters. An optional server_name can be supplied to explicitly set which server's signature is expected. Otherwise assumes that either the group_id or user_id @@ -51,7 +51,7 @@ class GroupAttestationSigning(object): elif get_domain_from_id(user_id) == self.server_name: server_name = get_domain_from_id(group_id) else: - raise Exception("Expected eitehr group_id or user_id to be local") + raise Exception("Expected either group_id or user_id to be local") if user_id != attestation["user_id"]: raise SynapseError(400, "Attestation has incorrect user_id") diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 61fe0d49d..414c95e3f 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -80,7 +80,9 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): - """Get the users in group as seen by requester_user_id + """Get the users in group as seen by requester_user_id. + + The ordering is arbitrary at the moment """ yield self.check_group_is_ours(group_id, and_exists=True) @@ -126,6 +128,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def get_rooms_in_group(self, group_id, requester_user_id): """Get the rooms in group as seen by requester_user_id + + This returns rooms in order of decreasing number of joined users """ yield self.check_group_is_ours(group_id, and_exists=True) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index 5dc7a497e..bfe8c2ca4 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -28,7 +28,7 @@ CREATE UNIQUE INDEX groups_idx ON groups(group_id); CREATE TABLE group_users ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, - is_admin BOOLEAN NOT NULL, + is_admin BOOLEAN NOT NULL, -- whether the users membership can be seen by everyone is_public BOOLEAN NOT NULL ); @@ -49,7 +49,7 @@ CREATE INDEX groups_invites_u_idx ON group_invites(user_id); CREATE TABLE group_rooms ( group_id TEXT NOT NULL, room_id TEXT NOT NULL, - is_public BOOLEAN NOT NULL + is_public BOOLEAN NOT NULL -- whether the room can be seen by everyone ); CREATE INDEX groups_rooms_g_idx ON group_rooms(group_id, room_id); From e52c391cd452077fc219fad0db8b9e5499251e5b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 14:25:46 +0100 Subject: [PATCH 007/223] Rename column to attestation_json --- synapse/storage/group_server.py | 8 ++++---- synapse/storage/schema/delta/43/group_server.sql | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 327d77086..105ab9920 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -168,7 +168,7 @@ class GroupServerStore(SQLBaseStore): "group_id": group_id, "user_id": user_id, "valid_until_ms": remote_attestation["valid_until_ms"], - "attestation": json.dumps(remote_attestation), + "attestation_json": json.dumps(remote_attestation), }, ) @@ -278,7 +278,7 @@ class GroupServerStore(SQLBaseStore): }, updatevalues={ "valid_until_ms": attestation["valid_until_ms"], - "attestation": json.dumps(attestation) + "attestation_json": json.dumps(attestation) }, desc="update_remote_attestion", ) @@ -294,13 +294,13 @@ class GroupServerStore(SQLBaseStore): "group_id": group_id, "user_id": user_id, }, - retcols=("valid_until_ms", "attestation"), + retcols=("valid_until_ms", "attestation_json"), desc="get_remote_attestation", allow_none=True, ) now = int(self._clock.time_msec()) if row and now < row["valid_until_ms"]: - defer.returnValue(json.loads(row["attestation"])) + defer.returnValue(json.loads(row["attestation_json"])) defer.returnValue(None) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index bfe8c2ca4..b55b0a8de 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -73,7 +73,7 @@ CREATE TABLE group_attestations_remote ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, valid_until_ms BIGINT NOT NULL, - attestation TEXT NOT NULL + attestation_json TEXT NOT NULL ); CREATE INDEX group_attestations_remote_g_idx ON group_attestations_remote(group_id, user_id); From 26752df503880b565a49350e0dd8881f9b2285e9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 14:29:03 +0100 Subject: [PATCH 008/223] Typo --- synapse/groups/attestations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 0741b55c1..9ac09366d 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -86,7 +86,7 @@ class GroupAttestionRenewer(object): self.store = hs.get_datastore() self.assestations = hs.get_groups_attestation_signing() self.transport_client = hs.get_federation_transport_client() - self.is_mine_id = hs.is_mind_id + self.is_mine_id = hs.is_mine_id self._renew_attestations_loop = self.clock.looping_call( self._renew_attestations, 30 * 60 * 1000, From bbb739d24a448c500dbc56c9cedf262d42c7f2f4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 14:31:36 +0100 Subject: [PATCH 009/223] Comment --- synapse/groups/attestations.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 9ac09366d..5ef7a12cb 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -22,8 +22,10 @@ from synapse.util.logcontext import preserve_fn from signedjson.sign import sign_json +# Default validity duration for new attestations we create DEFAULT_ATTESTATION_LENGTH_MS = 3 * 24 * 60 * 60 * 1000 -MIN_ATTESTATION_LENGTH_MS = 1 * 60 * 60 * 1000 + +# Start trying to update our attestations when they come this close to expiring UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000 @@ -58,11 +60,12 @@ class GroupAttestationSigning(object): if group_id != attestation["group_id"]: raise SynapseError(400, "Attestation has incorrect group_id") - - # TODO: valid_until_ms = attestation["valid_until_ms"] - if valid_until_ms - self.clock.time_msec() < MIN_ATTESTATION_LENGTH_MS: - raise SynapseError(400, "Attestation not valid for long enough") + + # TODO: We also want to check that *new* attestations that people give + # us to store are valid for at least a little while. + if valid_until_ms < self.clock.time_msec(): + raise SynapseError(400, "Attestation expired") yield self.keyring.verify_json_for_server(server_name, attestation) From fe4e885f54353f82c5c56d1d2a31593124d39e8a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 14:35:07 +0100 Subject: [PATCH 010/223] Add federation API for adding room to group --- synapse/federation/transport/server.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 2286f6f8f..5d6ff7923 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -645,6 +645,24 @@ class FederationGroupsRoomsServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsAddRoomsServlet(BaseFederationServlet): + """Add room to group + """ + PATH = "/groups/(?P[^/]*)/room/(?)$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, room_id): + requester_user_id = content["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.add_room( + group_id, requester_user_id, room_id, content + ) + + defer.returnValue((200, new_content)) + + class FederationGroupsUsersServlet(BaseFederationServlet): """Get the users in a group on behalf of a user """ From 410b4e14a176293ee1f41f24a641db031c6192a4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 11 Jul 2017 15:44:18 +0100 Subject: [PATCH 011/223] Move comment --- synapse/storage/schema/delta/43/group_server.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index b55b0a8de..cf0659c51 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -28,8 +28,8 @@ CREATE UNIQUE INDEX groups_idx ON groups(group_id); CREATE TABLE group_users ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, - is_admin BOOLEAN NOT NULL, -- whether the users membership can be seen by everyone - is_public BOOLEAN NOT NULL + is_admin BOOLEAN NOT NULL, + is_public BOOLEAN NOT NULL -- whether the users membership can be seen by everyone ); From 6d586dc05c35f1c0159b1eff3d83d7e3973b425d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 12 Jul 2017 09:58:37 +0100 Subject: [PATCH 012/223] Comment --- synapse/storage/schema/delta/43/group_server.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index cf0659c51..c223ee275 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -15,7 +15,7 @@ CREATE TABLE groups ( group_id TEXT NOT NULL, - name TEXT, + name TEXT, -- the display name of the room avatar_url TEXT, short_description TEXT, long_description TEXT From a62406aaa5c4ef3780e42c9de443a2cc1e82cd9a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Jul 2017 15:44:40 +0100 Subject: [PATCH 013/223] Add group summary APIs --- synapse/federation/transport/server.py | 17 + synapse/groups/groups_server.py | 256 ++++++- synapse/storage/group_server.py | 643 ++++++++++++++++++ .../storage/schema/delta/43/group_server.sql | 56 ++ 4 files changed, 970 insertions(+), 2 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 5d6ff7923..bbb66190e 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -627,6 +627,22 @@ class FederationGroupsProfileServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsSummaryServlet(BaseFederationServlet): + PATH = "/groups/(?P[^/]*)/summary$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id): + requester_user_id = content["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.get_group_summary( + group_id, requester_user_id + ) + + defer.returnValue((200, new_content)) + + class FederationGroupsRoomsServlet(BaseFederationServlet): """Get the rooms in a group on behalf of a user """ @@ -784,6 +800,7 @@ ROOM_LIST_CLASSES = ( GROUP_SERVER_SERVLET_CLASSES = ( FederationGroupsProfileServlet, + FederationGroupsSummaryServlet, FederationGroupsRoomsServlet, FederationGroupsUsersServlet, FederationGroupsInviteServlet, diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 414c95e3f..29a911e18 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -64,6 +64,255 @@ class GroupsServerHandler(object): defer.returnValue(group) + @defer.inlineCallbacks + def get_group_summary(self, group_id, requester_user_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) + + profile = yield self.get_group_profile(group_id, requester_user_id) + + users, roles = yield self.store.get_users_for_summary_by_role( + group_id, include_private=is_user_in_group, + ) + + # TODO: Add profiles to users + # TODO: Add assestations to users + + rooms, categories = yield self.store.get_rooms_for_summary_by_category( + group_id, include_private=is_user_in_group, + ) + + for room_entry in rooms: + room_id = room_entry["room_id"] + joined_users = yield self.store.get_users_in_room(room_id) + entry = yield self.room_list_handler.generate_room_entry( + room_id, len(joined_users), + with_alias=False, allow_private=True, + ) + entry.pop("room_id", None) + + room_entry["profile"] = entry + + rooms.sort(key=lambda e: e.get("order", 0)) + + for entry in users: + user_id = entry["user_id"] + + if not self.is_mine_id(requester_user_id): + attestation = yield self.store.get_remote_attestation(group_id, user_id) + if not attestation: + continue + + entry["attestation"] = attestation + else: + entry["attestation"] = self.attestations.create_attestation( + group_id, user_id, + ) + + users.sort(key=lambda e: e.get("order", 0)) + + defer.returnValue({ + "profile": profile, + "users_section": { + "users": users, + "roles": roles, + "total_user_count_estimate": 0, # TODO + }, + "rooms_section": { + "rooms": rooms, + "categories": categories, + "total_room_count_estimate": 0, # TODO + }, + }) + + @defer.inlineCallbacks + def update_group_summary_room(self, group_id, user_id, room_id, category_id, content): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + order = content.get("order", None) + + is_public = _parse_visibility_from_contents(content) + + yield self.store.add_room_to_summary( + group_id=group_id, + room_id=room_id, + category_id=category_id, + order=order, + is_public=is_public, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def delete_group_summary_room(self, group_id, user_id, room_id, category_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + yield self.store.remove_room_from_summary( + group_id=group_id, + room_id=room_id, + category_id=category_id, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def get_group_categories(self, group_id, user_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + categories = yield self.store.get_group_categories( + group_id=group_id, + ) + defer.returnValue({"categories": categories}) + + @defer.inlineCallbacks + def get_group_category(self, group_id, user_id, category_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + res = yield self.store.get_group_category( + group_id=group_id, + category_id=category_id, + ) + + defer.returnValue(res) + + @defer.inlineCallbacks + def update_group_category(self, group_id, user_id, category_id, content): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + is_public = _parse_visibility_from_contents(content) + profile = content.get("profile") + + yield self.store.upsert_group_category( + group_id=group_id, + category_id=category_id, + is_public=is_public, + profile=profile, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def delete_group_category(self, group_id, user_id, category_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + yield self.store.remove_group_category( + group_id=group_id, + category_id=category_id, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def get_group_roles(self, group_id, user_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + roles = yield self.store.get_group_roles( + group_id=group_id, + ) + defer.returnValue({"roles": roles}) + + @defer.inlineCallbacks + def get_group_role(self, group_id, user_id, role_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + res = yield self.store.get_group_role( + group_id=group_id, + role_id=role_id, + ) + defer.returnValue(res) + + @defer.inlineCallbacks + def update_group_role(self, group_id, user_id, role_id, content): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + is_public = _parse_visibility_from_contents(content) + + profile = content.get("profile") + + yield self.store.upsert_group_role( + group_id=group_id, + role_id=role_id, + is_public=is_public, + profile=profile, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def delete_group_role(self, group_id, user_id, role_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + yield self.store.remove_group_role( + group_id=group_id, + role_id=role_id, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def update_group_summary_user(self, group_id, requester_user_id, user_id, role_id, + content): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + order = content.get("order", None) + + is_public = _parse_visibility_from_contents(content) + + yield self.store.add_user_to_summary( + group_id=group_id, + user_id=user_id, + role_id=role_id, + order=order, + is_public=is_public, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def delete_group_summary_user(self, group_id, requester_user_id, user_id, role_id): + yield self.check_group_is_ours(group_id, and_exists=True) + + is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + + yield self.store.remove_user_from_summary( + group_id=group_id, + user_id=user_id, + role_id=role_id, + ) + + defer.returnValue({}) + @defer.inlineCallbacks def get_group_profile(self, group_id, requester_user_id): """Get the group profile as seen by requester_user_id @@ -210,7 +459,9 @@ class GroupsServerHandler(object): } if self.hs.is_mine_id(user_id): - raise NotImplementedError() + groups_local = self.hs.get_groups_local_handler() + res = yield groups_local.on_invite(group_id, user_id, content) + local_attestation = None else: local_attestation = self.attestations.create_attestation(group_id, user_id) content.update({ @@ -338,7 +589,8 @@ class GroupsServerHandler(object): if is_kick: if self.hs.is_mine_id(user_id): - raise NotImplementedError() + groups_local = self.hs.get_groups_local_handler() + yield groups_local.user_removed_from_group(group_id, user_id, {}) else: yield self.transport_client.remove_user_from_group_notification( get_domain_from_id(user_id), group_id, user_id, {} diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 105ab9920..f4818ff17 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -15,11 +15,16 @@ from twisted.internet import defer +from synapse.api.errors import SynapseError + from ._base import SQLBaseStore import ujson as json +_DEFAULT_CATEGORY_ID = "default" + + class GroupServerStore(SQLBaseStore): def get_group(self, group_id): return self._simple_select_one( @@ -64,6 +69,492 @@ class GroupServerStore(SQLBaseStore): desc="get_rooms_in_group", ) + def get_rooms_for_summary_by_category(self, group_id, include_private=False): + def _get_rooms_for_summary_txn(txn): + keyvalues = { + "group_id": group_id, + } + if not include_private: + keyvalues["is_public"] = True + + sql = """ + SELECT room_id, is_public, category_id, room_order + FROM group_summary_rooms + WHERE group_id = ? + """ + + if not include_private: + sql += " AND is_public = ?" + txn.execute(sql, (group_id, True)) + else: + txn.execute(sql, (group_id,)) + + rooms = [ + { + "room_id": row[0], + "is_public": row[1], + "category_id": row[2] if row[2] != _DEFAULT_CATEGORY_ID else None, + "order": row[3], + } + for row in txn + ] + + sql = """ + SELECT category_id, is_public, profile, cat_order + FROM group_summary_room_categories + INNER JOIN group_room_categories USING (group_id, category_id) + WHERE group_id = ? + """ + + if not include_private: + sql += " AND is_public = ?" + txn.execute(sql, (group_id, True)) + else: + txn.execute(sql, (group_id,)) + + categories = { + row[0]: { + "is_public": row[1], + "profile": json.loads(row[2]), + "order": row[3], + } + for row in txn + } + + return rooms, categories + return self.runInteraction( + "get_rooms_for_summary", _get_rooms_for_summary_txn + ) + + def add_room_to_summary(self, group_id, room_id, category_id, order, is_public): + return self.runInteraction( + "add_room_to_summary", self._add_room_to_summary_txn, + group_id, room_id, category_id, order, is_public, + ) + + def _add_room_to_summary_txn(self, txn, group_id, room_id, category_id, order, + is_public): + if category_id is None: + category_id = _DEFAULT_CATEGORY_ID + else: + cat_exists = self._simple_select_one_onecol_txn( + txn, + table="group_room_categories", + keyvalues={ + "group_id": group_id, + "category_id": category_id, + }, + retcol="group_id", + allow_none=True, + ) + if not cat_exists: + raise SynapseError(400, "Category doesn't exist") + + # TODO: Check room is part of group already + cat_exists = self._simple_select_one_onecol_txn( + txn, + table="group_summary_room_categories", + keyvalues={ + "group_id": group_id, + "category_id": category_id, + }, + retcol="group_id", + allow_none=True, + ) + if not cat_exists: + txn.execute(""" + INSERT INTO group_summary_room_categories + (group_id, category_id, cat_order) + SELECT ?, ?, COALESCE(MAX(cat_order), 1) + FROM group_summary_room_categories + WHERE group_id = ? AND category_id = ? + """, (group_id, category_id, group_id, category_id)) + + existing = self._simple_select_one_txn( + txn, + table="group_summary_rooms", + keyvalues={ + "group_id": group_id, + "room_id": room_id, + "category_id": category_id, + }, + retcols=("room_order", "is_public",), + allow_none=True, + ) + + if order is not None: + sql = """ + UPDATE group_summary_rooms SET room_order = room_order + 1 + WHERE group_id = ? AND category_id = ? AND room_order >= ? + """ + txn.execute(sql, (group_id, category_id, order,)) + elif not existing: + sql = """ + SELECT COALESCE(MAX(room_order), 0) + 1 FROM group_summary_rooms + WHERE group_id = ? AND category_id = ? + """ + txn.execute(sql, (group_id, category_id,)) + order, = txn.fetchone() + + if existing: + to_update = {} + if order is not None: + to_update["room_order"] = order + if is_public is not None: + to_update["is_public"] = is_public + self._simple_update_txn( + txn, + table="group_summary_rooms", + keyvalues={ + "group_id": group_id, + "category_id": category_id, + "room_id": room_id, + }, + values=to_update, + ) + else: + if is_public is None: + is_public = True + + self._simple_insert_txn( + txn, + table="group_summary_rooms", + values={ + "group_id": group_id, + "category_id": category_id, + "room_id": room_id, + "room_order": order, + "is_public": is_public, + }, + ) + + def remove_room_from_summary(self, group_id, room_id, category_id): + if category_id is None: + category_id = _DEFAULT_CATEGORY_ID + + return self._simple_delete( + table="group_summary_rooms", + keyvalues={ + "group_id": group_id, + "category_id": category_id, + "room_id": room_id, + }, + desc="remove_room_from_summary", + ) + + @defer.inlineCallbacks + def get_group_categories(self, group_id): + rows = yield self._simple_select_list( + table="group_room_categories", + keyvalues={ + "group_id": group_id, + }, + retcols=("category_id", "is_public", "profile"), + desc="get_group_categories", + ) + + defer.returnValue({ + row["category_id"]: { + "is_public": row["is_public"], + "profile": json.loads(row["profile"]), + } + for row in rows + }) + + @defer.inlineCallbacks + def get_group_category(self, group_id, category_id): + category = yield self._simple_select_one( + table="group_room_categories", + keyvalues={ + "group_id": group_id, + "category_id": category_id, + }, + retcols=("is_public", "profile"), + desc="get_group_category", + ) + + category["profile"] = json.loads(category["profile"]) + + defer.returnValue(category) + + def upsert_group_category(self, group_id, category_id, profile, is_public): + insertion_values = {} + update_values = {"category_id": category_id} # This cannot be empty + + if profile is None: + insertion_values["profile"] = "{}" + else: + update_values["profile"] = json.dumps(profile) + + if is_public is None: + insertion_values["is_public"] = True + else: + update_values["is_public"] = is_public + + return self._simple_upsert( + table="group_room_categories", + keyvalues={ + "group_id": group_id, + "category_id": category_id, + }, + values=update_values, + insertion_values=insertion_values, + desc="upsert_group_category", + ) + + def remove_group_category(self, group_id, category_id): + return self._simple_delete( + table="group_room_categories", + keyvalues={ + "group_id": group_id, + "category_id": category_id, + }, + desc="remove_group_category", + ) + + @defer.inlineCallbacks + def get_group_roles(self, group_id): + rows = yield self._simple_select_list( + table="group_roles", + keyvalues={ + "group_id": group_id, + }, + retcols=("role_id", "is_public", "profile"), + desc="get_group_roles", + ) + + defer.returnValue({ + row["role_id"]: { + "is_public": row["is_public"], + "profile": json.loads(row["profile"]), + } + for row in rows + }) + + @defer.inlineCallbacks + def get_group_role(self, group_id, role_id): + role = yield self._simple_select_one( + table="group_roles", + keyvalues={ + "group_id": group_id, + "role_id": role_id, + }, + retcols=("is_public", "profile"), + desc="get_group_role", + ) + + role["profile"] = json.loads(role["profile"]) + + defer.returnValue(role) + + def upsert_group_role(self, group_id, role_id, profile, is_public): + insertion_values = {} + update_values = {"role_id": role_id} # This cannot be empty + + if profile is None: + insertion_values["profile"] = "{}" + else: + update_values["profile"] = json.dumps(profile) + + if is_public is None: + insertion_values["is_public"] = True + else: + update_values["is_public"] = is_public + + return self._simple_upsert( + table="group_roles", + keyvalues={ + "group_id": group_id, + "role_id": role_id, + }, + values=update_values, + insertion_values=insertion_values, + desc="upsert_group_role", + ) + + def remove_group_role(self, group_id, role_id): + return self._simple_delete( + table="group_roles", + keyvalues={ + "group_id": group_id, + "role_id": role_id, + }, + desc="remove_group_role", + ) + + def add_user_to_summary(self, group_id, user_id, role_id, order, is_public): + return self.runInteraction( + "add_user_to_summary", self._add_user_to_summary_txn, + group_id, user_id, role_id, order, is_public, + ) + + def _add_user_to_summary_txn(self, txn, group_id, user_id, role_id, order, + is_public): + if role_id is None: + role_id = _DEFAULT_CATEGORY_ID + else: + role_exists = self._simple_select_one_onecol_txn( + txn, + table="group_roles", + keyvalues={ + "group_id": group_id, + "role_id": role_id, + }, + retcol="group_id", + allow_none=True, + ) + if not role_exists: + raise SynapseError(400, "Role doesn't exist") + + # TODO: Check room is part of group already + role_exists = self._simple_select_one_onecol_txn( + txn, + table="group_summary_roles", + keyvalues={ + "group_id": group_id, + "role_id": role_id, + }, + retcol="group_id", + allow_none=True, + ) + if not role_exists: + txn.execute(""" + INSERT INTO group_summary_roles + (group_id, role_id, role_order) + SELECT ?, ?, COALESCE(MAX(role_order), 1) + FROM group_summary_roles + WHERE group_id = ? AND role_id = ? + """, (group_id, role_id, group_id, role_id)) + + existing = self._simple_select_one_txn( + txn, + table="group_summary_users", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + "role_id": role_id, + }, + retcols=("user_order", "is_public",), + allow_none=True, + ) + + if order is not None: + sql = """ + UPDATE group_summary_users SET user_order = user_order + 1 + WHERE group_id = ? AND role_id = ? AND user_order >= ? + """ + txn.execute(sql, (group_id, role_id, order,)) + elif not existing: + sql = """ + SELECT COALESCE(MAX(user_order), 0) + 1 FROM group_summary_users + WHERE group_id = ? AND role_id = ? + """ + txn.execute(sql, (group_id, role_id,)) + order, = txn.fetchone() + + if existing: + to_update = {} + if order is not None: + to_update["user_order"] = order + if is_public is not None: + to_update["is_public"] = is_public + self._simple_update_txn( + txn, + table="group_summary_users", + keyvalues={ + "group_id": group_id, + "role_id": role_id, + "user_id": user_id, + }, + values=to_update, + ) + else: + if is_public is None: + is_public = True + + self._simple_insert_txn( + txn, + table="group_summary_users", + values={ + "group_id": group_id, + "role_id": role_id, + "user_id": user_id, + "user_order": order, + "is_public": is_public, + }, + ) + + def remove_user_from_summary(self, group_id, user_id, role_id): + if role_id is None: + role_id = _DEFAULT_CATEGORY_ID + + return self._simple_delete( + table="group_summary_users", + keyvalues={ + "group_id": group_id, + "role_id": role_id, + "user_id": user_id, + }, + desc="remove_user_from_summary", + ) + + def get_users_for_summary_by_role(self, group_id, include_private=False): + def _get_users_for_summary_txn(txn): + keyvalues = { + "group_id": group_id, + } + if not include_private: + keyvalues["is_public"] = True + + sql = """ + SELECT user_id, is_public, role_id, user_order + FROM group_summary_users + WHERE group_id = ? + """ + + if not include_private: + sql += " AND is_public = ?" + txn.execute(sql, (group_id, True)) + else: + txn.execute(sql, (group_id,)) + + users = [ + { + "user_id": row[0], + "is_public": row[1], + "role_id": row[2] if row[2] != _DEFAULT_CATEGORY_ID else None, + "order": row[3], + } + for row in txn + ] + + sql = """ + SELECT role_id, is_public, profile, role_order + FROM group_summary_roles + INNER JOIN group_roles USING (group_id, role_id) + WHERE group_id = ? + """ + + if not include_private: + sql += " AND is_public = ?" + txn.execute(sql, (group_id, True)) + else: + txn.execute(sql, (group_id,)) + + roles = { + row[0]: { + "is_public": row[1], + "profile": json.loads(row[2]), + "order": row[3], + } + for row in txn + } + + return users, roles + return self.runInteraction( + "get_users_for_summary_by_role", _get_users_for_summary_txn + ) + def is_user_in_group(self, user_id, group_id): return self._simple_select_one_onecol( table="group_users", @@ -223,6 +714,103 @@ class GroupServerStore(SQLBaseStore): desc="add_room_to_group", ) + @defer.inlineCallbacks + def register_user_group_membership(self, group_id, user_id, membership, + is_admin=False, content={}, + local_attestation=None, + remote_attestation=None, + ): + def _register_user_group_membership_txn(txn, next_id): + # TODO: Upsert? + self._simple_delete_txn( + txn, + table="local_group_membership", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + self._simple_insert_txn( + txn, + table="local_group_membership", + values={ + "group_id": group_id, + "user_id": user_id, + "is_admin": is_admin, + "membership": membership, + "content": json.dumps(content), + }, + ) + self._simple_delete_txn( + txn, + table="local_group_updates", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + "type": "membership", + }, + ) + self._simple_insert_txn( + txn, + table="local_group_updates", + values={ + "stream_id": next_id, + "group_id": group_id, + "user_id": user_id, + "type": "membership", + "content": json.dumps({"membership": membership, "content": content}), + } + ) + self._group_updates_stream_cache.entity_has_changed(user_id, next_id) + + # TODO: Insert profile to ensuer it comes down stream if its a join. + + if membership == "join": + if local_attestation: + self._simple_insert_txn( + txn, + table="group_attestations_renewals", + values={ + "group_id": group_id, + "user_id": user_id, + "valid_until_ms": local_attestation["valid_until_ms"], + } + ) + if remote_attestation: + self._simple_insert_txn( + txn, + table="group_attestations_remote", + values={ + "group_id": group_id, + "user_id": user_id, + "valid_until_ms": remote_attestation["valid_until_ms"], + "attestation": json.dumps(remote_attestation), + } + ) + else: + self._simple_delete_txn( + txn, + table="group_attestations_renewals", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + self._simple_delete_txn( + txn, + table="group_attestations_remote", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + + with self._group_updates_id_gen.get_next() as next_id: + yield self.runInteraction( + "register_user_group_membership", + _register_user_group_membership_txn, next_id, + ) + @defer.inlineCallbacks def create_group(self, group_id, user_id, name, avatar_url, short_description, long_description,): @@ -238,6 +826,61 @@ class GroupServerStore(SQLBaseStore): desc="create_group", ) + def get_joined_groups(self, user_id): + return self._simple_select_onecol( + table="local_group_membership", + keyvalues={ + "user_id": user_id, + "membership": "join", + }, + retcol="group_id", + desc="get_joined_groups", + ) + + def get_all_groups_for_user(self, user_id, now_token): + def _get_all_groups_for_user_txn(txn): + sql = """ + SELECT group_id, type, membership, u.content + FROM local_group_updates AS u + INNER JOIN local_group_membership USING (group_id, user_id) + WHERE user_id = ? AND membership != 'leave' + AND stream_id <= ? + """ + txn.execute(sql, (user_id, now_token,)) + return self.cursor_to_dict(txn) + return self.runInteraction( + "get_all_groups_for_user", _get_all_groups_for_user_txn, + ) + + def get_groups_changes_for_user(self, user_id, from_token, to_token): + from_token = int(from_token) + has_changed = self._group_updates_stream_cache.has_entity_changed( + user_id, from_token, + ) + if not has_changed: + return [] + + def _get_groups_changes_for_user_txn(txn): + sql = """ + SELECT group_id, membership, type, u.content + FROM local_group_updates AS u + INNER JOIN local_group_membership USING (group_id, user_id) + WHERE user_id = ? AND ? < stream_id AND stream_id <= ? + """ + txn.execute(sql, (user_id, from_token, to_token,)) + return [{ + "group_id": group_id, + "membership": membership, + "type": gtype, + "content": json.loads(content_json), + } for group_id, membership, gtype, content_json in txn] + return self.runInteraction( + "get_groups_changes_for_user", _get_groups_changes_for_user_txn, + ) + + def get_group_stream_token(self): + return self._group_updates_id_gen.get_current_token() + def get_attestations_need_renewals(self, valid_until_ms): """Get all attestations that need to be renewed until givent time """ diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index c223ee275..3013b89b7 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -56,6 +56,62 @@ CREATE INDEX groups_rooms_g_idx ON group_rooms(group_id, room_id); CREATE INDEX groups_rooms_r_idx ON group_rooms(room_id); +CREATE TABLE group_summary_rooms ( + group_id TEXT NOT NULL, + room_id TEXT NOT NULL, + category_id TEXT NOT NULL, + room_order BIGINT NOT NULL, + is_public BOOLEAN NOT NULL, + UNIQUE (group_id, category_id, room_id, room_order), + CHECK (room_order > 0) +); + +CREATE UNIQUE INDEX group_summary_rooms_g_idx ON group_summary_rooms(group_id, room_id, category_id); + +CREATE TABLE group_summary_room_categories ( + group_id TEXT NOT NULL, + category_id TEXT NOT NULL, + cat_order BIGINT NOT NULL, + UNIQUE (group_id, category_id, cat_order), + CHECK (cat_order > 0) +); + +CREATE TABLE group_room_categories ( + group_id TEXT NOT NULL, + category_id TEXT NOT NULL, + profile TEXT NOT NULL, + is_public BOOLEAN NOT NULL, + UNIQUE (group_id, category_id) +); + + +CREATE TABLE group_summary_users ( + group_id TEXT NOT NULL, + user_id TEXT NOT NULL, + role_id TEXT NOT NULL, + user_order BIGINT NOT NULL, + is_public BOOLEAN NOT NULL +); + +CREATE INDEX group_summary_users_g_idx ON group_summary_users(group_id); + +CREATE TABLE group_summary_roles ( + group_id TEXT NOT NULL, + role_id TEXT NOT NULL, + role_order BIGINT NOT NULL, + UNIQUE (group_id, role_id, role_order), + CHECK (role_order > 0) +); + +CREATE TABLE group_roles ( + group_id TEXT NOT NULL, + role_id TEXT NOT NULL, + profile TEXT NOT NULL, + is_public BOOLEAN NOT NULL, + UNIQUE (group_id, role_id) +); + + -- List of attestations we've given out and need to renew CREATE TABLE group_attestations_renewals ( group_id TEXT NOT NULL, From 8d55877c9eaffc8dcaf26cdc7a032c774c9a2f5b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 12 Jul 2017 11:43:39 +0100 Subject: [PATCH 014/223] Simplify checking if admin --- synapse/groups/groups_server.py | 72 +++++++++------------------------ 1 file changed, 19 insertions(+), 53 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 29a911e18..ec45da2d7 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -50,7 +50,7 @@ class GroupsServerHandler(object): hs.get_groups_attestation_renewer() @defer.inlineCallbacks - def check_group_is_ours(self, group_id, and_exists=False): + def check_group_is_ours(self, group_id, and_exists=False, and_is_admin=None): """Check that the group is ours, and optionally if it exists. If group does exist then return group. @@ -62,6 +62,11 @@ class GroupsServerHandler(object): if and_exists and not group: raise SynapseError(404, "Unknown group") + if and_is_admin: + is_admin = yield self.store.is_user_admin_in_group(group_id, and_is_admin) + if not is_admin: + raise SynapseError(403, "User is not admin in group") + defer.returnValue(group) @defer.inlineCallbacks @@ -128,11 +133,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_summary_room(self, group_id, user_id, room_id, category_id, content): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) order = content.get("order", None) @@ -150,11 +151,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_summary_room(self, group_id, user_id, room_id, category_id): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_room_from_summary( group_id=group_id, @@ -186,11 +183,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_category(self, group_id, user_id, category_id, content): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) is_public = _parse_visibility_from_contents(content) profile = content.get("profile") @@ -206,11 +199,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_category(self, group_id, user_id, category_id): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_group_category( group_id=group_id, @@ -240,11 +229,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_role(self, group_id, user_id, role_id, content): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) is_public = _parse_visibility_from_contents(content) @@ -261,11 +246,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_role(self, group_id, user_id, role_id): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_group_role( group_id=group_id, @@ -277,11 +258,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_summary_user(self, group_id, requester_user_id, user_id, role_id, content): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) order = content.get("order", None) @@ -299,11 +276,7 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_summary_user(self, group_id, requester_user_id, user_id, role_id): - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_user_from_summary( group_id=group_id, @@ -419,12 +392,9 @@ class GroupsServerHandler(object): def add_room(self, group_id, requester_user_id, room_id, content): """Add room to group """ - - yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id) - if not is_admin: - raise SynapseError(403, "User is not admin in group") + yield self.check_group_is_ours( + group_id, and_exists=True, and_is_admin=requester_user_id + ) # TODO: Check if room has already been added @@ -439,13 +409,9 @@ class GroupsServerHandler(object): """Invite user to group """ - group = yield self.check_group_is_ours(group_id, and_exists=True) - - is_admin = yield self.store.is_user_admin_in_group( - group_id, requester_user_id + group = yield self.check_group_is_ours( + group_id, and_exists=True, and_is_admin=requester_user_id ) - if not is_admin: - raise SynapseError(403, "User is not admin in group") # TODO: Check if user knocked # TODO: Check if user is already invited From 26451a09eb938e6a72be3d77ff8c9e3fd2b33539 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 12 Jul 2017 14:11:59 +0100 Subject: [PATCH 015/223] Comments --- synapse/groups/groups_server.py | 38 +++++++++++++++++++ synapse/storage/group_server.py | 29 ++++++++++++++ .../storage/schema/delta/43/group_server.sql | 17 ++++++--- 3 files changed, 79 insertions(+), 5 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index ec45da2d7..83dfcd0fd 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -54,6 +54,12 @@ class GroupsServerHandler(object): """Check that the group is ours, and optionally if it exists. If group does exist then return group. + + Args: + group_id (str) + and_exists (bool): whether to also check if group exists + and_is_admin (str): whether to also check if given str is a user_id + that is an admin """ if not self.is_mine_id(group_id): raise SynapseError(400, "Group not on this server") @@ -71,6 +77,14 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def get_group_summary(self, group_id, requester_user_id): + """Get the summary for a group as seen by requester_user_id. + + The group summary consists of the profile of the room, and a curated + list of users and rooms. These list *may* be organised by role/category. + The roles/categories are ordered, and so are the users/rooms within them. + + A user/room may appear in multiple roles/categories. + """ yield self.check_group_is_ours(group_id, and_exists=True) is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) @@ -133,6 +147,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_summary_room(self, group_id, user_id, room_id, category_id, content): + """Add/update a room to the group summary + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) order = content.get("order", None) @@ -151,6 +167,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_summary_room(self, group_id, user_id, room_id, category_id): + """Remove a room from the summary + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_room_from_summary( @@ -163,6 +181,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def get_group_categories(self, group_id, user_id): + """Get all categories in a group (as seen by user) + """ yield self.check_group_is_ours(group_id, and_exists=True) categories = yield self.store.get_group_categories( @@ -172,6 +192,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def get_group_category(self, group_id, user_id, category_id): + """Get a specific category in a group (as seen by user) + """ yield self.check_group_is_ours(group_id, and_exists=True) res = yield self.store.get_group_category( @@ -183,6 +205,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_category(self, group_id, user_id, category_id, content): + """Add/Update a group category + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) is_public = _parse_visibility_from_contents(content) @@ -199,6 +223,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_category(self, group_id, user_id, category_id): + """Delete a group category + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_group_category( @@ -210,6 +236,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def get_group_roles(self, group_id, user_id): + """Get all roles in a group (as seen by user) + """ yield self.check_group_is_ours(group_id, and_exists=True) roles = yield self.store.get_group_roles( @@ -219,6 +247,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def get_group_role(self, group_id, user_id, role_id): + """Get a specific role in a group (as seen by user) + """ yield self.check_group_is_ours(group_id, and_exists=True) res = yield self.store.get_group_role( @@ -229,6 +259,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_role(self, group_id, user_id, role_id, content): + """Add/update a role in a group + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) is_public = _parse_visibility_from_contents(content) @@ -246,6 +278,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_role(self, group_id, user_id, role_id): + """Remove role from group + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_group_role( @@ -258,6 +292,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def update_group_summary_user(self, group_id, requester_user_id, user_id, role_id, content): + """Add/update a users entry in the group summary + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) order = content.get("order", None) @@ -276,6 +312,8 @@ class GroupsServerHandler(object): @defer.inlineCallbacks def delete_group_summary_user(self, group_id, requester_user_id, user_id, role_id): + """Remove a user from the group summary + """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) yield self.store.remove_user_from_summary( diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index f4818ff17..18bfaeda6 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -22,6 +22,8 @@ from ._base import SQLBaseStore import ujson as json +# The category ID for the "default" category. We don't store as null in the +# database to avoid the fun of null != null _DEFAULT_CATEGORY_ID = "default" @@ -70,6 +72,10 @@ class GroupServerStore(SQLBaseStore): ) def get_rooms_for_summary_by_category(self, group_id, include_private=False): + """Get the rooms and categories that should be included in a summary request + + Returns ([rooms], [categories]) + """ def _get_rooms_for_summary_txn(txn): keyvalues = { "group_id": group_id, @@ -134,6 +140,14 @@ class GroupServerStore(SQLBaseStore): def _add_room_to_summary_txn(self, txn, group_id, room_id, category_id, order, is_public): + """Add room to summary. + + This automatically adds the room to the end of the list of rooms to be + included in the summary response. If a role is given then user will + be added under that category (the category will automatically be added tothe + the summary if a user is listed under that role in the summary). + """ + if category_id is None: category_id = _DEFAULT_CATEGORY_ID else: @@ -278,6 +292,8 @@ class GroupServerStore(SQLBaseStore): defer.returnValue(category) def upsert_group_category(self, group_id, category_id, profile, is_public): + """Add/update room category for group + """ insertion_values = {} update_values = {"category_id": category_id} # This cannot be empty @@ -348,6 +364,8 @@ class GroupServerStore(SQLBaseStore): defer.returnValue(role) def upsert_group_role(self, group_id, role_id, profile, is_public): + """Add/remove user role + """ insertion_values = {} update_values = {"role_id": role_id} # This cannot be empty @@ -390,6 +408,13 @@ class GroupServerStore(SQLBaseStore): def _add_user_to_summary_txn(self, txn, group_id, user_id, role_id, order, is_public): + """Add user to summary. + + This automatically adds the user to the end of the list of users to be + included in the summary response. If a role is given then user will + be added under that role (the role will automatically be added to the + summary if a user is listed under that role in the summary). + """ if role_id is None: role_id = _DEFAULT_CATEGORY_ID else: @@ -499,6 +524,10 @@ class GroupServerStore(SQLBaseStore): ) def get_users_for_summary_by_role(self, group_id, include_private=False): + """Get the users and roles that should be included in a summary request + + Returns ([users], [roles]) + """ def _get_users_for_summary_txn(txn): keyvalues = { "group_id": group_id, diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index 3013b89b7..472aab0a7 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -56,18 +56,21 @@ CREATE INDEX groups_rooms_g_idx ON group_rooms(group_id, room_id); CREATE INDEX groups_rooms_r_idx ON group_rooms(room_id); +-- Rooms to include in the summary CREATE TABLE group_summary_rooms ( group_id TEXT NOT NULL, room_id TEXT NOT NULL, category_id TEXT NOT NULL, room_order BIGINT NOT NULL, - is_public BOOLEAN NOT NULL, + is_public BOOLEAN NOT NULL, -- whether the room should be show to everyone UNIQUE (group_id, category_id, room_id, room_order), CHECK (room_order > 0) ); CREATE UNIQUE INDEX group_summary_rooms_g_idx ON group_summary_rooms(group_id, room_id, category_id); + +-- Categories to include in the summary CREATE TABLE group_summary_room_categories ( group_id TEXT NOT NULL, category_id TEXT NOT NULL, @@ -76,25 +79,27 @@ CREATE TABLE group_summary_room_categories ( CHECK (cat_order > 0) ); +-- The categories in the group CREATE TABLE group_room_categories ( group_id TEXT NOT NULL, category_id TEXT NOT NULL, profile TEXT NOT NULL, - is_public BOOLEAN NOT NULL, + is_public BOOLEAN NOT NULL, -- whether the category should be show to everyone UNIQUE (group_id, category_id) ); - +-- The users to include in the group summary CREATE TABLE group_summary_users ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, role_id TEXT NOT NULL, user_order BIGINT NOT NULL, - is_public BOOLEAN NOT NULL + is_public BOOLEAN NOT NULL -- whether the user should be show to everyone ); CREATE INDEX group_summary_users_g_idx ON group_summary_users(group_id); +-- The roles to include in the group summary CREATE TABLE group_summary_roles ( group_id TEXT NOT NULL, role_id TEXT NOT NULL, @@ -103,11 +108,13 @@ CREATE TABLE group_summary_roles ( CHECK (role_order > 0) ); + +-- The roles in a groups CREATE TABLE group_roles ( group_id TEXT NOT NULL, role_id TEXT NOT NULL, profile TEXT NOT NULL, - is_public BOOLEAN NOT NULL, + is_public BOOLEAN NOT NULL, -- whether the role should be show to everyone UNIQUE (group_id, role_id) ); From 5bbb53580a3fc732f0e4aab49f7893bd4f7e2a43 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 13 Jul 2017 10:25:29 +0100 Subject: [PATCH 016/223] raise NotImplementedError --- synapse/groups/groups_server.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 83dfcd0fd..dc9d361f5 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -463,9 +463,7 @@ class GroupsServerHandler(object): } if self.hs.is_mine_id(user_id): - groups_local = self.hs.get_groups_local_handler() - res = yield groups_local.on_invite(group_id, user_id, content) - local_attestation = None + raise NotImplementedError() else: local_attestation = self.attestations.create_attestation(group_id, user_id) content.update({ @@ -593,8 +591,7 @@ class GroupsServerHandler(object): if is_kick: if self.hs.is_mine_id(user_id): - groups_local = self.hs.get_groups_local_handler() - yield groups_local.user_removed_from_group(group_id, user_id, {}) + raise NotImplementedError() else: yield self.transport_client.remove_user_from_group_notification( get_domain_from_id(user_id), group_id, user_id, {} From 7a39da8cc6cac4014789a29d9abaf48ec13971d5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 13 Jul 2017 11:13:19 +0100 Subject: [PATCH 017/223] Add summary APIs to federation --- synapse/federation/transport/server.py | 234 +++++++++++++++++++++++-- 1 file changed, 223 insertions(+), 11 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index bbb66190e..1ea2b37ce 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -615,8 +615,8 @@ class FederationGroupsProfileServlet(BaseFederationServlet): PATH = "/groups/(?P[^/]*)/profile$" @defer.inlineCallbacks - def on_POST(self, origin, content, query, group_id): - requester_user_id = content["requester_user_id"] + def on_GET(self, origin, content, query, group_id): + requester_user_id = query["requester_user_id"] if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -631,8 +631,8 @@ class FederationGroupsSummaryServlet(BaseFederationServlet): PATH = "/groups/(?P[^/]*)/summary$" @defer.inlineCallbacks - def on_POST(self, origin, content, query, group_id): - requester_user_id = content["requester_user_id"] + def on_GET(self, origin, content, query, group_id): + requester_user_id = query["requester_user_id"] if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -649,8 +649,8 @@ class FederationGroupsRoomsServlet(BaseFederationServlet): PATH = "/groups/(?P[^/]*)/rooms$" @defer.inlineCallbacks - def on_POST(self, origin, content, query, group_id): - requester_user_id = content["requester_user_id"] + def on_GET(self, origin, content, query, group_id): + requester_user_id = query["requester_user_id"] if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -668,7 +668,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, room_id): - requester_user_id = content["requester_user_id"] + requester_user_id = query["requester_user_id"] if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -685,8 +685,8 @@ class FederationGroupsUsersServlet(BaseFederationServlet): PATH = "/groups/(?P[^/]*)/users$" @defer.inlineCallbacks - def on_POST(self, origin, content, query, group_id): - requester_user_id = content["requester_user_id"] + def on_GET(self, origin, content, query, group_id): + requester_user_id = query["requester_user_id"] if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -704,7 +704,7 @@ class FederationGroupsInviteServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, user_id): - requester_user_id = content["requester_user_id"] + requester_user_id = query["requester_user_id"] if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -739,7 +739,7 @@ class FederationGroupsRemoveUserServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, user_id): - requester_user_id = content["requester_user_id"] + requester_user_id = query["requester_user_id"] if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -766,6 +766,212 @@ class FederationGroupsRenewAttestaionServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsSummaryRoomsServlet(BaseFederationServlet): + """Add/remove a room from the group summary, with optional category. + + Matches both: + - /groups/:group/summary/rooms/:room_id + - /groups/:group/summary/categories/:category/rooms/:room_id + """ + PATH = ( + "/groups/(?P[^/]*)/summary" + "(/categories/(?P[^/]+))?" + "/rooms/(?P[^/]*)$" + ) + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, category_id, room_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.update_group_summary_room( + group_id, requester_user_id, + room_id=room_id, + category_id=category_id, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, origin, content, query, group_id, category_id, room_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.delete_group_summary_room( + group_id, requester_user_id, + room_id=room_id, + category_id=category_id, + ) + + defer.returnValue((200, resp)) + + +class FederationGroupsCategoriesServlet(BaseFederationServlet): + PATH = ( + "/groups/(?P[^/]*)/categories/$" + ) + + @defer.inlineCallbacks + def on_GET(self, origin, content, query, group_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.get_group_categories( + group_id, requester_user_id, + ) + + defer.returnValue((200, resp)) + + +class FederationGroupsCategoryServlet(BaseFederationServlet): + PATH = ( + "/groups/(?P[^/]*)/categories/(?P[^/]+)$" + ) + + @defer.inlineCallbacks + def on_GET(self, origin, content, query, group_id, category_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.get_group_category( + group_id, requester_user_id, category_id + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, category_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.upsert_group_category( + group_id, requester_user_id, category_id, content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, origin, content, query, group_id, category_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.delete_group_category( + group_id, requester_user_id, category_id, + ) + + defer.returnValue((200, resp)) + + +class FederationGroupsRolesServlet(BaseFederationServlet): + PATH = ( + "/groups/(?P[^/]*)/roles/$" + ) + + @defer.inlineCallbacks + def on_GET(self, origin, content, query, group_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.get_group_roles( + group_id, requester_user_id, + ) + + defer.returnValue((200, resp)) + + +class FederationGroupsRoleServlet(BaseFederationServlet): + PATH = ( + "/groups/(?P[^/]*)/roles/(?P[^/]+)$" + ) + + @defer.inlineCallbacks + def on_GET(self, origin, content, query, group_id, role_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.get_group_role( + group_id, requester_user_id, role_id + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, role_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.update_group_role( + group_id, requester_user_id, role_id, content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, origin, content, query, group_id, role_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.delete_group_role( + group_id, requester_user_id, role_id, + ) + + defer.returnValue((200, resp)) + + +class FederationGroupsSummaryUsersServlet(BaseFederationServlet): + """Add/remove a user from the group summary, with optional role. + + Matches both: + - /groups/:group/summary/users/:user_id + - /groups/:group/summary/roles/:role/users/:user_id + """ + PATH = ( + "/groups/(?P[^/]*)/summary" + "(/roles/(?P[^/]+))?" + "/users/(?P[^/]*)$" + ) + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, role_id, user_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.update_group_summary_user( + group_id, requester_user_id, + user_id=user_id, + role_id=role_id, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, origin, content, query, group_id, role_id, user_id): + requester_user_id = query["requester_user_id"] + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + resp = yield self.handler.delete_group_summary_user( + group_id, requester_user_id, + user_id=user_id, + role_id=role_id, + ) + + defer.returnValue((200, resp)) + + FEDERATION_SERVLET_CLASSES = ( FederationSendServlet, FederationPullServlet, @@ -806,6 +1012,12 @@ GROUP_SERVER_SERVLET_CLASSES = ( FederationGroupsInviteServlet, FederationGroupsAcceptInviteServlet, FederationGroupsRemoveUserServlet, + FederationGroupsSummaryRoomsServlet, + FederationGroupsCategoriesServlet, + FederationGroupsCategoryServlet, + FederationGroupsRolesServlet, + FederationGroupsRoleServlet, + FederationGroupsSummaryUsersServlet, ) From a78cda4bafd1eb33a40e8d841de311ea2dbbc086 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 13 Jul 2017 11:17:07 +0100 Subject: [PATCH 018/223] Remove TODO --- synapse/groups/groups_server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index dc9d361f5..a00bafe3a 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -96,7 +96,6 @@ class GroupsServerHandler(object): ) # TODO: Add profiles to users - # TODO: Add assestations to users rooms, categories = yield self.store.get_rooms_for_summary_by_category( group_id, include_private=is_user_in_group, From 8575e3160f98a0b33cd0ec6080389701dcb535e8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 13 Jul 2017 13:32:40 +0100 Subject: [PATCH 019/223] Comments --- synapse/federation/transport/server.py | 8 ++++++ synapse/storage/group_server.py | 36 +++++++++++++++++--------- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 1ea2b37ce..304c2a2a4 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -810,6 +810,8 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet): class FederationGroupsCategoriesServlet(BaseFederationServlet): + """Get all categories for a group + """ PATH = ( "/groups/(?P[^/]*)/categories/$" ) @@ -828,6 +830,8 @@ class FederationGroupsCategoriesServlet(BaseFederationServlet): class FederationGroupsCategoryServlet(BaseFederationServlet): + """Add/remove/get a category in a group + """ PATH = ( "/groups/(?P[^/]*)/categories/(?P[^/]+)$" ) @@ -870,6 +874,8 @@ class FederationGroupsCategoryServlet(BaseFederationServlet): class FederationGroupsRolesServlet(BaseFederationServlet): + """Get roles in a group + """ PATH = ( "/groups/(?P[^/]*)/roles/$" ) @@ -888,6 +894,8 @@ class FederationGroupsRolesServlet(BaseFederationServlet): class FederationGroupsRoleServlet(BaseFederationServlet): + """Add/remove/get a role in a group + """ PATH = ( "/groups/(?P[^/]*)/roles/(?P[^/]+)$" ) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 18bfaeda6..b328ef8bc 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -140,12 +140,16 @@ class GroupServerStore(SQLBaseStore): def _add_room_to_summary_txn(self, txn, group_id, room_id, category_id, order, is_public): - """Add room to summary. + """Add (or update) room's entry in summary. - This automatically adds the room to the end of the list of rooms to be - included in the summary response. If a role is given then user will - be added under that category (the category will automatically be added tothe - the summary if a user is listed under that role in the summary). + Args: + group_id (str) + room_id (str) + category_id (str): If not None then adds the category to the end of + the summary if its not already there. [Optional] + order (int): If not None inserts the room at that position, e.g. + an order of 1 will put the room first. Otherwise, the room gets + added to the end. """ if category_id is None: @@ -164,7 +168,7 @@ class GroupServerStore(SQLBaseStore): if not cat_exists: raise SynapseError(400, "Category doesn't exist") - # TODO: Check room is part of group already + # TODO: Check category is part of summary already cat_exists = self._simple_select_one_onecol_txn( txn, table="group_summary_room_categories", @@ -176,6 +180,7 @@ class GroupServerStore(SQLBaseStore): allow_none=True, ) if not cat_exists: + # If not, add it with an order larger than all others txn.execute(""" INSERT INTO group_summary_room_categories (group_id, category_id, cat_order) @@ -197,6 +202,7 @@ class GroupServerStore(SQLBaseStore): ) if order is not None: + # Shuffle other room orders that come after the given order sql = """ UPDATE group_summary_rooms SET room_order = room_order + 1 WHERE group_id = ? AND category_id = ? AND room_order >= ? @@ -408,12 +414,16 @@ class GroupServerStore(SQLBaseStore): def _add_user_to_summary_txn(self, txn, group_id, user_id, role_id, order, is_public): - """Add user to summary. + """Add (or update) user's entry in summary. - This automatically adds the user to the end of the list of users to be - included in the summary response. If a role is given then user will - be added under that role (the role will automatically be added to the - summary if a user is listed under that role in the summary). + Args: + group_id (str) + user_id (str) + role_id (str): If not None then adds the role to the end of + the summary if its not already there. [Optional] + order (int): If not None inserts the user at that position, e.g. + an order of 1 will put the user first. Otherwise, the user gets + added to the end. """ if role_id is None: role_id = _DEFAULT_CATEGORY_ID @@ -431,7 +441,7 @@ class GroupServerStore(SQLBaseStore): if not role_exists: raise SynapseError(400, "Role doesn't exist") - # TODO: Check room is part of group already + # TODO: Check role is part of the summary already role_exists = self._simple_select_one_onecol_txn( txn, table="group_summary_roles", @@ -443,6 +453,7 @@ class GroupServerStore(SQLBaseStore): allow_none=True, ) if not role_exists: + # If not, add it with an order larger than all others txn.execute(""" INSERT INTO group_summary_roles (group_id, role_id, role_order) @@ -464,6 +475,7 @@ class GroupServerStore(SQLBaseStore): ) if order is not None: + # Shuffle other users orders that come after the given order sql = """ UPDATE group_summary_users SET user_order = user_order + 1 WHERE group_id = ? AND role_id = ? AND user_order >= ? From 3b0470dba59274c65e69a4eab8909eaa55393a2a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 13 Jul 2017 13:53:21 +0100 Subject: [PATCH 020/223] Remove unused functions --- synapse/storage/group_server.py | 152 -------------------------------- 1 file changed, 152 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index b328ef8bc..2e05c23fd 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -755,103 +755,6 @@ class GroupServerStore(SQLBaseStore): desc="add_room_to_group", ) - @defer.inlineCallbacks - def register_user_group_membership(self, group_id, user_id, membership, - is_admin=False, content={}, - local_attestation=None, - remote_attestation=None, - ): - def _register_user_group_membership_txn(txn, next_id): - # TODO: Upsert? - self._simple_delete_txn( - txn, - table="local_group_membership", - keyvalues={ - "group_id": group_id, - "user_id": user_id, - }, - ) - self._simple_insert_txn( - txn, - table="local_group_membership", - values={ - "group_id": group_id, - "user_id": user_id, - "is_admin": is_admin, - "membership": membership, - "content": json.dumps(content), - }, - ) - self._simple_delete_txn( - txn, - table="local_group_updates", - keyvalues={ - "group_id": group_id, - "user_id": user_id, - "type": "membership", - }, - ) - self._simple_insert_txn( - txn, - table="local_group_updates", - values={ - "stream_id": next_id, - "group_id": group_id, - "user_id": user_id, - "type": "membership", - "content": json.dumps({"membership": membership, "content": content}), - } - ) - self._group_updates_stream_cache.entity_has_changed(user_id, next_id) - - # TODO: Insert profile to ensuer it comes down stream if its a join. - - if membership == "join": - if local_attestation: - self._simple_insert_txn( - txn, - table="group_attestations_renewals", - values={ - "group_id": group_id, - "user_id": user_id, - "valid_until_ms": local_attestation["valid_until_ms"], - } - ) - if remote_attestation: - self._simple_insert_txn( - txn, - table="group_attestations_remote", - values={ - "group_id": group_id, - "user_id": user_id, - "valid_until_ms": remote_attestation["valid_until_ms"], - "attestation": json.dumps(remote_attestation), - } - ) - else: - self._simple_delete_txn( - txn, - table="group_attestations_renewals", - keyvalues={ - "group_id": group_id, - "user_id": user_id, - }, - ) - self._simple_delete_txn( - txn, - table="group_attestations_remote", - keyvalues={ - "group_id": group_id, - "user_id": user_id, - }, - ) - - with self._group_updates_id_gen.get_next() as next_id: - yield self.runInteraction( - "register_user_group_membership", - _register_user_group_membership_txn, next_id, - ) - @defer.inlineCallbacks def create_group(self, group_id, user_id, name, avatar_url, short_description, long_description,): @@ -867,61 +770,6 @@ class GroupServerStore(SQLBaseStore): desc="create_group", ) - def get_joined_groups(self, user_id): - return self._simple_select_onecol( - table="local_group_membership", - keyvalues={ - "user_id": user_id, - "membership": "join", - }, - retcol="group_id", - desc="get_joined_groups", - ) - - def get_all_groups_for_user(self, user_id, now_token): - def _get_all_groups_for_user_txn(txn): - sql = """ - SELECT group_id, type, membership, u.content - FROM local_group_updates AS u - INNER JOIN local_group_membership USING (group_id, user_id) - WHERE user_id = ? AND membership != 'leave' - AND stream_id <= ? - """ - txn.execute(sql, (user_id, now_token,)) - return self.cursor_to_dict(txn) - return self.runInteraction( - "get_all_groups_for_user", _get_all_groups_for_user_txn, - ) - - def get_groups_changes_for_user(self, user_id, from_token, to_token): - from_token = int(from_token) - has_changed = self._group_updates_stream_cache.has_entity_changed( - user_id, from_token, - ) - if not has_changed: - return [] - - def _get_groups_changes_for_user_txn(txn): - sql = """ - SELECT group_id, membership, type, u.content - FROM local_group_updates AS u - INNER JOIN local_group_membership USING (group_id, user_id) - WHERE user_id = ? AND ? < stream_id AND stream_id <= ? - """ - txn.execute(sql, (user_id, from_token, to_token,)) - return [{ - "group_id": group_id, - "membership": membership, - "type": gtype, - "content": json.loads(content_json), - } for group_id, membership, gtype, content_json in txn] - return self.runInteraction( - "get_groups_changes_for_user", _get_groups_changes_for_user_txn, - ) - - def get_group_stream_token(self): - return self._group_updates_id_gen.get_current_token() - def get_attestations_need_renewals(self, valid_until_ms): """Get all attestations that need to be renewed until givent time """ From 4b203bdba51a314abef56ccee4d77e1945d16735 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 14 Jul 2017 14:02:00 +0100 Subject: [PATCH 021/223] Correctly increment orders --- synapse/storage/group_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 2e05c23fd..c23dc79ca 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -184,7 +184,7 @@ class GroupServerStore(SQLBaseStore): txn.execute(""" INSERT INTO group_summary_room_categories (group_id, category_id, cat_order) - SELECT ?, ?, COALESCE(MAX(cat_order), 1) + SELECT ?, ?, COALESCE(MAX(cat_order), 0) + 1 FROM group_summary_room_categories WHERE group_id = ? AND category_id = ? """, (group_id, category_id, group_id, category_id)) @@ -457,7 +457,7 @@ class GroupServerStore(SQLBaseStore): txn.execute(""" INSERT INTO group_summary_roles (group_id, role_id, role_order) - SELECT ?, ?, COALESCE(MAX(role_order), 1) + SELECT ?, ?, COALESCE(MAX(role_order), 0) + 1 FROM group_summary_roles WHERE group_id = ? AND role_id = ? """, (group_id, role_id, group_id, role_id)) From 85fda57208bb79e54fe473fda64351f04ffe1cda Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 14 Jul 2017 14:03:54 +0100 Subject: [PATCH 022/223] Add DEFAULT_ROLE_ID --- synapse/storage/group_server.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index c23dc79ca..e8a799d8c 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -24,7 +24,8 @@ import ujson as json # The category ID for the "default" category. We don't store as null in the # database to avoid the fun of null != null -_DEFAULT_CATEGORY_ID = "default" +_DEFAULT_CATEGORY_ID = "" +_DEFAULT_ROLE_ID = "" class GroupServerStore(SQLBaseStore): @@ -426,7 +427,7 @@ class GroupServerStore(SQLBaseStore): added to the end. """ if role_id is None: - role_id = _DEFAULT_CATEGORY_ID + role_id = _DEFAULT_ROLE_ID else: role_exists = self._simple_select_one_onecol_txn( txn, @@ -523,7 +524,7 @@ class GroupServerStore(SQLBaseStore): def remove_user_from_summary(self, group_id, user_id, role_id): if role_id is None: - role_id = _DEFAULT_CATEGORY_ID + role_id = _DEFAULT_ROLE_ID return self._simple_delete( table="group_summary_users", @@ -563,7 +564,7 @@ class GroupServerStore(SQLBaseStore): { "user_id": row[0], "is_public": row[1], - "role_id": row[2] if row[2] != _DEFAULT_CATEGORY_ID else None, + "role_id": row[2] if row[2] != _DEFAULT_ROLE_ID else None, "order": row[3], } for row in txn From cb3aee8219b68e99fc391b96813e6588279e8d86 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 14 Jul 2017 14:06:09 +0100 Subject: [PATCH 023/223] Ensure category and role ids are non-null --- synapse/federation/transport/server.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 304c2a2a4..4f7d2546c 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -785,6 +785,9 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if category_id == "": + raise SynapseError(400, "category_id cannot be empty string") + resp = yield self.handler.update_group_summary_room( group_id, requester_user_id, room_id=room_id, @@ -800,6 +803,9 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if category_id == "": + raise SynapseError(400, "category_id cannot be empty string") + resp = yield self.handler.delete_group_summary_room( group_id, requester_user_id, room_id=room_id, @@ -854,6 +860,9 @@ class FederationGroupsCategoryServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if category_id == "": + raise SynapseError(400, "category_id cannot be empty string") + resp = yield self.handler.upsert_group_category( group_id, requester_user_id, category_id, content, ) @@ -866,6 +875,9 @@ class FederationGroupsCategoryServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if category_id == "": + raise SynapseError(400, "category_id cannot be empty string") + resp = yield self.handler.delete_group_category( group_id, requester_user_id, category_id, ) @@ -918,6 +930,9 @@ class FederationGroupsRoleServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if role_id == "": + raise SynapseError(400, "role_id cannot be empty string") + resp = yield self.handler.update_group_role( group_id, requester_user_id, role_id, content, ) @@ -930,6 +945,9 @@ class FederationGroupsRoleServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if role_id == "": + raise SynapseError(400, "role_id cannot be empty string") + resp = yield self.handler.delete_group_role( group_id, requester_user_id, role_id, ) @@ -956,6 +974,9 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if role_id == "": + raise SynapseError(400, "role_id cannot be empty string") + resp = yield self.handler.update_group_summary_user( group_id, requester_user_id, user_id=user_id, @@ -971,6 +992,9 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") + if role_id == "": + raise SynapseError(400, "role_id cannot be empty string") + resp = yield self.handler.delete_group_summary_user( group_id, requester_user_id, user_id=user_id, From 2f9eafdd369796d8b7731b24ab8cf6a98ad19e29 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Jul 2017 14:52:27 +0100 Subject: [PATCH 024/223] Add local group server support --- synapse/federation/transport/client.py | 77 +++ synapse/federation/transport/server.py | 44 ++ synapse/groups/groups_server.py | 7 +- synapse/handlers/groups_local.py | 278 ++++++++ synapse/rest/__init__.py | 2 + synapse/rest/client/v2_alpha/groups.py | 642 ++++++++++++++++++ synapse/server.py | 5 + synapse/storage/__init__.py | 15 + synapse/storage/group_server.py | 152 +++++ .../storage/schema/delta/43/group_server.sql | 28 + 10 files changed, 1248 insertions(+), 2 deletions(-) create mode 100644 synapse/handlers/groups_local.py create mode 100644 synapse/rest/client/v2_alpha/groups.py diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index d0f8da751..ea340e345 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -472,6 +472,72 @@ class TransportLayerClient(object): defer.returnValue(content) + @log_function + def get_group_profile(self, destination, group_id, requester_user_id): + path = PREFIX + "/groups/%s/profile" % (group_id,) + + return self.client.post_json( + destination=destination, + path=path, + data={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def get_group_summary(self, destination, group_id, requester_user_id): + path = PREFIX + "/groups/%s/summary" % (group_id,) + + return self.client.post_json( + destination=destination, + path=path, + data={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def get_group_rooms(self, destination, group_id, requester_user_id): + path = PREFIX + "/groups/%s/rooms" % (group_id,) + + return self.client.post_json( + destination=destination, + path=path, + data={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def get_group_users(self, destination, group_id, requester_user_id): + path = PREFIX + "/groups/%s/users" % (group_id,) + + return self.client.post_json( + destination=destination, + path=path, + data={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def accept_group_invite(self, destination, group_id, user_id, content): + path = PREFIX + "/groups/%s/users/%s/accept_invite" % (group_id, user_id) + + return self.client.post_json( + destination=destination, + path=path, + data=content, + ignore_backoff=True, + ) + + @log_function + def invite_to_group(self, destination, group_id, user_id, content): + path = PREFIX + "/groups/%s/users/%s/invite" % (group_id, user_id) + + return self.client.post_json( + destination=destination, + path=path, + data=content, + ignore_backoff=True, + ) + @log_function def invite_to_group_notification(self, destination, group_id, user_id, content): """Sent by group server to inform a user's server that they have been @@ -487,6 +553,17 @@ class TransportLayerClient(object): ignore_backoff=True, ) + @log_function + def remove_user_from_group(self, destination, group_id, user_id, content): + path = PREFIX + "/groups/%s/users/%s/remove" % (group_id, user_id) + + return self.client.post_json( + destination=destination, + path=path, + data=content, + ignore_backoff=True, + ) + @log_function def remove_user_from_group_notification(self, destination, group_id, user_id, content): diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 4f7d2546c..0f08334f3 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -715,6 +715,21 @@ class FederationGroupsInviteServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsLocalInviteServlet(BaseFederationServlet): + PATH = "/groups/local/(?P[^/]*)/users/(?P[^/]*)/invite$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, user_id): + if get_domain_from_id(group_id) != origin: + raise SynapseError(403, "group_id doesn't match origin") + + new_content = yield self.handler.on_invite( + group_id, user_id, content, + ) + + defer.returnValue((200, new_content)) + + class FederationGroupsAcceptInviteServlet(BaseFederationServlet): """Accept an invitation from the group server """ @@ -750,6 +765,21 @@ class FederationGroupsRemoveUserServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsRemoveLocalUserServlet(BaseFederationServlet): + PATH = "/groups/local/(?P[^/]*)/users/(?P[^/]*)/remove$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, user_id): + if get_domain_from_id(group_id) != origin: + raise SynapseError(403, "user_id doesn't match origin") + + new_content = yield self.handler.user_removed_from_group( + group_id, user_id, content, + ) + + defer.returnValue((200, new_content)) + + class FederationGroupsRenewAttestaionServlet(BaseFederationServlet): """A group or user's server renews their attestation """ @@ -1053,6 +1083,12 @@ GROUP_SERVER_SERVLET_CLASSES = ( ) +GROUP_LOCAL_SERVLET_CLASSES = ( + FederationGroupsLocalInviteServlet, + FederationGroupsRemoveLocalUserServlet, +) + + GROUP_ATTESTATION_SERVLET_CLASSES = ( FederationGroupsRenewAttestaionServlet, ) @@ -1083,6 +1119,14 @@ def register_servlets(hs, resource, authenticator, ratelimiter): server_name=hs.hostname, ).register(resource) + for servletclass in GROUP_LOCAL_SERVLET_CLASSES: + servletclass( + handler=hs.get_groups_local_handler(), + authenticator=authenticator, + ratelimiter=ratelimiter, + server_name=hs.hostname, + ).register(resource) + for servletclass in GROUP_ATTESTATION_SERVLET_CLASSES: servletclass( handler=hs.get_groups_attestation_renewer(), diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index a00bafe3a..c8559577f 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -462,7 +462,9 @@ class GroupsServerHandler(object): } if self.hs.is_mine_id(user_id): - raise NotImplementedError() + groups_local = self.hs.get_groups_local_handler() + res = yield groups_local.on_invite(group_id, user_id, content) + local_attestation = None else: local_attestation = self.attestations.create_attestation(group_id, user_id) content.update({ @@ -590,7 +592,8 @@ class GroupsServerHandler(object): if is_kick: if self.hs.is_mine_id(user_id): - raise NotImplementedError() + groups_local = self.hs.get_groups_local_handler() + yield groups_local.user_removed_from_group(group_id, user_id, {}) else: yield self.transport_client.remove_user_from_group_notification( get_domain_from_id(user_id), group_id, user_id, {} diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py new file mode 100644 index 000000000..3df255b05 --- /dev/null +++ b/synapse/handlers/groups_local.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Vector Creations Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from synapse.api.errors import SynapseError + +import logging + +logger = logging.getLogger(__name__) + + +# TODO: Validate attestations +# TODO: Allow users to "knock" or simpkly join depending on rules +# TODO: is_priveged flag to users and is_public to users and rooms +# TODO: Roles +# TODO: Audit log for admins (profile updates, membership changes, users who tried +# to join but were rejected, etc) +# TODO: Flairs +# TODO: Add group memebership /sync + + +def _create_rerouter(name): + def f(self, group_id, *args, **kwargs): + if self.is_mine_id(group_id): + return getattr(self.groups_server_handler, name)( + group_id, *args, **kwargs + ) + + repl_layer = self.hs.get_replication_layer() + return getattr(repl_layer, name)(group_id, *args, **kwargs) + return f + + +class GroupsLocalHandler(object): + def __init__(self, hs): + self.hs = hs + self.store = hs.get_datastore() + self.room_list_handler = hs.get_room_list_handler() + self.groups_server_handler = hs.get_groups_server_handler() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.keyring = hs.get_keyring() + self.is_mine_id = hs.is_mine_id + self.signing_key = hs.config.signing_key[0] + self.server_name = hs.hostname + self.attestations = hs.get_groups_attestation_signing() + + # Ensure attestations get renewed + hs.get_groups_attestation_renewer() + + get_group_profile = _create_rerouter("get_group_profile") + get_rooms_in_group = _create_rerouter("get_rooms_in_group") + + update_group_summary_room = _create_rerouter("update_group_summary_room") + delete_group_summary_room = _create_rerouter("delete_group_summary_room") + + update_group_category = _create_rerouter("update_group_category") + delete_group_category = _create_rerouter("delete_group_category") + get_group_category = _create_rerouter("get_group_category") + get_group_categories = _create_rerouter("get_group_categories") + + update_group_summary_user = _create_rerouter("update_group_summary_user") + delete_group_summary_user = _create_rerouter("delete_group_summary_user") + + update_group_role = _create_rerouter("update_group_role") + delete_group_role = _create_rerouter("delete_group_role") + get_group_role = _create_rerouter("get_group_role") + get_group_roles = _create_rerouter("get_group_roles") + + @defer.inlineCallbacks + def get_group_summary(self, group_id, requester_user_id): + if self.is_mine_id(group_id): + res = yield self.groups_server_handler.get_group_summary( + group_id, requester_user_id + ) + defer.returnValue(res) + + repl_layer = self.hs.get_replication_layer() + res = yield repl_layer.get_group_summary(group_id, requester_user_id) + + chunk = res["users_section"]["users"] + valid_users = [] + for entry in chunk: + g_user_id = entry["user_id"] + attestation = entry.pop("attestation") + try: + yield self.attestations.verify_attestation( + attestation, + group_id=group_id, + user_id=g_user_id, + ) + valid_users.append(entry) + except Exception as e: + logger.info("Failed to verify user is in group: %s", e) + + res["users_section"]["users"] = valid_users + + res["users_section"]["users"].sort(key=lambda e: e.get("order", 0)) + res["rooms_section"]["rooms"].sort(key=lambda e: e.get("order", 0)) + + defer.returnValue(res) + + def create_group(self, group_id, user_id, content): + logger.info("Asking to create group with ID: %r", group_id) + + if self.is_mine_id(group_id): + return self.groups_server_handler.create_group( + group_id, user_id, content + ) + + repl_layer = self.hs.get_replication_layer() + return repl_layer.create_group(group_id, user_id, content) # TODO + + def add_room(self, group_id, user_id, room_id, content): + if self.is_mine_id(group_id): + return self.groups_server_handler.add_room( + group_id, user_id, room_id, content + ) + + repl_layer = self.hs.get_replication_layer() + return repl_layer.add_room_to_group(group_id, user_id, room_id, content) # TODO + + @defer.inlineCallbacks + def get_users_in_group(self, group_id, requester_user_id): + if self.is_mine_id(group_id): + res = yield self.groups_server_handler.get_users_in_group( + group_id, requester_user_id + ) + defer.returnValue(res) + + repl_layer = self.hs.get_replication_layer() + res = yield repl_layer.get_users_in_group(group_id, requester_user_id) # TODO + + chunk = res["chunk"] + valid_entries = [] + for entry in chunk: + g_user_id = entry["user_id"] + attestation = entry.pop("attestation") + try: + yield self.attestations.verify_attestation( + attestation, + group_id=group_id, + user_id=g_user_id, + ) + valid_entries.append(entry) + except Exception as e: + logger.info("Failed to verify user is in group: %s", e) + + res["chunk"] = valid_entries + + defer.returnValue(res) + + @defer.inlineCallbacks + def join_group(self, group_id, user_id, content): + raise NotImplementedError() # TODO + + @defer.inlineCallbacks + def accept_invite(self, group_id, user_id, content): + if self.is_mine_id(group_id): + yield self.groups_server_handler.accept_invite( + group_id, user_id, content + ) + local_attestation = None + remote_attestation = None + else: + local_attestation = self.attestations.create_attestation(group_id, user_id) + content["attestation"] = local_attestation + + repl_layer = self.hs.get_replication_layer() + res = yield repl_layer.accept_group_invite(group_id, user_id, content) + + remote_attestation = res["attestation"] + + yield self.attestations.verify_attestation( + remote_attestation, + group_id=group_id, + user_id=user_id, + ) + + yield self.store.register_user_group_membership( + group_id, user_id, + membership="join", + is_admin=False, + local_attestation=local_attestation, + remote_attestation=remote_attestation, + ) + + defer.returnValue({}) + + @defer.inlineCallbacks + def invite(self, group_id, user_id, requester_user_id, config): + content = { + "requester_user_id": requester_user_id, + "config": config, + } + if self.is_mine_id(group_id): + res = yield self.groups_server_handler.invite_to_group( + group_id, user_id, requester_user_id, content, + ) + else: + repl_layer = self.hs.get_replication_layer() + res = yield repl_layer.invite_to_group( + group_id, user_id, content, + ) + + defer.returnValue(res) + + @defer.inlineCallbacks + def on_invite(self, group_id, user_id, content): + # TODO: Support auto join and rejection + + if not self.is_mine_id(user_id): + raise SynapseError(400, "User not on this server") + + local_profile = {} + if "profile" in content: + if "name" in content["profile"]: + local_profile["name"] = content["profile"]["name"] + if "avatar_url" in content["profile"]: + local_profile["avatar_url"] = content["profile"]["avatar_url"] + + yield self.store.register_user_group_membership( + group_id, user_id, + membership="invite", + content={"profile": local_profile, "inviter": content["inviter"]}, + ) + + defer.returnValue({"state": "invite"}) + + @defer.inlineCallbacks + def remove_user_from_group(self, group_id, user_id, requester_user_id, content): + if user_id == requester_user_id: + yield self.store.register_user_group_membership( + group_id, user_id, + membership="leave", + ) + + # TODO: Should probably remember that we tried to leave so that we can + # retry if the group server is currently down. + + if self.is_mine_id(group_id): + res = yield self.groups_server_handler.remove_user_from_group( + group_id, user_id, requester_user_id, content, + ) + else: + content["requester_user_id"] = requester_user_id + repl_layer = self.hs.get_replication_layer() + res = yield repl_layer.remove_user_from_group( + group_id, user_id, content + ) # TODO + + defer.returnValue(res) + + @defer.inlineCallbacks + def user_removed_from_group(self, group_id, user_id, content): + # TODO: Check if user in group + yield self.store.register_user_group_membership( + group_id, user_id, + membership="leave", + ) + + @defer.inlineCallbacks + def get_joined_groups(self, user_id): + group_ids = yield self.store.get_joined_groups(user_id) + defer.returnValue({"groups": group_ids}) diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index 3d809d181..16f5a73b9 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -52,6 +52,7 @@ from synapse.rest.client.v2_alpha import ( thirdparty, sendtodevice, user_directory, + groups, ) from synapse.http.server import JsonResource @@ -102,3 +103,4 @@ class ClientRestResource(JsonResource): thirdparty.register_servlets(hs, client_resource) sendtodevice.register_servlets(hs, client_resource) user_directory.register_servlets(hs, client_resource) + groups.register_servlets(hs, client_resource) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py new file mode 100644 index 000000000..255552c36 --- /dev/null +++ b/synapse/rest/client/v2_alpha/groups.py @@ -0,0 +1,642 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Vector Creations Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.types import GroupID + +from ._base import client_v2_patterns + +import logging + +logger = logging.getLogger(__name__) + + +class GroupServlet(RestServlet): + PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/profile$") + + def __init__(self, hs): + super(GroupServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + group_description = yield self.groups_handler.get_group_profile(group_id, user_id) + + defer.returnValue((200, group_description)) + + +class GroupSummaryServlet(RestServlet): + PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/summary$") + + def __init__(self, hs): + super(GroupSummaryServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + get_group_summary = yield self.groups_handler.get_group_summary(group_id, user_id) + + defer.returnValue((200, get_group_summary)) + + +class GroupSummaryRoomsServlet(RestServlet): + PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/summary/rooms$") + + def __init__(self, hs): + super(GroupSummaryServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + get_group_summary = yield self.groups_handler.get_group_summary(group_id, user_id) + + defer.returnValue((200, get_group_summary)) + + +class GroupSummaryRoomsDefaultCatServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/summary/rooms/(?P[^/]*)$" + ) + + def __init__(self, hs): + super(GroupSummaryRoomsDefaultCatServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, room_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + resp = yield self.groups_handler.update_group_summary_room( + group_id, user_id, + room_id=room_id, + category_id=None, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, request, group_id, room_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + resp = yield self.groups_handler.delete_group_summary_room( + group_id, user_id, + room_id=room_id, + category_id=None, + ) + + defer.returnValue((200, resp)) + + +class GroupSummaryRoomsCatServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/summary" + "/categories/(?P[^/]+)/rooms/(?P[^/]+)$" + ) + + def __init__(self, hs): + super(GroupSummaryRoomsCatServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, category_id, room_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + resp = yield self.groups_handler.update_group_summary_room( + group_id, user_id, + room_id=room_id, + category_id=category_id, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, request, group_id, category_id, room_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + resp = yield self.groups_handler.delete_group_summary_room( + group_id, user_id, + room_id=room_id, + category_id=category_id, + ) + + defer.returnValue((200, resp)) + + +class GroupCategoryServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/categories/(?P[^/]+)$" + ) + + def __init__(self, hs): + super(GroupCategoryServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id, category_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + category = yield self.groups_handler.get_group_category( + group_id, user_id, + category_id=category_id, + ) + + defer.returnValue((200, category)) + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, category_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + resp = yield self.groups_handler.update_group_category( + group_id, user_id, + category_id=category_id, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, request, group_id, category_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + resp = yield self.groups_handler.delete_group_category( + group_id, user_id, + category_id=category_id, + ) + + defer.returnValue((200, resp)) + + +class GroupCategoriesServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/categories/$" + ) + + def __init__(self, hs): + super(GroupCategoriesServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + category = yield self.groups_handler.get_group_categories( + group_id, user_id, + ) + + defer.returnValue((200, category)) + + +class GroupRoleServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/roles/(?P[^/]+)$" + ) + + def __init__(self, hs): + super(GroupRoleServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id, role_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + category = yield self.groups_handler.get_group_role( + group_id, user_id, + role_id=role_id, + ) + + defer.returnValue((200, category)) + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, role_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + resp = yield self.groups_handler.update_group_role( + group_id, user_id, + role_id=role_id, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, request, group_id, role_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + resp = yield self.groups_handler.delete_group_role( + group_id, user_id, + role_id=role_id, + ) + + defer.returnValue((200, resp)) + + +class GroupRolesServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/roles/$" + ) + + def __init__(self, hs): + super(GroupRolesServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + category = yield self.groups_handler.get_group_roles( + group_id, user_id, + ) + + defer.returnValue((200, category)) + + +class GroupSummaryUsersDefaultRoleServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/summary/users/(?P[^/]*)$" + ) + + def __init__(self, hs): + super(GroupSummaryUsersDefaultRoleServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, user_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + resp = yield self.groups_handler.update_group_summary_user( + group_id, requester_user_id, + user_id=user_id, + role_id=None, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, request, group_id, user_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + resp = yield self.groups_handler.delete_group_summary_user( + group_id, requester_user_id, + user_id=user_id, + role_id=None, + ) + + defer.returnValue((200, resp)) + + +class GroupSummaryUsersRoleServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/summary" + "/roles/(?P[^/]+)/users/(?P[^/]+)$" + ) + + def __init__(self, hs): + super(GroupSummaryUsersRoleServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, role_id, user_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + resp = yield self.groups_handler.update_group_summary_user( + group_id, requester_user_id, + user_id=user_id, + role_id=role_id, + content=content, + ) + + defer.returnValue((200, resp)) + + @defer.inlineCallbacks + def on_DELETE(self, request, group_id, role_id, user_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + resp = yield self.groups_handler.delete_group_summary_user( + group_id, requester_user_id, + user_id=user_id, + role_id=role_id, + ) + + defer.returnValue((200, resp)) + + +class GroupRoomServlet(RestServlet): + PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/rooms$") + + def __init__(self, hs): + super(GroupRoomServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + result = yield self.groups_handler.get_rooms_in_group(group_id, user_id) + + defer.returnValue((200, result)) + + +class GroupUsersServlet(RestServlet): + PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/users$") + + def __init__(self, hs): + super(GroupUsersServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + result = yield self.groups_handler.get_users_in_group(group_id, user_id) + + defer.returnValue((200, result)) + + +class GroupCreateServlet(RestServlet): + PATTERNS = client_v2_patterns("/create_group$") + + def __init__(self, hs): + super(GroupCreateServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + self.server_name = hs.hostname + + @defer.inlineCallbacks + def on_POST(self, request): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + # TODO: Create group on remote server + content = parse_json_object_from_request(request) + localpart = content.pop("localpart") + group_id = GroupID.create(localpart, self.server_name).to_string() + + result = yield self.groups_handler.create_group(group_id, user_id, content) + + defer.returnValue((200, result)) + + +class GroupAdminRoomsServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/admin/rooms/(?P[^/]*)$" + ) + + def __init__(self, hs): + super(GroupAdminRoomsServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, room_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + result = yield self.groups_handler.add_room(group_id, user_id, room_id, content) + + defer.returnValue((200, result)) + + +class GroupAdminUsersInviteServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/admin/users/invite/(?P[^/]*)$" + ) + + def __init__(self, hs): + super(GroupAdminUsersInviteServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + self.store = hs.get_datastore() + self.is_mine_id = hs.is_mine_id + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, user_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + config = content.get("config", {}) + result = yield self.groups_handler.invite( + group_id, user_id, requester_user_id, config, + ) + + defer.returnValue((200, result)) + + +class GroupAdminUsersKickServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/admin/users/remove/(?P[^/]*)$" + ) + + def __init__(self, hs): + super(GroupAdminUsersKickServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id, user_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + result = yield self.groups_handler.remove_user_from_group( + group_id, user_id, requester_user_id, content, + ) + + defer.returnValue((200, result)) + + +class GroupSelfLeaveServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/self/leave$" + ) + + def __init__(self, hs): + super(GroupSelfLeaveServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + result = yield self.groups_handler.remove_user_from_group( + group_id, requester_user_id, requester_user_id, content, + ) + + defer.returnValue((200, result)) + + +class GroupSelfJoinServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/self/join$" + ) + + def __init__(self, hs): + super(GroupSelfJoinServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + result = yield self.groups_handler.join_group( + group_id, requester_user_id, content, + ) + + defer.returnValue((200, result)) + + +class GroupSelfAcceptInviteServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/self/accept_invite$" + ) + + def __init__(self, hs): + super(GroupSelfAcceptInviteServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + result = yield self.groups_handler.accept_invite( + group_id, requester_user_id, content, + ) + + defer.returnValue((200, result)) + + +class GroupsForUserServlet(RestServlet): + PATTERNS = client_v2_patterns( + "/joined_groups$" + ) + + def __init__(self, hs): + super(GroupsForUserServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + result = yield self.groups_handler.get_joined_groups(user_id) + + defer.returnValue((200, result)) + + +def register_servlets(hs, http_server): + GroupServlet(hs).register(http_server) + GroupSummaryServlet(hs).register(http_server) + GroupUsersServlet(hs).register(http_server) + GroupRoomServlet(hs).register(http_server) + GroupCreateServlet(hs).register(http_server) + GroupAdminRoomsServlet(hs).register(http_server) + GroupAdminUsersInviteServlet(hs).register(http_server) + GroupAdminUsersKickServlet(hs).register(http_server) + GroupSelfLeaveServlet(hs).register(http_server) + GroupSelfJoinServlet(hs).register(http_server) + GroupSelfAcceptInviteServlet(hs).register(http_server) + GroupsForUserServlet(hs).register(http_server) + GroupSummaryRoomsDefaultCatServlet(hs).register(http_server) + GroupCategoryServlet(hs).register(http_server) + GroupCategoriesServlet(hs).register(http_server) + GroupSummaryRoomsCatServlet(hs).register(http_server) + GroupRoleServlet(hs).register(http_server) + GroupRolesServlet(hs).register(http_server) + GroupSummaryUsersDefaultRoleServlet(hs).register(http_server) + GroupSummaryUsersRoleServlet(hs).register(http_server) diff --git a/synapse/server.py b/synapse/server.py index d857cca84..d0a627276 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -50,6 +50,7 @@ from synapse.handlers.initial_sync import InitialSyncHandler from synapse.handlers.receipts import ReceiptsHandler from synapse.handlers.read_marker import ReadMarkerHandler from synapse.handlers.user_directory import UserDirectoyHandler +from synapse.handlers.groups_local import GroupsLocalHandler from synapse.groups.groups_server import GroupsServerHandler from synapse.groups.attestations import GroupAttestionRenewer, GroupAttestationSigning from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory @@ -141,6 +142,7 @@ class HomeServer(object): 'read_marker_handler', 'action_generator', 'user_directory_handler', + 'groups_local_handler', 'groups_server_handler', 'groups_attestation_signing', 'groups_attestation_renewer', @@ -314,6 +316,9 @@ class HomeServer(object): def build_user_directory_handler(self): return UserDirectoyHandler(self) + def build_groups_local_handler(self): + return GroupsLocalHandler(self) + def build_groups_server_handler(self): return GroupsServerHandler(self) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index fdee9f1ad..594566eb3 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -136,6 +136,9 @@ class DataStore(RoomMemberStore, RoomStore, db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")], ) + self._group_updates_id_gen = StreamIdGenerator( + db_conn, "local_group_updates", "stream_id", + ) if isinstance(self.database_engine, PostgresEngine): self._cache_id_gen = StreamIdGenerator( @@ -236,6 +239,18 @@ class DataStore(RoomMemberStore, RoomStore, prefilled_cache=curr_state_delta_prefill, ) + _group_updates_prefill, min_group_updates_id = self._get_cache_dict( + db_conn, "local_group_updates", + entity_column="user_id", + stream_column="stream_id", + max_value=self._group_updates_id_gen.get_current_token(), + limit=1000, + ) + self._group_updates_stream_cache = StreamChangeCache( + "_group_updates_stream_cache", min_group_updates_id, + prefilled_cache=_group_updates_prefill, + ) + cur = LoggingTransaction( db_conn.cursor(), name="_find_stream_orderings_for_times_txn", diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index e8a799d8c..036549d43 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -756,6 +756,103 @@ class GroupServerStore(SQLBaseStore): desc="add_room_to_group", ) + @defer.inlineCallbacks + def register_user_group_membership(self, group_id, user_id, membership, + is_admin=False, content={}, + local_attestation=None, + remote_attestation=None, + ): + def _register_user_group_membership_txn(txn, next_id): + # TODO: Upsert? + self._simple_delete_txn( + txn, + table="local_group_membership", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + self._simple_insert_txn( + txn, + table="local_group_membership", + values={ + "group_id": group_id, + "user_id": user_id, + "is_admin": is_admin, + "membership": membership, + "content": json.dumps(content), + }, + ) + self._simple_delete_txn( + txn, + table="local_group_updates", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + "type": "membership", + }, + ) + self._simple_insert_txn( + txn, + table="local_group_updates", + values={ + "stream_id": next_id, + "group_id": group_id, + "user_id": user_id, + "type": "membership", + "content": json.dumps({"membership": membership, "content": content}), + } + ) + self._group_updates_stream_cache.entity_has_changed(user_id, next_id) + + # TODO: Insert profile to ensuer it comes down stream if its a join. + + if membership == "join": + if local_attestation: + self._simple_insert_txn( + txn, + table="group_attestations_renewals", + values={ + "group_id": group_id, + "user_id": user_id, + "valid_until_ms": local_attestation["valid_until_ms"], + } + ) + if remote_attestation: + self._simple_insert_txn( + txn, + table="group_attestations_remote", + values={ + "group_id": group_id, + "user_id": user_id, + "valid_until_ms": remote_attestation["valid_until_ms"], + "attestation": json.dumps(remote_attestation), + } + ) + else: + self._simple_delete_txn( + txn, + table="group_attestations_renewals", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + self._simple_delete_txn( + txn, + table="group_attestations_remote", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) + + with self._group_updates_id_gen.get_next() as next_id: + yield self.runInteraction( + "register_user_group_membership", + _register_user_group_membership_txn, next_id, + ) + @defer.inlineCallbacks def create_group(self, group_id, user_id, name, avatar_url, short_description, long_description,): @@ -771,6 +868,61 @@ class GroupServerStore(SQLBaseStore): desc="create_group", ) + def get_joined_groups(self, user_id): + return self._simple_select_onecol( + table="local_group_membership", + keyvalues={ + "user_id": user_id, + "membership": "join", + }, + retcol="group_id", + desc="get_joined_groups", + ) + + def get_all_groups_for_user(self, user_id, now_token): + def _get_all_groups_for_user_txn(txn): + sql = """ + SELECT group_id, type, membership, u.content + FROM local_group_updates AS u + INNER JOIN local_group_membership USING (group_id, user_id) + WHERE user_id = ? AND membership != 'leave' + AND stream_id <= ? + """ + txn.execute(sql, (user_id, now_token,)) + return self.cursor_to_dict(txn) + return self.runInteraction( + "get_all_groups_for_user", _get_all_groups_for_user_txn, + ) + + def get_groups_changes_for_user(self, user_id, from_token, to_token): + from_token = int(from_token) + has_changed = self._group_updates_stream_cache.has_entity_changed( + user_id, from_token, + ) + if not has_changed: + return [] + + def _get_groups_changes_for_user_txn(txn): + sql = """ + SELECT group_id, membership, type, u.content + FROM local_group_updates AS u + INNER JOIN local_group_membership USING (group_id, user_id) + WHERE user_id = ? AND ? < stream_id AND stream_id <= ? + """ + txn.execute(sql, (user_id, from_token, to_token,)) + return [{ + "group_id": group_id, + "membership": membership, + "type": gtype, + "content": json.loads(content_json), + } for group_id, membership, gtype, content_json in txn] + return self.runInteraction( + "get_groups_changes_for_user", _get_groups_changes_for_user_txn, + ) + + def get_group_stream_token(self): + return self._group_updates_id_gen.get_current_token() + def get_attestations_need_renewals(self, valid_until_ms): """Get all attestations that need to be renewed until givent time """ diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index 472aab0a7..e32db8b31 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -142,3 +142,31 @@ CREATE TABLE group_attestations_remote ( CREATE INDEX group_attestations_remote_g_idx ON group_attestations_remote(group_id, user_id); CREATE INDEX group_attestations_remote_u_idx ON group_attestations_remote(user_id); CREATE INDEX group_attestations_remote_v_idx ON group_attestations_remote(valid_until_ms); + + +CREATE TABLE local_group_membership ( + group_id TEXT NOT NULL, + user_id TEXT NOT NULL, + is_admin BOOLEAN NOT NULL, + membership TEXT NOT NULL, + content TEXT NOT NULL +); + +CREATE INDEX local_group_membership_u_idx ON local_group_membership(user_id, group_id); +CREATE INDEX local_group_membership_g_idx ON local_group_membership(group_id); + + +CREATE TABLE local_group_updates ( + stream_id BIGINT NOT NULL, + group_id TEXT NOT NULL, + user_id TEXT NOT NULL, + type TEXT NOT NULL, + content TEXT NOT NULL +); + + +CREATE TABLE local_group_profiles ( + group_id TEXT NOT NULL, + name TEXT, + avatar_url TEXT +); From e96ee95a7e84e7d75ac57395bb64e1c3428596e9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 09:33:16 +0100 Subject: [PATCH 025/223] Remove sync stuff --- synapse/storage/__init__.py | 15 --------- synapse/storage/group_server.py | 55 --------------------------------- 2 files changed, 70 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 594566eb3..fdee9f1ad 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -136,9 +136,6 @@ class DataStore(RoomMemberStore, RoomStore, db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")], ) - self._group_updates_id_gen = StreamIdGenerator( - db_conn, "local_group_updates", "stream_id", - ) if isinstance(self.database_engine, PostgresEngine): self._cache_id_gen = StreamIdGenerator( @@ -239,18 +236,6 @@ class DataStore(RoomMemberStore, RoomStore, prefilled_cache=curr_state_delta_prefill, ) - _group_updates_prefill, min_group_updates_id = self._get_cache_dict( - db_conn, "local_group_updates", - entity_column="user_id", - stream_column="stream_id", - max_value=self._group_updates_id_gen.get_current_token(), - limit=1000, - ) - self._group_updates_stream_cache = StreamChangeCache( - "_group_updates_stream_cache", min_group_updates_id, - prefilled_cache=_group_updates_prefill, - ) - cur = LoggingTransaction( db_conn.cursor(), name="_find_stream_orderings_for_times_txn", diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 036549d43..2dcdcbfdf 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -868,61 +868,6 @@ class GroupServerStore(SQLBaseStore): desc="create_group", ) - def get_joined_groups(self, user_id): - return self._simple_select_onecol( - table="local_group_membership", - keyvalues={ - "user_id": user_id, - "membership": "join", - }, - retcol="group_id", - desc="get_joined_groups", - ) - - def get_all_groups_for_user(self, user_id, now_token): - def _get_all_groups_for_user_txn(txn): - sql = """ - SELECT group_id, type, membership, u.content - FROM local_group_updates AS u - INNER JOIN local_group_membership USING (group_id, user_id) - WHERE user_id = ? AND membership != 'leave' - AND stream_id <= ? - """ - txn.execute(sql, (user_id, now_token,)) - return self.cursor_to_dict(txn) - return self.runInteraction( - "get_all_groups_for_user", _get_all_groups_for_user_txn, - ) - - def get_groups_changes_for_user(self, user_id, from_token, to_token): - from_token = int(from_token) - has_changed = self._group_updates_stream_cache.has_entity_changed( - user_id, from_token, - ) - if not has_changed: - return [] - - def _get_groups_changes_for_user_txn(txn): - sql = """ - SELECT group_id, membership, type, u.content - FROM local_group_updates AS u - INNER JOIN local_group_membership USING (group_id, user_id) - WHERE user_id = ? AND ? < stream_id AND stream_id <= ? - """ - txn.execute(sql, (user_id, from_token, to_token,)) - return [{ - "group_id": group_id, - "membership": membership, - "type": gtype, - "content": json.loads(content_json), - } for group_id, membership, gtype, content_json in txn] - return self.runInteraction( - "get_groups_changes_for_user", _get_groups_changes_for_user_txn, - ) - - def get_group_stream_token(self): - return self._group_updates_id_gen.get_current_token() - def get_attestations_need_renewals(self, valid_until_ms): """Get all attestations that need to be renewed until givent time """ From 45407301111e55d04f46957a824d73eab69796de Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 09:36:17 +0100 Subject: [PATCH 026/223] Remove unused tables --- synapse/storage/schema/delta/43/group_server.sql | 7 ------- 1 file changed, 7 deletions(-) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index e32db8b31..f9e11c914 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -163,10 +163,3 @@ CREATE TABLE local_group_updates ( type TEXT NOT NULL, content TEXT NOT NULL ); - - -CREATE TABLE local_group_profiles ( - group_id TEXT NOT NULL, - name TEXT, - avatar_url TEXT -); From 6e9f147faa528c7493dbaa4e12b64baef2379d83 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 09:47:25 +0100 Subject: [PATCH 027/223] Add GroupID type --- synapse/types.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/types.py b/synapse/types.py index 111948540..b32c0e360 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -156,6 +156,11 @@ class EventID(DomainSpecificString): SIGIL = "$" +class GroupID(DomainSpecificString): + """Structure representing a group ID.""" + SIGIL = "+" + + class StreamToken( namedtuple("Token", ( "room_key", From 508460f24077da74d8a3d3ce891c0b55ebbce2e8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 09:55:46 +0100 Subject: [PATCH 028/223] Remove sync stuff --- synapse/storage/group_server.py | 20 ------------------- .../storage/schema/delta/43/group_server.sql | 10 +--------- 2 files changed, 1 insertion(+), 29 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 2dcdcbfdf..bff2324cc 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -783,26 +783,6 @@ class GroupServerStore(SQLBaseStore): "content": json.dumps(content), }, ) - self._simple_delete_txn( - txn, - table="local_group_updates", - keyvalues={ - "group_id": group_id, - "user_id": user_id, - "type": "membership", - }, - ) - self._simple_insert_txn( - txn, - table="local_group_updates", - values={ - "stream_id": next_id, - "group_id": group_id, - "user_id": user_id, - "type": "membership", - "content": json.dumps({"membership": membership, "content": content}), - } - ) self._group_updates_stream_cache.entity_has_changed(user_id, next_id) # TODO: Insert profile to ensuer it comes down stream if its a join. diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index f9e11c914..e1fd47aa7 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -144,6 +144,7 @@ CREATE INDEX group_attestations_remote_u_idx ON group_attestations_remote(user_i CREATE INDEX group_attestations_remote_v_idx ON group_attestations_remote(valid_until_ms); +-- The group membership for the HS's users CREATE TABLE local_group_membership ( group_id TEXT NOT NULL, user_id TEXT NOT NULL, @@ -154,12 +155,3 @@ CREATE TABLE local_group_membership ( CREATE INDEX local_group_membership_u_idx ON local_group_membership(user_id, group_id); CREATE INDEX local_group_membership_g_idx ON local_group_membership(group_id); - - -CREATE TABLE local_group_updates ( - stream_id BIGINT NOT NULL, - group_id TEXT NOT NULL, - user_id TEXT NOT NULL, - type TEXT NOT NULL, - content TEXT NOT NULL -); From 3e703eb04e1b30dc2bce03d3895ac79ac24a063d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 10:17:25 +0100 Subject: [PATCH 029/223] Comment --- synapse/storage/group_server.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index bff2324cc..3c6ee7df6 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -762,6 +762,20 @@ class GroupServerStore(SQLBaseStore): local_attestation=None, remote_attestation=None, ): + """Registers that a local user is a member of a (local or remote) group. + + Args: + group_id (str) + user_id (str) + membership (str) + is_admin (bool) + content (dict): Content of the membership, e.g. includes the inviter + if the user has been invited. + local_attestation (dict): If remote group then store the fact that we + have given out an attestation, else None. + remote_attestation (dict): If remote group then store the remote + attestation from the group, else None. + """ def _register_user_group_membership_txn(txn, next_id): # TODO: Upsert? self._simple_delete_txn( From 68f34e85cebcacef428d1f38942990c43c3cdd01 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 10:29:57 +0100 Subject: [PATCH 030/223] Use transport client directly --- synapse/handlers/groups_local.py | 43 +++++++++++++++++++------------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 3df255b05..e0f53120b 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -32,15 +32,17 @@ logger = logging.getLogger(__name__) # TODO: Add group memebership /sync -def _create_rerouter(name): +def _create_rerouter(func_name): + """Returns a function that looks at the group id and calls the function + on federation or the local group server if the group is local + """ def f(self, group_id, *args, **kwargs): if self.is_mine_id(group_id): - return getattr(self.groups_server_handler, name)( + return getattr(self.groups_server_handler, func_name)( group_id, *args, **kwargs ) - repl_layer = self.hs.get_replication_layer() - return getattr(repl_layer, name)(group_id, *args, **kwargs) + return getattr(self.transport_client, func_name)(group_id, *args, **kwargs) return f @@ -50,6 +52,7 @@ class GroupsLocalHandler(object): self.store = hs.get_datastore() self.room_list_handler = hs.get_room_list_handler() self.groups_server_handler = hs.get_groups_server_handler() + self.transport_client = hs.get_federation_transport_client() self.auth = hs.get_auth() self.clock = hs.get_clock() self.keyring = hs.get_keyring() @@ -82,15 +85,19 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def get_group_summary(self, group_id, requester_user_id): + """Get the group summary for a group. + + If the group is remote we check that the users have valid attestations. + """ if self.is_mine_id(group_id): res = yield self.groups_server_handler.get_group_summary( group_id, requester_user_id ) defer.returnValue(res) - repl_layer = self.hs.get_replication_layer() - res = yield repl_layer.get_group_summary(group_id, requester_user_id) + res = yield self.transport_client.get_group_summary(group_id, requester_user_id) + # Loop through the users and validate the attestations. chunk = res["users_section"]["users"] valid_users = [] for entry in chunk: @@ -121,8 +128,7 @@ class GroupsLocalHandler(object): group_id, user_id, content ) - repl_layer = self.hs.get_replication_layer() - return repl_layer.create_group(group_id, user_id, content) # TODO + return self.transport_client.create_group(group_id, user_id, content) # TODO def add_room(self, group_id, user_id, room_id, content): if self.is_mine_id(group_id): @@ -130,8 +136,9 @@ class GroupsLocalHandler(object): group_id, user_id, room_id, content ) - repl_layer = self.hs.get_replication_layer() - return repl_layer.add_room_to_group(group_id, user_id, room_id, content) # TODO + return self.transport_client.add_room_to_group( + group_id, user_id, room_id, content, + ) # TODO @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): @@ -141,8 +148,9 @@ class GroupsLocalHandler(object): ) defer.returnValue(res) - repl_layer = self.hs.get_replication_layer() - res = yield repl_layer.get_users_in_group(group_id, requester_user_id) # TODO + res = yield self.transport_client.get_users_in_group( + group_id, requester_user_id, + ) # TODO chunk = res["chunk"] valid_entries = [] @@ -179,8 +187,9 @@ class GroupsLocalHandler(object): local_attestation = self.attestations.create_attestation(group_id, user_id) content["attestation"] = local_attestation - repl_layer = self.hs.get_replication_layer() - res = yield repl_layer.accept_group_invite(group_id, user_id, content) + res = yield self.transport_client.accept_group_invite( + group_id, user_id, content, + ) remote_attestation = res["attestation"] @@ -211,8 +220,7 @@ class GroupsLocalHandler(object): group_id, user_id, requester_user_id, content, ) else: - repl_layer = self.hs.get_replication_layer() - res = yield repl_layer.invite_to_group( + res = yield self.transport_client.invite_to_group( group_id, user_id, content, ) @@ -257,8 +265,7 @@ class GroupsLocalHandler(object): ) else: content["requester_user_id"] = requester_user_id - repl_layer = self.hs.get_replication_layer() - res = yield repl_layer.remove_user_from_group( + res = yield self.transport_client.remove_user_from_group( group_id, user_id, content ) # TODO From cccfcfa7b9e2af28bb56d6f970754fe5aa07ad56 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 10:31:59 +0100 Subject: [PATCH 031/223] Comments --- synapse/federation/transport/server.py | 34 ++++++++++++++------------ synapse/handlers/groups_local.py | 3 +++ 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 0f08334f3..d68e90d2f 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -715,21 +715,6 @@ class FederationGroupsInviteServlet(BaseFederationServlet): defer.returnValue((200, new_content)) -class FederationGroupsLocalInviteServlet(BaseFederationServlet): - PATH = "/groups/local/(?P[^/]*)/users/(?P[^/]*)/invite$" - - @defer.inlineCallbacks - def on_POST(self, origin, content, query, group_id, user_id): - if get_domain_from_id(group_id) != origin: - raise SynapseError(403, "group_id doesn't match origin") - - new_content = yield self.handler.on_invite( - group_id, user_id, content, - ) - - defer.returnValue((200, new_content)) - - class FederationGroupsAcceptInviteServlet(BaseFederationServlet): """Accept an invitation from the group server """ @@ -765,7 +750,26 @@ class FederationGroupsRemoveUserServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsLocalInviteServlet(BaseFederationServlet): + """A group server has invited a local user + """ + PATH = "/groups/local/(?P[^/]*)/users/(?P[^/]*)/invite$" + + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id, user_id): + if get_domain_from_id(group_id) != origin: + raise SynapseError(403, "group_id doesn't match origin") + + new_content = yield self.handler.on_invite( + group_id, user_id, content, + ) + + defer.returnValue((200, new_content)) + + class FederationGroupsRemoveLocalUserServlet(BaseFederationServlet): + """A group server has removed a local user + """ PATH = "/groups/local/(?P[^/]*)/users/(?P[^/]*)/remove$" @defer.inlineCallbacks diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index e0f53120b..0857b14c7 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -64,6 +64,9 @@ class GroupsLocalHandler(object): # Ensure attestations get renewed hs.get_groups_attestation_renewer() + # The following functions merely route the query to the local groups server + # or federation depending on if the group is local or remote + get_group_profile = _create_rerouter("get_group_profile") get_rooms_in_group = _create_rerouter("get_rooms_in_group") From e5ea6dd021ea71f3b5bc9a37fb896c351ee550b1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 14:37:06 +0100 Subject: [PATCH 032/223] Add client apis --- synapse/federation/transport/client.py | 196 +++++++++++++++++++++++-- synapse/handlers/groups_local.py | 2 +- 2 files changed, 188 insertions(+), 10 deletions(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index ea340e345..500f3622a 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -476,10 +476,10 @@ class TransportLayerClient(object): def get_group_profile(self, destination, group_id, requester_user_id): path = PREFIX + "/groups/%s/profile" % (group_id,) - return self.client.post_json( + return self.client.get_json( destination=destination, path=path, - data={"requester_user_id": requester_user_id}, + args={"requester_user_id": requester_user_id}, ignore_backoff=True, ) @@ -487,10 +487,10 @@ class TransportLayerClient(object): def get_group_summary(self, destination, group_id, requester_user_id): path = PREFIX + "/groups/%s/summary" % (group_id,) - return self.client.post_json( + return self.client.get_json( destination=destination, path=path, - data={"requester_user_id": requester_user_id}, + args={"requester_user_id": requester_user_id}, ignore_backoff=True, ) @@ -498,10 +498,22 @@ class TransportLayerClient(object): def get_group_rooms(self, destination, group_id, requester_user_id): path = PREFIX + "/groups/%s/rooms" % (group_id,) + return self.client.get_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + def add_room_to_group(self, destination, group_id, requester_user_id, room_id, + content): + path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) + return self.client.post_json( destination=destination, path=path, - data={"requester_user_id": requester_user_id}, + args={"requester_user_id": requester_user_id}, + data=content, ignore_backoff=True, ) @@ -509,10 +521,10 @@ class TransportLayerClient(object): def get_group_users(self, destination, group_id, requester_user_id): path = PREFIX + "/groups/%s/users" % (group_id,) - return self.client.post_json( + return self.client.get_json( destination=destination, path=path, - data={"requester_user_id": requester_user_id}, + args={"requester_user_id": requester_user_id}, ignore_backoff=True, ) @@ -528,12 +540,13 @@ class TransportLayerClient(object): ) @log_function - def invite_to_group(self, destination, group_id, user_id, content): + def invite_to_group(self, destination, group_id, user_id, requester_user_id, content): path = PREFIX + "/groups/%s/users/%s/invite" % (group_id, user_id) return self.client.post_json( destination=destination, path=path, + args=requester_user_id, data=content, ignore_backoff=True, ) @@ -554,12 +567,14 @@ class TransportLayerClient(object): ) @log_function - def remove_user_from_group(self, destination, group_id, user_id, content): + def remove_user_from_group(self, destination, group_id, requester_user_id, + user_id, content): path = PREFIX + "/groups/%s/users/%s/remove" % (group_id, user_id) return self.client.post_json( destination=destination, path=path, + args={"requester_user_id": requester_user_id}, data=content, ignore_backoff=True, ) @@ -594,3 +609,166 @@ class TransportLayerClient(object): data=content, ignore_backoff=True, ) + + @log_function + def update_group_summary_room(self, destination, group_id, user_id, room_id, + category_id, content): + if category_id: + path = PREFIX + "/groups/%s/summary/categories/%s/rooms/%s" % ( + group_id, category_id, room_id, + ) + else: + path = PREFIX + "/groups/%s/summary/rooms/%s" % (group_id, room_id,) + + return self.client.post_json( + destination=destination, + path=path, + args={"requester_user_id": user_id}, + data=content, + ignore_backoff=True, + ) + + @log_function + def delete_group_summary_room(self, destination, group_id, user_id, room_id, + category_id): + if category_id: + path = PREFIX + "/groups/%s/summary/categories/%s/rooms/%s" % ( + group_id, category_id, room_id, + ) + else: + path = PREFIX + "/groups/%s/summary/rooms/%s" % (group_id, room_id,) + + return self.client.delete_json( + destination=destination, + path=path, + args={"requester_user_id": user_id}, + ignore_backoff=True, + ) + + @log_function + def get_group_categories(self, destination, group_id, requester_user_id): + path = PREFIX + "/groups/%s/categories" % (group_id,) + + return self.client.get_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def get_group_category(self, destination, group_id, requester_user_id, category_id): + path = PREFIX + "/groups/%s/categories/%s" % (group_id, category_id,) + + return self.client.get_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def update_group_category(self, destination, group_id, requester_user_id, category_id, + content): + path = PREFIX + "/groups/%s/categories/%s" % (group_id, category_id,) + + return self.client.post_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + data=content, + ignore_backoff=True, + ) + + @log_function + def delete_group_category(self, destination, group_id, requester_user_id, + category_id): + path = PREFIX + "/groups/%s/categories/%s" % (group_id, category_id,) + + return self.client.delete_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def get_group_roles(self, destination, group_id, requester_user_id): + path = PREFIX + "/groups/%s/roles" % (group_id,) + + return self.client.get_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def get_group_role(self, destination, group_id, requester_user_id, role_id): + path = PREFIX + "/groups/%s/roles/%s" % (group_id, role_id,) + + return self.client.get_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def update_group_role(self, destination, group_id, requester_user_id, role_id, + content): + path = PREFIX + "/groups/%s/roles/%s" % (group_id, role_id,) + + return self.client.post_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + data=content, + ignore_backoff=True, + ) + + @log_function + def delete_group_role(self, destination, group_id, requester_user_id, role_id): + path = PREFIX + "/groups/%s/roles/%s" % (group_id, role_id,) + + return self.client.delete_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + + @log_function + def update_group_summary_user(self, destination, group_id, requester_user_id, + user_id, role_id, content): + if role_id: + path = PREFIX + "/groups/%s/summary/roles/%s/users/%s" % ( + group_id, role_id, user_id, + ) + else: + path = PREFIX + "/groups/%s/summary/users/%s" % (group_id, user_id,) + + return self.client.post_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + data=content, + ignore_backoff=True, + ) + + @log_function + def delete_group_summary_user(self, destination, group_id, requester_user_id, + user_id, role_id): + if role_id: + path = PREFIX + "/groups/%s/summary/roles/%s/users/%s" % ( + group_id, role_id, user_id, + ) + else: + path = PREFIX + "/groups/%s/summary/users/%s" % (group_id, user_id,) + + return self.client.delete_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 0857b14c7..696221052 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -224,7 +224,7 @@ class GroupsLocalHandler(object): ) else: res = yield self.transport_client.invite_to_group( - group_id, user_id, content, + group_id, user_id, requester_user_id, content, ) defer.returnValue(res) From 332839f6ea4f4ae6e89c383bfb334b6ddecd3e53 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 14:45:37 +0100 Subject: [PATCH 033/223] Update federation client pokes --- synapse/handlers/groups_local.py | 35 ++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 696221052..7d7fc5d97 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -16,6 +16,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError +from synapse.types import get_domain_from_id import logging @@ -41,8 +42,11 @@ def _create_rerouter(func_name): return getattr(self.groups_server_handler, func_name)( group_id, *args, **kwargs ) - - return getattr(self.transport_client, func_name)(group_id, *args, **kwargs) + else: + destination = get_domain_from_id(group_id) + return getattr(self.transport_client, func_name)( + destination, group_id, *args, **kwargs + ) return f @@ -98,7 +102,9 @@ class GroupsLocalHandler(object): ) defer.returnValue(res) - res = yield self.transport_client.get_group_summary(group_id, requester_user_id) + res = yield self.transport_client.get_group_summary( + get_domain_from_id(group_id), group_id, requester_user_id, + ) # Loop through the users and validate the attestations. chunk = res["users_section"]["users"] @@ -131,7 +137,9 @@ class GroupsLocalHandler(object): group_id, user_id, content ) - return self.transport_client.create_group(group_id, user_id, content) # TODO + return self.transport_client.create_group( + get_domain_from_id(group_id), group_id, user_id, content, + ) # TODO def add_room(self, group_id, user_id, room_id, content): if self.is_mine_id(group_id): @@ -140,8 +148,8 @@ class GroupsLocalHandler(object): ) return self.transport_client.add_room_to_group( - group_id, user_id, room_id, content, - ) # TODO + get_domain_from_id(group_id), group_id, user_id, room_id, content, + ) @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): @@ -151,9 +159,9 @@ class GroupsLocalHandler(object): ) defer.returnValue(res) - res = yield self.transport_client.get_users_in_group( - group_id, requester_user_id, - ) # TODO + res = yield self.transport_client.get_group_users( + get_domain_from_id(group_id), group_id, requester_user_id, + ) chunk = res["chunk"] valid_entries = [] @@ -191,7 +199,7 @@ class GroupsLocalHandler(object): content["attestation"] = local_attestation res = yield self.transport_client.accept_group_invite( - group_id, user_id, content, + get_domain_from_id(group_id), group_id, user_id, content, ) remote_attestation = res["attestation"] @@ -224,7 +232,8 @@ class GroupsLocalHandler(object): ) else: res = yield self.transport_client.invite_to_group( - group_id, user_id, requester_user_id, content, + get_domain_from_id(group_id), group_id, user_id, requester_user_id, + content, ) defer.returnValue(res) @@ -269,8 +278,8 @@ class GroupsLocalHandler(object): else: content["requester_user_id"] = requester_user_id res = yield self.transport_client.remove_user_from_group( - group_id, user_id, content - ) # TODO + get_domain_from_id(group_id), group_id, user_id, content + ) defer.returnValue(res) From 12ed4ee48e37ea51d4addddc1b4e6e9a194199ba Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 15:33:09 +0100 Subject: [PATCH 034/223] Correctly parse query params --- synapse/federation/transport/server.py | 38 +++++++++++++------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index d68e90d2f..29e966ac2 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -616,7 +616,7 @@ class FederationGroupsProfileServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -632,7 +632,7 @@ class FederationGroupsSummaryServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -650,7 +650,7 @@ class FederationGroupsRoomsServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -668,7 +668,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, room_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -686,7 +686,7 @@ class FederationGroupsUsersServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -704,7 +704,7 @@ class FederationGroupsInviteServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, user_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -739,7 +739,7 @@ class FederationGroupsRemoveUserServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, user_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -815,7 +815,7 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, category_id, room_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -833,7 +833,7 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet): @defer.inlineCallbacks def on_DELETE(self, origin, content, query, group_id, category_id, room_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -858,7 +858,7 @@ class FederationGroupsCategoriesServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -878,7 +878,7 @@ class FederationGroupsCategoryServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id, category_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -890,7 +890,7 @@ class FederationGroupsCategoryServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, category_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -905,7 +905,7 @@ class FederationGroupsCategoryServlet(BaseFederationServlet): @defer.inlineCallbacks def on_DELETE(self, origin, content, query, group_id, category_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -928,7 +928,7 @@ class FederationGroupsRolesServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -948,7 +948,7 @@ class FederationGroupsRoleServlet(BaseFederationServlet): @defer.inlineCallbacks def on_GET(self, origin, content, query, group_id, role_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -960,7 +960,7 @@ class FederationGroupsRoleServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, role_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -975,7 +975,7 @@ class FederationGroupsRoleServlet(BaseFederationServlet): @defer.inlineCallbacks def on_DELETE(self, origin, content, query, group_id, role_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -1004,7 +1004,7 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet): @defer.inlineCallbacks def on_POST(self, origin, content, query, group_id, role_id, user_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") @@ -1022,7 +1022,7 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet): @defer.inlineCallbacks def on_DELETE(self, origin, content, query, group_id, role_id, user_id): - requester_user_id = query["requester_user_id"] + requester_user_id = parse_string_from_args(query, "requester_user_id") if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") From 94ecd871a047707da5998f83440c039d064de8aa Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 16:38:54 +0100 Subject: [PATCH 035/223] Fix typos --- synapse/federation/transport/client.py | 4 ++-- synapse/handlers/groups_local.py | 5 +++-- synapse/storage/group_server.py | 25 +++++++++++++++++-------- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 500f3622a..e4d84c06c 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -495,7 +495,7 @@ class TransportLayerClient(object): ) @log_function - def get_group_rooms(self, destination, group_id, requester_user_id): + def get_rooms_in_group(self, destination, group_id, requester_user_id): path = PREFIX + "/groups/%s/rooms" % (group_id,) return self.client.get_json( @@ -518,7 +518,7 @@ class TransportLayerClient(object): ) @log_function - def get_group_users(self, destination, group_id, requester_user_id): + def get_users_in_group(self, destination, group_id, requester_user_id): path = PREFIX + "/groups/%s/users" % (group_id,) return self.client.get_json( diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 7d7fc5d97..50f7fce88 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -159,7 +159,7 @@ class GroupsLocalHandler(object): ) defer.returnValue(res) - res = yield self.transport_client.get_group_users( + res = yield self.transport_client.get_users_in_group( get_domain_from_id(group_id), group_id, requester_user_id, ) @@ -278,7 +278,8 @@ class GroupsLocalHandler(object): else: content["requester_user_id"] = requester_user_id res = yield self.transport_client.remove_user_from_group( - get_domain_from_id(group_id), group_id, user_id, content + get_domain_from_id(group_id), group_id, requester_user_id, + user_id, content, ) defer.returnValue(res) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 3c6ee7df6..0a69e0f50 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -776,7 +776,7 @@ class GroupServerStore(SQLBaseStore): remote_attestation (dict): If remote group then store the remote attestation from the group, else None. """ - def _register_user_group_membership_txn(txn, next_id): + def _register_user_group_membership_txn(txn): # TODO: Upsert? self._simple_delete_txn( txn, @@ -797,7 +797,6 @@ class GroupServerStore(SQLBaseStore): "content": json.dumps(content), }, ) - self._group_updates_stream_cache.entity_has_changed(user_id, next_id) # TODO: Insert profile to ensuer it comes down stream if its a join. @@ -820,7 +819,7 @@ class GroupServerStore(SQLBaseStore): "group_id": group_id, "user_id": user_id, "valid_until_ms": remote_attestation["valid_until_ms"], - "attestation": json.dumps(remote_attestation), + "attestation_json": json.dumps(remote_attestation), } ) else: @@ -841,11 +840,10 @@ class GroupServerStore(SQLBaseStore): }, ) - with self._group_updates_id_gen.get_next() as next_id: - yield self.runInteraction( - "register_user_group_membership", - _register_user_group_membership_txn, next_id, - ) + yield self.runInteraction( + "register_user_group_membership", + _register_user_group_membership_txn, + ) @defer.inlineCallbacks def create_group(self, group_id, user_id, name, avatar_url, short_description, @@ -928,3 +926,14 @@ class GroupServerStore(SQLBaseStore): defer.returnValue(json.loads(row["attestation_json"])) defer.returnValue(None) + + def get_joined_groups(self, user_id): + return self._simple_select_onecol( + table="local_group_membership", + keyvalues={ + "user_id": user_id, + "membership": "join", + }, + retcol="group_id", + desc="get_joined_groups", + ) From 05c13f6c221c1c034f30a76c41dbee14f2620520 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 16:40:21 +0100 Subject: [PATCH 036/223] Add 'args' param to post_json --- synapse/http/matrixfederationclient.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 747a791f8..f58bf41d5 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -347,7 +347,7 @@ class MatrixFederationHttpClient(object): @defer.inlineCallbacks def post_json(self, destination, path, data={}, long_retries=False, - timeout=None, ignore_backoff=False): + timeout=None, ignore_backoff=False, args={}): """ Sends the specifed json data using POST Args: @@ -383,6 +383,7 @@ class MatrixFederationHttpClient(object): destination, "POST", path, + query_bytes=encode_query_args(args), body_callback=body_callback, headers_dict={"Content-Type": ["application/json"]}, long_retries=long_retries, @@ -427,13 +428,6 @@ class MatrixFederationHttpClient(object): """ logger.debug("get_json args: %s", args) - encoded_args = {} - for k, vs in args.items(): - if isinstance(vs, basestring): - vs = [vs] - encoded_args[k] = [v.encode("UTF-8") for v in vs] - - query_bytes = urllib.urlencode(encoded_args, True) logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail) def body_callback(method, url_bytes, headers_dict): @@ -444,7 +438,7 @@ class MatrixFederationHttpClient(object): destination, "GET", path, - query_bytes=query_bytes, + query_bytes=encode_query_args(args), body_callback=body_callback, retry_on_dns_fail=retry_on_dns_fail, timeout=timeout, @@ -610,3 +604,15 @@ def check_content_type_is_json(headers): raise RuntimeError( "Content-Type not application/json: was '%s'" % c_type ) + + +def encode_query_args(args): + encoded_args = {} + for k, vs in args.items(): + if isinstance(vs, basestring): + vs = [vs] + encoded_args[k] = [v.encode("UTF-8") for v in vs] + + query_bytes = urllib.urlencode(encoded_args, True) + + return query_bytes From e884ff31d8af31cc29f8a85d9bea03b806891e8b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 16:41:44 +0100 Subject: [PATCH 037/223] Add DELETE --- synapse/http/matrixfederationclient.py | 46 ++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index f58bf41d5..8b94e6f29 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -454,6 +454,52 @@ class MatrixFederationHttpClient(object): defer.returnValue(json.loads(body)) + @defer.inlineCallbacks + def delete_json(self, destination, path, long_retries=False, + timeout=None, ignore_backoff=False, args={}): + """Send a DELETE request to the remote expecting some json response + + Args: + destination (str): The remote server to send the HTTP request + to. + path (str): The HTTP path. + long_retries (bool): A boolean that indicates whether we should + retry for a short or long time. + timeout(int): How long to try (in ms) the destination for before + giving up. None indicates no timeout. + ignore_backoff (bool): true to ignore the historical backoff data and + try the request anyway. + Returns: + Deferred: Succeeds when we get a 2xx HTTP response. The result + will be the decoded JSON body. + + Fails with ``HTTPRequestException`` if we get an HTTP response + code >= 300. + + Fails with ``NotRetryingDestination`` if we are not yet ready + to retry this server. + """ + + response = yield self._request( + destination, + "DELETE", + path, + query_bytes=encode_query_args(args), + headers_dict={"Content-Type": ["application/json"]}, + long_retries=long_retries, + timeout=timeout, + ignore_backoff=ignore_backoff, + ) + + if 200 <= response.code < 300: + # We need to update the transactions table to say it was sent? + check_content_type_is_json(response.headers) + + with logcontext.PreserveLoggingContext(): + body = yield readBody(response) + + defer.returnValue(json.loads(body)) + @defer.inlineCallbacks def get_file(self, destination, path, output_stream, args={}, retry_on_dns_fail=True, max_size=None, From 6027b1992f507cb1a11836797827d9e69e2a6020 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 16:51:25 +0100 Subject: [PATCH 038/223] Fix permissions --- synapse/groups/groups_server.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index c8559577f..b9ad9507f 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -293,7 +293,9 @@ class GroupsServerHandler(object): content): """Add/update a users entry in the group summary """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours( + group_id, and_exists=True, and_is_admin=requester_user_id, + ) order = content.get("order", None) @@ -313,7 +315,9 @@ class GroupsServerHandler(object): def delete_group_summary_user(self, group_id, requester_user_id, user_id, role_id): """Remove a user from the group summary """ - yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + yield self.check_group_is_ours( + group_id, and_exists=True, and_is_admin=requester_user_id, + ) yield self.store.remove_user_from_summary( group_id=group_id, From 3431ec55dc00f9b2b58ce0cc6645d6aed8bd5c87 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 17:19:39 +0100 Subject: [PATCH 039/223] Comments --- synapse/federation/transport/client.py | 40 +++++++ synapse/rest/client/v2_alpha/groups.py | 151 ++++++++----------------- 2 files changed, 88 insertions(+), 103 deletions(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index e4d84c06c..073d3abb2 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -474,6 +474,8 @@ class TransportLayerClient(object): @log_function def get_group_profile(self, destination, group_id, requester_user_id): + """Get a group profile + """ path = PREFIX + "/groups/%s/profile" % (group_id,) return self.client.get_json( @@ -485,6 +487,8 @@ class TransportLayerClient(object): @log_function def get_group_summary(self, destination, group_id, requester_user_id): + """Get a group summary + """ path = PREFIX + "/groups/%s/summary" % (group_id,) return self.client.get_json( @@ -496,6 +500,8 @@ class TransportLayerClient(object): @log_function def get_rooms_in_group(self, destination, group_id, requester_user_id): + """Get all rooms in a group + """ path = PREFIX + "/groups/%s/rooms" % (group_id,) return self.client.get_json( @@ -507,6 +513,8 @@ class TransportLayerClient(object): def add_room_to_group(self, destination, group_id, requester_user_id, room_id, content): + """Add a room to a group + """ path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) return self.client.post_json( @@ -519,6 +527,8 @@ class TransportLayerClient(object): @log_function def get_users_in_group(self, destination, group_id, requester_user_id): + """Get users in a group + """ path = PREFIX + "/groups/%s/users" % (group_id,) return self.client.get_json( @@ -530,6 +540,8 @@ class TransportLayerClient(object): @log_function def accept_group_invite(self, destination, group_id, user_id, content): + """Accept a group invite + """ path = PREFIX + "/groups/%s/users/%s/accept_invite" % (group_id, user_id) return self.client.post_json( @@ -541,6 +553,8 @@ class TransportLayerClient(object): @log_function def invite_to_group(self, destination, group_id, user_id, requester_user_id, content): + """Invite a user to a group + """ path = PREFIX + "/groups/%s/users/%s/invite" % (group_id, user_id) return self.client.post_json( @@ -569,6 +583,8 @@ class TransportLayerClient(object): @log_function def remove_user_from_group(self, destination, group_id, requester_user_id, user_id, content): + """Remove a user fron a group + """ path = PREFIX + "/groups/%s/users/%s/remove" % (group_id, user_id) return self.client.post_json( @@ -613,6 +629,8 @@ class TransportLayerClient(object): @log_function def update_group_summary_room(self, destination, group_id, user_id, room_id, category_id, content): + """Update a room entry in a group summary + """ if category_id: path = PREFIX + "/groups/%s/summary/categories/%s/rooms/%s" % ( group_id, category_id, room_id, @@ -631,6 +649,8 @@ class TransportLayerClient(object): @log_function def delete_group_summary_room(self, destination, group_id, user_id, room_id, category_id): + """Delete a room entry in a group summary + """ if category_id: path = PREFIX + "/groups/%s/summary/categories/%s/rooms/%s" % ( group_id, category_id, room_id, @@ -647,6 +667,8 @@ class TransportLayerClient(object): @log_function def get_group_categories(self, destination, group_id, requester_user_id): + """Get all categories in a group + """ path = PREFIX + "/groups/%s/categories" % (group_id,) return self.client.get_json( @@ -658,6 +680,8 @@ class TransportLayerClient(object): @log_function def get_group_category(self, destination, group_id, requester_user_id, category_id): + """Get category info in a group + """ path = PREFIX + "/groups/%s/categories/%s" % (group_id, category_id,) return self.client.get_json( @@ -670,6 +694,8 @@ class TransportLayerClient(object): @log_function def update_group_category(self, destination, group_id, requester_user_id, category_id, content): + """Update a category in a group + """ path = PREFIX + "/groups/%s/categories/%s" % (group_id, category_id,) return self.client.post_json( @@ -683,6 +709,8 @@ class TransportLayerClient(object): @log_function def delete_group_category(self, destination, group_id, requester_user_id, category_id): + """Delete a category in a group + """ path = PREFIX + "/groups/%s/categories/%s" % (group_id, category_id,) return self.client.delete_json( @@ -694,6 +722,8 @@ class TransportLayerClient(object): @log_function def get_group_roles(self, destination, group_id, requester_user_id): + """Get all roles in a group + """ path = PREFIX + "/groups/%s/roles" % (group_id,) return self.client.get_json( @@ -705,6 +735,8 @@ class TransportLayerClient(object): @log_function def get_group_role(self, destination, group_id, requester_user_id, role_id): + """Get a roles info + """ path = PREFIX + "/groups/%s/roles/%s" % (group_id, role_id,) return self.client.get_json( @@ -717,6 +749,8 @@ class TransportLayerClient(object): @log_function def update_group_role(self, destination, group_id, requester_user_id, role_id, content): + """Update a role in a group + """ path = PREFIX + "/groups/%s/roles/%s" % (group_id, role_id,) return self.client.post_json( @@ -729,6 +763,8 @@ class TransportLayerClient(object): @log_function def delete_group_role(self, destination, group_id, requester_user_id, role_id): + """Delete a role in a group + """ path = PREFIX + "/groups/%s/roles/%s" % (group_id, role_id,) return self.client.delete_json( @@ -741,6 +777,8 @@ class TransportLayerClient(object): @log_function def update_group_summary_user(self, destination, group_id, requester_user_id, user_id, role_id, content): + """Update a users entry in a group + """ if role_id: path = PREFIX + "/groups/%s/summary/roles/%s/users/%s" % ( group_id, role_id, user_id, @@ -759,6 +797,8 @@ class TransportLayerClient(object): @log_function def delete_group_summary_user(self, destination, group_id, requester_user_id, user_id, role_id): + """Delete a users entry in a group + """ if role_id: path = PREFIX + "/groups/%s/summary/roles/%s/users/%s" % ( group_id, role_id, user_id, diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 255552c36..787967c3a 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -26,6 +26,8 @@ logger = logging.getLogger(__name__) class GroupServlet(RestServlet): + """Get the group profile + """ PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/profile$") def __init__(self, hs): @@ -45,6 +47,8 @@ class GroupServlet(RestServlet): class GroupSummaryServlet(RestServlet): + """Get the full group summary + """ PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/summary$") def __init__(self, hs): @@ -63,69 +67,17 @@ class GroupSummaryServlet(RestServlet): defer.returnValue((200, get_group_summary)) -class GroupSummaryRoomsServlet(RestServlet): - PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/summary/rooms$") - - def __init__(self, hs): - super(GroupSummaryServlet, self).__init__() - self.auth = hs.get_auth() - self.clock = hs.get_clock() - self.groups_handler = hs.get_groups_local_handler() - - @defer.inlineCallbacks - def on_GET(self, request, group_id): - requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() - - get_group_summary = yield self.groups_handler.get_group_summary(group_id, user_id) - - defer.returnValue((200, get_group_summary)) - - -class GroupSummaryRoomsDefaultCatServlet(RestServlet): - PATTERNS = client_v2_patterns( - "/groups/(?P[^/]*)/summary/rooms/(?P[^/]*)$" - ) - - def __init__(self, hs): - super(GroupSummaryRoomsDefaultCatServlet, self).__init__() - self.auth = hs.get_auth() - self.clock = hs.get_clock() - self.groups_handler = hs.get_groups_local_handler() - - @defer.inlineCallbacks - def on_PUT(self, request, group_id, room_id): - requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() - - content = parse_json_object_from_request(request) - resp = yield self.groups_handler.update_group_summary_room( - group_id, user_id, - room_id=room_id, - category_id=None, - content=content, - ) - - defer.returnValue((200, resp)) - - @defer.inlineCallbacks - def on_DELETE(self, request, group_id, room_id): - requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() - - resp = yield self.groups_handler.delete_group_summary_room( - group_id, user_id, - room_id=room_id, - category_id=None, - ) - - defer.returnValue((200, resp)) - - class GroupSummaryRoomsCatServlet(RestServlet): + """Update/delete a rooms entry in the summary. + + Matches both: + - /groups/:group/summary/rooms/:room_id + - /groups/:group/summary/categories/:category/rooms/:room_id + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/summary" - "/categories/(?P[^/]+)/rooms/(?P[^/]+)$" + "(/categories/(?P[^/]+))?" + "/rooms/(?P[^/]*)$" ) def __init__(self, hs): @@ -164,6 +116,8 @@ class GroupSummaryRoomsCatServlet(RestServlet): class GroupCategoryServlet(RestServlet): + """Get/add/update/delete a group category + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/categories/(?P[^/]+)$" ) @@ -214,6 +168,8 @@ class GroupCategoryServlet(RestServlet): class GroupCategoriesServlet(RestServlet): + """Get all group categories + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/categories/$" ) @@ -237,6 +193,8 @@ class GroupCategoriesServlet(RestServlet): class GroupRoleServlet(RestServlet): + """Get/add/update/delete a group role + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/roles/(?P[^/]+)$" ) @@ -287,6 +245,8 @@ class GroupRoleServlet(RestServlet): class GroupRolesServlet(RestServlet): + """Get all group roles + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/roles/$" ) @@ -309,50 +269,17 @@ class GroupRolesServlet(RestServlet): defer.returnValue((200, category)) -class GroupSummaryUsersDefaultRoleServlet(RestServlet): - PATTERNS = client_v2_patterns( - "/groups/(?P[^/]*)/summary/users/(?P[^/]*)$" - ) - - def __init__(self, hs): - super(GroupSummaryUsersDefaultRoleServlet, self).__init__() - self.auth = hs.get_auth() - self.clock = hs.get_clock() - self.groups_handler = hs.get_groups_local_handler() - - @defer.inlineCallbacks - def on_PUT(self, request, group_id, user_id): - requester = yield self.auth.get_user_by_req(request) - requester_user_id = requester.user.to_string() - - content = parse_json_object_from_request(request) - resp = yield self.groups_handler.update_group_summary_user( - group_id, requester_user_id, - user_id=user_id, - role_id=None, - content=content, - ) - - defer.returnValue((200, resp)) - - @defer.inlineCallbacks - def on_DELETE(self, request, group_id, user_id): - requester = yield self.auth.get_user_by_req(request) - requester_user_id = requester.user.to_string() - - resp = yield self.groups_handler.delete_group_summary_user( - group_id, requester_user_id, - user_id=user_id, - role_id=None, - ) - - defer.returnValue((200, resp)) - - class GroupSummaryUsersRoleServlet(RestServlet): + """Update/delete a user's entry in the summary. + + Matches both: + - /groups/:group/summary/users/:room_id + - /groups/:group/summary/roles/:role/users/:user_id + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/summary" - "/roles/(?P[^/]+)/users/(?P[^/]+)$" + "(/roles/(?P[^/]+))?" + "/users/(?P[^/]*)$" ) def __init__(self, hs): @@ -391,6 +318,8 @@ class GroupSummaryUsersRoleServlet(RestServlet): class GroupRoomServlet(RestServlet): + """Get all rooms in a group + """ PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/rooms$") def __init__(self, hs): @@ -410,6 +339,8 @@ class GroupRoomServlet(RestServlet): class GroupUsersServlet(RestServlet): + """Get all users in a group + """ PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/users$") def __init__(self, hs): @@ -429,6 +360,8 @@ class GroupUsersServlet(RestServlet): class GroupCreateServlet(RestServlet): + """Create a group + """ PATTERNS = client_v2_patterns("/create_group$") def __init__(self, hs): @@ -454,6 +387,8 @@ class GroupCreateServlet(RestServlet): class GroupAdminRoomsServlet(RestServlet): + """Add a room to the group + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/admin/rooms/(?P[^/]*)$" ) @@ -476,6 +411,8 @@ class GroupAdminRoomsServlet(RestServlet): class GroupAdminUsersInviteServlet(RestServlet): + """Invite a user to the group + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/admin/users/invite/(?P[^/]*)$" ) @@ -503,6 +440,8 @@ class GroupAdminUsersInviteServlet(RestServlet): class GroupAdminUsersKickServlet(RestServlet): + """Kick a user from the group + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/admin/users/remove/(?P[^/]*)$" ) @@ -527,6 +466,8 @@ class GroupAdminUsersKickServlet(RestServlet): class GroupSelfLeaveServlet(RestServlet): + """Leave a joined group + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/self/leave$" ) @@ -551,6 +492,8 @@ class GroupSelfLeaveServlet(RestServlet): class GroupSelfJoinServlet(RestServlet): + """Attempt to join a group, or knock + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/self/join$" ) @@ -575,6 +518,8 @@ class GroupSelfJoinServlet(RestServlet): class GroupSelfAcceptInviteServlet(RestServlet): + """Accept a group invite + """ PATTERNS = client_v2_patterns( "/groups/(?P[^/]*)/self/accept_invite$" ) @@ -599,6 +544,8 @@ class GroupSelfAcceptInviteServlet(RestServlet): class GroupsForUserServlet(RestServlet): + """Get all groups the logged in user is joined to + """ PATTERNS = client_v2_patterns( "/joined_groups$" ) @@ -632,11 +579,9 @@ def register_servlets(hs, http_server): GroupSelfJoinServlet(hs).register(http_server) GroupSelfAcceptInviteServlet(hs).register(http_server) GroupsForUserServlet(hs).register(http_server) - GroupSummaryRoomsDefaultCatServlet(hs).register(http_server) GroupCategoryServlet(hs).register(http_server) GroupCategoriesServlet(hs).register(http_server) GroupSummaryRoomsCatServlet(hs).register(http_server) GroupRoleServlet(hs).register(http_server) GroupRolesServlet(hs).register(http_server) - GroupSummaryUsersDefaultRoleServlet(hs).register(http_server) GroupSummaryUsersRoleServlet(hs).register(http_server) From 14a34f12d755e7516dc81348d811d47dc51f026d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 18 Jul 2017 17:28:42 +0100 Subject: [PATCH 040/223] Comments --- synapse/federation/transport/server.py | 2 +- synapse/groups/groups_server.py | 2 +- synapse/handlers/groups_local.py | 29 +++++++++++++++++--------- synapse/rest/client/v2_alpha/groups.py | 4 +++- 4 files changed, 24 insertions(+), 13 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 29e966ac2..1332b49f3 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -672,7 +672,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") - new_content = yield self.handler.add_room( + new_content = yield self.handler.add_room_to_group( group_id, requester_user_id, room_id, content ) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index b9ad9507f..1b6e354ca 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -430,7 +430,7 @@ class GroupsServerHandler(object): }) @defer.inlineCallbacks - def add_room(self, group_id, requester_user_id, room_id, content): + def add_room_to_group(self, group_id, requester_user_id, room_id, content): """Add room to group """ yield self.check_group_is_ours( diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 50f7fce88..0b80348c8 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -74,6 +74,8 @@ class GroupsLocalHandler(object): get_group_profile = _create_rerouter("get_group_profile") get_rooms_in_group = _create_rerouter("get_rooms_in_group") + add_room_to_group = _create_rerouter("add_room_to_group") + update_group_summary_room = _create_rerouter("update_group_summary_room") delete_group_summary_room = _create_rerouter("delete_group_summary_room") @@ -130,6 +132,9 @@ class GroupsLocalHandler(object): defer.returnValue(res) def create_group(self, group_id, user_id, content): + """Create a group + """ + logger.info("Asking to create group with ID: %r", group_id) if self.is_mine_id(group_id): @@ -141,18 +146,10 @@ class GroupsLocalHandler(object): get_domain_from_id(group_id), group_id, user_id, content, ) # TODO - def add_room(self, group_id, user_id, room_id, content): - if self.is_mine_id(group_id): - return self.groups_server_handler.add_room( - group_id, user_id, room_id, content - ) - - return self.transport_client.add_room_to_group( - get_domain_from_id(group_id), group_id, user_id, room_id, content, - ) - @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): + """Get users in a group + """ if self.is_mine_id(group_id): res = yield self.groups_server_handler.get_users_in_group( group_id, requester_user_id @@ -184,10 +181,14 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def join_group(self, group_id, user_id, content): + """Request to join a group + """ raise NotImplementedError() # TODO @defer.inlineCallbacks def accept_invite(self, group_id, user_id, content): + """Accept an invite to a group + """ if self.is_mine_id(group_id): yield self.groups_server_handler.accept_invite( group_id, user_id, content @@ -222,6 +223,8 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def invite(self, group_id, user_id, requester_user_id, config): + """Invite a user to a group + """ content = { "requester_user_id": requester_user_id, "config": config, @@ -240,6 +243,8 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def on_invite(self, group_id, user_id, content): + """One of our users were invited to a group + """ # TODO: Support auto join and rejection if not self.is_mine_id(user_id): @@ -262,6 +267,8 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def remove_user_from_group(self, group_id, user_id, requester_user_id, content): + """Remove a user from a group + """ if user_id == requester_user_id: yield self.store.register_user_group_membership( group_id, user_id, @@ -286,6 +293,8 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def user_removed_from_group(self, group_id, user_id, content): + """One of our users was removed/kicked from a group + """ # TODO: Check if user in group yield self.store.register_user_group_membership( group_id, user_id, diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 787967c3a..f937d856f 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -405,7 +405,9 @@ class GroupAdminRoomsServlet(RestServlet): user_id = requester.user.to_string() content = parse_json_object_from_request(request) - result = yield self.groups_handler.add_room(group_id, user_id, room_id, content) + result = yield self.groups_handler.add_room_to_group( + group_id, user_id, room_id, content, + ) defer.returnValue((200, result)) From 6f443a74cf6ab0bfe452289f9888580725987765 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 09:46:33 +0100 Subject: [PATCH 041/223] Add update group profile API --- synapse/federation/transport/server.py | 12 ++++++++++++ synapse/groups/groups_server.py | 16 ++++++++++++++++ synapse/handlers/groups_local.py | 1 + synapse/rest/client/v2_alpha/groups.py | 12 ++++++++++++ synapse/storage/group_server.py | 11 +++++++++++ 5 files changed, 52 insertions(+) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 1332b49f3..e04750fd2 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -642,6 +642,18 @@ class FederationGroupsSummaryServlet(BaseFederationServlet): defer.returnValue((200, new_content)) + @defer.inlineCallbacks + def on_POST(self, origin, content, query, group_id): + requester_user_id = parse_string_from_args(query, "requester_user_id") + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.update_group_profile( + group_id, requester_user_id, content + ) + + defer.returnValue((200, new_content)) + class FederationGroupsRoomsServlet(BaseFederationServlet): """Get the rooms in a group on behalf of a user diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 1b6e354ca..322aad2a6 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -341,6 +341,22 @@ class GroupsServerHandler(object): else: raise SynapseError(404, "Unknown group") + @defer.inlineCallbacks + def update_group_profile(self, group_id, requester_user_id, content): + """Update the group profile + """ + yield self.check_group_is_ours( + group_id, and_exists=True, and_is_admin=requester_user_id, + ) + + profile = {} + for keyname in ("name", "avatar_url", "short_description", + "long_description"): + if keyname in content: + profile[keyname] = content[keyname] + + yield self.store.update_group_profile(group_id, profile) + @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): """Get the users in group as seen by requester_user_id. diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 0b80348c8..b2c920da3 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -72,6 +72,7 @@ class GroupsLocalHandler(object): # or federation depending on if the group is local or remote get_group_profile = _create_rerouter("get_group_profile") + update_group_profile = _create_rerouter("update_group_profile") get_rooms_in_group = _create_rerouter("get_rooms_in_group") add_room_to_group = _create_rerouter("add_room_to_group") diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index f937d856f..64d803d48 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -45,6 +45,18 @@ class GroupServlet(RestServlet): defer.returnValue((200, group_description)) + @defer.inlineCallbacks + def on_POST(self, request, group_id, content): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + yield self.groups_handler.update_group_profile( + group_id, user_id, content, + ) + + defer.returnValue((200, {})) + class GroupSummaryServlet(RestServlet): """Get the full group summary diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 0a69e0f50..4197d22d8 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -860,6 +860,17 @@ class GroupServerStore(SQLBaseStore): desc="create_group", ) + @defer.inlineCallbacks + def update_group_profile(self, group_id, profile,): + yield self._simple_update_one( + table="groups", + keyvalues={ + "group_id": group_id, + }, + updatevalues=profile, + desc="create_group", + ) + def get_attestations_need_renewals(self, valid_until_ms): """Get all attestations that need to be renewed until givent time """ From 57826d645bd62ab534dbcab8d66a98daec145459 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 13:15:22 +0100 Subject: [PATCH 042/223] Fix typo --- synapse/storage/group_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 0a69e0f50..a2e7aa47d 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -798,7 +798,7 @@ class GroupServerStore(SQLBaseStore): }, ) - # TODO: Insert profile to ensuer it comes down stream if its a join. + # TODO: Insert profile to ensure it comes down stream if its a join. if membership == "join": if local_attestation: From 8209b5f033417ab018fdd1114170b89fb0b18aa9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 16:22:22 +0100 Subject: [PATCH 043/223] Fix a storage desc --- synapse/storage/group_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 4197d22d8..2331ec79b 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -868,7 +868,7 @@ class GroupServerStore(SQLBaseStore): "group_id": group_id, }, updatevalues=profile, - desc="create_group", + desc="update_group_profile", ) def get_attestations_need_renewals(self, valid_until_ms): From 0ab153d2014d871c13b02dbd1c6bf7c0cc0bcedc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 16:24:18 +0100 Subject: [PATCH 044/223] Check values are strings --- synapse/groups/groups_server.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 322aad2a6..b1ee43ef9 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -353,7 +353,10 @@ class GroupsServerHandler(object): for keyname in ("name", "avatar_url", "short_description", "long_description"): if keyname in content: - profile[keyname] = content[keyname] + value = content[keyname] + if not isinstance(value, basestring): + raise SynapseError(400, "%r value is not a string" % (keyname,)) + profile[keyname] = value yield self.store.update_group_profile(group_id, profile) From c544188ee3644c85a97a3c4e09e63ad4e3c6f0cc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Jul 2017 14:53:19 +0100 Subject: [PATCH 045/223] Add groups to sync stream --- synapse/handlers/sync.py | 64 ++++++++++++++++- synapse/rest/client/v2_alpha/sync.py | 5 ++ synapse/storage/__init__.py | 15 ++++ synapse/storage/group_server.py | 68 +++++++++++++++++-- .../storage/schema/delta/43/group_server.sql | 9 +++ synapse/streams/events.py | 2 + synapse/types.py | 2 + tests/rest/client/v1/test_rooms.py | 4 +- 8 files changed, 161 insertions(+), 8 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 91c6c6be3..c01fcd3d5 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -108,6 +108,17 @@ class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [ return True +class GroupsSyncResult(collections.namedtuple("GroupsSyncResult", [ + "join", + "invite", + "leave", +])): + __slots__ = [] + + def __nonzero__(self): + return self.join or self.invite or self.leave + + class SyncResult(collections.namedtuple("SyncResult", [ "next_batch", # Token for the next sync "presence", # List of presence events for the user. @@ -119,6 +130,7 @@ class SyncResult(collections.namedtuple("SyncResult", [ "device_lists", # List of user_ids whose devices have chanegd "device_one_time_keys_count", # Dict of algorithm to count for one time keys # for this device + "groups", ])): __slots__ = [] @@ -134,7 +146,8 @@ class SyncResult(collections.namedtuple("SyncResult", [ self.archived or self.account_data or self.to_device or - self.device_lists + self.device_lists or + self.groups ) @@ -560,6 +573,8 @@ class SyncHandler(object): user_id, device_id ) + yield self._generate_sync_entry_for_groups(sync_result_builder) + defer.returnValue(SyncResult( presence=sync_result_builder.presence, account_data=sync_result_builder.account_data, @@ -568,10 +583,56 @@ class SyncHandler(object): archived=sync_result_builder.archived, to_device=sync_result_builder.to_device, device_lists=device_lists, + groups=sync_result_builder.groups, device_one_time_keys_count=one_time_key_counts, next_batch=sync_result_builder.now_token, )) + @measure_func("_generate_sync_entry_for_groups") + @defer.inlineCallbacks + def _generate_sync_entry_for_groups(self, sync_result_builder): + user_id = sync_result_builder.sync_config.user.to_string() + since_token = sync_result_builder.since_token + now_token = sync_result_builder.now_token + + if since_token and since_token.groups_key: + results = yield self.store.get_groups_changes_for_user( + user_id, since_token.groups_key, now_token.groups_key, + ) + else: + results = yield self.store.get_all_groups_for_user( + user_id, now_token.groups_key, + ) + + invited = {} + joined = {} + left = {} + for result in results: + membership = result["membership"] + group_id = result["group_id"] + gtype = result["type"] + content = result["content"] + + if membership == "join": + if gtype == "membership": + content.pop("membership", None) + invited[group_id] = content["content"] + else: + joined.setdefault(group_id, {})[gtype] = content + elif membership == "invite": + if gtype == "membership": + content.pop("membership", None) + invited[group_id] = content["content"] + else: + if gtype == "membership": + left[group_id] = content["content"] + + sync_result_builder.groups = GroupsSyncResult( + join=joined, + invite=invited, + leave=left, + ) + @measure_func("_generate_sync_entry_for_device_list") @defer.inlineCallbacks def _generate_sync_entry_for_device_list(self, sync_result_builder): @@ -1260,6 +1321,7 @@ class SyncResultBuilder(object): self.invited = [] self.archived = [] self.device = [] + self.groups = None class RoomSyncResultBuilder(object): diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 6dcc40745..5f208a4c1 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -199,6 +199,11 @@ class SyncRestServlet(RestServlet): "invite": invited, "leave": archived, }, + "groups": { + "join": sync_result.groups.join, + "invite": sync_result.groups.invite, + "leave": sync_result.groups.leave, + }, "device_one_time_keys_count": sync_result.device_one_time_keys_count, "next_batch": sync_result.next_batch.to_string(), } diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index fdee9f1ad..594566eb3 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -136,6 +136,9 @@ class DataStore(RoomMemberStore, RoomStore, db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")], ) + self._group_updates_id_gen = StreamIdGenerator( + db_conn, "local_group_updates", "stream_id", + ) if isinstance(self.database_engine, PostgresEngine): self._cache_id_gen = StreamIdGenerator( @@ -236,6 +239,18 @@ class DataStore(RoomMemberStore, RoomStore, prefilled_cache=curr_state_delta_prefill, ) + _group_updates_prefill, min_group_updates_id = self._get_cache_dict( + db_conn, "local_group_updates", + entity_column="user_id", + stream_column="stream_id", + max_value=self._group_updates_id_gen.get_current_token(), + limit=1000, + ) + self._group_updates_stream_cache = StreamChangeCache( + "_group_updates_stream_cache", min_group_updates_id, + prefilled_cache=_group_updates_prefill, + ) + cur = LoggingTransaction( db_conn.cursor(), name="_find_stream_orderings_for_times_txn", diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index a2e7aa47d..45f0a4c59 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -776,7 +776,7 @@ class GroupServerStore(SQLBaseStore): remote_attestation (dict): If remote group then store the remote attestation from the group, else None. """ - def _register_user_group_membership_txn(txn): + def _register_user_group_membership_txn(txn, next_id): # TODO: Upsert? self._simple_delete_txn( txn, @@ -798,6 +798,19 @@ class GroupServerStore(SQLBaseStore): }, ) + self._simple_insert_txn( + txn, + table="local_group_updates", + values={ + "stream_id": next_id, + "group_id": group_id, + "user_id": user_id, + "type": "membership", + "content": json.dumps({"membership": membership, "content": content}), + } + ) + self._group_updates_stream_cache.entity_has_changed(user_id, next_id) + # TODO: Insert profile to ensure it comes down stream if its a join. if membership == "join": @@ -840,10 +853,11 @@ class GroupServerStore(SQLBaseStore): }, ) - yield self.runInteraction( - "register_user_group_membership", - _register_user_group_membership_txn, - ) + with self._group_updates_id_gen.get_next() as next_id: + yield self.runInteraction( + "register_user_group_membership", + _register_user_group_membership_txn, next_id, + ) @defer.inlineCallbacks def create_group(self, group_id, user_id, name, avatar_url, short_description, @@ -937,3 +951,47 @@ class GroupServerStore(SQLBaseStore): retcol="group_id", desc="get_joined_groups", ) + + def get_all_groups_for_user(self, user_id, now_token): + def _get_all_groups_for_user_txn(txn): + sql = """ + SELECT group_id, type, membership, u.content + FROM local_group_updates AS u + INNER JOIN local_group_membership USING (group_id, user_id) + WHERE user_id = ? AND membership != 'leave' + AND stream_id <= ? + """ + txn.execute(sql, (user_id, now_token,)) + return self.cursor_to_dict(txn) + return self.runInteraction( + "get_all_groups_for_user", _get_all_groups_for_user_txn, + ) + + def get_groups_changes_for_user(self, user_id, from_token, to_token): + from_token = int(from_token) + has_changed = self._group_updates_stream_cache.has_entity_changed( + user_id, from_token, + ) + if not has_changed: + return [] + + def _get_groups_changes_for_user_txn(txn): + sql = """ + SELECT group_id, membership, type, u.content + FROM local_group_updates AS u + INNER JOIN local_group_membership USING (group_id, user_id) + WHERE user_id = ? AND ? < stream_id AND stream_id <= ? + """ + txn.execute(sql, (user_id, from_token, to_token,)) + return [{ + "group_id": group_id, + "membership": membership, + "type": gtype, + "content": json.loads(content_json), + } for group_id, membership, gtype, content_json in txn] + return self.runInteraction( + "get_groups_changes_for_user", _get_groups_changes_for_user_txn, + ) + + def get_group_stream_token(self): + return self._group_updates_id_gen.get_current_token() diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index e1fd47aa7..92f3339c9 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -155,3 +155,12 @@ CREATE TABLE local_group_membership ( CREATE INDEX local_group_membership_u_idx ON local_group_membership(user_id, group_id); CREATE INDEX local_group_membership_g_idx ON local_group_membership(group_id); + + +CREATE TABLE local_group_updates ( + stream_id BIGINT NOT NULL, + group_id TEXT NOT NULL, + user_id TEXT NOT NULL, + type TEXT NOT NULL, + content TEXT NOT NULL +); diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 91a59b0ba..e2be50081 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -45,6 +45,7 @@ class EventSources(object): push_rules_key, _ = self.store.get_push_rules_stream_token() to_device_key = self.store.get_to_device_stream_token() device_list_key = self.store.get_device_stream_token() + groups_key = self.store.get_group_stream_token() token = StreamToken( room_key=( @@ -65,6 +66,7 @@ class EventSources(object): push_rules_key=push_rules_key, to_device_key=to_device_key, device_list_key=device_list_key, + groups_key=groups_key, ) defer.returnValue(token) diff --git a/synapse/types.py b/synapse/types.py index b32c0e360..37d5fa7f9 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -171,6 +171,7 @@ class StreamToken( "push_rules_key", "to_device_key", "device_list_key", + "groups_key", )) ): _SEPARATOR = "_" @@ -209,6 +210,7 @@ class StreamToken( or (int(other.push_rules_key) < int(self.push_rules_key)) or (int(other.to_device_key) < int(self.to_device_key)) or (int(other.device_list_key) < int(self.device_list_key)) + or (int(other.groups_key) < int(self.groups_key)) ) def copy_and_advance(self, key, new_value): diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index d746ea856..de376fb51 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -1032,7 +1032,7 @@ class RoomMessageListTestCase(RestTestCase): @defer.inlineCallbacks def test_topo_token_is_accepted(self): - token = "t1-0_0_0_0_0_0_0_0" + token = "t1-0_0_0_0_0_0_0_0_0" (code, response) = yield self.mock_resource.trigger_get( "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)) @@ -1044,7 +1044,7 @@ class RoomMessageListTestCase(RestTestCase): @defer.inlineCallbacks def test_stream_token_is_accepted_for_fwd_pagianation(self): - token = "s0_0_0_0_0_0_0_0" + token = "s0_0_0_0_0_0_0_0_0" (code, response) = yield self.mock_resource.trigger_get( "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)) From 4d793626ffae05e3d9ac2d770bea7224a3318a56 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 16:42:44 +0100 Subject: [PATCH 046/223] Fix bug in generating current token --- synapse/streams/events.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/streams/events.py b/synapse/streams/events.py index e2be50081..f03ad9911 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -75,6 +75,7 @@ class EventSources(object): push_rules_key, _ = self.store.get_push_rules_stream_token() to_device_key = self.store.get_to_device_stream_token() device_list_key = self.store.get_device_stream_token() + groups_key = self.store.get_group_stream_token() token = StreamToken( room_key=( @@ -95,5 +96,6 @@ class EventSources(object): push_rules_key=push_rules_key, to_device_key=to_device_key, device_list_key=device_list_key, + groups_key=groups_key, ) defer.returnValue(token) From 139fe30f47aabd3ca8cce0f8d8c961348188b90b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 16:47:35 +0100 Subject: [PATCH 047/223] Remember to cast to bool --- synapse/handlers/sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index c01fcd3d5..600d0589f 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -116,7 +116,7 @@ class GroupsSyncResult(collections.namedtuple("GroupsSyncResult", [ __slots__ = [] def __nonzero__(self): - return self.join or self.invite or self.leave + return bool(self.join or self.invite or self.leave) class SyncResult(collections.namedtuple("SyncResult", [ From 2cc998fed879357376edb35d5088d88a078dd576 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 17:13:18 +0100 Subject: [PATCH 048/223] Fix replication. And notify --- synapse/app/synchrotron.py | 6 +++ synapse/handlers/groups_local.py | 20 ++++++-- synapse/replication/slave/storage/groups.py | 54 +++++++++++++++++++++ synapse/replication/tcp/streams.py | 20 ++++++++ synapse/storage/group_server.py | 23 +++++++++ 5 files changed, 119 insertions(+), 4 deletions(-) create mode 100644 synapse/replication/slave/storage/groups.py diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 4bdd99a96..d06a05acd 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -41,6 +41,7 @@ from synapse.replication.slave.storage.presence import SlavedPresenceStore from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore from synapse.replication.slave.storage.devices import SlavedDeviceStore from synapse.replication.slave.storage.room import RoomStore +from synapse.replication.slave.storage.groups import SlavedGroupServerStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.server import HomeServer from synapse.storage.engines import create_engine @@ -75,6 +76,7 @@ class SynchrotronSlavedStore( SlavedRegistrationStore, SlavedFilteringStore, SlavedPresenceStore, + SlavedGroupServerStore, SlavedDeviceInboxStore, SlavedDeviceStore, SlavedClientIpStore, @@ -409,6 +411,10 @@ class SyncReplicationHandler(ReplicationClientHandler): ) elif stream_name == "presence": yield self.presence_handler.process_replication_rows(token, rows) + elif stream_name == "receipts": + self.notifier.on_new_event( + "groups_key", token, users=[row.user_id for row in rows], + ) def start(config_options): diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 0b80348c8..4182ea5af 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -211,13 +211,16 @@ class GroupsLocalHandler(object): user_id=user_id, ) - yield self.store.register_user_group_membership( + token = yield self.store.register_user_group_membership( group_id, user_id, membership="join", is_admin=False, local_attestation=local_attestation, remote_attestation=remote_attestation, ) + self.notifier.on_new_event( + "groups_key", token, users=[user_id], + ) defer.returnValue({}) @@ -257,11 +260,14 @@ class GroupsLocalHandler(object): if "avatar_url" in content["profile"]: local_profile["avatar_url"] = content["profile"]["avatar_url"] - yield self.store.register_user_group_membership( + token = yield self.store.register_user_group_membership( group_id, user_id, membership="invite", content={"profile": local_profile, "inviter": content["inviter"]}, ) + self.notifier.on_new_event( + "groups_key", token, users=[user_id], + ) defer.returnValue({"state": "invite"}) @@ -270,10 +276,13 @@ class GroupsLocalHandler(object): """Remove a user from a group """ if user_id == requester_user_id: - yield self.store.register_user_group_membership( + token = yield self.store.register_user_group_membership( group_id, user_id, membership="leave", ) + self.notifier.on_new_event( + "groups_key", token, users=[user_id], + ) # TODO: Should probably remember that we tried to leave so that we can # retry if the group server is currently down. @@ -296,10 +305,13 @@ class GroupsLocalHandler(object): """One of our users was removed/kicked from a group """ # TODO: Check if user in group - yield self.store.register_user_group_membership( + token = yield self.store.register_user_group_membership( group_id, user_id, membership="leave", ) + self.notifier.on_new_event( + "groups_key", token, users=[user_id], + ) @defer.inlineCallbacks def get_joined_groups(self, user_id): diff --git a/synapse/replication/slave/storage/groups.py b/synapse/replication/slave/storage/groups.py new file mode 100644 index 000000000..0bc4bce5b --- /dev/null +++ b/synapse/replication/slave/storage/groups.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._base import BaseSlavedStore +from ._slaved_id_tracker import SlavedIdTracker +from synapse.storage import DataStore +from synapse.util.caches.stream_change_cache import StreamChangeCache + + +class SlavedGroupServerStore(BaseSlavedStore): + def __init__(self, db_conn, hs): + super(SlavedGroupServerStore, self).__init__(db_conn, hs) + + self.hs = hs + + self._group_updates_id_gen = SlavedIdTracker( + db_conn, "local_group_updates", "stream_id", + ) + self._group_updates_stream_cache = StreamChangeCache( + "_group_updates_stream_cache", self._group_updates_id_gen.get_current_token(), + ) + + get_groups_changes_for_user = DataStore.get_groups_changes_for_user.__func__ + get_group_stream_token = DataStore.get_group_stream_token.__func__ + get_all_groups_for_user = DataStore.get_all_groups_for_user.__func__ + + def stream_positions(self): + result = super(SlavedGroupServerStore, self).stream_positions() + result["groups"] = self._group_updates_id_gen.get_current_token() + return result + + def process_replication_rows(self, stream_name, token, rows): + if stream_name == "groups": + self._group_updates_id_gen.advance(token) + for row in rows: + self._group_updates_stream_cache.entity_has_changed( + row.user_id, token + ) + + return super(SlavedGroupServerStore, self).process_replication_rows( + stream_name, token, rows + ) diff --git a/synapse/replication/tcp/streams.py b/synapse/replication/tcp/streams.py index fbafe12cc..4c60bf79f 100644 --- a/synapse/replication/tcp/streams.py +++ b/synapse/replication/tcp/streams.py @@ -118,6 +118,12 @@ CurrentStateDeltaStreamRow = namedtuple("CurrentStateDeltaStream", ( "state_key", # str "event_id", # str, optional )) +GroupsStreamRow = namedtuple("GroupsStreamRow", ( + "group_id", # str + "user_id", # str + "type", # str + "content", # dict +)) class Stream(object): @@ -464,6 +470,19 @@ class CurrentStateDeltaStream(Stream): super(CurrentStateDeltaStream, self).__init__(hs) +class GroupServerStream(Stream): + NAME = "groups" + ROW_TYPE = GroupsStreamRow + + def __init__(self, hs): + store = hs.get_datastore() + + self.current_token = store.get_group_stream_token + self.update_function = store.get_all_groups_changes + + super(GroupServerStream, self).__init__(hs) + + STREAMS_MAP = { stream.NAME: stream for stream in ( @@ -482,5 +501,6 @@ STREAMS_MAP = { TagAccountDataStream, AccountDataStream, CurrentStateDeltaStream, + GroupServerStream, ) } diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 45f0a4c59..5006ac863 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -853,6 +853,8 @@ class GroupServerStore(SQLBaseStore): }, ) + return next_id + with self._group_updates_id_gen.get_next() as next_id: yield self.runInteraction( "register_user_group_membership", @@ -993,5 +995,26 @@ class GroupServerStore(SQLBaseStore): "get_groups_changes_for_user", _get_groups_changes_for_user_txn, ) + def get_all_groups_changes(self, from_token, to_token, limit): + from_token = int(from_token) + has_changed = self._group_updates_stream_cache.has_any_entity_changed( + from_token, + ) + if not has_changed: + return [] + + def _get_all_groups_changes_txn(txn): + sql = """ + SELECT stream_id, group_id, user_id, type, content + FROM local_group_updates + WHERE ? < stream_id AND stream_id <= ? + LIMIT ? + """ + txn.execute(sql, (from_token, to_token, limit,)) + return txn.fetchall() + return self.runInteraction( + "get_all_groups_changes", _get_all_groups_changes_txn, + ) + def get_group_stream_token(self): return self._group_updates_id_gen.get_current_token() From 960dae3340221168e1e019f250cbc2dd430a1aee Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 17:14:44 +0100 Subject: [PATCH 049/223] Add notifier --- synapse/handlers/groups_local.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 4182ea5af..0c329e633 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -63,6 +63,7 @@ class GroupsLocalHandler(object): self.is_mine_id = hs.is_mine_id self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname + self.notifier = hs.get_notifier() self.attestations = hs.get_groups_attestation_signing() # Ensure attestations get renewed From b238cf7f6bea80eae076bd34c50d470211a78c72 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 20 Jul 2017 17:49:55 +0100 Subject: [PATCH 050/223] Remove spurious content param --- synapse/rest/client/v2_alpha/groups.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 64d803d48..009cd7073 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -46,7 +46,7 @@ class GroupServlet(RestServlet): defer.returnValue((200, group_description)) @defer.inlineCallbacks - def on_POST(self, request, group_id, content): + def on_POST(self, request, group_id): requester = yield self.auth.get_user_by_req(request) user_id = requester.user.to_string() From d5e32c843fb51f2b7c0247ca821fd38e5f2f25d3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 24 Jul 2017 13:31:26 +0100 Subject: [PATCH 051/223] Correctly add joins to correct segment --- synapse/handlers/sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 600d0589f..d7b90a35e 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -616,7 +616,7 @@ class SyncHandler(object): if membership == "join": if gtype == "membership": content.pop("membership", None) - invited[group_id] = content["content"] + joined[group_id] = content["content"] else: joined.setdefault(group_id, {})[gtype] = content elif membership == "invite": From 851aeae7c796b127dddf7ca6df882df6104ee5e7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 24 Jul 2017 13:40:56 +0100 Subject: [PATCH 052/223] Check users/rooms are in group before adding to summary --- synapse/storage/group_server.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index d42e215b2..258c3168a 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -152,6 +152,18 @@ class GroupServerStore(SQLBaseStore): an order of 1 will put the room first. Otherwise, the room gets added to the end. """ + room_in_group = self._simple_select_one_onecol_txn( + txn, + table="group_rooms", + keyvalues={ + "group_id": group_id, + "room_id": room_id, + }, + retcol="room_id", + allow_none=True, + ) + if not room_in_group: + raise SynapseError(400, "room not in group") if category_id is None: category_id = _DEFAULT_CATEGORY_ID @@ -426,6 +438,19 @@ class GroupServerStore(SQLBaseStore): an order of 1 will put the user first. Otherwise, the user gets added to the end. """ + user_in_group = self._simple_select_one_onecol_txn( + txn, + table="group_users", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + retcol="user_id", + allow_none=True, + ) + if not user_in_group: + raise SynapseError(400, "user not in group") + if role_id is None: role_id = _DEFAULT_ROLE_ID else: From b76ef6ccb8e00610c791a78b940d396da82fb1ce Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 24 Jul 2017 13:55:39 +0100 Subject: [PATCH 053/223] Include users membership in group in summary API --- synapse/groups/groups_server.py | 5 +++ synapse/storage/group_server.py | 55 +++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index b1ee43ef9..f25f327eb 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -130,6 +130,10 @@ class GroupsServerHandler(object): users.sort(key=lambda e: e.get("order", 0)) + membership_info = yield self.store.get_users_membership_info_in_group( + group_id, requester_user_id, + ) + defer.returnValue({ "profile": profile, "users_section": { @@ -142,6 +146,7 @@ class GroupsServerHandler(object): "categories": categories, "total_room_count_estimate": 0, # TODO }, + "user": membership_info, }) @defer.inlineCallbacks diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 258c3168a..989a10eea 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -672,6 +672,61 @@ class GroupServerStore(SQLBaseStore): allow_none=True, ) + @defer.inlineCallbacks + def get_users_membership_info_in_group(self, group_id, user_id): + """Get a dict describing the memebrship of a user in a group. + + Example if joined: + + { + "memebrship": "joined", + "is_public": True, + "is_privileged": False, + } + + Returns an empty dict if the user is not joined/invited/etc + """ + def _get_users_membership_in_group_txn(txn): + row = self._simple_select_one_txn( + table="group_users", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + retcols=("is_admin", "is_public"), + allow_none=True, + desc="is_user_adim_in_group", + ) + + if row: + return { + "memebrship": "joined", + "is_public": row["is_public"], + "is_privileged": row["is_admin"], + } + + row = self._simple_select_one_onecol_txn( + table="group_invites", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + retcol="user_id", + desc="is_user_invited_to_local_group", + allow_none=True, + ) + + if row: + return { + "memebrship": "invited", + } + + return {} + + return self.runInteraction( + "get_users_membership_info_in_group", _get_users_membership_in_group_txn, + ) + def add_user_to_group(self, group_id, user_id, is_admin=False, is_public=True, local_attestation=None, remote_attestation=None): """Add a user to the group server. From ed666d396985c1fa2b9acb1c69199dd55670a88f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 24 Jul 2017 14:05:09 +0100 Subject: [PATCH 054/223] Fix all the typos --- synapse/storage/group_server.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 989a10eea..357111e30 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -643,7 +643,7 @@ class GroupServerStore(SQLBaseStore): }, retcol="is_admin", allow_none=True, - desc="is_user_adim_in_group", + desc="is_user_admin_in_group", ) def add_group_invite(self, group_id, user_id): @@ -672,14 +672,13 @@ class GroupServerStore(SQLBaseStore): allow_none=True, ) - @defer.inlineCallbacks def get_users_membership_info_in_group(self, group_id, user_id): - """Get a dict describing the memebrship of a user in a group. + """Get a dict describing the membership of a user in a group. Example if joined: { - "memebrship": "joined", + "membership": "joined", "is_public": True, "is_privileged": False, } @@ -688,6 +687,7 @@ class GroupServerStore(SQLBaseStore): """ def _get_users_membership_in_group_txn(txn): row = self._simple_select_one_txn( + txn, table="group_users", keyvalues={ "group_id": group_id, @@ -695,30 +695,29 @@ class GroupServerStore(SQLBaseStore): }, retcols=("is_admin", "is_public"), allow_none=True, - desc="is_user_adim_in_group", ) if row: return { - "memebrship": "joined", + "membership": "joined", "is_public": row["is_public"], "is_privileged": row["is_admin"], } row = self._simple_select_one_onecol_txn( + txn, table="group_invites", keyvalues={ "group_id": group_id, "user_id": user_id, }, retcol="user_id", - desc="is_user_invited_to_local_group", allow_none=True, ) if row: return { - "memebrship": "invited", + "membership": "invited", } return {} From 629cdfb124c013c07b50116386d05162e40871aa Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 24 Jul 2017 14:54:05 +0100 Subject: [PATCH 055/223] Use join rather than joined, etc. --- synapse/storage/group_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 357111e30..9c55e10e7 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -699,7 +699,7 @@ class GroupServerStore(SQLBaseStore): if row: return { - "membership": "joined", + "membership": "join", "is_public": row["is_public"], "is_privileged": row["is_admin"], } @@ -717,7 +717,7 @@ class GroupServerStore(SQLBaseStore): if row: return { - "membership": "invited", + "membership": "invite", } return {} From 966a70f1fa74192866ff5b0dbae67ee8f490d97d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 24 Jul 2017 17:49:39 +0100 Subject: [PATCH 056/223] Update comment --- synapse/storage/group_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 9c55e10e7..f44e80b51 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -678,12 +678,12 @@ class GroupServerStore(SQLBaseStore): Example if joined: { - "membership": "joined", + "membership": "join", "is_public": True, "is_privileged": False, } - Returns an empty dict if the user is not joined/invited/etc + Returns an empty dict if the user is not join/invite/etc """ def _get_users_membership_in_group_txn(txn): row = self._simple_select_one_txn( From a1e67bcb974e098cbbe2fbe6072bc7d7658936f9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 4 Aug 2017 10:07:10 +0100 Subject: [PATCH 057/223] Remove stale TODO comments --- synapse/handlers/groups_local.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index d0ed98822..b8b1e754c 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -23,16 +23,6 @@ import logging logger = logging.getLogger(__name__) -# TODO: Validate attestations -# TODO: Allow users to "knock" or simpkly join depending on rules -# TODO: is_priveged flag to users and is_public to users and rooms -# TODO: Roles -# TODO: Audit log for admins (profile updates, membership changes, users who tried -# to join but were rejected, etc) -# TODO: Flairs -# TODO: Add group memebership /sync - - def _create_rerouter(func_name): """Returns a function that looks at the group id and calls the function on federation or the local group server if the group is local From 05e21285aae4a0411a9ec1151ce006297fa3ca91 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 8 Aug 2017 11:50:09 +0100 Subject: [PATCH 058/223] Store whether the user wants to publicise their membership of a group --- synapse/handlers/groups_local.py | 4 ++++ synapse/storage/group_server.py | 2 ++ synapse/storage/schema/delta/43/group_server.sql | 1 + 3 files changed, 7 insertions(+) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index b8b1e754c..3a738ef36 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -203,12 +203,16 @@ class GroupsLocalHandler(object): user_id=user_id, ) + # TODO: Check that the group is public and we're being added publically + is_publicised = content.get("publicise", False) + token = yield self.store.register_user_group_membership( group_id, user_id, membership="join", is_admin=False, local_attestation=local_attestation, remote_attestation=remote_attestation, + is_publicised=is_publicised, ) self.notifier.on_new_event( "groups_key", token, users=[user_id], diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index f44e80b51..31514f3cd 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -840,6 +840,7 @@ class GroupServerStore(SQLBaseStore): is_admin=False, content={}, local_attestation=None, remote_attestation=None, + is_publicised=False, ): """Registers that a local user is a member of a (local or remote) group. @@ -873,6 +874,7 @@ class GroupServerStore(SQLBaseStore): "user_id": user_id, "is_admin": is_admin, "membership": membership, + "is_publicised": is_publicised, "content": json.dumps(content), }, ) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index 92f3339c9..01ac0edc3 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -150,6 +150,7 @@ CREATE TABLE local_group_membership ( user_id TEXT NOT NULL, is_admin BOOLEAN NOT NULL, membership TEXT NOT NULL, + is_publicised TEXT NOT NULL, -- if the user is publicising their membership content TEXT NOT NULL ); From b880ff190a82d4f337b94115fc017d703e53878d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 8 Aug 2017 14:19:07 +0100 Subject: [PATCH 059/223] Allow update group publicity --- synapse/rest/client/v2_alpha/groups.py | 28 ++++++++++++++++++++++++++ synapse/storage/group_server.py | 15 ++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 009cd7073..9b1116ace 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -557,6 +557,33 @@ class GroupSelfAcceptInviteServlet(RestServlet): defer.returnValue((200, result)) +class GroupSelfUpdatePublicityServlet(RestServlet): + """Update whether we publicise a users membership of a group + """ + PATTERNS = client_v2_patterns( + "/groups/(?P[^/]*)/self/update_publicity$" + ) + + def __init__(self, hs): + super(GroupSelfUpdatePublicityServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.store = hs.get_datastore() + + @defer.inlineCallbacks + def on_PUT(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + requester_user_id = requester.user.to_string() + + content = parse_json_object_from_request(request) + publicise = content["publicise"] + yield self.store.update_group_publicity( + group_id, requester_user_id, publicise, + ) + + defer.returnValue((200, {})) + + class GroupsForUserServlet(RestServlet): """Get all groups the logged in user is joined to """ @@ -598,4 +625,5 @@ def register_servlets(hs, http_server): GroupSummaryRoomsCatServlet(hs).register(http_server) GroupRoleServlet(hs).register(http_server) GroupRolesServlet(hs).register(http_server) + GroupSelfUpdatePublicityServlet(hs).register(http_server) GroupSummaryUsersRoleServlet(hs).register(http_server) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 31514f3cd..10e757e97 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -835,6 +835,21 @@ class GroupServerStore(SQLBaseStore): desc="add_room_to_group", ) + def update_group_publicity(self, group_id, user_id, publicise): + """Update whether the user is publicising their membership of the group + """ + return self._simple_update_one( + table="local_group_membership", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + updatevalues={ + "is_publicised": publicise, + }, + desc="update_group_publicity" + ) + @defer.inlineCallbacks def register_user_group_membership(self, group_id, user_id, membership, is_admin=False, content={}, From ef8e5786770ff285ebdf1fce420b5aa86437673c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 9 Aug 2017 13:36:22 +0100 Subject: [PATCH 060/223] Add bulk group publicised lookup API --- synapse/federation/transport/client.py | 15 +++++++ synapse/federation/transport/server.py | 17 ++++++++ synapse/handlers/groups_local.py | 42 ++++++++++++++++++++ synapse/rest/client/v2_alpha/groups.py | 54 ++++++++++++++++++++++++++ synapse/storage/group_server.py | 14 +++++++ 5 files changed, 142 insertions(+) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 073d3abb2..ce68cc493 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -812,3 +812,18 @@ class TransportLayerClient(object): args={"requester_user_id": requester_user_id}, ignore_backoff=True, ) + + def bulk_get_publicised_groups(self, destination, user_ids): + """Get the groups a list of users are publicising + """ + + path = PREFIX + "/get_groups_publicised" + + content = {"user_ids": user_ids} + + return self.client.post_json( + destination=destination, + path=path, + data=content, + ignore_backoff=True, + ) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index e04750fd2..b5f07c50b 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -1050,6 +1050,22 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet): defer.returnValue((200, resp)) +class FederationGroupsBulkPublicisedServlet(BaseFederationServlet): + """Get roles in a group + """ + PATH = ( + "/get_groups_publicised$" + ) + + @defer.inlineCallbacks + def on_POST(self, origin, content, query): + resp = yield self.handler.bulk_get_publicised_groups( + content["user_ids"], proxy=False, + ) + + defer.returnValue((200, resp)) + + FEDERATION_SERVLET_CLASSES = ( FederationSendServlet, FederationPullServlet, @@ -1102,6 +1118,7 @@ GROUP_SERVER_SERVLET_CLASSES = ( GROUP_LOCAL_SERVLET_CLASSES = ( FederationGroupsLocalInviteServlet, FederationGroupsRemoveLocalUserServlet, + FederationGroupsBulkPublicisedServlet, ) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 3a738ef36..c980623bb 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -313,3 +313,45 @@ class GroupsLocalHandler(object): def get_joined_groups(self, user_id): group_ids = yield self.store.get_joined_groups(user_id) defer.returnValue({"groups": group_ids}) + + @defer.inlineCallbacks + def get_publicised_groups_for_user(self, user_id): + if self.hs.is_mine_id(user_id): + result = yield self.store.get_publicised_groups_for_user(user_id) + defer.returnValue({"groups": result}) + else: + result = yield self.transport_client.get_publicised_groups_for_user( + get_domain_from_id(user_id), user_id + ) + # TODO: Verify attestations + defer.returnValue(result) + + @defer.inlineCallbacks + def bulk_get_publicised_groups(self, user_ids, proxy=True): + destinations = {} + locals = [] + + for user_id in user_ids: + if self.hs.is_mine_id(user_id): + locals.append(user_id) + else: + destinations.setdefault( + get_domain_from_id(user_id), [] + ).append(user_id) + + if not proxy and destinations: + raise SynapseError(400, "Some user_ids are not local") + + results = {} + for destination, dest_user_ids in destinations.iteritems(): + r = yield self.transport_client.bulk_get_publicised_groups( + destination, dest_user_ids, + ) + results.update(r) + + for uid in locals: + results[uid] = yield self.store.get_publicised_groups_for_user( + uid + ) + + defer.returnValue({"users": results}) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 9b1116ace..97d7948bb 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -584,6 +584,59 @@ class GroupSelfUpdatePublicityServlet(RestServlet): defer.returnValue((200, {})) +class PublicisedGroupsForUserServlet(RestServlet): + """Get the list of groups a user is advertising + """ + PATTERNS = client_v2_patterns( + "/publicised_groups/(?P[^/]*)$" + ) + + def __init__(self, hs): + super(PublicisedGroupsForUserServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.store = hs.get_datastore() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, user_id): + yield self.auth.get_user_by_req(request) + + result = yield self.groups_handler.get_publicised_groups_for_user( + user_id + ) + + defer.returnValue((200, result)) + + +class PublicisedGroupsForUsersServlet(RestServlet): + """Get the list of groups a user is advertising + """ + PATTERNS = client_v2_patterns( + "/publicised_groups$" + ) + + def __init__(self, hs): + super(PublicisedGroupsForUsersServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.store = hs.get_datastore() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_POST(self, request): + yield self.auth.get_user_by_req(request) + + content = parse_json_object_from_request(request) + user_ids = content["user_ids"] + + result = yield self.groups_handler.bulk_get_publicised_groups( + user_ids + ) + + defer.returnValue((200, result)) + + class GroupsForUserServlet(RestServlet): """Get all groups the logged in user is joined to """ @@ -627,3 +680,4 @@ def register_servlets(hs, http_server): GroupRolesServlet(hs).register(http_server) GroupSelfUpdatePublicityServlet(hs).register(http_server) GroupSummaryUsersRoleServlet(hs).register(http_server) + PublicisedGroupsForUserServlet(hs).register(http_server) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 10e757e97..0c35b03d2 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -835,6 +835,20 @@ class GroupServerStore(SQLBaseStore): desc="add_room_to_group", ) + def get_publicised_groups_for_user(self, user_id): + """Get all groups a user is publicising + """ + return self._simple_select_onecol( + table="local_group_membership", + keyvalues={ + "user_id": user_id, + "membership": "join", + "is_publicised": True, + }, + retcol="group_id", + desc="get_publicised_groups_for_user", + ) + def update_group_publicity(self, group_id, user_id, publicise): """Update whether the user is publicising their membership of the group """ From ba3ff7918b54ae431aaaedb3d12650c93d366c04 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 11 Aug 2017 13:42:42 +0100 Subject: [PATCH 061/223] Fixup --- synapse/handlers/groups_local.py | 22 +++++++++++++--------- synapse/rest/client/v2_alpha/groups.py | 1 + 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index c980623bb..274fed927 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -329,27 +329,31 @@ class GroupsLocalHandler(object): @defer.inlineCallbacks def bulk_get_publicised_groups(self, user_ids, proxy=True): destinations = {} - locals = [] + local_users = set() for user_id in user_ids: if self.hs.is_mine_id(user_id): - locals.append(user_id) + local_users.add(user_id) else: destinations.setdefault( - get_domain_from_id(user_id), [] - ).append(user_id) + get_domain_from_id(user_id), set() + ).add(user_id) if not proxy and destinations: raise SynapseError(400, "Some user_ids are not local") results = {} + failed_results = [] for destination, dest_user_ids in destinations.iteritems(): - r = yield self.transport_client.bulk_get_publicised_groups( - destination, dest_user_ids, - ) - results.update(r) + try: + r = yield self.transport_client.bulk_get_publicised_groups( + destination, list(dest_user_ids), + ) + results.update(r["users"]) + except Exception: + failed_results.extend(dest_user_ids) - for uid in locals: + for uid in local_users: results[uid] = yield self.store.get_publicised_groups_for_user( uid ) diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 97d7948bb..b469058e9 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -681,3 +681,4 @@ def register_servlets(hs, http_server): GroupSelfUpdatePublicityServlet(hs).register(http_server) GroupSummaryUsersRoleServlet(hs).register(http_server) PublicisedGroupsForUserServlet(hs).register(http_server) + PublicisedGroupsForUsersServlet(hs).register(http_server) From 175a01f56c86a4c201e72d49f22663425656d81d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 21 Aug 2017 14:45:56 +0100 Subject: [PATCH 062/223] Groups: Fix mising json.load in initial sync --- synapse/storage/group_server.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index f44e80b51..792a57deb 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -1101,7 +1101,13 @@ class GroupServerStore(SQLBaseStore): LIMIT ? """ txn.execute(sql, (from_token, to_token, limit,)) - return txn.fetchall() + return [{ + "stream_id": stream_id, + "group_id": group_id, + "user_id": user_id, + "type": gtype, + "content": json.loads(content_json), + } for stream_id, group_id, user_id, gtype, content_json in txn] return self.runInteraction( "get_all_groups_changes", _get_all_groups_changes_txn, ) From 8b50fe5330249fd24d50fa97385cd88ef6703d79 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 21 Aug 2017 13:18:23 +0100 Subject: [PATCH 063/223] Use BOOLEAN rather than TEXT type --- synapse/storage/schema/delta/43/group_server.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index 01ac0edc3..e74554381 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -150,7 +150,7 @@ CREATE TABLE local_group_membership ( user_id TEXT NOT NULL, is_admin BOOLEAN NOT NULL, membership TEXT NOT NULL, - is_publicised TEXT NOT NULL, -- if the user is publicising their membership + is_publicised BOOLEAN NOT NULL, -- if the user is publicising their membership content TEXT NOT NULL ); From 97c544f91f562f33b9655e7c8c8f980bac5ac658 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Aug 2017 11:11:37 +0100 Subject: [PATCH 064/223] Add _simple_update --- synapse/storage/_base.py | 51 +++++++++++++++++++++++++--------------- 1 file changed, 32 insertions(+), 19 deletions(-) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 6f54036d6..5124a833a 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -743,6 +743,33 @@ class SQLBaseStore(object): txn.execute(sql, values) return cls.cursor_to_dict(txn) + def _simple_update(self, table, keyvalues, updatevalues, desc): + return self.runInteraction( + desc, + self._simple_update_txn, + table, keyvalues, updatevalues, + ) + + @staticmethod + def _simple_update_txn(txn, table, keyvalues, updatevalues): + if keyvalues: + where = "WHERE %s" % " AND ".join("%s = ?" % k for k in keyvalues.iterkeys()) + else: + where = "" + + update_sql = "UPDATE %s SET %s %s" % ( + table, + ", ".join("%s = ?" % (k,) for k in updatevalues), + where, + ) + + txn.execute( + update_sql, + updatevalues.values() + keyvalues.values() + ) + + return txn.rowcount + def _simple_update_one(self, table, keyvalues, updatevalues, desc="_simple_update_one"): """Executes an UPDATE query on the named table, setting new values for @@ -768,27 +795,13 @@ class SQLBaseStore(object): table, keyvalues, updatevalues, ) - @staticmethod - def _simple_update_one_txn(txn, table, keyvalues, updatevalues): - if keyvalues: - where = "WHERE %s" % " AND ".join("%s = ?" % k for k in keyvalues.iterkeys()) - else: - where = "" + @classmethod + def _simple_update_one_txn(cls, txn, table, keyvalues, updatevalues): + rowcount = cls._simple_update_txn(txn, table, keyvalues, updatevalues) - update_sql = "UPDATE %s SET %s %s" % ( - table, - ", ".join("%s = ?" % (k,) for k in updatevalues), - where, - ) - - txn.execute( - update_sql, - updatevalues.values() + keyvalues.values() - ) - - if txn.rowcount == 0: + if rowcount == 0: raise StoreError(404, "No row found") - if txn.rowcount > 1: + if rowcount > 1: raise StoreError(500, "More than one row matched") @staticmethod From 27ebc5c8f299488ccc0a6f100ec3b248cd81a058 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Aug 2017 11:21:34 +0100 Subject: [PATCH 065/223] Add remote profile cache --- synapse/groups/groups_server.py | 18 ++++ synapse/handlers/groups_local.py | 17 +++- synapse/handlers/profile.py | 81 ++++++++++++++- synapse/storage/profile.py | 98 +++++++++++++++++++ .../storage/schema/delta/43/profile_cache.sql | 28 ++++++ 5 files changed, 237 insertions(+), 5 deletions(-) create mode 100644 synapse/storage/schema/delta/43/profile_cache.sql diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index f25f327eb..6bccae4bf 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -503,6 +503,13 @@ class GroupsServerHandler(object): get_domain_from_id(user_id), group_id, user_id, content ) + user_profile = res.get("user_profile", {}) + yield self.store.add_remote_profile_cache( + user_id, + displayname=user_profile.get("displayname"), + avatar_url=user_profile.get("avatar_url"), + ) + if res["state"] == "join": if not self.hs.is_mine_id(user_id): remote_attestation = res["attestation"] @@ -627,6 +634,9 @@ class GroupsServerHandler(object): get_domain_from_id(user_id), group_id, user_id, {} ) + if not self.hs.is_mine_id(user_id): + yield self.store.maybe_delete_remote_profile_cache(user_id) + defer.returnValue({}) @defer.inlineCallbacks @@ -647,6 +657,7 @@ class GroupsServerHandler(object): avatar_url = profile.get("avatar_url") short_description = profile.get("short_description") long_description = profile.get("long_description") + user_profile = content.get("user_profile", {}) yield self.store.create_group( group_id, @@ -679,6 +690,13 @@ class GroupsServerHandler(object): remote_attestation=remote_attestation, ) + if not self.hs.is_mine_id(user_id): + yield self.store.add_remote_profile_cache( + user_id, + displayname=user_profile.get("displayname"), + avatar_url=user_profile.get("avatar_url"), + ) + defer.returnValue({ "group_id": group_id, }) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 274fed927..bfa10bde5 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -56,6 +56,9 @@ class GroupsLocalHandler(object): self.notifier = hs.get_notifier() self.attestations = hs.get_groups_attestation_signing() + handlers = hs.get_handlers() + self.profile_handler = handlers.profile_handler + # Ensure attestations get renewed hs.get_groups_attestation_renewer() @@ -123,6 +126,7 @@ class GroupsLocalHandler(object): defer.returnValue(res) + @defer.inlineCallbacks def create_group(self, group_id, user_id, content): """Create a group """ @@ -130,13 +134,16 @@ class GroupsLocalHandler(object): logger.info("Asking to create group with ID: %r", group_id) if self.is_mine_id(group_id): - return self.groups_server_handler.create_group( + res = yield self.groups_server_handler.create_group( group_id, user_id, content ) + defer.returnValue(res) - return self.transport_client.create_group( + content["user_profile"] = yield self.profile_handler.get_profile(user_id) + res = yield self.transport_client.create_group( get_domain_from_id(group_id), group_id, user_id, content, - ) # TODO + ) + defer.returnValue(res) @defer.inlineCallbacks def get_users_in_group(self, group_id, requester_user_id): @@ -265,7 +272,9 @@ class GroupsLocalHandler(object): "groups_key", token, users=[user_id], ) - defer.returnValue({"state": "invite"}) + user_profile = yield self.profile_handler.get_profile(user_id) + + defer.returnValue({"state": "invite", "user_profile": user_profile}) @defer.inlineCallbacks def remove_user_from_group(self, group_id, user_id, requester_user_id, content): diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 7abee98de..57e22edb0 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -19,7 +19,7 @@ from twisted.internet import defer import synapse.types from synapse.api.errors import SynapseError, AuthError, CodeMessageException -from synapse.types import UserID +from synapse.types import UserID, get_domain_from_id from ._base import BaseHandler @@ -27,15 +27,53 @@ logger = logging.getLogger(__name__) class ProfileHandler(BaseHandler): + PROFILE_UPDATE_MS = 60 * 1000 + PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000 def __init__(self, hs): super(ProfileHandler, self).__init__(hs) + self.clock = hs.get_clock() + self.federation = hs.get_replication_layer() self.federation.register_query_handler( "profile", self.on_profile_query ) + self.clock.looping_call(self._update_remote_profile_cache, self.PROFILE_UPDATE_MS) + + @defer.inlineCallbacks + def get_profile(self, user_id): + target_user = UserID.from_string(user_id) + if self.hs.is_mine(target_user): + displayname = yield self.store.get_profile_displayname( + target_user.localpart + ) + avatar_url = yield self.store.get_profile_avatar_url( + target_user.localpart + ) + + defer.returnValue({ + "displayname": displayname, + "avatar_url": avatar_url, + }) + else: + try: + result = yield self.federation.make_query( + destination=target_user.domain, + query_type="profile", + args={ + "user_id": user_id, + }, + ignore_backoff=True, + ) + defer.returnValue(result) + except CodeMessageException as e: + if e.code != 404: + logger.exception("Failed to get displayname") + + raise + @defer.inlineCallbacks def get_displayname(self, target_user): if self.hs.is_mine(target_user): @@ -182,3 +220,44 @@ class ProfileHandler(BaseHandler): "Failed to update join event for room %s - %s", room_id, str(e.message) ) + + def _update_remote_profile_cache(self): + """Called periodically to check profiles of remote users we havent' + checked in a while. + """ + entries = yield self.store.get_remote_profile_cache_entries_that_expire( + last_checked=self.clock.time_msec() - self.PROFILE_UPDATE_EVERY_MS + ) + + for user_id, displayname, avatar_url in entries: + is_subcscribed = yield self.store.is_subscribed_remote_profile_for_user( + user_id, + ) + if not is_subcscribed: + yield self.store.maybe_delete_remote_profile_cache(user_id) + continue + + try: + profile = yield self.federation.make_query( + destination=get_domain_from_id(user_id), + query_type="profile", + args={ + "user_id": user_id, + }, + ignore_backoff=True, + ) + except: + logger.exception("Failed to get avatar_url") + + yield self.store.update_remote_profile_cache( + user_id, displayname, avatar_url + ) + continue + + new_name = profile.get("displayname") + new_avatar = profile.get("avatar_url") + + # We always hit update to update the last_check timestamp + yield self.store.update_remote_profile_cache( + user_id, new_name, new_avatar + ) diff --git a/synapse/storage/profile.py b/synapse/storage/profile.py index 26a40905a..dca6af8a7 100644 --- a/synapse/storage/profile.py +++ b/synapse/storage/profile.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from twisted.internet import defer + from ._base import SQLBaseStore @@ -55,3 +57,99 @@ class ProfileStore(SQLBaseStore): updatevalues={"avatar_url": new_avatar_url}, desc="set_profile_avatar_url", ) + + def get_from_remote_profile_cache(self, user_id): + return self._simple_select_one( + table="remote_profile_cache", + keyvalues={"user_id": user_id}, + retcols=("displayname", "avatar_url", "last_check"), + allow_none=True, + desc="get_from_remote_profile_cache", + ) + + def add_remote_profile_cache(self, user_id, displayname, avatar_url): + """Ensure we are caching the remote user's profiles. + + This should only be called when `is_subscribed_remote_profile_for_user` + would return true for the user. + """ + return self._simple_upsert( + table="remote_profile_cache", + keyvalues={"user_id": user_id}, + values={ + "displayname": displayname, + "avatar_url": avatar_url, + "last_check": self._clock.time_msec(), + }, + desc="add_remote_profile_cache", + ) + + def update_remote_profile_cache(self, user_id, displayname, avatar_url): + return self._simple_update( + table="remote_profile_cache", + keyvalues={"user_id": user_id}, + values={ + "displayname": displayname, + "avatar_url": avatar_url, + "last_check": self._clock.time_msec(), + }, + desc="update_remote_profile_cache", + ) + + @defer.inlineCallbacks + def maybe_delete_remote_profile_cache(self, user_id): + """Check if we still care about the remote user's profile, and if we + don't then remove their profile from the cache + """ + subscribed = yield self.is_subscribed_remote_profile_for_user(user_id) + if not subscribed: + yield self._simple_delete( + table="remote_profile_cache", + keyvalues={"user_id": user_id}, + desc="delete_remote_profile_cache", + ) + + def get_remote_profile_cache_entries_that_expire(self, last_checked): + """Get all users who haven't been checked since `last_checked` + """ + def _get_remote_profile_cache_entries_that_expire_txn(txn): + sql = """ + SELECT user_id, displayname, avatar_url + FROM remote_profile_cache + WHERE last_check < ? + """ + + txn.execute(sql, (last_checked,)) + + return self.cursor_to_dict(txn) + + return self.runInteraction( + "get_remote_profile_cache_entries_that_expire", + _get_remote_profile_cache_entries_that_expire_txn, + ) + + @defer.inlineCallbacks + def is_subscribed_remote_profile_for_user(self, user_id): + """Check whether we are interested in a remote user's profile. + """ + res = yield self._simple_select_one_onecol( + table="group_users", + keyvalues={"user_id": user_id}, + retcol="user_id", + allow_none=True, + desc="should_update_remote_profile_cache_for_user", + ) + + if res: + defer.returnValue(True) + + res = yield self._simple_select_one_onecol( + table="group_invites", + keyvalues={"user_id": user_id}, + retcol="user_id", + allow_none=True, + desc="should_update_remote_profile_cache_for_user", + ) + + if res: + defer.returnValue(True) diff --git a/synapse/storage/schema/delta/43/profile_cache.sql b/synapse/storage/schema/delta/43/profile_cache.sql new file mode 100644 index 000000000..e5ddc84df --- /dev/null +++ b/synapse/storage/schema/delta/43/profile_cache.sql @@ -0,0 +1,28 @@ +/* Copyright 2017 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +-- A subset of remote users whose profiles we have cached. +-- Whether a user is in this table or not is defined by the storage function +-- `is_subscribed_remote_profile_for_user` +CREATE TABLE remote_profile_cache ( + user_id TEXT NOT NULL, + displayname TEXT, + avatar_url TEXT, + last_check BIGINT NOT NULL +); + +CREATE UNIQUE INDEX remote_profile_cache_user_id ON remote_profile_cache(user_id); +CREATE INDEX remote_profile_cache_time ON remote_profile_cache(last_check); From bf81f3cf2c3e5b1d96953f3116c22aee05fb79b3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Aug 2017 14:34:56 +0100 Subject: [PATCH 066/223] Split out profile handler to fix tests --- synapse/handlers/__init__.py | 2 -- synapse/handlers/groups_local.py | 3 +-- synapse/handlers/message.py | 3 ++- synapse/handlers/profile.py | 13 ++++++++----- synapse/handlers/register.py | 4 ++-- synapse/handlers/room_member.py | 4 +++- synapse/rest/client/v1/profile.py | 18 +++++++++--------- synapse/server.py | 5 +++++ tests/handlers/test_profile.py | 4 +--- tests/handlers/test_register.py | 5 +++-- tests/rest/client/v1/test_profile.py | 3 +-- 11 files changed, 35 insertions(+), 29 deletions(-) diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py index 5ad408f54..53213cdcc 100644 --- a/synapse/handlers/__init__.py +++ b/synapse/handlers/__init__.py @@ -20,7 +20,6 @@ from .room import ( from .room_member import RoomMemberHandler from .message import MessageHandler from .federation import FederationHandler -from .profile import ProfileHandler from .directory import DirectoryHandler from .admin import AdminHandler from .identity import IdentityHandler @@ -52,7 +51,6 @@ class Handlers(object): self.room_creation_handler = RoomCreationHandler(hs) self.room_member_handler = RoomMemberHandler(hs) self.federation_handler = FederationHandler(hs) - self.profile_handler = ProfileHandler(hs) self.directory_handler = DirectoryHandler(hs) self.admin_handler = AdminHandler(hs) self.identity_handler = IdentityHandler(hs) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index bfa10bde5..1950c12f1 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -56,8 +56,7 @@ class GroupsLocalHandler(object): self.notifier = hs.get_notifier() self.attestations = hs.get_groups_attestation_signing() - handlers = hs.get_handlers() - self.profile_handler = handlers.profile_handler + self.profile_handler = hs.get_profile_handler() # Ensure attestations get renewed hs.get_groups_attestation_renewer() diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index be4f123c5..5b8f20b73 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -47,6 +47,7 @@ class MessageHandler(BaseHandler): self.state = hs.get_state_handler() self.clock = hs.get_clock() self.validator = EventValidator() + self.profile_handler = hs.get_profile_handler() self.pagination_lock = ReadWriteLock() @@ -210,7 +211,7 @@ class MessageHandler(BaseHandler): if membership in {Membership.JOIN, Membership.INVITE}: # If event doesn't include a display name, add one. - profile = self.hs.get_handlers().profile_handler + profile = self.profile_handler content = builder.content try: diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 57e22edb0..5e34501c7 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -22,18 +22,21 @@ from synapse.api.errors import SynapseError, AuthError, CodeMessageException from synapse.types import UserID, get_domain_from_id from ._base import BaseHandler - logger = logging.getLogger(__name__) -class ProfileHandler(BaseHandler): +class ProfileHandler(object): PROFILE_UPDATE_MS = 60 * 1000 PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000 def __init__(self, hs): - super(ProfileHandler, self).__init__(hs) - + self.hs = hs + self.store = hs.get_datastore() self.clock = hs.get_clock() + self.ratelimiter = hs.get_ratelimiter() + + # AWFUL hack to get at BaseHandler.ratelimit + self.base_handler = BaseHandler(hs) self.federation = hs.get_replication_layer() self.federation.register_query_handler( @@ -194,7 +197,7 @@ class ProfileHandler(BaseHandler): if not self.hs.is_mine(user): return - yield self.ratelimit(requester) + yield self.base_handler.ratelimit(requester) room_ids = yield self.store.get_rooms_for_user( user.to_string(), diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index ee3a2269a..560fb3625 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -36,6 +36,7 @@ class RegistrationHandler(BaseHandler): super(RegistrationHandler, self).__init__(hs) self.auth = hs.get_auth() + self.profile_handler = hs.get_profile_handler() self.captcha_client = CaptchaServerHttpClient(hs) self._next_generated_user_id = None @@ -423,8 +424,7 @@ class RegistrationHandler(BaseHandler): if displayname is not None: logger.info("setting user display name: %s -> %s", user_id, displayname) - profile_handler = self.hs.get_handlers().profile_handler - yield profile_handler.set_displayname( + yield self.profile_handler.set_displayname( user, requester, displayname, by_admin=True, ) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index b3f979b24..dadc19d45 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -45,6 +45,8 @@ class RoomMemberHandler(BaseHandler): def __init__(self, hs): super(RoomMemberHandler, self).__init__(hs) + self.profile_handler = hs.get_profile_handler() + self.member_linearizer = Linearizer(name="member") self.clock = hs.get_clock() @@ -255,7 +257,7 @@ class RoomMemberHandler(BaseHandler): content["membership"] = Membership.JOIN - profile = self.hs.get_handlers().profile_handler + profile = self.profile_handler if not content_specified: content["displayname"] = yield profile.get_displayname(target) content["avatar_url"] = yield profile.get_avatar_url(target) diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py index 1a5045c9e..d7edc3424 100644 --- a/synapse/rest/client/v1/profile.py +++ b/synapse/rest/client/v1/profile.py @@ -26,13 +26,13 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet): def __init__(self, hs): super(ProfileDisplaynameRestServlet, self).__init__(hs) - self.handlers = hs.get_handlers() + self.profile_handler = hs.get_profile_handler() @defer.inlineCallbacks def on_GET(self, request, user_id): user = UserID.from_string(user_id) - displayname = yield self.handlers.profile_handler.get_displayname( + displayname = yield self.profile_handler.get_displayname( user, ) @@ -55,7 +55,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet): except: defer.returnValue((400, "Unable to parse name")) - yield self.handlers.profile_handler.set_displayname( + yield self.profile_handler.set_displayname( user, requester, new_name, is_admin) defer.returnValue((200, {})) @@ -69,13 +69,13 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet): def __init__(self, hs): super(ProfileAvatarURLRestServlet, self).__init__(hs) - self.handlers = hs.get_handlers() + self.profile_handler = hs.get_profile_handler() @defer.inlineCallbacks def on_GET(self, request, user_id): user = UserID.from_string(user_id) - avatar_url = yield self.handlers.profile_handler.get_avatar_url( + avatar_url = yield self.profile_handler.get_avatar_url( user, ) @@ -97,7 +97,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet): except: defer.returnValue((400, "Unable to parse name")) - yield self.handlers.profile_handler.set_avatar_url( + yield self.profile_handler.set_avatar_url( user, requester, new_name, is_admin) defer.returnValue((200, {})) @@ -111,16 +111,16 @@ class ProfileRestServlet(ClientV1RestServlet): def __init__(self, hs): super(ProfileRestServlet, self).__init__(hs) - self.handlers = hs.get_handlers() + self.profile_handler = hs.get_profile_handler() @defer.inlineCallbacks def on_GET(self, request, user_id): user = UserID.from_string(user_id) - displayname = yield self.handlers.profile_handler.get_displayname( + displayname = yield self.profile_handler.get_displayname( user, ) - avatar_url = yield self.handlers.profile_handler.get_avatar_url( + avatar_url = yield self.profile_handler.get_avatar_url( user, ) diff --git a/synapse/server.py b/synapse/server.py index d0a627276..5b892cc39 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -51,6 +51,7 @@ from synapse.handlers.receipts import ReceiptsHandler from synapse.handlers.read_marker import ReadMarkerHandler from synapse.handlers.user_directory import UserDirectoyHandler from synapse.handlers.groups_local import GroupsLocalHandler +from synapse.handlers.profile import ProfileHandler from synapse.groups.groups_server import GroupsServerHandler from synapse.groups.attestations import GroupAttestionRenewer, GroupAttestationSigning from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory @@ -114,6 +115,7 @@ class HomeServer(object): 'application_service_scheduler', 'application_service_handler', 'device_message_handler', + 'profile_handler', 'notifier', 'distributor', 'client_resource', @@ -258,6 +260,9 @@ class HomeServer(object): def build_initial_sync_handler(self): return InitialSyncHandler(self) + def build_profile_handler(self): + return ProfileHandler(self) + def build_event_sources(self): return EventSources(self) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 2a203129c..a5f47181d 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -62,8 +62,6 @@ class ProfileTestCase(unittest.TestCase): self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) - hs.handlers = ProfileHandlers(hs) - self.store = hs.get_datastore() self.frank = UserID.from_string("@1234ABCD:test") @@ -72,7 +70,7 @@ class ProfileTestCase(unittest.TestCase): yield self.store.create_profile(self.frank.localpart) - self.handler = hs.get_handlers().profile_handler + self.handler = hs.get_profile_handler() @defer.inlineCallbacks def test_get_my_name(self): diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index c8cf9a63e..e990e4522 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -40,13 +40,14 @@ class RegistrationTestCase(unittest.TestCase): self.hs = yield setup_test_homeserver( handlers=None, http_client=None, - expire_access_token=True) + expire_access_token=True, + profile_handler=Mock(), + ) self.macaroon_generator = Mock( generate_access_token=Mock(return_value='secret')) self.hs.get_macaroon_generator = Mock(return_value=self.macaroon_generator) self.hs.handlers = RegistrationHandlers(self.hs) self.handler = self.hs.get_handlers().registration_handler - self.hs.get_handlers().profile_handler = Mock() @defer.inlineCallbacks def test_user_is_created_and_logged_in_if_doesnt_exist(self): diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 1e95e9753..dddcf51b6 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -46,6 +46,7 @@ class ProfileTestCase(unittest.TestCase): resource_for_client=self.mock_resource, federation=Mock(), replication_layer=Mock(), + profile_handler=self.mock_handler ) def _get_user_by_req(request=None, allow_guest=False): @@ -53,8 +54,6 @@ class ProfileTestCase(unittest.TestCase): hs.get_v1auth().get_user_by_req = _get_user_by_req - hs.get_handlers().profile_handler = self.mock_handler - profile.register_servlets(hs, self.mock_resource) @defer.inlineCallbacks From 258409ef6156782d0c15cd5d1c1620b5734f379c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Aug 2017 14:45:20 +0100 Subject: [PATCH 067/223] Fix typos and reinherit --- synapse/handlers/profile.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 5e34501c7..c3cee38a4 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -25,18 +25,12 @@ from ._base import BaseHandler logger = logging.getLogger(__name__) -class ProfileHandler(object): +class ProfileHandler(BaseHandler): PROFILE_UPDATE_MS = 60 * 1000 PROFILE_UPDATE_EVERY_MS = 24 * 60 * 60 * 1000 def __init__(self, hs): - self.hs = hs - self.store = hs.get_datastore() - self.clock = hs.get_clock() - self.ratelimiter = hs.get_ratelimiter() - - # AWFUL hack to get at BaseHandler.ratelimit - self.base_handler = BaseHandler(hs) + super(ProfileHandler, self).__init__(hs) self.federation = hs.get_replication_layer() self.federation.register_query_handler( @@ -197,7 +191,7 @@ class ProfileHandler(object): if not self.hs.is_mine(user): return - yield self.base_handler.ratelimit(requester) + yield self.ratelimit(requester) room_ids = yield self.store.get_rooms_for_user( user.to_string(), @@ -225,7 +219,7 @@ class ProfileHandler(object): ) def _update_remote_profile_cache(self): - """Called periodically to check profiles of remote users we havent' + """Called periodically to check profiles of remote users we haven't checked in a while. """ entries = yield self.store.get_remote_profile_cache_entries_that_expire( @@ -233,10 +227,10 @@ class ProfileHandler(object): ) for user_id, displayname, avatar_url in entries: - is_subcscribed = yield self.store.is_subscribed_remote_profile_for_user( + is_subscribed = yield self.store.is_subscribed_remote_profile_for_user( user_id, ) - if not is_subcscribed: + if not is_subscribed: yield self.store.maybe_delete_remote_profile_cache(user_id) continue From 4a9b1cf25300eedf66aaefcb36e23f5fadf2b57a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Aug 2017 16:23:58 +0100 Subject: [PATCH 068/223] Add user profiles to summary from group server --- synapse/groups/groups_server.py | 7 ++++++- synapse/handlers/profile.py | 23 +++++++++++++++++++++++ synapse/storage/profile.py | 2 +- 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 6bccae4bf..94cf9788b 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -45,6 +45,7 @@ class GroupsServerHandler(object): self.server_name = hs.hostname self.attestations = hs.get_groups_attestation_signing() self.transport_client = hs.get_federation_transport_client() + self.profile_handler = hs.get_profile_handler() # Ensure attestations get renewed hs.get_groups_attestation_renewer() @@ -128,6 +129,9 @@ class GroupsServerHandler(object): group_id, user_id, ) + user_profile = yield self.profile_handler.get_profile_from_cache(user_id) + entry.update(user_profile) + users.sort(key=lambda e: e.get("order", 0)) membership_info = yield self.store.get_users_membership_info_in_group( @@ -387,7 +391,8 @@ class GroupsServerHandler(object): entry = {"user_id": g_user_id} - # TODO: Get profile information + profile = yield self.profile_handler.get_profile_from_cache(g_user_id) + entry.update(profile) if not is_public: entry["is_public"] = False diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index c3cee38a4..e56e0a52b 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -71,6 +71,29 @@ class ProfileHandler(BaseHandler): raise + @defer.inlineCallbacks + def get_profile_from_cache(self, user_id): + """Get the profile information from our local cache. If the user is + ours then the profile information will always be corect. Otherwise, + it may be out of date/missing. + """ + target_user = UserID.from_string(user_id) + if self.hs.is_mine(target_user): + displayname = yield self.store.get_profile_displayname( + target_user.localpart + ) + avatar_url = yield self.store.get_profile_avatar_url( + target_user.localpart + ) + + defer.returnValue({ + "displayname": displayname, + "avatar_url": avatar_url, + }) + else: + profile = yield self.store.get_from_remote_profile_cache(user_id) + defer.returnValue(profile or {}) + @defer.inlineCallbacks def get_displayname(self, target_user): if self.hs.is_mine(target_user): diff --git a/synapse/storage/profile.py b/synapse/storage/profile.py index dca6af8a7..beea3102f 100644 --- a/synapse/storage/profile.py +++ b/synapse/storage/profile.py @@ -62,7 +62,7 @@ class ProfileStore(SQLBaseStore): return self._simple_select_one( table="remote_profile_cache", keyvalues={"user_id": user_id}, - retcols=("displayname", "avatar_url", "last_check"), + retcols=("displayname", "avatar_url",), allow_none=True, desc="get_from_remote_profile_cache", ) From 93e504d04e5e92ec1d54b3c7c860adc2cfb0e9f4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 19 Sep 2017 10:35:35 +0100 Subject: [PATCH 069/223] Ensure that creator of group sees group down /sync --- synapse/handlers/groups_local.py | 34 ++++++++++++++++++++++++++++---- synapse/handlers/sync.py | 1 + 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 1950c12f1..b4833f8ef 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -136,12 +136,38 @@ class GroupsLocalHandler(object): res = yield self.groups_server_handler.create_group( group_id, user_id, content ) - defer.returnValue(res) + local_attestation = None + remote_attestation = None + else: + local_attestation = self.attestations.create_attestation(group_id, user_id) + content["attestation"] = local_attestation - content["user_profile"] = yield self.profile_handler.get_profile(user_id) - res = yield self.transport_client.create_group( - get_domain_from_id(group_id), group_id, user_id, content, + content["user_profile"] = yield self.profile_handler.get_profile(user_id) + + res = yield self.transport_client.create_group( + get_domain_from_id(group_id), group_id, user_id, content, + ) + + remote_attestation = res["attestation"] + yield self.attestations.verify_attestation( + remote_attestation, + group_id=group_id, + user_id=user_id, + ) + + is_publicised = content.get("publicise", False) + token = yield self.store.register_user_group_membership( + group_id, user_id, + membership="join", + is_admin=True, + local_attestation=local_attestation, + remote_attestation=remote_attestation, + is_publicised=is_publicised, ) + self.notifier.on_new_event( + "groups_key", token, users=[user_id], + ) + defer.returnValue(res) @defer.inlineCallbacks diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index f2e4ffcec..69c1bc189 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -637,6 +637,7 @@ class SyncHandler(object): if membership == "join": if gtype == "membership": + # TODO: Add profile content.pop("membership", None) joined[group_id] = content["content"] else: From 069ae2df126418b5be1c96727a578cfd1dd4e506 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 20 Sep 2017 10:52:12 +0100 Subject: [PATCH 070/223] Fix initial sync --- synapse/storage/group_server.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 543306350..b0399f813 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -1085,7 +1085,15 @@ class GroupServerStore(SQLBaseStore): AND stream_id <= ? """ txn.execute(sql, (user_id, now_token,)) - return self.cursor_to_dict(txn) + return [ + { + "group_id": row[0], + "type": row[1], + "membership": row[2], + "content": json.loads(row[3]), + } + for row in txn + ] return self.runInteraction( "get_all_groups_for_user", _get_all_groups_for_user_txn, ) From 197d82dc070447b4a89a82816996f38f01ca7a04 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 20 Sep 2017 11:12:11 +0100 Subject: [PATCH 071/223] Correctly return next token --- synapse/storage/group_server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index b0399f813..2afd689d8 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -966,10 +966,11 @@ class GroupServerStore(SQLBaseStore): return next_id with self._group_updates_id_gen.get_next() as next_id: - yield self.runInteraction( + res = yield self.runInteraction( "register_user_group_membership", _register_user_group_membership_txn, next_id, ) + defer.returnValue(res) @defer.inlineCallbacks def create_group(self, group_id, user_id, name, avatar_url, short_description, From ae8d4bb0f0e39fc10275bdcc69ba08d98497624f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 21 Sep 2017 15:55:18 +0100 Subject: [PATCH 072/223] Keep room_id's in group summary --- synapse/groups/groups_server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 94cf9788b..25f48a11a 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -109,7 +109,6 @@ class GroupsServerHandler(object): room_id, len(joined_users), with_alias=False, allow_private=True, ) - entry.pop("room_id", None) room_entry["profile"] = entry From bb746a9de109a6c8643cbf21bbd876a67b9f5c9d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 21 Sep 2017 15:57:22 +0100 Subject: [PATCH 073/223] Revert: Keep room_id's in group summary --- synapse/groups/groups_server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 25f48a11a..94cf9788b 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -109,6 +109,7 @@ class GroupsServerHandler(object): room_id, len(joined_users), with_alias=False, allow_private=True, ) + entry.pop("room_id", None) room_entry["profile"] = entry From e1dec2f1a797122b4d72ba883e09b2d1b9eafcc9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 21 Sep 2017 16:09:57 +0100 Subject: [PATCH 074/223] Remove user from group summary when the leave the group --- synapse/storage/group_server.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 2afd689d8..d0b5ad231 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -822,6 +822,14 @@ class GroupServerStore(SQLBaseStore): "user_id": user_id, }, ) + self._simple_delete_txn( + txn, + table="group_summary_users", + keyvalues={ + "group_id": group_id, + "user_id": user_id, + }, + ) return self.runInteraction("remove_user_from_group", _remove_user_from_group_txn) def add_room_to_group(self, group_id, room_id, is_public): From 95298783bb86e13da56e07fa3f73b6e2de053c08 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 26 Sep 2017 11:04:37 +0100 Subject: [PATCH 075/223] Add is_publicised to group summary --- synapse/handlers/groups_local.py | 50 ++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index b4833f8ef..14fdf06b5 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -96,32 +96,38 @@ class GroupsLocalHandler(object): res = yield self.groups_server_handler.get_group_summary( group_id, requester_user_id ) - defer.returnValue(res) + else: + res = yield self.transport_client.get_group_summary( + get_domain_from_id(group_id), group_id, requester_user_id, + ) - res = yield self.transport_client.get_group_summary( - get_domain_from_id(group_id), group_id, requester_user_id, - ) + # Loop through the users and validate the attestations. + chunk = res["users_section"]["users"] + valid_users = [] + for entry in chunk: + g_user_id = entry["user_id"] + attestation = entry.pop("attestation") + try: + yield self.attestations.verify_attestation( + attestation, + group_id=group_id, + user_id=g_user_id, + ) + valid_users.append(entry) + except Exception as e: + logger.info("Failed to verify user is in group: %s", e) - # Loop through the users and validate the attestations. - chunk = res["users_section"]["users"] - valid_users = [] - for entry in chunk: - g_user_id = entry["user_id"] - attestation = entry.pop("attestation") - try: - yield self.attestations.verify_attestation( - attestation, - group_id=group_id, - user_id=g_user_id, - ) - valid_users.append(entry) - except Exception as e: - logger.info("Failed to verify user is in group: %s", e) + res["users_section"]["users"] = valid_users - res["users_section"]["users"] = valid_users + res["users_section"]["users"].sort(key=lambda e: e.get("order", 0)) + res["rooms_section"]["rooms"].sort(key=lambda e: e.get("order", 0)) - res["users_section"]["users"].sort(key=lambda e: e.get("order", 0)) - res["rooms_section"]["rooms"].sort(key=lambda e: e.get("order", 0)) + # Add `is_publicised` flag to indicate whether the user has publicised their + # membership of the group on their profile + result = yield self.store.get_publicised_groups_for_user(requester_user_id) + is_publicised = group_id in result + + res.setdefault("user", {})["is_publicised"] = is_publicised defer.returnValue(res) From a8e2a3df32f3584d728021d5feafecf78b0f37d1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 26 Sep 2017 15:39:13 +0100 Subject: [PATCH 076/223] Add unique index to group_rooms table --- synapse/groups/groups_server.py | 2 -- synapse/storage/schema/delta/43/group_server.sql | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 94cf9788b..699d8a526 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -466,8 +466,6 @@ class GroupsServerHandler(object): group_id, and_exists=True, and_is_admin=requester_user_id ) - # TODO: Check if room has already been added - is_public = _parse_visibility_from_contents(content) yield self.store.add_room_to_group(group_id, room_id, is_public=is_public) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/43/group_server.sql index e74554381..b2333848a 100644 --- a/synapse/storage/schema/delta/43/group_server.sql +++ b/synapse/storage/schema/delta/43/group_server.sql @@ -52,7 +52,7 @@ CREATE TABLE group_rooms ( is_public BOOLEAN NOT NULL -- whether the room can be seen by everyone ); -CREATE INDEX groups_rooms_g_idx ON group_rooms(group_id, room_id); +CREATE UNIQUE INDEX groups_rooms_g_idx ON group_rooms(group_id, room_id); CREATE INDEX groups_rooms_r_idx ON group_rooms(room_id); From 17b8e2bd02ad0abbd25103b637eb8490f3a53507 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 26 Sep 2017 15:52:41 +0100 Subject: [PATCH 077/223] Add remove room API --- synapse/federation/transport/client.py | 12 ++++++++++++ synapse/federation/transport/server.py | 14 +++++++++++++- synapse/groups/groups_server.py | 12 ++++++++++++ synapse/handlers/groups_local.py | 1 + synapse/rest/client/v2_alpha/groups.py | 11 +++++++++++ synapse/storage/group_server.py | 23 +++++++++++++++++++++++ 6 files changed, 72 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index ce68cc493..36f6eb75e 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -525,6 +525,18 @@ class TransportLayerClient(object): ignore_backoff=True, ) + def remove_room_from_group(self, destination, group_id, requester_user_id, room_id): + """Remove a room from a group + """ + path = PREFIX + "/groups/%s/room/%s" % (group_id, room_id,) + + return self.client.delete_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + @log_function def get_users_in_group(self, destination, group_id, requester_user_id): """Get users in a group diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index b5f07c50b..c7565e073 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -674,7 +674,7 @@ class FederationGroupsRoomsServlet(BaseFederationServlet): class FederationGroupsAddRoomsServlet(BaseFederationServlet): - """Add room to group + """Add/remove room from group """ PATH = "/groups/(?P[^/]*)/room/(?)$" @@ -690,6 +690,18 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet): defer.returnValue((200, new_content)) + @defer.inlineCallbacks + def on_DELETE(self, origin, content, query, group_id, room_id): + requester_user_id = parse_string_from_args(query, "requester_user_id") + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.remove_room_from_group( + group_id, requester_user_id, room_id, + ) + + defer.returnValue((200, new_content)) + class FederationGroupsUsersServlet(BaseFederationServlet): """Get the users in a group on behalf of a user diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 699d8a526..10bf61d17 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -472,6 +472,18 @@ class GroupsServerHandler(object): defer.returnValue({}) + @defer.inlineCallbacks + def remove_room_from_group(self, group_id, requester_user_id, room_id): + """Remove room from group + """ + yield self.check_group_is_ours( + group_id, and_exists=True, and_is_admin=requester_user_id + ) + + yield self.store.remove_room_from_group(group_id, room_id) + + defer.returnValue({}) + @defer.inlineCallbacks def invite_to_group(self, group_id, user_id, requester_user_id, content): """Invite user to group diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 14fdf06b5..a2bacbfc3 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -69,6 +69,7 @@ class GroupsLocalHandler(object): get_rooms_in_group = _create_rerouter("get_rooms_in_group") add_room_to_group = _create_rerouter("add_room_to_group") + remove_room_from_group = _create_rerouter("remove_room_from_group") update_group_summary_room = _create_rerouter("update_group_summary_room") delete_group_summary_room = _create_rerouter("delete_group_summary_room") diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index b469058e9..8f3ce15b0 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -423,6 +423,17 @@ class GroupAdminRoomsServlet(RestServlet): defer.returnValue((200, result)) + @defer.inlineCallbacks + def on_DELETE(self, request, group_id, room_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + result = yield self.groups_handler.remove_room_from_group( + group_id, user_id, room_id, + ) + + defer.returnValue((200, result)) + class GroupAdminUsersInviteServlet(RestServlet): """Invite a user to the group diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index d0b5ad231..4fe9172ad 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -843,6 +843,29 @@ class GroupServerStore(SQLBaseStore): desc="add_room_to_group", ) + def remove_room_from_group(self, group_id, room_id): + def _remove_room_from_group_txn(txn): + self._simple_delete_txn( + txn, + table="group_rooms", + keyvalues={ + "group_id": group_id, + "room_id": room_id, + }, + ) + + self._simple_delete_txn( + txn, + table="group_summary_rooms", + keyvalues={ + "group_id": group_id, + "room_id": room_id, + }, + ) + return self.runInteraction( + "remove_room_from_group", _remove_room_from_group_txn, + ) + def get_publicised_groups_for_user(self, user_id): """Get all groups a user is publicising """ From 4824a33c31c32a055fc5b8ff4d1197c0bd3933c5 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 26 Sep 2017 17:51:26 +0100 Subject: [PATCH 078/223] Factor out module loading to a separate place So it can be reused --- synapse/config/password_auth_providers.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index 83762d089..90824cab7 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -15,13 +15,15 @@ from ._base import Config, ConfigError -import importlib +from synapse.util.module_loader import load_module class PasswordAuthProviderConfig(Config): def read_config(self, config): self.password_providers = [] + provider_config = None + # We want to be backwards compatible with the old `ldap_config` # param. ldap_config = config.get("ldap_config", {}) @@ -38,19 +40,15 @@ class PasswordAuthProviderConfig(Config): if provider['module'] == "synapse.util.ldap_auth_provider.LdapAuthProvider": from ldap_auth_provider import LdapAuthProvider provider_class = LdapAuthProvider + try: + provider_config = provider_class.parse_config(provider["config"]) + except Exception as e: + raise ConfigError( + "Failed to parse config for %r: %r" % (provider['module'], e) + ) else: - # We need to import the module, and then pick the class out of - # that, so we split based on the last dot. - module, clz = provider['module'].rsplit(".", 1) - module = importlib.import_module(module) - provider_class = getattr(module, clz) + (provider_class, provider_config) = load_module(provider) - try: - provider_config = provider_class.parse_config(provider["config"]) - except Exception as e: - raise ConfigError( - "Failed to parse config for %r: %r" % (provider['module'], e) - ) self.password_providers.append((provider_class, provider_config)) def default_config(self, **kwargs): From 0b03a9770829247055fe8eaf66c24bb1892a3c50 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 26 Sep 2017 17:56:41 +0100 Subject: [PATCH 079/223] Add module_loader.py --- synapse/util/module_loader.py | 41 +++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 synapse/util/module_loader.py diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py new file mode 100644 index 000000000..b4464790e --- /dev/null +++ b/synapse/util/module_loader.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import importlib + +from synapse.config._base import ConfigError + +def load_module(provider): + """ Loads a module with its config + Take a dict with keys 'module' (the module name) and 'config' + (the config dict). + + Returns + Tuple of (provider class, parsed config object) + """ + # We need to import the module, and then pick the class out of + # that, so we split based on the last dot. + module, clz = provider['module'].rsplit(".", 1) + module = importlib.import_module(module) + provider_class = getattr(module, clz) + + try: + provider_config = provider_class.parse_config(provider["config"]) + except Exception as e: + raise ConfigError( + "Failed to parse config for %r: %r" % (provider['module'], e) + ) + + return (provider_class, provider_config) From 9fd086e506ae3cb3db7f1b1c7317c7602a4d71e3 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 26 Sep 2017 17:59:46 +0100 Subject: [PATCH 080/223] unnecessary parens --- synapse/util/module_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py index b4464790e..4b51d7a77 100644 --- a/synapse/util/module_loader.py +++ b/synapse/util/module_loader.py @@ -38,4 +38,4 @@ def load_module(provider): "Failed to parse config for %r: %r" % (provider['module'], e) ) - return (provider_class, provider_config) + return provider_class, provider_config From 6cd5fcd5366cfef4959d107e818d0e20d78aa483 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 26 Sep 2017 19:20:23 +0100 Subject: [PATCH 081/223] Make the spam checker a module --- synapse/config/homeserver.py | 4 ++- synapse/events/spamcheck.py | 37 +++++++++++++++------------ synapse/federation/federation_base.py | 5 ++-- synapse/handlers/message.py | 5 ++-- synapse/server.py | 5 ++++ 5 files changed, 33 insertions(+), 23 deletions(-) diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index b22cacf8d..3f9d9d5f8 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -34,6 +34,7 @@ from .password_auth_providers import PasswordAuthProviderConfig from .emailconfig import EmailConfig from .workers import WorkerConfig from .push import PushConfig +from .spam_checker import SpamCheckerConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, @@ -41,7 +42,8 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig, AppServiceConfig, KeyConfig, SAML2Config, CasConfig, JWTConfig, PasswordConfig, EmailConfig, - WorkerConfig, PasswordAuthProviderConfig, PushConfig,): + WorkerConfig, PasswordAuthProviderConfig, PushConfig, + SpamCheckerConfig,): pass diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 56fa9e556..7b22b3413 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -13,26 +13,29 @@ # See the License for the specific language governing permissions and # limitations under the License. +class SpamChecker(object): + def __init__(self, hs): + self.spam_checker = None -def check_event_for_spam(event): - """Checks if a given event is considered "spammy" by this server. + if hs.config.spam_checker is not None: + module, config = hs.config.spam_checker + print("cfg %r", config) + self.spam_checker = module(config=config) - If the server considers an event spammy, then it will be rejected if - sent by a local user. If it is sent by a user on another server, then - users receive a blank event. + def check_event_for_spam(self, event): + """Checks if a given event is considered "spammy" by this server. - Args: - event (synapse.events.EventBase): the event to be checked + If the server considers an event spammy, then it will be rejected if + sent by a local user. If it is sent by a user on another server, then + users receive a blank event. - Returns: - bool: True if the event is spammy. - """ - if not hasattr(event, "content") or "body" not in event.content: - return False + Args: + event (synapse.events.EventBase): the event to be checked - # for example: - # - # if "the third flower is green" in event.content["body"]: - # return True + Returns: + bool: True if the event is spammy. + """ + if self.spam_checker is None: + return False - return False + return self.spam_checker.check_event_for_spam(event) diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index babd9ea07..a0f5d40eb 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -16,7 +16,6 @@ import logging from synapse.api.errors import SynapseError from synapse.crypto.event_signing import check_event_content_hash -from synapse.events import spamcheck from synapse.events.utils import prune_event from synapse.util import unwrapFirstError, logcontext from twisted.internet import defer @@ -26,7 +25,7 @@ logger = logging.getLogger(__name__) class FederationBase(object): def __init__(self, hs): - pass + self.spam_checker = hs.get_spam_checker() @defer.inlineCallbacks def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False, @@ -144,7 +143,7 @@ class FederationBase(object): ) return redacted - if spamcheck.check_event_for_spam(pdu): + if self.spam_checker.check_event_for_spam(pdu): logger.warn( "Event contains spam, redacting %s: %s", pdu.event_id, pdu.get_pdu_json() diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index da18bf23d..37f0a2772 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -12,7 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from synapse.events import spamcheck from twisted.internet import defer from synapse.api.constants import EventTypes, Membership @@ -58,6 +57,8 @@ class MessageHandler(BaseHandler): self.action_generator = hs.get_action_generator() + self.spam_checker = hs.get_spam_checker() + @defer.inlineCallbacks def purge_history(self, room_id, event_id): event = yield self.store.get_event(event_id) @@ -322,7 +323,7 @@ class MessageHandler(BaseHandler): txn_id=txn_id ) - if spamcheck.check_event_for_spam(event): + if self.spam_checker.check_event_for_spam(event): raise SynapseError( 403, "Spam is not permitted here", Codes.FORBIDDEN ) diff --git a/synapse/server.py b/synapse/server.py index a38e5179e..4d44af745 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -31,6 +31,7 @@ from synapse.appservice.api import ApplicationServiceApi from synapse.appservice.scheduler import ApplicationServiceScheduler from synapse.crypto.keyring import Keyring from synapse.events.builder import EventBuilderFactory +from synapse.events.spamcheck import SpamChecker from synapse.federation import initialize_http_replication from synapse.federation.send_queue import FederationRemoteSendQueue from synapse.federation.transport.client import TransportLayerClient @@ -139,6 +140,7 @@ class HomeServer(object): 'read_marker_handler', 'action_generator', 'user_directory_handler', + 'spam_checker', ] def __init__(self, hostname, **kwargs): @@ -309,6 +311,9 @@ class HomeServer(object): def build_user_directory_handler(self): return UserDirectoyHandler(self) + def build_spam_checker(self): + return SpamChecker(self) + def remove_pusher(self, app_id, push_key, user_id): return self.get_pusherpool().remove_pusher(app_id, push_key, user_id) From 8ad5f34908df99804b27bd045fde5b9d5625d784 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 26 Sep 2017 19:21:41 +0100 Subject: [PATCH 082/223] pep8 --- synapse/util/module_loader.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py index 4b51d7a77..4288312b8 100644 --- a/synapse/util/module_loader.py +++ b/synapse/util/module_loader.py @@ -17,6 +17,7 @@ import importlib from synapse.config._base import ConfigError + def load_module(provider): """ Loads a module with its config Take a dict with keys 'module' (the module name) and 'config' From 1786b0e768877a608a6f44a6a37cc36e598eda4e Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 27 Sep 2017 10:22:54 +0100 Subject: [PATCH 083/223] Forgot the new file again :( --- synapse/config/spam_checker.py | 35 ++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 synapse/config/spam_checker.py diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py new file mode 100644 index 000000000..3fec42bdb --- /dev/null +++ b/synapse/config/spam_checker.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.util.module_loader import load_module + +from ._base import Config + + +class SpamCheckerConfig(Config): + def read_config(self, config): + self.spam_checker = None + + provider = config.get("spam_checker", None) + if provider is not None: + self.spam_checker = load_module(provider) + + def default_config(self, **kwargs): + return """\ + # spam_checker: + # module: "my_custom_project.SuperSpamChecker" + # config: + # example_option: 'things' + """ From 60c78666abbf82c3adfaa3bb4faf86f867eb18ea Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 27 Sep 2017 10:26:13 +0100 Subject: [PATCH 084/223] pep8 --- synapse/events/spamcheck.py | 1 + synapse/util/module_loader.py | 1 + 2 files changed, 2 insertions(+) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 7b22b3413..a876bcb81 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + class SpamChecker(object): def __init__(self, hs): self.spam_checker = None diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py index 4b51d7a77..4288312b8 100644 --- a/synapse/util/module_loader.py +++ b/synapse/util/module_loader.py @@ -17,6 +17,7 @@ import importlib from synapse.config._base import ConfigError + def load_module(provider): """ Loads a module with its config Take a dict with keys 'module' (the module name) and 'config' From 8c06dd607165c109de23aa41098a8b45e02dcbd8 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 27 Sep 2017 10:31:14 +0100 Subject: [PATCH 085/223] Remove unintentional debugging --- synapse/events/spamcheck.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index a876bcb81..8ddbf2ca3 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -20,7 +20,6 @@ class SpamChecker(object): if hs.config.spam_checker is not None: module, config = hs.config.spam_checker - print("cfg %r", config) self.spam_checker = module(config=config) def check_event_for_spam(self, event): From ef3a5ae787e2fa25cc753b7c5dc9f31ba3bf4316 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 27 Sep 2017 11:24:19 +0100 Subject: [PATCH 086/223] Don't test is spam_checker not None Sometimes it's a Mock object which is not none but is still not what we're after --- synapse/events/spamcheck.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 8ddbf2ca3..e739f105b 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -18,8 +18,14 @@ class SpamChecker(object): def __init__(self, hs): self.spam_checker = None - if hs.config.spam_checker is not None: + module = None + config = None + try: module, config = hs.config.spam_checker + except: + pass + + if module is not None: self.spam_checker = module(config=config) def check_event_for_spam(self, event): From adec03395d1c9a8e237a74ea420966bae8ea0002 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 27 Sep 2017 15:01:25 +0100 Subject: [PATCH 087/223] Fix bug where /joined_members didn't check user was in room --- synapse/handlers/message.py | 31 +++++++++++++++++++++++++++++++ synapse/rest/client/v1/room.py | 17 +++++++---------- 2 files changed, 38 insertions(+), 10 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 37f0a2772..f6740544c 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -419,6 +419,37 @@ class MessageHandler(BaseHandler): [serialize_event(c, now) for c in room_state.values()] ) + @defer.inlineCallbacks + def get_joined_members(self, user_id, room_id): + """Get all the joined members in the room and their profile information. + + If the user has left the room return the state events from when they left. + + Args: + user_id(str): The user requesting state events. + room_id(str): The room ID to get all state events from. + Returns: + A dict of user_id to profile info + """ + membership, membership_event_id = yield self._check_in_room_or_world_readable( + room_id, user_id + ) + + if membership == Membership.JOIN: + users_with_profile = yield self.state.get_current_user_in_room(room_id) + else: + raise NotImplementedError( + "Getting joined members after leaving is not implemented" + ) + + defer.returnValue({ + user_id: { + "avatar_url": profile.avatar_url, + "display_name": profile.display_name, + } + for user_id, profile in users_with_profile.iteritems() + }) + @measure_func("_create_new_client_event") @defer.inlineCallbacks def _create_new_client_event(self, builder, requester=None, prev_event_ids=None): diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index cd388770c..4be0fee38 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -398,22 +398,19 @@ class JoinedRoomMemberListRestServlet(ClientV1RestServlet): def __init__(self, hs): super(JoinedRoomMemberListRestServlet, self).__init__(hs) - self.state = hs.get_state_handler() + self.message_handler = hs.get_handlers().message_handler @defer.inlineCallbacks def on_GET(self, request, room_id): - yield self.auth.get_user_by_req(request) + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() - users_with_profile = yield self.state.get_current_user_in_room(room_id) + users_with_profile = yield self.message_handler.get_joined_members( + user_id, room_id, + ) defer.returnValue((200, { - "joined": { - user_id: { - "avatar_url": profile.avatar_url, - "display_name": profile.display_name, - } - for user_id, profile in users_with_profile.iteritems() - } + "joined": users_with_profile, })) From 8090fd4664de87bad636ace6774dad8c33bd5276 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 10:09:32 +0100 Subject: [PATCH 088/223] Fix /joined_members to work with AS users --- synapse/handlers/message.py | 36 +++++++++++++++++++++++----------- synapse/rest/client/v1/room.py | 3 +-- 2 files changed, 26 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index f6740544c..ca8c6c55b 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -420,27 +420,41 @@ class MessageHandler(BaseHandler): ) @defer.inlineCallbacks - def get_joined_members(self, user_id, room_id): + def get_joined_members(self, requester, room_id): """Get all the joined members in the room and their profile information. If the user has left the room return the state events from when they left. Args: - user_id(str): The user requesting state events. + requester(Requester): The user requesting state events. room_id(str): The room ID to get all state events from. Returns: A dict of user_id to profile info """ - membership, membership_event_id = yield self._check_in_room_or_world_readable( - room_id, user_id - ) - - if membership == Membership.JOIN: - users_with_profile = yield self.state.get_current_user_in_room(room_id) - else: - raise NotImplementedError( - "Getting joined members after leaving is not implemented" + user_id = requester.user.to_string() + if not requester.app_service: + # We check AS auth after fetching the room membership, as it + # requires us to pull out all joined members anyway. + membership, _ = yield self._check_in_room_or_world_readable( + room_id, user_id ) + if membership != Membership.JOIN: + raise NotImplementedError( + "Getting joined members after leaving is not implemented" + ) + + users_with_profile = yield self.state.get_current_user_in_room(room_id) + + # If this is an AS, double check that they are allowed to see the members. + # This can either be because the AS user is in the room or becuase there + # is a user in the room that the AS is "interested in" + if requester.app_service and user_id not in users_with_profile: + for uid in users_with_profile: + if requester.app_service.is_interested_in_user(uid): + break + else: + # Loop fell through, AS has no interested users in room + raise AuthError(403, "Appservice not in room") defer.returnValue({ user_id: { diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 4be0fee38..6c379d53a 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -403,10 +403,9 @@ class JoinedRoomMemberListRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id): requester = yield self.auth.get_user_by_req(request) - user_id = requester.user.to_string() users_with_profile = yield self.message_handler.get_joined_members( - user_id, room_id, + requester, room_id, ) defer.returnValue((200, { From 9ccb4226ba0824a1468c4e8f0abe91aca3381862 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 12:18:06 +0100 Subject: [PATCH 089/223] Delete expired url cache data --- synapse/rest/media/v1/filepath.py | 43 ++++++++- synapse/rest/media/v1/preview_url_resource.py | 90 ++++++++++++++++++- synapse/storage/media_repository.py | 61 +++++++++++++ synapse/storage/prepare_database.py | 2 +- .../schema/delta/44/expire_url_cache.sql | 17 ++++ 5 files changed, 208 insertions(+), 5 deletions(-) create mode 100644 synapse/storage/schema/delta/44/expire_url_cache.sql diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index d92b7ff33..c5d43209f 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -73,19 +73,58 @@ class MediaFilePaths(object): ) def url_cache_filepath(self, media_id): + # Media id is of the form + # E.g.: 2017-09-28-fsdRDt24DS234dsf return os.path.join( self.base_path, "url_cache", - media_id[0:2], media_id[2:4], media_id[4:] + media_id[:10], media_id[11:] ) + def url_cache_filepath_dirs_to_delete(self, media_id): + "The dirs to try and remove if we delete the media_id file" + return [ + os.path.join( + self.base_path, "url_cache", + media_id[:10], + ), + ] + def url_cache_thumbnail(self, media_id, width, height, content_type, method): + # Media id is of the form + # E.g.: 2017-09-28-fsdRDt24DS234dsf + top_level_type, sub_type = content_type.split("/") file_name = "%i-%i-%s-%s-%s" % ( width, height, top_level_type, sub_type, method ) + return os.path.join( self.base_path, "url_cache_thumbnails", - media_id[0:2], media_id[2:4], media_id[4:], + media_id[:10], media_id[11:], file_name ) + + def url_cache_thumbnail_directory(self, media_id): + # Media id is of the form + # E.g.: 2017-09-28-fsdRDt24DS234dsf + + return os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[:10], media_id[11:], + ) + + def url_cache_thumbnail_dirs_to_delete(self, media_id): + "The dirs to try and remove if we delete the media_id thumbnails" + # Media id is of the form + # E.g.: 2017-09-28-fsdRDt24DS234dsf + return [ + os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[:10], media_id[11:], + ), + os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[:10], + ), + ] diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index b81a336c5..c5ba83ddf 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -36,6 +36,9 @@ import cgi import ujson as json import urlparse import itertools +import datetime +import errno +import shutil import logging logger = logging.getLogger(__name__) @@ -70,6 +73,10 @@ class PreviewUrlResource(Resource): self.downloads = {} + self._cleaner_loop = self.clock.looping_call( + self._expire_url_cache_data, 30 * 10000 + ) + def render_GET(self, request): self._async_render_GET(request) return NOT_DONE_YET @@ -253,8 +260,7 @@ class PreviewUrlResource(Resource): # we're most likely being explicitly triggered by a human rather than a # bot, so are we really a robot? - # XXX: horrible duplication with base_resource's _download_remote_file() - file_id = random_string(24) + file_id = datetime.date.today().isoformat() + '_' + random_string(16) fname = self.filepaths.url_cache_filepath(file_id) self.media_repo._makedirs(fname) @@ -328,6 +334,86 @@ class PreviewUrlResource(Resource): "etag": headers["ETag"][0] if "ETag" in headers else None, }) + @defer.inlineCallbacks + def _expire_url_cache_data(self): + """Clean up expired url cache content, media and thumbnails. + """ + now = self.clock.time_msec() + + # First we delete expired url cache entries + media_ids = yield self.store.get_expired_url_cache(now) + + removed_media = [] + for media_id in media_ids: + fname = self.filepaths.url_cache_filepath(media_id) + try: + os.remove(fname) + except OSError as e: + # If the path doesn't exist, meh + if e.errno != errno.ENOENT: + logger.warn("Failed to remove media: %r: %s", media_id, e) + continue + + removed_media.append(media_id) + + try: + dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id) + for dir in dirs: + os.rmdir(dir) + except: + pass + + yield self.store.delete_url_cache(removed_media) + + logger.info("Deleted %d entries from url cache", len(removed_media)) + + # Now we delete old images associated with the url cache. + # These may be cached for a bit on the client (i.e., they + # may have a room open with a preview url thing open). + # So we wait a couple of days before deleting, just in case. + expire_before = now - 2 * 24 * 60 * 60 * 1000 + yield self.store.get_url_cache_media_before(expire_before) + + removed_media = [] + for media_id in media_ids: + fname = self.filepaths.url_cache_filepath(media_id) + try: + os.remove(fname) + except OSError as e: + # If the path doesn't exist, meh + if e.errno != errno.ENOENT: + logger.warn("Failed to remove media: %r: %s", media_id, e) + continue + + try: + dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id) + for dir in dirs: + os.rmdir(dir) + except: + pass + + thumbnail_dir = self.filepaths.url_cache_thumbnail_directory(media_id) + try: + shutil.rmtree(thumbnail_dir) + except OSError as e: + # If the path doesn't exist, meh + if e.errno != errno.ENOENT: + logger.warn("Failed to remove media: %r: %s", media_id, e) + continue + + removed_media.append(media_id) + + try: + dirs = self.filepaths.url_cache_thumbnail_dirs_to_delete(media_id) + for dir in dirs: + os.rmdir(dir) + except: + pass + + yield self.store.delete_url_cache_media(removed_media) + + logger.info("Deleted %d media from url cache", len(removed_media)) + def decode_and_calc_og(body, media_uri, request_encoding=None): from lxml import etree diff --git a/synapse/storage/media_repository.py b/synapse/storage/media_repository.py index 82bb61b81..5cca14ccb 100644 --- a/synapse/storage/media_repository.py +++ b/synapse/storage/media_repository.py @@ -238,3 +238,64 @@ class MediaRepositoryStore(SQLBaseStore): }, ) return self.runInteraction("delete_remote_media", delete_remote_media_txn) + + def get_expired_url_cache(self, now_ts): + sql = ( + "SELECT media_id FROM local_media_repository_url_cache" + " WHERE download_ts + expires < ?" + " ORDER BY download_ts + expires ASC" + " LIMIT 100" + ) + + def _get_expired_url_cache_txn(txn): + txn.execute(sql, (now_ts,)) + return [row[0] for row in txn] + + return self.runInteraction("get_expired_url_cache", _get_expired_url_cache_txn) + + def delete_url_cache(self, media_ids): + sql = ( + "DELETE FROM local_media_repository_url_cache" + " WHERE media_id = ?" + ) + + def _delete_url_cache_txn(txn): + txn.executemany(sql, [(media_id) for media_id in media_ids]) + + return self.runInteraction("delete_url_cache", _delete_url_cache_txn) + + def get_url_cache_media_before(self, before_ts): + sql = ( + "SELECT media_id FROM local_media_repository" + " WHERE created_ts < ?" + " ORDER BY created_ts ASC" + " LIMIT 100" + ) + + def _get_url_cache_media_before_txn(txn): + txn.execute(sql, (before_ts,)) + return [row[0] for row in txn] + + return self.runInteraction( + "get_url_cache_media_before", _get_url_cache_media_before_txn, + ) + + def delete_url_cache_media(self, media_ids): + def _delete_url_cache_media_txn(txn): + sql = ( + "DELETE FROM local_media_repository" + " WHERE media_id = ?" + ) + + txn.executemany(sql, [(media_id) for media_id in media_ids]) + + sql = ( + "DELETE FROM local_media_repository_thumbnails" + " WHERE media_id = ?" + ) + + txn.executemany(sql, [(media_id) for media_id in media_ids]) + + return self.runInteraction( + "delete_url_cache_media", _delete_url_cache_media_txn, + ) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 72b670b83..a0af8456f 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 43 +SCHEMA_VERSION = 44 dir_path = os.path.abspath(os.path.dirname(__file__)) diff --git a/synapse/storage/schema/delta/44/expire_url_cache.sql b/synapse/storage/schema/delta/44/expire_url_cache.sql new file mode 100644 index 000000000..96202bd2a --- /dev/null +++ b/synapse/storage/schema/delta/44/expire_url_cache.sql @@ -0,0 +1,17 @@ +/* Copyright 2017 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE INDEX local_media_repository_url_idx ON local_media_repository(created_ts) WHERE url_cache IS NOT NULL; +CREATE INDEX local_media_repository_url_cache_expires_idx ON local_media_repository_url_cache(download_ts + expires); From 77f1d24de3c696f52bc1ba6d0f61e82f03a9de7a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 12:23:15 +0100 Subject: [PATCH 090/223] More brackets --- synapse/storage/schema/delta/44/expire_url_cache.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/44/expire_url_cache.sql b/synapse/storage/schema/delta/44/expire_url_cache.sql index 96202bd2a..997e790b6 100644 --- a/synapse/storage/schema/delta/44/expire_url_cache.sql +++ b/synapse/storage/schema/delta/44/expire_url_cache.sql @@ -14,4 +14,4 @@ */ CREATE INDEX local_media_repository_url_idx ON local_media_repository(created_ts) WHERE url_cache IS NOT NULL; -CREATE INDEX local_media_repository_url_cache_expires_idx ON local_media_repository_url_cache(download_ts + expires); +CREATE INDEX local_media_repository_url_cache_expires_idx ON local_media_repository_url_cache((download_ts + expires)); From ae79764fe55ab15156b4f28658326bd2c9c0b937 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 12:37:53 +0100 Subject: [PATCH 091/223] Change expires column to expires_ts --- synapse/rest/media/v1/preview_url_resource.py | 4 ++-- synapse/storage/media_repository.py | 14 ++++++------- .../schema/delta/44/expire_url_cache.sql | 21 ++++++++++++++++++- 3 files changed, 29 insertions(+), 10 deletions(-) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index c5ba83ddf..6f896ffb5 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -137,7 +137,7 @@ class PreviewUrlResource(Resource): cache_result = yield self.store.get_url_cache(url, ts) if ( cache_result and - cache_result["download_ts"] + cache_result["expires"] > ts and + cache_result["expires_ts"] > ts and cache_result["response_code"] / 100 == 2 ): respond_with_json_bytes( @@ -246,7 +246,7 @@ class PreviewUrlResource(Resource): url, media_info["response_code"], media_info["etag"], - media_info["expires"], + media_info["expires"] + media_info["created_ts"], json.dumps(og), media_info["filesystem_id"], media_info["created_ts"], diff --git a/synapse/storage/media_repository.py b/synapse/storage/media_repository.py index 5cca14ccb..b8a0dd076 100644 --- a/synapse/storage/media_repository.py +++ b/synapse/storage/media_repository.py @@ -62,7 +62,7 @@ class MediaRepositoryStore(SQLBaseStore): def get_url_cache_txn(txn): # get the most recently cached result (relative to the given ts) sql = ( - "SELECT response_code, etag, expires, og, media_id, download_ts" + "SELECT response_code, etag, expires_ts, og, media_id, download_ts" " FROM local_media_repository_url_cache" " WHERE url = ? AND download_ts <= ?" " ORDER BY download_ts DESC LIMIT 1" @@ -74,7 +74,7 @@ class MediaRepositoryStore(SQLBaseStore): # ...or if we've requested a timestamp older than the oldest # copy in the cache, return the oldest copy (if any) sql = ( - "SELECT response_code, etag, expires, og, media_id, download_ts" + "SELECT response_code, etag, expires_ts, og, media_id, download_ts" " FROM local_media_repository_url_cache" " WHERE url = ? AND download_ts > ?" " ORDER BY download_ts ASC LIMIT 1" @@ -86,14 +86,14 @@ class MediaRepositoryStore(SQLBaseStore): return None return dict(zip(( - 'response_code', 'etag', 'expires', 'og', 'media_id', 'download_ts' + 'response_code', 'etag', 'expires_ts', 'og', 'media_id', 'download_ts' ), row)) return self.runInteraction( "get_url_cache", get_url_cache_txn ) - def store_url_cache(self, url, response_code, etag, expires, og, media_id, + def store_url_cache(self, url, response_code, etag, expires_ts, og, media_id, download_ts): return self._simple_insert( "local_media_repository_url_cache", @@ -101,7 +101,7 @@ class MediaRepositoryStore(SQLBaseStore): "url": url, "response_code": response_code, "etag": etag, - "expires": expires, + "expires_ts": expires_ts, "og": og, "media_id": media_id, "download_ts": download_ts, @@ -242,8 +242,8 @@ class MediaRepositoryStore(SQLBaseStore): def get_expired_url_cache(self, now_ts): sql = ( "SELECT media_id FROM local_media_repository_url_cache" - " WHERE download_ts + expires < ?" - " ORDER BY download_ts + expires ASC" + " WHERE expires_ts < ?" + " ORDER BY expires_ts ASC" " LIMIT 100" ) diff --git a/synapse/storage/schema/delta/44/expire_url_cache.sql b/synapse/storage/schema/delta/44/expire_url_cache.sql index 997e790b6..9475d53e8 100644 --- a/synapse/storage/schema/delta/44/expire_url_cache.sql +++ b/synapse/storage/schema/delta/44/expire_url_cache.sql @@ -14,4 +14,23 @@ */ CREATE INDEX local_media_repository_url_idx ON local_media_repository(created_ts) WHERE url_cache IS NOT NULL; -CREATE INDEX local_media_repository_url_cache_expires_idx ON local_media_repository_url_cache((download_ts + expires)); + +-- we need to change `expires` to `expires_ts` so that we can index on it. SQLite doesn't support +-- indices on expressions until 3.9. +CREATE TABLE local_media_repository_url_cache_new( + url TEXT, + response_code INTEGER, + etag TEXT, + expires_ts BIGINT, + og TEXT, + media_id TEXT, + download_ts BIGINT +); + +INSERT INTO local_media_repository_url_cache_new + SELECT url, response_code, etag, expires + download_ts, og, media_id, download_ts FROM local_media_repository_url_cache; + +DROP TABLE local_media_repository_url_cache; +ALTER TABLE local_media_repository_url_cache_new RENAME TO local_media_repository_url_cache; + +CREATE INDEX local_media_repository_url_cache_expires_idx ON local_media_repository_url_cache(expires_ts); From 7a44c01d894d85a0eb829b4a82d1aeaff9a39ec9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 12:46:04 +0100 Subject: [PATCH 092/223] Fix typo --- synapse/storage/media_repository.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/storage/media_repository.py b/synapse/storage/media_repository.py index b8a0dd076..5e39daa21 100644 --- a/synapse/storage/media_repository.py +++ b/synapse/storage/media_repository.py @@ -260,7 +260,7 @@ class MediaRepositoryStore(SQLBaseStore): ) def _delete_url_cache_txn(txn): - txn.executemany(sql, [(media_id) for media_id in media_ids]) + txn.executemany(sql, [(media_id,) for media_id in media_ids]) return self.runInteraction("delete_url_cache", _delete_url_cache_txn) @@ -287,14 +287,14 @@ class MediaRepositoryStore(SQLBaseStore): " WHERE media_id = ?" ) - txn.executemany(sql, [(media_id) for media_id in media_ids]) + txn.executemany(sql, [(media_id,) for media_id in media_ids]) sql = ( "DELETE FROM local_media_repository_thumbnails" " WHERE media_id = ?" ) - txn.executemany(sql, [(media_id) for media_id in media_ids]) + txn.executemany(sql, [(media_id,) for media_id in media_ids]) return self.runInteraction( "delete_url_cache_media", _delete_url_cache_media_txn, From ace807908602cb955fc7a2cae63dc6e64bf90cc5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 12:52:51 +0100 Subject: [PATCH 093/223] Support new and old style media id formats --- synapse/rest/media/v1/filepath.py | 112 +++++++++++++++++++++--------- 1 file changed, 81 insertions(+), 31 deletions(-) diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index c5d43209f..d5cec1012 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -14,6 +14,9 @@ # limitations under the License. import os +import re + +NEW_FORMAT_ID_RE = re.compile(r"^\d\d\d\d-\d\d-\d\d") class MediaFilePaths(object): @@ -73,21 +76,39 @@ class MediaFilePaths(object): ) def url_cache_filepath(self, media_id): - # Media id is of the form - # E.g.: 2017-09-28-fsdRDt24DS234dsf - return os.path.join( - self.base_path, "url_cache", - media_id[:10], media_id[11:] - ) + if NEW_FORMAT_ID_RE.match(media_id): + # Media id is of the form + # E.g.: 2017-09-28-fsdRDt24DS234dsf + return os.path.join( + self.base_path, "url_cache", + media_id[:10], media_id[11:] + ) + else: + return os.path.join( + self.base_path, "url_cache", + media_id[0:2], media_id[2:4], media_id[4:], + ) def url_cache_filepath_dirs_to_delete(self, media_id): "The dirs to try and remove if we delete the media_id file" - return [ - os.path.join( - self.base_path, "url_cache", - media_id[:10], - ), - ] + if NEW_FORMAT_ID_RE.match(media_id): + return [ + os.path.join( + self.base_path, "url_cache", + media_id[:10], + ), + ] + else: + return [ + os.path.join( + self.base_path, "url_cache", + media_id[0:2], media_id[2:4], + ), + os.path.join( + self.base_path, "url_cache", + media_id[0:2], + ), + ] def url_cache_thumbnail(self, media_id, width, height, content_type, method): @@ -99,32 +120,61 @@ class MediaFilePaths(object): width, height, top_level_type, sub_type, method ) - return os.path.join( - self.base_path, "url_cache_thumbnails", - media_id[:10], media_id[11:], - file_name - ) + if NEW_FORMAT_ID_RE.match(media_id): + return os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[:10], media_id[11:], + file_name + ) + else: + return os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[0:2], media_id[2:4], media_id[4:], + file_name + ) def url_cache_thumbnail_directory(self, media_id): # Media id is of the form # E.g.: 2017-09-28-fsdRDt24DS234dsf - return os.path.join( - self.base_path, "url_cache_thumbnails", - media_id[:10], media_id[11:], - ) + if NEW_FORMAT_ID_RE.match(media_id): + return os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[:10], media_id[11:], + ) + else: + return os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[0:2], media_id[2:4], media_id[4:], + ) def url_cache_thumbnail_dirs_to_delete(self, media_id): "The dirs to try and remove if we delete the media_id thumbnails" # Media id is of the form # E.g.: 2017-09-28-fsdRDt24DS234dsf - return [ - os.path.join( - self.base_path, "url_cache_thumbnails", - media_id[:10], media_id[11:], - ), - os.path.join( - self.base_path, "url_cache_thumbnails", - media_id[:10], - ), - ] + if NEW_FORMAT_ID_RE.match(media_id): + return [ + os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[:10], media_id[11:], + ), + os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[:10], + ), + ] + else: + return [ + os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[0:2], media_id[2:4], media_id[4:], + ), + os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[0:2], media_id[2:4], + ), + os.path.join( + self.base_path, "url_cache_thumbnails", + media_id[0:2], + ), + ] From 5f501ec7e2645abe232bd6bab407ac863e3250c2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 12:59:01 +0100 Subject: [PATCH 094/223] Fix typo in url cache expiry timer --- synapse/rest/media/v1/preview_url_resource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 6f896ffb5..1616809e8 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -74,7 +74,7 @@ class PreviewUrlResource(Resource): self.downloads = {} self._cleaner_loop = self.clock.looping_call( - self._expire_url_cache_data, 30 * 10000 + self._expire_url_cache_data, 30 * 1000 ) def render_GET(self, request): From 93247a424a5068b088567fa98b6990e47608b7cb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 13:48:14 +0100 Subject: [PATCH 095/223] Only pull out local media that were for url cache --- synapse/storage/media_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/media_repository.py b/synapse/storage/media_repository.py index 5e39daa21..1f2eab98e 100644 --- a/synapse/storage/media_repository.py +++ b/synapse/storage/media_repository.py @@ -267,7 +267,7 @@ class MediaRepositoryStore(SQLBaseStore): def get_url_cache_media_before(self, before_ts): sql = ( "SELECT media_id FROM local_media_repository" - " WHERE created_ts < ?" + " WHERE created_ts < ? AND url_cache IS NOT NULL" " ORDER BY created_ts ASC" " LIMIT 100" ) From e1e7d76cf16858d998884f19b141f90a0415d297 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 13:55:29 +0100 Subject: [PATCH 096/223] Actually assign result to variable --- synapse/rest/media/v1/preview_url_resource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 1616809e8..0123369a7 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -372,7 +372,7 @@ class PreviewUrlResource(Resource): # may have a room open with a preview url thing open). # So we wait a couple of days before deleting, just in case. expire_before = now - 2 * 24 * 60 * 60 * 1000 - yield self.store.get_url_cache_media_before(expire_before) + media_ids = yield self.store.get_url_cache_media_before(expire_before) removed_media = [] for media_id in media_ids: From 7cc483aa0ef9e51bd3839768e44b449cf6d24136 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 13:56:53 +0100 Subject: [PATCH 097/223] Clear up expired url cache every 10s --- synapse/rest/media/v1/preview_url_resource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 0123369a7..2300c263e 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -74,7 +74,7 @@ class PreviewUrlResource(Resource): self.downloads = {} self._cleaner_loop = self.clock.looping_call( - self._expire_url_cache_data, 30 * 1000 + self._expire_url_cache_data, 10 * 1000 ) def render_GET(self, request): From 4dc07e93a85f0f6e09a6763a7833ef935be1c417 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 14:10:33 +0100 Subject: [PATCH 098/223] Add old indices --- synapse/storage/schema/delta/44/expire_url_cache.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/storage/schema/delta/44/expire_url_cache.sql b/synapse/storage/schema/delta/44/expire_url_cache.sql index 9475d53e8..e2b775f03 100644 --- a/synapse/storage/schema/delta/44/expire_url_cache.sql +++ b/synapse/storage/schema/delta/44/expire_url_cache.sql @@ -34,3 +34,5 @@ DROP TABLE local_media_repository_url_cache; ALTER TABLE local_media_repository_url_cache_new RENAME TO local_media_repository_url_cache; CREATE INDEX local_media_repository_url_cache_expires_idx ON local_media_repository_url_cache(expires_ts); +CREATE INDEX local_media_repository_url_cache_by_url_download_ts ON local_media_repository_url_cache(url, download_ts); +CREATE INDEX local_media_repository_url_cache_media_idx ON local_media_repository_url_cache(media_id); From 768f00dedbee83dd6bfb7c37bfadc511f7aeb10e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 14:27:27 +0100 Subject: [PATCH 099/223] Up the limits on number of url cache entries to delete at one time --- synapse/storage/media_repository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/storage/media_repository.py b/synapse/storage/media_repository.py index 1f2eab98e..7110a7127 100644 --- a/synapse/storage/media_repository.py +++ b/synapse/storage/media_repository.py @@ -244,7 +244,7 @@ class MediaRepositoryStore(SQLBaseStore): "SELECT media_id FROM local_media_repository_url_cache" " WHERE expires_ts < ?" " ORDER BY expires_ts ASC" - " LIMIT 100" + " LIMIT 500" ) def _get_expired_url_cache_txn(txn): @@ -269,7 +269,7 @@ class MediaRepositoryStore(SQLBaseStore): "SELECT media_id FROM local_media_repository" " WHERE created_ts < ? AND url_cache IS NOT NULL" " ORDER BY created_ts ASC" - " LIMIT 100" + " LIMIT 500" ) def _get_url_cache_media_before_txn(txn): From 75e67b9ee4526bc8e5ffd9251ad0370604db13cb Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 28 Sep 2017 15:24:00 +0100 Subject: [PATCH 100/223] Handle SERVFAILs when doing AAAA lookups for federation (#2477) ... to cope with people with broken dnssec setups, mostly --- synapse/http/endpoint.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index 241b17f2c..a97532162 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -354,16 +354,28 @@ def _get_hosts_for_srv_record(dns_client, host): return res[0] - def eb(res): - res.trap(DNSNameError) - return [] + def eb(res, record_type): + if res.check(DNSNameError): + return [] + logger.warn("Error looking up %s for %s: %s", + record_type, host, res, res.value) + return res # no logcontexts here, so we can safely fire these off and gatherResults d1 = dns_client.lookupAddress(host).addCallbacks(cb, eb) d2 = dns_client.lookupIPV6Address(host).addCallbacks(cb, eb) - results = yield defer.gatherResults([d1, d2], consumeErrors=True) + results = yield defer.DeferredList( + [d1, d2], consumeErrors=True) + + # if all of the lookups failed, raise an exception rather than blowing out + # the cache with an empty result. + if results and all(s == defer.FAILURE for (s, _) in results): + defer.returnValue(results[0][1]) + + for (success, result) in results: + if success == defer.FAILURE: + continue - for result in results: for answer in result: if not answer.payload: continue From e43de3ae4b33fb2fad7a4db042f413ecd7448545 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 28 Sep 2017 13:44:47 +0100 Subject: [PATCH 101/223] Improve logging of failures in matrixfederationclient * don't log exception types twice * not all exceptions have a meaningful 'message'. Use the repr rather than attempting to build a string ourselves. --- synapse/http/matrixfederationclient.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 747a791f8..6fc3a41c2 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -204,18 +204,15 @@ class MatrixFederationHttpClient(object): raise logger.warn( - "{%s} Sending request failed to %s: %s %s: %s - %s", + "{%s} Sending request failed to %s: %s %s: %s", txn_id, destination, method, url_bytes, - type(e).__name__, _flatten_response_never_received(e), ) - log_result = "%s - %s" % ( - type(e).__name__, _flatten_response_never_received(e), - ) + log_result = _flatten_response_never_received(e) if retries_left and not timeout: if long_retries: @@ -578,12 +575,14 @@ class _JsonProducer(object): def _flatten_response_never_received(e): if hasattr(e, "reasons"): - return ", ".join( + reasons = ", ".join( _flatten_response_never_received(f.value) for f in e.reasons ) + + return "%s:[%s]" % (type(e).__name__, reasons) else: - return "%s: %s" % (type(e).__name__, e.message,) + return repr(e) def check_content_type_is_json(headers): From d5694ac5fa3266a777fa171f33bebc0d7477c12a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 28 Sep 2017 16:08:08 +0100 Subject: [PATCH 102/223] Only log if we've removed media --- synapse/rest/media/v1/preview_url_resource.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 2300c263e..895b480d5 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -365,7 +365,8 @@ class PreviewUrlResource(Resource): yield self.store.delete_url_cache(removed_media) - logger.info("Deleted %d entries from url cache", len(removed_media)) + if removed_media: + logger.info("Deleted %d entries from url cache", len(removed_media)) # Now we delete old images associated with the url cache. # These may be cached for a bit on the client (i.e., they @@ -412,7 +413,8 @@ class PreviewUrlResource(Resource): yield self.store.delete_url_cache_media(removed_media) - logger.info("Deleted %d media from url cache", len(removed_media)) + if removed_media: + logger.info("Deleted %d media from url cache", len(removed_media)) def decode_and_calc_og(body, media_uri, request_encoding=None): From cafb8de132999507e9b05c751fbb32d199e7de50 Mon Sep 17 00:00:00 2001 From: Jeremy Cline Date: Sat, 30 Sep 2017 11:22:37 -0400 Subject: [PATCH 103/223] Unfreeze event before serializing with ujson In newer versions of https://github.com/esnme/ultrajson, ujson does not serialize frozendicts (introduced in esnme/ultrajson@53f85b1). Although the PyPI version is still 1.35, Fedora ships with a build from commit esnme/ultrajson@2f1d487. This causes the serialization to fail if the distribution-provided package is used. This runs the event through the unfreeze utility before serializing it. Thanks to @ignatenkobrain for tracking down the root cause. fixes #2351 Signed-off-by: Jeremy Cline --- synapse/handlers/message.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index be4f123c5..fe9d8848b 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -26,6 +26,7 @@ from synapse.types import ( from synapse.util.async import run_on_reactor, ReadWriteLock, Limiter from synapse.util.logcontext import preserve_fn from synapse.util.metrics import measure_func +from synapse.util.frozenutils import unfreeze from synapse.visibility import filter_events_for_client from ._base import BaseHandler @@ -503,7 +504,7 @@ class MessageHandler(BaseHandler): # Ensure that we can round trip before trying to persist in db try: - dump = ujson.dumps(event.content) + dump = ujson.dumps(unfreeze(event.content)) ujson.loads(dump) except: logger.exception("Failed to encode content: %r", event.content) From 7fc1aad195b01c4ffe990fc705ff61d128dc0190 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 2 Oct 2017 00:53:32 +0100 Subject: [PATCH 104/223] Drop search values with nul characters https://github.com/matrix-org/synapse/issues/2187 contains a report of a port failing due to nul characters somewhere in the search table. Let's try dropping the offending rows. --- scripts/synapse_port_db | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db index bc167b59a..dc7fe940e 100755 --- a/scripts/synapse_port_db +++ b/scripts/synapse_port_db @@ -376,10 +376,13 @@ class Porter(object): " VALUES (?,?,?,?,to_tsvector('english', ?),?,?)" ) - rows_dict = [ - dict(zip(headers, row)) - for row in rows - ] + rows_dict = [] + for row in rows: + d = dict(zip(headers, row)) + if "\0" in d['value']: + logger.warn('dropping search row %s', d) + else: + rows_dict.append(d) txn.executemany(sql, [ ( From 30848c0fcd34aaf0b2db7b65c91648ae49c480a2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Oct 2017 11:09:51 +0100 Subject: [PATCH 105/223] Ignore incoming events for rooms that we have left When synapse receives an event for a room its not in over federation, it double checks with the remote server to see if it is in fact in the room. This is done so that if the server has forgotten about the room (usually as a result of the database being dropped) it can recover from it. However, in the presence of state resets in large rooms, this can cause a lot of work for servers that have legitimately left. As a hacky solution that supports both cases we drop incoming events for rooms that we have explicitly left. This means that we no longer support the case of servers having forgotten that they've rejoined a room, but that is sufficiently rare that we're not going to support it for now. --- synapse/handlers/federation.py | 23 +++++++++++++++++++++++ synapse/storage/roommember.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 18f87cad6..b160ff168 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -125,6 +125,29 @@ class FederationHandler(BaseHandler): self.room_queues[pdu.room_id].append((pdu, origin)) return + # If we're no longer in the room just ditch the event entirely. This + # is probably an old server that has come back and thinks we're still + # in the room. + # + # If we were never in the room then maybe our database got vaped and + # we should check if we *are* in fact in the room. If we are then we + # can magically rejoin the room. + is_in_room = yield self.auth.check_host_in_room( + pdu.room_id, + self.server_name + ) + if not is_in_room: + was_in_room = yield self.store.was_host_joined( + pdu.room_id, self.server_name, + + ) + if was_in_room: + logger.info( + "Ignoring PDU %s for room %s from %s as we've left the room!", + pdu.event_id, pdu.room_id, origin, + ) + return + state = None auth_chain = [] diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 457ca288d..cb0791e59 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -533,6 +533,38 @@ class RoomMemberStore(SQLBaseStore): defer.returnValue(True) + @cachedInlineCallbacks() + def was_host_joined(self, room_id, host): + """Check whether the server is or ever was in the room. + """ + if '%' in host or '_' in host: + raise Exception("Invalid host name") + + sql = """ + SELECT user_id FROM room_memberships + WHERE room_id = ? + AND user_id LIKE ? + AND membership = 'join' + LIMIT 1 + """ + + # We do need to be careful to ensure that host doesn't have any wild cards + # in it, but we checked above for known ones and we'll check below that + # the returned user actually has the correct domain. + like_clause = "%:" + host + + rows = yield self._execute("was_host_joined", None, sql, room_id, like_clause) + + if not rows: + defer.returnValue(False) + + user_id = rows[0][0] + if get_domain_from_id(user_id) != host: + # This can only happen if the host name has something funky in it + raise Exception("Invalid host name") + + defer.returnValue(True) + def get_joined_hosts(self, room_id, state_entry): state_group = state_entry.state_group if not state_group: From f2da6df568178aa542209f2b9413f4fcff5484fd Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Oct 2017 11:31:06 +0100 Subject: [PATCH 106/223] Remove spurious line feed --- synapse/handlers/federation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b160ff168..7456b2300 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -139,7 +139,6 @@ class FederationHandler(BaseHandler): if not is_in_room: was_in_room = yield self.store.was_host_joined( pdu.room_id, self.server_name, - ) if was_in_room: logger.info( From 84716d267c6d93cfe759e8da336efb3136dc1560 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 13:53:09 +0100 Subject: [PATCH 107/223] Allow spam checker to reject invites too --- synapse/handlers/federation.py | 4 ++++ synapse/handlers/room_member.py | 20 ++++++++++++++------ 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 18f87cad6..32078fde3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -77,6 +77,7 @@ class FederationHandler(BaseHandler): self.action_generator = hs.get_action_generator() self.is_mine_id = hs.is_mine_id self.pusher_pool = hs.get_pusherpool() + self.spam_checker = hs.get_spam_checker() self.replication_layer.set_handler(self) @@ -1077,6 +1078,9 @@ class FederationHandler(BaseHandler): if self.hs.config.block_non_admin_invites: raise SynapseError(403, "This server does not accept room invites") + if not self.spam_checker.user_may_invite(requester.user): + raise SynapseError(403, "This user is not permitted to send invites to this server") + membership = event.content.get("membership") if event.type != EventTypes.Member or membership != Membership.INVITE: raise SynapseError(400, "The event was not an m.room.member invite event") diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 9a498c2d3..61b0140e6 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -48,6 +48,7 @@ class RoomMemberHandler(BaseHandler): self.member_linearizer = Linearizer(name="member") self.clock = hs.get_clock() + self.spam_checker = hs.get_spam_checker() self.distributor = hs.get_distributor() self.distributor.declare("user_joined_room") @@ -210,12 +211,19 @@ class RoomMemberHandler(BaseHandler): if is_blocked: raise SynapseError(403, "This room has been blocked on this server") - if (effective_membership_state == "invite" and - self.hs.config.block_non_admin_invites): - is_requester_admin = yield self.auth.is_server_admin( - requester.user, - ) - if not is_requester_admin: + if effective_membership_state == "invite": + block_invite = False + if self.hs.config.block_non_admin_invites: + is_requester_admin = yield self.auth.is_server_admin( + requester.user, + ) + if not is_requester_admin: + block_invite = True + + if not self.spam_checker.user_may_invite(requester.user): + block_invite = True + + if block_invite: raise SynapseError( 403, "Invites have been disabled on this server", ) From 2a7ed700d51f0a81f563298c78cd4566994ddbab Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 14:04:10 +0100 Subject: [PATCH 108/223] Fix param name & lint --- synapse/handlers/federation.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 32078fde3..8571350cc 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1078,8 +1078,10 @@ class FederationHandler(BaseHandler): if self.hs.config.block_non_admin_invites: raise SynapseError(403, "This server does not accept room invites") - if not self.spam_checker.user_may_invite(requester.user): - raise SynapseError(403, "This user is not permitted to send invites to this server") + if not self.spam_checker.user_may_invite(event.sender): + raise SynapseError( + 403, "This user is not permitted to send invites to this server" + ) membership = event.content.get("membership") if event.type != EventTypes.Member or membership != Membership.INVITE: From e4ab96021e84ad9cccb2c3e0dea6347cce4e6149 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Oct 2017 14:10:21 +0100 Subject: [PATCH 109/223] Update comments --- synapse/handlers/federation.py | 2 +- synapse/storage/roommember.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 7456b2300..77dd0ae1e 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -127,7 +127,7 @@ class FederationHandler(BaseHandler): # If we're no longer in the room just ditch the event entirely. This # is probably an old server that has come back and thinks we're still - # in the room. + # in the room (or we've been rejoined to the room by a state reset). # # If we were never in the room then maybe our database got vaped and # we should check if we *are* in fact in the room. If we are then we diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index cb0791e59..63f6115ba 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -536,6 +536,13 @@ class RoomMemberStore(SQLBaseStore): @cachedInlineCallbacks() def was_host_joined(self, room_id, host): """Check whether the server is or ever was in the room. + + Args: + room_id (str) + host (str) + + Returns: + bool: whether the host is/was in the room or not """ if '%' in host or '_' in host: raise Exception("Invalid host name") From 11d62f43c9a28de7efd00a534cfbf05f254bfc3e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Oct 2017 14:12:28 +0100 Subject: [PATCH 110/223] Invalidate cache --- synapse/storage/events.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 7002b3752..4f0b43c36 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -784,6 +784,9 @@ class EventsStore(SQLBaseStore): self._invalidate_cache_and_stream( txn, self.is_host_joined, (room_id, host) ) + self._invalidate_cache_and_stream( + txn, self.was_host_joined, (room_id, host) + ) self._invalidate_cache_and_stream( txn, self.get_users_in_room, (room_id,) From 41fd9989a28cfd6cc0b401677be61270f3959cfa Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 14:17:44 +0100 Subject: [PATCH 111/223] Skip spam check for admin users --- synapse/handlers/room_member.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 61b0140e6..e88ba0e3a 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -213,16 +213,16 @@ class RoomMemberHandler(BaseHandler): if effective_membership_state == "invite": block_invite = False - if self.hs.config.block_non_admin_invites: - is_requester_admin = yield self.auth.is_server_admin( - requester.user, - ) - if not is_requester_admin: + is_requester_admin = yield self.auth.is_server_admin( + requester.user, + ) + if not is_requester_admin: + if ( + self.hs.config.block_non_admin_invites or + not self.spam_checker.user_may_invite(requester.user) + ): block_invite = True - if not self.spam_checker.user_may_invite(requester.user): - block_invite = True - if block_invite: raise SynapseError( 403, "Invites have been disabled on this server", From 537088e7dceff8af4b283e11e46d7df7e2f38065 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 14:28:12 +0100 Subject: [PATCH 112/223] Actually write warpper function --- synapse/events/spamcheck.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index e739f105b..605261f4b 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -45,3 +45,19 @@ class SpamChecker(object): return False return self.spam_checker.check_event_for_spam(event) + + def user_may_invite(self, userid): + """Checks if a given user may send an invite + + If this method returns false, the invite will be rejected. + + Args: + userid (string): The sender's user ID + + Returns: + bool: True if the user may send an invite, otherwise False + """ + if self.spam_checker is None: + return True + + return self.spam_checker.user_may_invite(userid) From bd769a81e12ea710accdebbaa296db1c1a625f75 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 15:16:40 +0100 Subject: [PATCH 113/223] better logging --- synapse/handlers/room_member.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index e88ba0e3a..76e46d93f 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -217,10 +217,15 @@ class RoomMemberHandler(BaseHandler): requester.user, ) if not is_requester_admin: - if ( - self.hs.config.block_non_admin_invites or - not self.spam_checker.user_may_invite(requester.user) - ): + if self.hs.config.block_non_admin_invites: + logger.debug( + "Blocking invite: user is not admin and non-admin " + "invites disabled" + ) + block_invite = True + + if not self.spam_checker.user_may_invite(requester.user): + logger.debug("Blocking invite due to spam checker") block_invite = True if block_invite: From c46a0d7eb4704c6532a611040a591633dac02b1a Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 15:20:14 +0100 Subject: [PATCH 114/223] this shouldn't be debug --- synapse/handlers/room_member.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 76e46d93f..77e5b95e8 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -218,14 +218,14 @@ class RoomMemberHandler(BaseHandler): ) if not is_requester_admin: if self.hs.config.block_non_admin_invites: - logger.debug( + logger.info( "Blocking invite: user is not admin and non-admin " "invites disabled" ) block_invite = True if not self.spam_checker.user_may_invite(requester.user): - logger.debug("Blocking invite due to spam checker") + logger.info("Blocking invite due to spam checker") block_invite = True if block_invite: From c2c188b699e555376912dfea49c42b02c4168270 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 15:46:19 +0100 Subject: [PATCH 115/223] Federation was passing strings anyway so pass string everywhere --- synapse/handlers/room_member.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 77e5b95e8..a33a8ad42 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -224,7 +224,7 @@ class RoomMemberHandler(BaseHandler): ) block_invite = True - if not self.spam_checker.user_may_invite(requester.user): + if not self.spam_checker.user_may_invite(requester.user.to_string()): logger.info("Blocking invite due to spam checker") block_invite = True From 1e375468de914fdefc7c0b4b65217c4ec95784a4 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 17:13:14 +0100 Subject: [PATCH 116/223] pass room id too --- synapse/events/spamcheck.py | 4 ++-- synapse/handlers/federation.py | 2 +- synapse/handlers/room_member.py | 4 +++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 605261f4b..fe2d22a6f 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -46,7 +46,7 @@ class SpamChecker(object): return self.spam_checker.check_event_for_spam(event) - def user_may_invite(self, userid): + def user_may_invite(self, userid, roomid): """Checks if a given user may send an invite If this method returns false, the invite will be rejected. @@ -60,4 +60,4 @@ class SpamChecker(object): if self.spam_checker is None: return True - return self.spam_checker.user_may_invite(userid) + return self.spam_checker.user_may_invite(userid, roomid) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 8571350cc..737fe518e 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1078,7 +1078,7 @@ class FederationHandler(BaseHandler): if self.hs.config.block_non_admin_invites: raise SynapseError(403, "This server does not accept room invites") - if not self.spam_checker.user_may_invite(event.sender): + if not self.spam_checker.user_may_invite(event.sender, event.room_id): raise SynapseError( 403, "This user is not permitted to send invites to this server" ) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index a33a8ad42..37985fa1f 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -224,7 +224,9 @@ class RoomMemberHandler(BaseHandler): ) block_invite = True - if not self.spam_checker.user_may_invite(requester.user.to_string()): + if not self.spam_checker.user_may_invite( + requester.user.to_string(), room_id, + ): logger.info("Blocking invite due to spam checker") block_invite = True From 1e2ac543516baaec06d4e0ebdd2dbbe003e1f73b Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Oct 2017 17:41:38 +0100 Subject: [PATCH 117/223] s/roomid/room_id/ --- synapse/events/spamcheck.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index fe2d22a6f..8b01c091e 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -46,7 +46,7 @@ class SpamChecker(object): return self.spam_checker.check_event_for_spam(event) - def user_may_invite(self, userid, roomid): + def user_may_invite(self, userid, room_id): """Checks if a given user may send an invite If this method returns false, the invite will be rejected. @@ -60,4 +60,4 @@ class SpamChecker(object): if self.spam_checker is None: return True - return self.spam_checker.user_may_invite(userid, roomid) + return self.spam_checker.user_may_invite(userid, room_id) From 197c14dbcfa9bc5bb281833a91ee035cb154216d Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 4 Oct 2017 10:47:54 +0100 Subject: [PATCH 118/223] Add room creation checks to spam checker Lets the spam checker deny attempts to create rooms and add aliases to them. --- synapse/events/spamcheck.py | 32 ++++++++++++++++++++++++++++++++ synapse/handlers/directory.py | 7 +++++++ synapse/handlers/room.py | 8 ++++++++ 3 files changed, 47 insertions(+) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 8b01c091e..7cb3468df 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -61,3 +61,35 @@ class SpamChecker(object): return True return self.spam_checker.user_may_invite(userid, room_id) + + def user_may_create_room(self, userid): + """Checks if a given user may create a room + + If this method returns false, the creation request will be rejected. + + Args: + userid (string): The sender's user ID + + Returns: + bool: True if the user may create a room, otherwise False + """ + if self.spam_checker is None: + return True + + return self.spam_checker.user_may_create_room(userid) + + def user_may_create_room_alias(self, userid, room_alias): + """Checks if a given user may create a room alias + + If this method returns false, the association request will be rejected. + + Args: + userid (string): The sender's user ID + + Returns: + bool: True if the user may create a room alias, otherwise False + """ + if self.spam_checker is None: + return True + + return self.spam_checker.user_may_create_room_alias(userid, room_alias) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 943554ce9..ed18bb20b 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -40,6 +40,8 @@ class DirectoryHandler(BaseHandler): "directory", self.on_directory_query ) + self.spam_checker = hs.get_spam_checker() + @defer.inlineCallbacks def _create_association(self, room_alias, room_id, servers=None, creator=None): # general association creation for both human users and app services @@ -73,6 +75,11 @@ class DirectoryHandler(BaseHandler): # association creation for human users # TODO(erikj): Do user auth. + if not self.spam_checker.user_may_create_room_alias(user_id, room_alias): + raise SynapseError( + 403, "This user is not permitted to create this alias", + ) + can_create = yield self.can_modify_alias( room_alias, user_id=user_id diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 5698d2808..f909ea04f 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -60,6 +60,11 @@ class RoomCreationHandler(BaseHandler): }, } + def __init__(self, hs): + super(RoomCreationHandler, self).__init__(hs) + + self.spam_checker = hs.get_spam_checker() + @defer.inlineCallbacks def create_room(self, requester, config, ratelimit=True): """ Creates a new room. @@ -75,6 +80,9 @@ class RoomCreationHandler(BaseHandler): """ user_id = requester.user.to_string() + if not self.spam_checker.user_may_create_room(user_id): + raise SynapseError(403, "You are not permitted to create rooms") + if ratelimit: yield self.ratelimit(requester) From 78d4ced82941ba249b7b16ea72684ade69c6a0d2 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 4 Oct 2017 12:44:27 +0100 Subject: [PATCH 119/223] un-double indent --- synapse/handlers/room.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index f909ea04f..535ba9517 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -81,7 +81,7 @@ class RoomCreationHandler(BaseHandler): user_id = requester.user.to_string() if not self.spam_checker.user_may_create_room(user_id): - raise SynapseError(403, "You are not permitted to create rooms") + raise SynapseError(403, "You are not permitted to create rooms") if ratelimit: yield self.ratelimit(requester) From d8ce68b09b0966330b4da720eeb41719c7c61be6 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 4 Oct 2017 14:29:33 +0100 Subject: [PATCH 120/223] spam check room publishing --- synapse/events/spamcheck.py | 18 ++++++++++++++++++ synapse/handlers/directory.py | 8 ++++++++ 2 files changed, 26 insertions(+) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 7cb3468df..595b1760f 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -85,6 +85,7 @@ class SpamChecker(object): Args: userid (string): The sender's user ID + room_alias (string): The alias to be created Returns: bool: True if the user may create a room alias, otherwise False @@ -93,3 +94,20 @@ class SpamChecker(object): return True return self.spam_checker.user_may_create_room_alias(userid, room_alias) + + def user_may_publish_room(self, userid, room_id): + """Checks if a given user may publish a room to the directory + + If this method returns false, the publish request will be rejected. + + Args: + userid (string): The sender's user ID + room_id (string): The ID of the room that would be published + + Returns: + bool: True if the user may publish the room, otherwise False + """ + if self.spam_checker is None: + return True + + return self.spam_checker.user_may_publish_room(userid, room_id) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index ed18bb20b..a0464ae5c 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -334,6 +334,14 @@ class DirectoryHandler(BaseHandler): room_id (str) visibility (str): "public" or "private" """ + if not self.spam_checker.user_may_publish_room( + requester.user.to_string(), room_id + ): + raise AuthError( + 403, + "This user is not permitted to publish rooms to the room list" + ) + if requester.is_guest: raise AuthError(403, "Guests cannot edit the published room list") From 6748f0a57962fb9657cab60083d94b4c97a0526c Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 11:33:30 +0100 Subject: [PATCH 121/223] Fix notif kws that start/end with non-word chars Only prepend / append word bounary characters if the search expression starts or ends with a word character, otherwise they don't work because there's no word bounary between whitespace and a non-word char. --- synapse/push/push_rule_evaluator.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 172c27c13..5a34d60ab 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -26,6 +26,8 @@ logger = logging.getLogger(__name__) GLOB_REGEX = re.compile(r'\\\[(\\\!|)(.*)\\\]') IS_GLOB = re.compile(r'[\?\*\[\]]') INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") +STARTS_WITH_WORD_CHAR_REGEX = re.compile(r"^\w") +ENDS_WITH_WORD_CHAR_REGEX = re.compile(r"\w$") def _room_member_count(ev, condition, room_member_count): @@ -183,7 +185,7 @@ def _glob_to_re(glob, word_boundary): r, ) if word_boundary: - r = r"\b%s\b" % (r,) + r = _re_word_boundary(r) return re.compile(r, flags=re.IGNORECASE) else: @@ -192,13 +194,30 @@ def _glob_to_re(glob, word_boundary): return re.compile(r, flags=re.IGNORECASE) elif word_boundary: r = re.escape(glob) - r = r"\b%s\b" % (r,) + r = _re_word_boundary(r) return re.compile(r, flags=re.IGNORECASE) else: r = "^" + re.escape(glob) + "$" return re.compile(r, flags=re.IGNORECASE) +def _re_word_boundary(r): + """ + Adds word boundary characters to the start and end of an + expression to require that the match occur as a whole word, + but do so respecting the fact that strings starting or ending + with non-word characters will change word boundaries. + """ + # Matching a regex string aginst a regex, since by definition + # \b is the boundary between a \w and a \W, so match \w at the + # start or end of the expression (although this will miss, eg. + # "[dl]og") + if STARTS_WITH_WORD_CHAR_REGEX.search(r): + r = r"\b%s" % (r,) + if ENDS_WITH_WORD_CHAR_REGEX.search(r): + r = r"%s\b" % (r,) + return r + def _flatten_dict(d, prefix=[], result=None): if result is None: From cbe3c3fdd49b87a452a9a9a229abfdf8dbe45922 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 11:43:10 +0100 Subject: [PATCH 122/223] pep8 --- synapse/push/push_rule_evaluator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 5a34d60ab..b78f2d90d 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -201,6 +201,7 @@ def _glob_to_re(glob, word_boundary): r = "^" + re.escape(glob) + "$" return re.compile(r, flags=re.IGNORECASE) + def _re_word_boundary(r): """ Adds word boundary characters to the start and end of an From eaaa837e002fa068f23b04b140d538e91ccc2eab Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 5 Oct 2017 11:43:22 +0100 Subject: [PATCH 123/223] Don't corrupt cache --- synapse/groups/groups_server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 10bf61d17..991cc12cc 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -109,6 +109,7 @@ class GroupsServerHandler(object): room_id, len(joined_users), with_alias=False, allow_private=True, ) + entry = dict(entry) # so we don't change whats cached entry.pop("room_id", None) room_entry["profile"] = entry From 0c8da8b519fbd8bca984117e354fe57c3a76e154 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 11:57:43 +0100 Subject: [PATCH 124/223] Use better method for word boundary searching From https://github.com/matrix-org/matrix-js-sdk/commit/ebc95667b8a5777d13e5d3c679972bedae022fd5 --- synapse/push/push_rule_evaluator.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index b78f2d90d..65f9a63fd 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -26,8 +26,6 @@ logger = logging.getLogger(__name__) GLOB_REGEX = re.compile(r'\\\[(\\\!|)(.*)\\\]') IS_GLOB = re.compile(r'[\?\*\[\]]') INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") -STARTS_WITH_WORD_CHAR_REGEX = re.compile(r"^\w") -ENDS_WITH_WORD_CHAR_REGEX = re.compile(r"\w$") def _room_member_count(ev, condition, room_member_count): @@ -209,15 +207,9 @@ def _re_word_boundary(r): but do so respecting the fact that strings starting or ending with non-word characters will change word boundaries. """ - # Matching a regex string aginst a regex, since by definition - # \b is the boundary between a \w and a \W, so match \w at the - # start or end of the expression (although this will miss, eg. - # "[dl]og") - if STARTS_WITH_WORD_CHAR_REGEX.search(r): - r = r"\b%s" % (r,) - if ENDS_WITH_WORD_CHAR_REGEX.search(r): - r = r"%s\b" % (r,) - return r + # we can't use \b as it chokes on unicode. however \W seems to be okay + # as shorthand for [^0-9A-Za-z_]. + return r"(^|\W)%s(\W|$)" % (r,) def _flatten_dict(d, prefix=[], result=None): From fa969cfdde72a2d136eba08eb99e00d47ddb5cdf Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 12:39:18 +0100 Subject: [PATCH 125/223] Support for channel notifications Add condition type to check the sender's power level and add a base rule using it for @channel notifications. --- synapse/push/baserules.py | 23 ++++++++++++++++++++++ synapse/push/bulk_push_rule_evaluator.py | 19 +++++++++++++++++- synapse/push/push_rule_evaluator.py | 25 +++++++++++++++++------- 3 files changed, 59 insertions(+), 8 deletions(-) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 85effdfa4..354b1f449 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -1,4 +1,5 @@ # Copyright 2015, 2016 OpenMarket Ltd +# Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -238,6 +239,28 @@ BASE_APPEND_OVERRIDE_RULES = [ } ] }, + { + 'rule_id': 'global/underride/.m.rule.channelnotif', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'content.body', + 'pattern': '*@channel*', + '_id': '_channelnotif_content', + }, + { + 'kind': 'sender_power_level', + 'is': '>=50', + '_id': '_channelnotif_pl', + }, + ], + 'actions': [ + 'notify', { + 'set_tweak': 'highlight', + 'value': True, + } + ] + } ] diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index b0d64aa6c..6459eec22 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -19,6 +19,7 @@ from twisted.internet import defer from .push_rule_evaluator import PushRuleEvaluatorForEvent +from synapse.event_auth import get_user_power_level from synapse.api.constants import EventTypes, Membership from synapse.metrics import get_metrics_for from synapse.util.caches import metrics as cache_metrics @@ -59,6 +60,7 @@ class BulkPushRuleEvaluator(object): def __init__(self, hs): self.hs = hs self.store = hs.get_datastore() + self.auth = hs.get_auth() self.room_push_rule_cache_metrics = cache_metrics.register_cache( "cache", @@ -108,6 +110,17 @@ class BulkPushRuleEvaluator(object): self.room_push_rule_cache_metrics, ) + @defer.inlineCallbacks + def _get_sender_power_level(self, event, context): + auth_events_ids = yield self.auth.compute_auth_events( + event, context.prev_state_ids, for_verification=False, + ) + auth_events = yield self.store.get_events(auth_events_ids) + auth_events = { + (e.type, e.state_key): e for e in auth_events.values() + } + defer.returnValue(get_user_power_level(event.sender, auth_events)) + @defer.inlineCallbacks def action_for_event_by_user(self, event, context): """Given an event and context, evaluate the push rules and return @@ -123,7 +136,11 @@ class BulkPushRuleEvaluator(object): event, context ) - evaluator = PushRuleEvaluatorForEvent(event, len(room_members)) + sender_power_level = yield self._get_sender_power_level(event, context) + + evaluator = PushRuleEvaluatorForEvent( + event, len(room_members), sender_power_level + ) condition_cache = {} diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 65f9a63fd..9cf3f9c63 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd +# Copyright 2015 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +30,12 @@ INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") def _room_member_count(ev, condition, room_member_count): + return _test_ineq_condition(condition, room_member_count) + +def _sender_power_level(ev, condition, power_level): + return _test_ineq_condition(condition, power_level) + +def _test_ineq_condition(condition, number): if 'is' not in condition: return False m = INEQUALITY_EXPR.match(condition['is']) @@ -41,19 +48,18 @@ def _room_member_count(ev, condition, room_member_count): rhs = int(rhs) if ineq == '' or ineq == '==': - return room_member_count == rhs + return number == rhs elif ineq == '<': - return room_member_count < rhs + return number < rhs elif ineq == '>': - return room_member_count > rhs + return number > rhs elif ineq == '>=': - return room_member_count >= rhs + return number >= rhs elif ineq == '<=': - return room_member_count <= rhs + return number <= rhs else: return False - def tweaks_for_actions(actions): tweaks = {} for a in actions: @@ -65,9 +71,10 @@ def tweaks_for_actions(actions): class PushRuleEvaluatorForEvent(object): - def __init__(self, event, room_member_count): + def __init__(self, event, room_member_count, sender_power_level): self._event = event self._room_member_count = room_member_count + self._sender_power_level = sender_power_level # Maps strings of e.g. 'content.body' -> event["content"]["body"] self._value_cache = _flatten_dict(event) @@ -81,6 +88,10 @@ class PushRuleEvaluatorForEvent(object): return _room_member_count( self._event, condition, self._room_member_count ) + elif condition['kind'] == 'sender_power_level': + return _sender_power_level( + self._event, condition, self._sender_power_level + ) else: return True From b9b9714fd5c3fa4c209d3ca1f6ddde365373ec98 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 13:02:19 +0100 Subject: [PATCH 126/223] Get rule type right --- synapse/push/baserules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 354b1f449..3b290a22a 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -240,7 +240,7 @@ BASE_APPEND_OVERRIDE_RULES = [ ] }, { - 'rule_id': 'global/underride/.m.rule.channelnotif', + 'rule_id': 'global/override/.m.rule.channelnotif', 'conditions': [ { 'kind': 'event_match', From 985ce80375b601d500ae58faa032874a794e942a Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 13:03:44 +0100 Subject: [PATCH 127/223] They're called rooms --- synapse/push/baserules.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 3b290a22a..71f9ab6b2 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -240,18 +240,18 @@ BASE_APPEND_OVERRIDE_RULES = [ ] }, { - 'rule_id': 'global/override/.m.rule.channelnotif', + 'rule_id': 'global/override/.m.rule.roomnotif', 'conditions': [ { 'kind': 'event_match', 'key': 'content.body', - 'pattern': '*@channel*', - '_id': '_channelnotif_content', + 'pattern': '*@room*', + '_id': '_roomnotif_content', }, { 'kind': 'sender_power_level', 'is': '>=50', - '_id': '_channelnotif_pl', + '_id': '_roomnotif_pl', }, ], 'actions': [ From e433393c4fded875cc38b09b02b453b7a014a9af Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 13:08:02 +0100 Subject: [PATCH 128/223] pep8 --- synapse/push/push_rule_evaluator.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 9cf3f9c63..a91dc0ee0 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -32,9 +32,11 @@ INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") def _room_member_count(ev, condition, room_member_count): return _test_ineq_condition(condition, room_member_count) + def _sender_power_level(ev, condition, power_level): return _test_ineq_condition(condition, power_level) + def _test_ineq_condition(condition, number): if 'is' not in condition: return False @@ -60,6 +62,7 @@ def _test_ineq_condition(condition, number): else: return False + def tweaks_for_actions(actions): tweaks = {} for a in actions: From ed80c6b6cc6da27849038a1b83bec7fa1ac54b3e Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 13:20:22 +0100 Subject: [PATCH 129/223] Add fastpath optimisation --- synapse/push/bulk_push_rule_evaluator.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 6459eec22..ca3b5af80 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -112,9 +112,15 @@ class BulkPushRuleEvaluator(object): @defer.inlineCallbacks def _get_sender_power_level(self, event, context): - auth_events_ids = yield self.auth.compute_auth_events( - event, context.prev_state_ids, for_verification=False, - ) + pl_event_key = (EventTypes.PowerLevels, "", ) + if pl_event_key in context.prev_state_ids: + # fastpath: if there's a power level event, that's all we need, and + # not having a power level event is an extreme edge case + auth_events_ids = [context.prev_state_ids[pl_event_key]] + else: + auth_events_ids = yield self.auth.compute_auth_events( + event, context.prev_state_ids, for_verification=False, + ) auth_events = yield self.store.get_events(auth_events_ids) auth_events = { (e.type, e.state_key): e for e in auth_events.values() From 269af961e9910e254f5abc979f0bd293687414f3 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 13:27:12 +0100 Subject: [PATCH 130/223] Make be faster --- synapse/push/bulk_push_rule_evaluator.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index ca3b5af80..df16d5ce9 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -112,11 +112,11 @@ class BulkPushRuleEvaluator(object): @defer.inlineCallbacks def _get_sender_power_level(self, event, context): - pl_event_key = (EventTypes.PowerLevels, "", ) - if pl_event_key in context.prev_state_ids: + pl_event_id = context.prev_state_ids.get((EventTypes.PowerLevels, "",)) + if pl_event_id: # fastpath: if there's a power level event, that's all we need, and # not having a power level event is an extreme edge case - auth_events_ids = [context.prev_state_ids[pl_event_key]] + auth_events_ids = [pl_event_id] else: auth_events_ids = yield self.auth.compute_auth_events( event, context.prev_state_ids, for_verification=False, From f878e6f8af9e80cfa4be717c03cc4f9853a93794 Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 5 Oct 2017 14:02:28 +0100 Subject: [PATCH 131/223] Spam checking: add the invitee to user_may_invite --- synapse/events/spamcheck.py | 4 ++-- synapse/handlers/federation.py | 12 +++++++----- synapse/handlers/room_member.py | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 595b1760f..dccc579ea 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -46,7 +46,7 @@ class SpamChecker(object): return self.spam_checker.check_event_for_spam(event) - def user_may_invite(self, userid, room_id): + def user_may_invite(self, inviter_userid, invitee_userid, room_id): """Checks if a given user may send an invite If this method returns false, the invite will be rejected. @@ -60,7 +60,7 @@ class SpamChecker(object): if self.spam_checker is None: return True - return self.spam_checker.user_may_invite(userid, room_id) + return self.spam_checker.user_may_invite(inviter_userid, invitee_userid, room_id) def user_may_create_room(self, userid): """Checks if a given user may create a room diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 737fe518e..8fccf8bab 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1071,6 +1071,9 @@ class FederationHandler(BaseHandler): """ event = pdu + if event.state_key is None: + raise SynapseError(400, "The invite event did not have a state key") + is_blocked = yield self.store.is_room_blocked(event.room_id) if is_blocked: raise SynapseError(403, "This room has been blocked on this server") @@ -1078,9 +1081,11 @@ class FederationHandler(BaseHandler): if self.hs.config.block_non_admin_invites: raise SynapseError(403, "This server does not accept room invites") - if not self.spam_checker.user_may_invite(event.sender, event.room_id): + if not self.spam_checker.user_may_invite( + event.sender, event.state_key, event.room_id, + ): raise SynapseError( - 403, "This user is not permitted to send invites to this server" + 403, "This user is not permitted to send invites to this server/user" ) membership = event.content.get("membership") @@ -1091,9 +1096,6 @@ class FederationHandler(BaseHandler): if sender_domain != origin: raise SynapseError(400, "The invite event was not from the server sending it") - if event.state_key is None: - raise SynapseError(400, "The invite event did not have a state key") - if not self.is_mine_id(event.state_key): raise SynapseError(400, "The invite event must be for this server") diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 37985fa1f..36a8ef8ce 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -225,7 +225,7 @@ class RoomMemberHandler(BaseHandler): block_invite = True if not self.spam_checker.user_may_invite( - requester.user.to_string(), room_id, + requester.user.to_string(), target.to_string(), room_id, ): logger.info("Blocking invite due to spam checker") block_invite = True From 3ddda939d35951896faa48631a3fe023e89e13e1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 5 Oct 2017 14:58:17 +0100 Subject: [PATCH 132/223] some comments in the state res code --- synapse/state.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/synapse/state.py b/synapse/state.py index 390799fbd..dcdcdef65 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -288,6 +288,9 @@ class StateHandler(object): """ logger.debug("resolve_state_groups event_ids %s", event_ids) + # map from state group id to the state in that state group (where + # 'state' is a map from state key to event id) + # dict[int, dict[(str, str), str]] state_groups_ids = yield self.store.get_state_groups_ids( room_id, event_ids ) @@ -320,11 +323,15 @@ class StateHandler(object): "Resolving state for %s with %d groups", room_id, len(state_groups_ids) ) + # build a map from state key to the event_ids which set that state. + # dict[(str, str), set[str]) state = {} for st in state_groups_ids.values(): for key, e_id in st.items(): state.setdefault(key, set()).add(e_id) + # build a map from state key to the event_ids which set that state, + # including only those where there are state keys in conflict. conflicted_state = { k: list(v) for k, v in state.items() @@ -494,8 +501,14 @@ def _resolve_with_state_fac(unconflicted_state, conflicted_state, logger.info("Asking for %d conflicted events", len(needed_events)) + # dict[str, FrozenEvent]: a map from state event id to event. Only includes + # the state events which are in conflict. state_map = yield state_map_factory(needed_events) + # get the ids of the auth events which allow us to authenticate the + # conflicted state, picking only from the unconflicting state. + # + # dict[(str, str), str]: a map from state key to event id auth_events = _create_auth_events_from_maps( unconflicted_state, conflicted_state, state_map ) From c8f568ddf9e8827d8971e14137663fa8df5b57d2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Oct 2017 22:14:24 +0100 Subject: [PATCH 133/223] Fix up deferred handling in federation.py * Avoid preserve_context_over_deferred, which is broken * set consumeErrors=True on defer.gatherResults, to avoid spurious "unhandled failure" erros --- synapse/handlers/federation.py | 45 ++++++++++++++++------------------ 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 8fccf8bab..63c56a4a3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -14,7 +14,6 @@ # limitations under the License. """Contains handlers for federation events.""" -import synapse.util.logcontext from signedjson.key import decode_verify_key_bytes from signedjson.sign import verify_signed_json from unpaddedbase64 import decode_base64 @@ -26,10 +25,7 @@ from synapse.api.errors import ( ) from synapse.api.constants import EventTypes, Membership, RejectedReason from synapse.events.validator import EventValidator -from synapse.util import unwrapFirstError -from synapse.util.logcontext import ( - preserve_fn, preserve_context_over_deferred -) +from synapse.util import unwrapFirstError, logcontext from synapse.util.metrics import measure_func from synapse.util.logutils import log_function from synapse.util.async import run_on_reactor, Linearizer @@ -592,9 +588,9 @@ class FederationHandler(BaseHandler): missing_auth - failed_to_fetch ) - results = yield preserve_context_over_deferred(defer.gatherResults( + results = yield logcontext.make_deferred_yieldable(defer.gatherResults( [ - preserve_fn(self.replication_layer.get_pdu)( + logcontext.preserve_fn(self.replication_layer.get_pdu)( [dest], event_id, outlier=True, @@ -786,10 +782,14 @@ class FederationHandler(BaseHandler): event_ids = list(extremities.keys()) logger.debug("calling resolve_state_groups in _maybe_backfill") - states = yield preserve_context_over_deferred(defer.gatherResults([ - preserve_fn(self.state_handler.resolve_state_groups)(room_id, [e]) - for e in event_ids - ])) + states = yield logcontext.make_deferred_yieldable(defer.gatherResults( + [ + logcontext.preserve_fn(self.state_handler.resolve_state_groups)( + room_id, [e] + ) + for e in event_ids + ], consumeErrors=True, + )) states = dict(zip(event_ids, [s.state for s in states])) state_map = yield self.store.get_events( @@ -942,9 +942,7 @@ class FederationHandler(BaseHandler): # lots of requests for missing prev_events which we do actually # have. Hence we fire off the deferred, but don't wait for it. - synapse.util.logcontext.preserve_fn(self._handle_queued_pdus)( - room_queue - ) + logcontext.preserve_fn(self._handle_queued_pdus)(room_queue) defer.returnValue(True) @@ -1438,7 +1436,7 @@ class FederationHandler(BaseHandler): if not backfilled: # this intentionally does not yield: we don't care about the result # and don't need to wait for it. - preserve_fn(self.pusher_pool.on_new_notifications)( + logcontext.preserve_fn(self.pusher_pool.on_new_notifications)( event_stream_id, max_stream_id ) @@ -1451,16 +1449,16 @@ class FederationHandler(BaseHandler): a bunch of outliers, but not a chunk of individual events that depend on each other for state calculations. """ - contexts = yield preserve_context_over_deferred(defer.gatherResults( + contexts = yield logcontext.make_deferred_yieldable(defer.gatherResults( [ - preserve_fn(self._prep_event)( + logcontext.preserve_fn(self._prep_event)( origin, ev_info["event"], state=ev_info.get("state"), auth_events=ev_info.get("auth_events"), ) for ev_info in event_infos - ] + ], consumeErrors=True, )) yield self.store.persist_events( @@ -1768,18 +1766,17 @@ class FederationHandler(BaseHandler): # Do auth conflict res. logger.info("Different auth: %s", different_auth) - different_events = yield preserve_context_over_deferred(defer.gatherResults( - [ - preserve_fn(self.store.get_event)( + different_events = yield logcontext.make_deferred_yieldable( + defer.gatherResults([ + logcontext.preserve_fn(self.store.get_event)( d, allow_none=True, allow_rejected=False, ) for d in different_auth if d in have_events and not have_events[d] - ], - consumeErrors=True - )).addErrback(unwrapFirstError) + ], consumeErrors=True) + ).addErrback(unwrapFirstError) if different_events: local_view = dict(auth_events) From 148428ce763978583da2b1d3c435ec321df45855 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Oct 2017 22:24:28 +0100 Subject: [PATCH 134/223] Fix logcontext handling for concurrently_execute Avoid preserve_context_over_deferred, which is broken. --- synapse/util/async.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/util/async.py b/synapse/util/async.py index 1453faf0e..bb252f75d 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -19,7 +19,7 @@ from twisted.internet import defer, reactor from .logcontext import ( PreserveLoggingContext, preserve_fn, preserve_context_over_deferred, ) -from synapse.util import unwrapFirstError +from synapse.util import logcontext, unwrapFirstError from contextlib import contextmanager @@ -155,7 +155,7 @@ def concurrently_execute(func, args, limit): except StopIteration: pass - return preserve_context_over_deferred(defer.gatherResults([ + return logcontext.make_deferred_yieldable(defer.gatherResults([ preserve_fn(_concurrently_execute_inner)() for _ in xrange(limit) ], consumeErrors=True)).addErrback(unwrapFirstError) From 01bbacf3c49f4311fddb61ef1ff98ee4f55fc44b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Oct 2017 15:12:43 +0100 Subject: [PATCH 135/223] Fix up logcontext handling in (federation) TransactionQueue Avoid using preserve_context_over_function, which has problems with respect to logcontexts. --- synapse/federation/transaction_queue.py | 48 ++++++++++++++++--------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 003eaba89..7a3c9cbb7 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -20,8 +20,8 @@ from .persistence import TransactionActions from .units import Transaction, Edu from synapse.api.errors import HttpResponseException +from synapse.util import logcontext from synapse.util.async import run_on_reactor -from synapse.util.logcontext import preserve_context_over_fn, preserve_fn from synapse.util.retryutils import NotRetryingDestination, get_retry_limiter from synapse.util.metrics import measure_func from synapse.handlers.presence import format_user_presence_state, get_interested_remotes @@ -231,11 +231,9 @@ class TransactionQueue(object): (pdu, order) ) - preserve_context_over_fn( - self._attempt_new_transaction, destination - ) + self._attempt_new_transaction(destination) - @preserve_fn # the caller should not yield on this + @logcontext.preserve_fn # the caller should not yield on this @defer.inlineCallbacks def send_presence(self, states): """Send the new presence states to the appropriate destinations. @@ -299,7 +297,7 @@ class TransactionQueue(object): state.user_id: state for state in states }) - preserve_fn(self._attempt_new_transaction)(destination) + self._attempt_new_transaction(destination) def send_edu(self, destination, edu_type, content, key=None): edu = Edu( @@ -321,9 +319,7 @@ class TransactionQueue(object): else: self.pending_edus_by_dest.setdefault(destination, []).append(edu) - preserve_context_over_fn( - self._attempt_new_transaction, destination - ) + self._attempt_new_transaction(destination) def send_failure(self, failure, destination): if destination == self.server_name or destination == "localhost": @@ -336,9 +332,7 @@ class TransactionQueue(object): destination, [] ).append(failure) - preserve_context_over_fn( - self._attempt_new_transaction, destination - ) + self._attempt_new_transaction(destination) def send_device_messages(self, destination): if destination == self.server_name or destination == "localhost": @@ -347,15 +341,24 @@ class TransactionQueue(object): if not self.can_send_to(destination): return - preserve_context_over_fn( - self._attempt_new_transaction, destination - ) + self._attempt_new_transaction(destination) def get_current_token(self): return 0 - @defer.inlineCallbacks def _attempt_new_transaction(self, destination): + """Try to start a new transaction to this destination + + If there is already a transaction in progress to this destination, + returns immediately. Otherwise kicks off the process of sending a + transaction in the background. + + Args: + destination (str): + + Returns: + None + """ # list of (pending_pdu, deferred, order) if destination in self.pending_transactions: # XXX: pending_transactions can get stuck on by a never-ending @@ -368,6 +371,19 @@ class TransactionQueue(object): ) return + logger.debug("TX [%s] Starting transaction loop", destination) + + # Drop the logcontext before starting the transaction. It doesn't + # really make sense to log all the outbound transactions against + # whatever path led us to this point: that's pretty arbitrary really. + # + # (this also means we can fire off _perform_transaction without + # yielding) + with logcontext.PreserveLoggingContext(): + self._transaction_transmission_loop(destination) + + @defer.inlineCallbacks + def _transaction_transmission_loop(self, destination): pending_pdus = [] try: self.pending_transactions[destination] = 1 From e8496efe8467568f488c6f53056be4bf69fd56e1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 9 Oct 2017 15:17:34 +0100 Subject: [PATCH 136/223] Fix up comment --- synapse/storage/roommember.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 63f6115ba..a0fc9a686 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -542,7 +542,8 @@ class RoomMemberStore(SQLBaseStore): host (str) Returns: - bool: whether the host is/was in the room or not + Deferred: Resolves to True if the host is/was in the room, otherwise + False. """ if '%' in host or '_' in host: raise Exception("Invalid host name") From 3cc852d339ad1cdcb0a435c76b44182bdb81dfe9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 9 Oct 2017 17:44:42 +0100 Subject: [PATCH 137/223] Fancy logformatter to format exceptions better This is a bit of an experimental change at this point; the idea is to see if it helps us track down where our stack overflows are coming from by logging the stack when the exception was caught and turned into a Failure. (We'll also need https://github.com/richvdh/twisted/commit/edf27044200e74680ea67c525768e36dc9d9af2b). If we deploy this, we'll be able to enable it via the log config yaml. --- synapse/util/logformatter.py | 43 ++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 synapse/util/logformatter.py diff --git a/synapse/util/logformatter.py b/synapse/util/logformatter.py new file mode 100644 index 000000000..60504162e --- /dev/null +++ b/synapse/util/logformatter.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import StringIO +import logging +import traceback + + +class LogFormatter(logging.Formatter): + """Log formatter which gives more detail for exceptions + + This is the same as the standard log formatter, except that when logging + exceptions [typically via log.foo("msg", exc_info=1)], it prints the + sequence that led up to the point at which the exception was caught. + (Normally only stack frames between the point the exception was raised and + where it was caught are logged). + """ + def __init__(self, *args, **kwargs): + super(LogFormatter, self).__init__(*args, **kwargs) + + def formatException(self, ei): + sio = StringIO.StringIO() + sio.write("Capture point (most recent call last):\n") + traceback.print_stack(ei[2].tb_frame.f_back, None, sio) + traceback.print_exception(ei[0], ei[1], ei[2], None, sio) + s = sio.getvalue() + sio.close() + if s[-1:] == "\n": + s = s[:-1] + return s From a6e3222fe5abf5b65b53678d1208c4c58f97b391 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Oct 2017 14:24:06 +0100 Subject: [PATCH 138/223] Fed server: Move origin-check code to _handle_received_pdu The response-building code expects there to be an entry in the `results` list for each entry in the pdu_list, so the early `continue` was messing this up. That doesn't really matter, because all that the federation client does is log any errors, but it's pretty poor form. --- synapse/federation/federation_server.py | 48 ++++++++++++------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 51e3fdea0..e791a1266 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -143,30 +143,6 @@ class FederationServer(FederationBase): results = [] for pdu in pdu_list: - # check that it's actually being sent from a valid destination to - # workaround bug #1753 in 0.18.5 and 0.18.6 - if transaction.origin != get_domain_from_id(pdu.event_id): - # We continue to accept join events from any server; this is - # necessary for the federation join dance to work correctly. - # (When we join over federation, the "helper" server is - # responsible for sending out the join event, rather than the - # origin. See bug #1893). - if not ( - pdu.type == 'm.room.member' and - pdu.content and - pdu.content.get("membership", None) == 'join' - ): - logger.info( - "Discarding PDU %s from invalid origin %s", - pdu.event_id, transaction.origin - ) - continue - else: - logger.info( - "Accepting join PDU %s from %s", - pdu.event_id, transaction.origin - ) - try: yield self._handle_received_pdu(transaction.origin, pdu) results.append({}) @@ -520,6 +496,30 @@ class FederationServer(FederationBase): Returns (Deferred): completes with None Raises: FederationError if the signatures / hash do not match """ + # check that it's actually being sent from a valid destination to + # workaround bug #1753 in 0.18.5 and 0.18.6 + if origin != get_domain_from_id(pdu.event_id): + # We continue to accept join events from any server; this is + # necessary for the federation join dance to work correctly. + # (When we join over federation, the "helper" server is + # responsible for sending out the join event, rather than the + # origin. See bug #1893). + if not ( + pdu.type == 'm.room.member' and + pdu.content and + pdu.content.get("membership", None) == 'join' + ): + logger.info( + "Discarding PDU %s from invalid origin %s", + pdu.event_id, origin + ) + return + else: + logger.info( + "Accepting join PDU %s from %s", + pdu.event_id, origin + ) + # Check signature. try: pdu = yield self._check_sigs_and_hash(pdu) From ba5b9b80a56a449ffab44afaf4661d5b44277898 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Oct 2017 15:18:58 +0100 Subject: [PATCH 139/223] fed server: refactor on_incoming_transaction Move as much as possible to after the have_responded check, and reduce the number of times we iterate over the pdu list. --- synapse/federation/federation_server.py | 53 ++++++++++++++----------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e791a1266..fa4ec2ad3 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -109,23 +109,12 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function def on_incoming_transaction(self, transaction_data): + # keep this as early as possible to make the calculated origin ts as + # accurate as possible. + request_time = int(self._clock.time_msec()) + transaction = Transaction(**transaction_data) - received_pdus_counter.inc_by(len(transaction.pdus)) - - for p in transaction.pdus: - if "unsigned" in p: - unsigned = p["unsigned"] - if "age" in unsigned: - p["age"] = unsigned["age"] - if "age" in p: - p["age_ts"] = int(self._clock.time_msec()) - int(p["age"]) - del p["age"] - - pdu_list = [ - self.event_from_pdu_json(p) for p in transaction.pdus - ] - logger.debug("[%s] Got transaction", transaction.transaction_id) response = yield self.transaction_actions.have_responded(transaction) @@ -140,17 +129,35 @@ class FederationServer(FederationBase): logger.debug("[%s] Transaction is new", transaction.transaction_id) - results = [] + received_pdus_counter.inc_by(len(transaction.pdus)) + + pdu_list = [] + + for p in transaction.pdus: + if "unsigned" in p: + unsigned = p["unsigned"] + if "age" in unsigned: + p["age"] = unsigned["age"] + if "age" in p: + p["age_ts"] = request_time - int(p["age"]) + del p["age"] + + event = self.event_from_pdu_json(p) + pdu_list.append(event) + + pdu_results = {} for pdu in pdu_list: + event_id = pdu.event_id try: yield self._handle_received_pdu(transaction.origin, pdu) - results.append({}) + pdu_results[event_id] = {} except FederationError as e: + logger.warn("Error handling PDU %s: %s", event_id, e) self.send_failure(e, transaction.origin) - results.append({"error": str(e)}) + pdu_results[event_id] = {"error": str(e)} except Exception as e: - results.append({"error": str(e)}) + pdu_results[event_id] = {"error": str(e)} logger.exception("Failed to handle PDU") if hasattr(transaction, "edus"): @@ -164,14 +171,12 @@ class FederationServer(FederationBase): for failure in getattr(transaction, "pdu_failures", []): logger.info("Got failure %r", failure) - logger.debug("Returning: %s", str(results)) - response = { - "pdus": dict(zip( - (p.event_id for p in pdu_list), results - )), + "pdus": pdu_results, } + logger.debug("Returning: %s", str(response)) + yield self.transaction_actions.set_response( transaction, 200, response From 4c7c4d4061ae298ce6df445c888b91d3e5791164 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Oct 2017 15:31:58 +0100 Subject: [PATCH 140/223] Fed server: use a linearizer for ongoing transactions We don't want to process the same transaction multiple times concurrently, so use a linearizer. --- synapse/federation/federation_server.py | 30 ++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index fa4ec2ad3..b2dffa2c3 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -53,6 +53,7 @@ class FederationServer(FederationBase): self.auth = hs.get_auth() self._server_linearizer = Linearizer("fed_server") + self._transaction_linearizer = Linearizer("fed_txn_handler") # We cache responses to state queries, as they take a while and often # come in waves. @@ -111,12 +112,39 @@ class FederationServer(FederationBase): def on_incoming_transaction(self, transaction_data): # keep this as early as possible to make the calculated origin ts as # accurate as possible. - request_time = int(self._clock.time_msec()) + request_time = self._clock.time_msec() transaction = Transaction(**transaction_data) + if not transaction.transaction_id: + raise Exception("Transaction missing transaction_id") + if not transaction.origin: + raise Exception("Transaction missing origin") + logger.debug("[%s] Got transaction", transaction.transaction_id) + # use a linearizer to ensure that we don't process the same transaction + # multiple times in parallel. + with (yield self._transaction_linearizer.queue( + (transaction.origin, transaction.transaction_id), + )): + result = yield self._handle_incoming_transaction( + transaction, request_time, + ) + + defer.returnValue(result) + + @defer.inlineCallbacks + def _handle_incoming_transaction(self, transaction, request_time): + """ Process an incoming transaction and return the HTTP response + + Args: + transaction (Transaction): incoming transaction + request_time (int): timestamp that the HTTP request arrived at + + Returns: + Deferred[(int, object)]: http response code and body + """ response = yield self.transaction_actions.have_responded(transaction) if response: From 6a6cc27aee16ee045b6909d2c401a9d4f6e54324 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 6 Oct 2017 16:07:20 +0100 Subject: [PATCH 141/223] fed server: process PDUs for different rooms in parallel With luck, this will give a real-time improvement when there are many rooms and the server ends up calling out to fetch missing events. --- synapse/federation/federation_server.py | 53 ++++++++++++++++--------- 1 file changed, 34 insertions(+), 19 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index b2dffa2c3..f00d59e70 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -12,14 +12,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - - from twisted.internet import defer from .federation_base import FederationBase from .units import Transaction, Edu -from synapse.util.async import Linearizer +from synapse.util import async from synapse.util.logutils import log_function from synapse.util.caches.response_cache import ResponseCache from synapse.events import FrozenEvent @@ -33,6 +31,9 @@ from synapse.crypto.event_signing import compute_event_signature import simplejson as json import logging +# when processing incoming transactions, we try to handle multiple rooms in +# parallel, up to this limit. +TRANSACTION_CONCURRENCY_LIMIT = 10 logger = logging.getLogger(__name__) @@ -52,8 +53,8 @@ class FederationServer(FederationBase): self.auth = hs.get_auth() - self._server_linearizer = Linearizer("fed_server") - self._transaction_linearizer = Linearizer("fed_txn_handler") + self._server_linearizer = async.Linearizer("fed_server") + self._transaction_linearizer = async.Linearizer("fed_txn_handler") # We cache responses to state queries, as they take a while and often # come in waves. @@ -159,7 +160,7 @@ class FederationServer(FederationBase): received_pdus_counter.inc_by(len(transaction.pdus)) - pdu_list = [] + pdus_by_room = {} for p in transaction.pdus: if "unsigned" in p: @@ -171,22 +172,36 @@ class FederationServer(FederationBase): del p["age"] event = self.event_from_pdu_json(p) - pdu_list.append(event) + room_id = event.room_id + pdus_by_room.setdefault(room_id, []).append(event) pdu_results = {} - for pdu in pdu_list: - event_id = pdu.event_id - try: - yield self._handle_received_pdu(transaction.origin, pdu) - pdu_results[event_id] = {} - except FederationError as e: - logger.warn("Error handling PDU %s: %s", event_id, e) - self.send_failure(e, transaction.origin) - pdu_results[event_id] = {"error": str(e)} - except Exception as e: - pdu_results[event_id] = {"error": str(e)} - logger.exception("Failed to handle PDU") + # we can process different rooms in parallel (which is useful if they + # require callouts to other servers to fetch missing events), but + # impose a limit to avoid going too crazy with ram/cpu. + @defer.inlineCallbacks + def process_pdus_for_room(room_id): + logger.debug("Processing PDUs for %s", room_id) + for pdu in pdus_by_room[room_id]: + event_id = pdu.event_id + try: + yield self._handle_received_pdu( + transaction.origin, pdu + ) + pdu_results[event_id] = {} + except FederationError as e: + logger.warn("Error handling PDU %s: %s", event_id, e) + self.send_failure(e, transaction.origin) + pdu_results[event_id] = {"error": str(e)} + except Exception as e: + pdu_results[event_id] = {"error": str(e)} + logger.exception("Failed to handle PDU %s", event_id) + + yield async.concurrently_execute( + process_pdus_for_room, pdus_by_room.keys(), + TRANSACTION_CONCURRENCY_LIMIT, + ) if hasattr(transaction, "edus"): for edu in (Edu(**x) for x in transaction.edus): From 707374d5dc8ed0ed077ed525262678ebcd583090 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Oct 2017 11:21:41 +0100 Subject: [PATCH 142/223] What year is it!? Who's the president!? --- synapse/push/push_rule_evaluator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index a91dc0ee0..7cf777f16 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd -# Copyright 2015 New Vector Ltd +# Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From a9f9d686316da9efa3e165275fb20066c0367649 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Oct 2017 11:38:31 +0100 Subject: [PATCH 143/223] More optimisation --- synapse/push/bulk_push_rule_evaluator.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index df16d5ce9..66e8a68a0 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -112,19 +112,22 @@ class BulkPushRuleEvaluator(object): @defer.inlineCallbacks def _get_sender_power_level(self, event, context): - pl_event_id = context.prev_state_ids.get((EventTypes.PowerLevels, "",)) + pl_event_key = (EventTypes.PowerLevels, "", ) + pl_event_id = context.prev_state_ids.get(pl_event_key) if pl_event_id: # fastpath: if there's a power level event, that's all we need, and # not having a power level event is an extreme edge case - auth_events_ids = [pl_event_id] + pl_event = yield self.store.get_event(pl_event_id) + auth_events = { pl_event_key: pl_event } else: auth_events_ids = yield self.auth.compute_auth_events( event, context.prev_state_ids, for_verification=False, ) - auth_events = yield self.store.get_events(auth_events_ids) - auth_events = { - (e.type, e.state_key): e for e in auth_events.values() - } + auth_events = yield self.store.get_events(auth_events_ids) + auth_events = { + (e.type, e.state_key): e for e in auth_events.itervalues() + } + defer.returnValue(get_user_power_level(event.sender, auth_events)) @defer.inlineCallbacks From c9f034b4acb0a48915d1680e310b692816eed713 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Oct 2017 11:47:10 +0100 Subject: [PATCH 144/223] There was already a constant for this also update copyright --- synapse/push/bulk_push_rule_evaluator.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 66e8a68a0..adc99bd4f 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2015 OpenMarket Ltd +# Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,6 +26,7 @@ from synapse.metrics import get_metrics_for from synapse.util.caches import metrics as cache_metrics from synapse.util.caches.descriptors import cached from synapse.util.async import Linearizer +from synapse.state import POWER_KEY from collections import namedtuple @@ -112,13 +114,12 @@ class BulkPushRuleEvaluator(object): @defer.inlineCallbacks def _get_sender_power_level(self, event, context): - pl_event_key = (EventTypes.PowerLevels, "", ) - pl_event_id = context.prev_state_ids.get(pl_event_key) + pl_event_id = context.prev_state_ids.get(POWER_KEY) if pl_event_id: # fastpath: if there's a power level event, that's all we need, and # not having a power level event is an extreme edge case pl_event = yield self.store.get_event(pl_event_id) - auth_events = { pl_event_key: pl_event } + auth_events = { POWER_KEY: pl_event } else: auth_events_ids = yield self.auth.compute_auth_events( event, context.prev_state_ids, for_verification=False, From 0f1eb3e914a1e47e441bd8bfb7d523882646fb6e Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Oct 2017 15:23:00 +0100 Subject: [PATCH 145/223] Use notification levels in power_levels Rather than making the condition directly require a specific power level. This way the level require to notify a room can be configured per room. --- synapse/push/baserules.py | 4 ++-- synapse/push/bulk_push_rule_evaluator.py | 10 ++++++---- synapse/push/push_rule_evaluator.py | 20 ++++++++++++++------ 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 71f9ab6b2..9dce99ebe 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -249,8 +249,8 @@ BASE_APPEND_OVERRIDE_RULES = [ '_id': '_roomnotif_content', }, { - 'kind': 'sender_power_level', - 'is': '>=50', + 'kind': 'sender_notification_permission', + 'key': 'room', '_id': '_roomnotif_pl', }, ], diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index adc99bd4f..db07a97a9 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -113,7 +113,7 @@ class BulkPushRuleEvaluator(object): ) @defer.inlineCallbacks - def _get_sender_power_level(self, event, context): + def _get_power_levels_and_sender_level(self, event, context): pl_event_id = context.prev_state_ids.get(POWER_KEY) if pl_event_id: # fastpath: if there's a power level event, that's all we need, and @@ -129,7 +129,9 @@ class BulkPushRuleEvaluator(object): (e.type, e.state_key): e for e in auth_events.itervalues() } - defer.returnValue(get_user_power_level(event.sender, auth_events)) + sender_level = get_user_power_level(event.sender, auth_events) + + defer.returnValue((auth_events[POWER_KEY].content, sender_level)) @defer.inlineCallbacks def action_for_event_by_user(self, event, context): @@ -146,10 +148,10 @@ class BulkPushRuleEvaluator(object): event, context ) - sender_power_level = yield self._get_sender_power_level(event, context) + (power_levels, sender_power_level) = yield self._get_power_levels_and_sender_level(event, context) evaluator = PushRuleEvaluatorForEvent( - event, len(room_members), sender_power_level + event, len(room_members), sender_power_level, power_levels, ) condition_cache = {} diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 7cf777f16..5011bef4f 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -33,8 +33,15 @@ def _room_member_count(ev, condition, room_member_count): return _test_ineq_condition(condition, room_member_count) -def _sender_power_level(ev, condition, power_level): - return _test_ineq_condition(condition, power_level) +def _sender_notification_permission(ev, condition, sender_power_level, power_levels): + notif_level_key = condition.get('key') + if notif_level_key is None: + return False + + notif_levels = power_levels.get('notifications', {}) + room_notif_level = notif_levels.get(notif_level_key, 50) + + return sender_power_level >= room_notif_level; def _test_ineq_condition(condition, number): @@ -74,10 +81,11 @@ def tweaks_for_actions(actions): class PushRuleEvaluatorForEvent(object): - def __init__(self, event, room_member_count, sender_power_level): + def __init__(self, event, room_member_count, sender_power_level, power_levels): self._event = event self._room_member_count = room_member_count self._sender_power_level = sender_power_level + self._power_levels = power_levels # Maps strings of e.g. 'content.body' -> event["content"]["body"] self._value_cache = _flatten_dict(event) @@ -91,9 +99,9 @@ class PushRuleEvaluatorForEvent(object): return _room_member_count( self._event, condition, self._room_member_count ) - elif condition['kind'] == 'sender_power_level': - return _sender_power_level( - self._event, condition, self._sender_power_level + elif condition['kind'] == 'sender_notification_permission': + return _sender_notification_permission( + self._event, condition, self._sender_power_level, self._power_levels, ) else: return True From ab1bc9bf5fcc8875c61e4ec7357d6e43abc76a55 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Oct 2017 15:34:05 +0100 Subject: [PATCH 146/223] Don't KeyError if no power_levels event --- synapse/push/bulk_push_rule_evaluator.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index db07a97a9..05c1c5165 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -131,7 +131,9 @@ class BulkPushRuleEvaluator(object): sender_level = get_user_power_level(event.sender, auth_events) - defer.returnValue((auth_events[POWER_KEY].content, sender_level)) + pl_event = auth_events.get(POWER_KEY) + + defer.returnValue((pl_event.content if pl_event else {}, sender_level)) @defer.inlineCallbacks def action_for_event_by_user(self, event, context): From 81a5e0073cbca1ed469fdae94150de3059e5c3e3 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 10 Oct 2017 15:53:34 +0100 Subject: [PATCH 147/223] pep8 --- synapse/push/bulk_push_rule_evaluator.py | 6 ++++-- synapse/push/push_rule_evaluator.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 05c1c5165..425a017bd 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -119,7 +119,7 @@ class BulkPushRuleEvaluator(object): # fastpath: if there's a power level event, that's all we need, and # not having a power level event is an extreme edge case pl_event = yield self.store.get_event(pl_event_id) - auth_events = { POWER_KEY: pl_event } + auth_events = {POWER_KEY: pl_event} else: auth_events_ids = yield self.auth.compute_auth_events( event, context.prev_state_ids, for_verification=False, @@ -150,7 +150,9 @@ class BulkPushRuleEvaluator(object): event, context ) - (power_levels, sender_power_level) = yield self._get_power_levels_and_sender_level(event, context) + (power_levels, sender_power_level) = ( + yield self._get_power_levels_and_sender_level(event, context) + ) evaluator = PushRuleEvaluatorForEvent( event, len(room_members), sender_power_level, power_levels, diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py index 5011bef4f..3601f2d36 100644 --- a/synapse/push/push_rule_evaluator.py +++ b/synapse/push/push_rule_evaluator.py @@ -41,7 +41,7 @@ def _sender_notification_permission(ev, condition, sender_power_level, power_lev notif_levels = power_levels.get('notifications', {}) room_notif_level = notif_levels.get(notif_level_key, 50) - return sender_power_level >= room_notif_level; + return sender_power_level >= room_notif_level def _test_ineq_condition(condition, number): From ec954f47fb7a1aaa176a7fbf7ca8e683cf428af8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 13:15:44 +0100 Subject: [PATCH 148/223] Validate room ids --- synapse/groups/groups_server.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 991cc12cc..6a85908dd 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -16,7 +16,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError -from synapse.types import UserID, get_domain_from_id +from synapse.types import UserID, get_domain_from_id, RoomID import logging @@ -160,6 +160,8 @@ class GroupsServerHandler(object): """ yield self.check_group_is_ours(group_id, and_exists=True, and_is_admin=user_id) + RoomID.from_string(room_id) # Ensure valid room id + order = content.get("order", None) is_public = _parse_visibility_from_contents(content) @@ -463,6 +465,8 @@ class GroupsServerHandler(object): def add_room_to_group(self, group_id, requester_user_id, room_id, content): """Add room to group """ + RoomID.from_string(room_id) # Ensure valid room id + yield self.check_group_is_ours( group_id, and_exists=True, and_is_admin=requester_user_id ) From c2c47550f9b85fda1a24964f053d03e459bb8436 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 13:23:15 +0100 Subject: [PATCH 149/223] Fix schema delta versions --- synapse/storage/prepare_database.py | 2 +- synapse/storage/schema/delta/{43 => 45}/group_server.sql | 0 synapse/storage/schema/delta/{43 => 45}/profile_cache.sql | 0 3 files changed, 1 insertion(+), 1 deletion(-) rename synapse/storage/schema/delta/{43 => 45}/group_server.sql (100%) rename synapse/storage/schema/delta/{43 => 45}/profile_cache.sql (100%) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index a0af8456f..ccaaabcfa 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 44 +SCHEMA_VERSION = 45 dir_path = os.path.abspath(os.path.dirname(__file__)) diff --git a/synapse/storage/schema/delta/43/group_server.sql b/synapse/storage/schema/delta/45/group_server.sql similarity index 100% rename from synapse/storage/schema/delta/43/group_server.sql rename to synapse/storage/schema/delta/45/group_server.sql diff --git a/synapse/storage/schema/delta/43/profile_cache.sql b/synapse/storage/schema/delta/45/profile_cache.sql similarity index 100% rename from synapse/storage/schema/delta/43/profile_cache.sql rename to synapse/storage/schema/delta/45/profile_cache.sql From 4ce43792350f0df432df25006c1bdd78c08647e0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 14:11:43 +0100 Subject: [PATCH 150/223] Fix attestations to check correct server name --- synapse/handlers/groups_local.py | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index a2bacbfc3..50e40548c 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -102,6 +102,8 @@ class GroupsLocalHandler(object): get_domain_from_id(group_id), group_id, requester_user_id, ) + group_server_name = get_domain_from_id(group_id) + # Loop through the users and validate the attestations. chunk = res["users_section"]["users"] valid_users = [] @@ -109,11 +111,13 @@ class GroupsLocalHandler(object): g_user_id = entry["user_id"] attestation = entry.pop("attestation") try: - yield self.attestations.verify_attestation( - attestation, - group_id=group_id, - user_id=g_user_id, - ) + if get_domain_from_id(g_user_id) != group_server_name: + yield self.attestations.verify_attestation( + attestation, + group_id=group_id, + user_id=g_user_id, + server_name=get_domain_from_id(g_user_id), + ) valid_users.append(entry) except Exception as e: logger.info("Failed to verify user is in group: %s", e) @@ -160,6 +164,7 @@ class GroupsLocalHandler(object): remote_attestation, group_id=group_id, user_id=user_id, + server_name=get_domain_from_id(group_id), ) is_publicised = content.get("publicise", False) @@ -187,6 +192,8 @@ class GroupsLocalHandler(object): ) defer.returnValue(res) + group_server_name = get_domain_from_id(group_id) + res = yield self.transport_client.get_users_in_group( get_domain_from_id(group_id), group_id, requester_user_id, ) @@ -197,11 +204,13 @@ class GroupsLocalHandler(object): g_user_id = entry["user_id"] attestation = entry.pop("attestation") try: - yield self.attestations.verify_attestation( - attestation, - group_id=group_id, - user_id=g_user_id, - ) + if get_domain_from_id(g_user_id) != group_server_name: + yield self.attestations.verify_attestation( + attestation, + group_id=group_id, + user_id=g_user_id, + server_name=get_domain_from_id(g_user_id), + ) valid_entries.append(entry) except Exception as e: logger.info("Failed to verify user is in group: %s", e) @@ -240,6 +249,7 @@ class GroupsLocalHandler(object): remote_attestation, group_id=group_id, user_id=user_id, + server_name=get_domain_from_id(group_id), ) # TODO: Check that the group is public and we're being added publically From 27e727a1469f336c3decc82288172923f2d46ddf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 14:32:40 +0100 Subject: [PATCH 151/223] Fix typo --- synapse/groups/groups_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 6a85908dd..1083bc299 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -400,7 +400,7 @@ class GroupsServerHandler(object): if not is_public: entry["is_public"] = False - if not self.is_mine_id(requester_user_id): + if not self.is_mine_id(g_user_id): attestation = yield self.store.get_remote_attestation(group_id, g_user_id) if not attestation: continue From b75d443caff63a736e914236ef7448c9502e6495 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 11 Oct 2017 14:25:16 +0100 Subject: [PATCH 152/223] log pdu_failures from incoming transactions ... even if we have no EDUs. This appears to have been introduced in 476899295f5fd6cff64799bcbc84cd4bf9005e33. --- synapse/federation/federation_server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index fa4ec2ad3..b4b587c86 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -168,8 +168,9 @@ class FederationServer(FederationBase): edu.content ) - for failure in getattr(transaction, "pdu_failures", []): - logger.info("Got failure %r", failure) + pdu_failures = getattr(transaction, "pdu_failures", []) + for failure in pdu_failures: + logger.info("Got failure %r", failure) response = { "pdus": pdu_results, From c3e190ce671ed37a31a1543f42e26277fea582ce Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 11 Oct 2017 14:37:20 +0100 Subject: [PATCH 153/223] fix a logcontext leak in read receipt handling --- synapse/handlers/receipts.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index e1cd3a48e..052576527 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from synapse.util import logcontext from ._base import BaseHandler @@ -59,6 +60,8 @@ class ReceiptsHandler(BaseHandler): is_new = yield self._handle_new_receipts([receipt]) if is_new: + # fire off a process in the background to send the receipt to + # remote servers self._push_remotes([receipt]) @defer.inlineCallbacks @@ -126,6 +129,7 @@ class ReceiptsHandler(BaseHandler): defer.returnValue(True) + @logcontext.preserve_fn # caller should not yield on this @defer.inlineCallbacks def _push_remotes(self, receipts): """Given a list of receipts, works out which remote servers should be From c3b7a45e84f0aaf45e671eef295993e3d09d6908 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 11 Oct 2017 14:39:22 +0100 Subject: [PATCH 154/223] Allow error strings from spam checker --- synapse/handlers/message.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index fbf88b46e..06672df3b 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd +# Copyright 2017 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -325,9 +326,12 @@ class MessageHandler(BaseHandler): txn_id=txn_id ) - if self.spam_checker.check_event_for_spam(event): + spam_error = self.spam_checker.check_event_for_spam(event) + if spam_error: + if not isinstance(spam_error, (str, basestring)): + spam_error = "Spam is not permitted here" raise SynapseError( - 403, "Spam is not permitted here", Codes.FORBIDDEN + 403, spam_error, Codes.FORBIDDEN ) yield self.send_nonmember_event( From 271f5601f3eb28a16e4b0d58017b4845856ab19c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 14:44:51 +0100 Subject: [PATCH 155/223] Fix typo in invite to group --- synapse/federation/transport/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 36f6eb75e..f96561c1f 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -572,7 +572,7 @@ class TransportLayerClient(object): return self.client.post_json( destination=destination, path=path, - args=requester_user_id, + args={"requester_user_id": requester_user_id}, data=content, ignore_backoff=True, ) From b78bae2d51bac7e1f75365d85cd67402adb5f408 Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 11 Oct 2017 14:49:09 +0100 Subject: [PATCH 156/223] fix isinstance --- synapse/handlers/message.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 06672df3b..28792788d 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -328,7 +328,7 @@ class MessageHandler(BaseHandler): spam_error = self.spam_checker.check_event_for_spam(event) if spam_error: - if not isinstance(spam_error, (str, basestring)): + if not isinstance(spam_error, basestring): spam_error = "Spam is not permitted here" raise SynapseError( 403, spam_error, Codes.FORBIDDEN From 4fad8efbfb1726c72bdd7cbbacc894b8701efec3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 11 Oct 2017 15:05:05 +0100 Subject: [PATCH 157/223] Fix stackoverflow and logcontexts from linearizer 1. make it not blow out the stack when there are more than 50 things waiting for a lock. Fixes https://github.com/matrix-org/synapse/issues/2505. 2. Make it not mess up the log contexts. --- synapse/util/async.py | 24 ++++++++++++++++++++++-- tests/util/test_linearizer.py | 28 ++++++++++++++++++++++++---- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/synapse/util/async.py b/synapse/util/async.py index bb252f75d..0fd5b4252 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -203,7 +203,26 @@ class Linearizer(object): except: logger.exception("Unexpected exception in Linearizer") - logger.info("Acquired linearizer lock %r for key %r", self.name, key) + logger.info("Acquired linearizer lock %r for key %r", self.name, + key) + + # if the code holding the lock completes synchronously, then it + # will recursively run the next claimant on the list. That can + # relatively rapidly lead to stack exhaustion. This is essentially + # the same problem as http://twistedmatrix.com/trac/ticket/9304. + # + # In order to break the cycle, we add a cheeky sleep(0) here to + # ensure that we fall back to the reactor between each iteration. + # + # (There's no particular need for it to happen before we return + # the context manager, but it needs to happen while we hold the + # lock, and the context manager's exit code must be synchronous, + # so actually this is the only sensible place. + yield run_on_reactor() + + else: + logger.info("Acquired uncontended linearizer lock %r for key %r", + self.name, key) @contextmanager def _ctx_manager(): @@ -211,7 +230,8 @@ class Linearizer(object): yield finally: logger.info("Releasing linearizer lock %r for key %r", self.name, key) - new_defer.callback(None) + with PreserveLoggingContext(): + new_defer.callback(None) current_d = self.key_to_defer.get(key) if current_d is new_defer: self.key_to_defer.pop(key, None) diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index afcba482f..793a88e46 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -12,8 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - - +from synapse.util import async, logcontext from tests import unittest from twisted.internet import defer @@ -38,7 +37,28 @@ class LinearizerTestCase(unittest.TestCase): with cm1: self.assertFalse(d2.called) - self.assertTrue(d2.called) - with (yield d2): pass + + def test_lots_of_queued_things(self): + # we have one slow thing, and lots of fast things queued up behind it. + # it should *not* explode the stack. + linearizer = Linearizer() + + @defer.inlineCallbacks + def func(i, sleep=False): + with logcontext.LoggingContext("func(%s)" % i) as lc: + with (yield linearizer.queue("")): + self.assertEqual( + logcontext.LoggingContext.current_context(), lc) + if sleep: + yield async.sleep(0) + + self.assertEqual( + logcontext.LoggingContext.current_context(), lc) + + func(0, sleep=True) + for i in xrange(1, 100): + func(i) + + return func(1000) From ea18996f54194f920dc506201a65eb3d36bb161d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 15:44:37 +0100 Subject: [PATCH 158/223] Fix group stream replication The stream update functions expect the storage function to return a list of tuples. --- synapse/storage/group_server.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 4fe9172ad..22a6bc626 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -1172,13 +1172,13 @@ class GroupServerStore(SQLBaseStore): LIMIT ? """ txn.execute(sql, (from_token, to_token, limit,)) - return [{ - "stream_id": stream_id, - "group_id": group_id, - "user_id": user_id, - "type": gtype, - "content": json.loads(content_json), - } for stream_id, group_id, user_id, gtype, content_json in txn] + return [( + stream_id, + group_id, + user_id, + gtype, + json.loads(content_json), + ) for stream_id, group_id, user_id, gtype, content_json in txn] return self.runInteraction( "get_all_groups_changes", _get_all_groups_changes_txn, ) From 818b08d0e4be7571008af4590542fd652f028dcd Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 15:54:00 +0100 Subject: [PATCH 159/223] peeeeeeeeep8888888888888888888888888888 --- synapse/storage/group_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 22a6bc626..3af372de5 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -1178,7 +1178,7 @@ class GroupServerStore(SQLBaseStore): user_id, gtype, json.loads(content_json), - ) for stream_id, group_id, user_id, gtype, content_json in txn] + ) for stream_id, group_id, user_id, gtype, content_json in txn] return self.runInteraction( "get_all_groups_changes", _get_all_groups_changes_txn, ) From b752507b488d223a0ab01ec3dbc7b30fee64c203 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 11 Oct 2017 16:54:36 +0100 Subject: [PATCH 160/223] Fix fetching remote summaries --- synapse/handlers/groups_local.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 50e40548c..3b676d46b 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -109,7 +109,7 @@ class GroupsLocalHandler(object): valid_users = [] for entry in chunk: g_user_id = entry["user_id"] - attestation = entry.pop("attestation") + attestation = entry.pop("attestation", {}) try: if get_domain_from_id(g_user_id) != group_server_name: yield self.attestations.verify_attestation( @@ -202,7 +202,7 @@ class GroupsLocalHandler(object): valid_entries = [] for entry in chunk: g_user_id = entry["user_id"] - attestation = entry.pop("attestation") + attestation = entry.pop("attestation", {}) try: if get_domain_from_id(g_user_id) != group_server_name: yield self.attestations.verify_attestation( From f30c4ed2bc2255dc7182bd026fb6437afec735a5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 11 Oct 2017 17:26:17 +0100 Subject: [PATCH 161/223] logformatter: fix AttributeError make sure we have the relevant fields before we try to log them. --- synapse/util/logformatter.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/synapse/util/logformatter.py b/synapse/util/logformatter.py index 60504162e..cdbc4bffd 100644 --- a/synapse/util/logformatter.py +++ b/synapse/util/logformatter.py @@ -33,9 +33,17 @@ class LogFormatter(logging.Formatter): def formatException(self, ei): sio = StringIO.StringIO() - sio.write("Capture point (most recent call last):\n") - traceback.print_stack(ei[2].tb_frame.f_back, None, sio) - traceback.print_exception(ei[0], ei[1], ei[2], None, sio) + (typ, val, tb) = ei + + # log the stack above the exception capture point if possible, but + # check that we actually have an f_back attribute to work around + # https://twistedmatrix.com/trac/ticket/9305 + + if tb and hasattr(tb.tb_frame, 'f_back'): + sio.write("Capture point (most recent call last):\n") + traceback.print_stack(tb.tb_frame.f_back, None, sio) + + traceback.print_exception(typ, val, tb, None, sio) s = sio.getvalue() sio.close() if s[-1:] == "\n": From f807f7f80442aec48a5e1f6b6b6f1a88e707b1e2 Mon Sep 17 00:00:00 2001 From: hera Date: Thu, 12 Oct 2017 10:50:44 +0000 Subject: [PATCH 162/223] log when we get an exception handling replication updates --- synapse/replication/tcp/resource.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index 3ea3ca5a6..6c1beca4e 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -160,7 +160,11 @@ class ReplicationStreamer(object): "Getting stream: %s: %s -> %s", stream.NAME, stream.last_token, stream.upto_token ) - updates, current_token = yield stream.get_updates() + try: + updates, current_token = yield stream.get_updates() + except: + logger.info("Failed to handle stream %s", stream.NAME) + raise logger.debug( "Sending %d updates to %d connections", From bf4fb1fb400daad23702bc0b3231ec069d68e87e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 15:20:59 +0100 Subject: [PATCH 163/223] Basic implementation of backup media store --- synapse/config/repository.py | 18 ++ synapse/rest/media/v1/media_repository.py | 219 +++++++++++----------- synapse/rest/media/v1/thumbnailer.py | 16 +- synapse/rest/media/v1/upload_resource.py | 2 +- 4 files changed, 130 insertions(+), 125 deletions(-) diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 2c6f57168..e3c83d56f 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -70,7 +70,17 @@ class ContentRepositoryConfig(Config): self.max_upload_size = self.parse_size(config["max_upload_size"]) self.max_image_pixels = self.parse_size(config["max_image_pixels"]) self.max_spider_size = self.parse_size(config["max_spider_size"]) + self.media_store_path = self.ensure_directory(config["media_store_path"]) + + self.backup_media_store_path = config.get("backup_media_store_path") + if self.backup_media_store_path: + self.ensure_directory(self.backup_media_store_path) + + self.synchronous_backup_media_store = config.get( + "synchronous_backup_media_store", False + ) + self.uploads_path = self.ensure_directory(config["uploads_path"]) self.dynamic_thumbnails = config["dynamic_thumbnails"] self.thumbnail_requirements = parse_thumbnail_requirements( @@ -115,6 +125,14 @@ class ContentRepositoryConfig(Config): # Directory where uploaded images and attachments are stored. media_store_path: "%(media_store)s" + # A secondary directory where uploaded images and attachments are + # stored as a backup. + # backup_media_store_path: "%(media_store)s" + + # Whether to wait for successful write to backup media store before + # returning successfully. + # synchronous_backup_media_store: false + # Directory where in-progress uploads are stored. uploads_path: "%(uploads_path)s" diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 0ea1248ce..3b442cc16 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -33,7 +33,7 @@ from synapse.api.errors import SynapseError, HttpResponseException, \ from synapse.util.async import Linearizer from synapse.util.stringutils import is_ascii -from synapse.util.logcontext import preserve_context_over_fn +from synapse.util.logcontext import preserve_context_over_fn, preserve_fn from synapse.util.retryutils import NotRetryingDestination import os @@ -59,7 +59,12 @@ class MediaRepository(object): self.store = hs.get_datastore() self.max_upload_size = hs.config.max_upload_size self.max_image_pixels = hs.config.max_image_pixels + self.filepaths = MediaFilePaths(hs.config.media_store_path) + self.backup_filepaths = None + if hs.config.backup_media_store_path: + self.backup_filepaths = MediaFilePaths(hs.config.backup_media_store_path) + self.dynamic_thumbnails = hs.config.dynamic_thumbnails self.thumbnail_requirements = hs.config.thumbnail_requirements @@ -87,18 +92,43 @@ class MediaRepository(object): if not os.path.exists(dirname): os.makedirs(dirname) + @defer.inlineCallbacks + def _write_to_file(self, source, file_name_func): + def write_file_thread(file_name): + source.seek(0) # Ensure we read from the start of the file + with open(file_name, "wb") as f: + shutil.copyfileobj(source, f) + + fname = file_name_func(self.filepaths) + self._makedirs(fname) + + # Write to the main repository + yield preserve_context_over_fn(threads.deferToThread, write_file_thread, fname) + + # Write to backup repository + if self.backup_filepaths: + backup_fname = file_name_func(backup_filepaths) + self._makedirs(backup_fname) + + # We can either wait for successful writing to the backup repository + # or write in the background and immediately return + if hs.config.synchronous_backup_media_store: + yield preserve_context_over_fn( + threads.deferToThread, write_file_thread, backup_fname, + ) + else: + preserve_fn(threads.deferToThread)(write_file, backup_fname) + + defer.returnValue(fname) + @defer.inlineCallbacks def create_content(self, media_type, upload_name, content, content_length, auth_user): media_id = random_string(24) - fname = self.filepaths.local_media_filepath(media_id) - self._makedirs(fname) - - # This shouldn't block for very long because the content will have - # already been uploaded at this point. - with open(fname, "wb") as f: - f.write(content) + fname = yield self._write_to_file( + content, lambda f: f.local_media_filepath(media_id) + ) logger.info("Stored local media in file %r", fname) @@ -253,9 +283,8 @@ class MediaRepository(object): def _get_thumbnail_requirements(self, media_type): return self.thumbnail_requirements.get(media_type, ()) - def _generate_thumbnail(self, input_path, t_path, t_width, t_height, + def _generate_thumbnail(self, thumbnailer, t_width, t_height, t_method, t_type): - thumbnailer = Thumbnailer(input_path) m_width = thumbnailer.width m_height = thumbnailer.height @@ -267,36 +296,40 @@ class MediaRepository(object): return if t_method == "crop": - t_len = thumbnailer.crop(t_path, t_width, t_height, t_type) + t_byte_source = thumbnailer.crop(t_width, t_height, t_type) elif t_method == "scale": t_width, t_height = thumbnailer.aspect(t_width, t_height) t_width = min(m_width, t_width) t_height = min(m_height, t_height) - t_len = thumbnailer.scale(t_path, t_width, t_height, t_type) + t_byte_source = thumbnailer.scale(t_width, t_height, t_type) else: - t_len = None + t_byte_source = None - return t_len + return t_byte_source @defer.inlineCallbacks def generate_local_exact_thumbnail(self, media_id, t_width, t_height, t_method, t_type): input_path = self.filepaths.local_media_filepath(media_id) - t_path = self.filepaths.local_media_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) - self._makedirs(t_path) - - t_len = yield preserve_context_over_fn( + thumbnailer = Thumbnailer(input_path) + t_byte_source = yield preserve_context_over_fn( threads.deferToThread, self._generate_thumbnail, - input_path, t_path, t_width, t_height, t_method, t_type + thumbnailer, t_width, t_height, t_method, t_type ) - if t_len: + if t_byte_source: + output_path = yield self._write_to_file( + content, + lambda f: f.local_media_thumbnail( + media_id, t_width, t_height, t_type, t_method + ) + ) + logger.info("Stored thumbnail in file %r", output_path) + yield self.store.store_local_thumbnail( - media_id, t_width, t_height, t_type, t_method, t_len + media_id, t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) ) defer.returnValue(t_path) @@ -306,21 +339,25 @@ class MediaRepository(object): t_width, t_height, t_method, t_type): input_path = self.filepaths.remote_media_filepath(server_name, file_id) - t_path = self.filepaths.remote_media_thumbnail( - server_name, file_id, t_width, t_height, t_type, t_method - ) - self._makedirs(t_path) - - t_len = yield preserve_context_over_fn( + thumbnailer = Thumbnailer(input_path) + t_byte_source = yield preserve_context_over_fn( threads.deferToThread, self._generate_thumbnail, - input_path, t_path, t_width, t_height, t_method, t_type + thumbnailer, t_width, t_height, t_method, t_type ) - if t_len: + if t_byte_source: + output_path = yield self._write_to_file( + content, + lambda f: f.remote_media_thumbnail( + server_name, file_id, t_width, t_height, t_type, t_method + ) + ) + logger.info("Stored thumbnail in file %r", output_path) + yield self.store.store_remote_media_thumbnail( server_name, media_id, file_id, - t_width, t_height, t_type, t_method, t_len + t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) ) defer.returnValue(t_path) @@ -351,59 +388,32 @@ class MediaRepository(object): local_thumbnails = [] def generate_thumbnails(): - scales = set() - crops = set() for r_width, r_height, r_method, r_type in requirements: - if r_method == "scale": - t_width, t_height = thumbnailer.aspect(r_width, r_height) - scales.add(( - min(m_width, t_width), min(m_height, t_height), r_type, - )) - elif r_method == "crop": - crops.add((r_width, r_height, r_type)) - - for t_width, t_height, t_type in scales: - t_method = "scale" - if url_cache: - t_path = self.filepaths.url_cache_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) - else: - t_path = self.filepaths.local_media_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) - self._makedirs(t_path) - t_len = thumbnailer.scale(t_path, t_width, t_height, t_type) + t_byte_source = self._generate_thumbnail( + thumbnailer, r_width, r_height, r_method, r_type, + ) local_thumbnails.append(( - media_id, t_width, t_height, t_type, t_method, t_len - )) - - for t_width, t_height, t_type in crops: - if (t_width, t_height, t_type) in scales: - # If the aspect ratio of the cropped thumbnail matches a purely - # scaled one then there is no point in calculating a separate - # thumbnail. - continue - t_method = "crop" - if url_cache: - t_path = self.filepaths.url_cache_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) - else: - t_path = self.filepaths.local_media_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) - self._makedirs(t_path) - t_len = thumbnailer.crop(t_path, t_width, t_height, t_type) - local_thumbnails.append(( - media_id, t_width, t_height, t_type, t_method, t_len + r_width, r_height, r_method, r_type, t_byte_source )) yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) - for l in local_thumbnails: - yield self.store.store_local_thumbnail(*l) + for t_width, t_height, t_method, t_type, t_byte_source in local_thumbnails: + if url_cache: + path_name_func = lambda f: f.url_cache_thumbnail( + media_id, t_width, t_height, t_type, t_method + ) + else: + path_name_func = lambda f: f.local_media_thumbnail( + media_id, t_width, t_height, t_type, t_method + ) + + yield self._write_to_file(t_byte_source, path_name_func) + + yield self.store.store_local_thumbnail( + media_id, t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) + ) defer.returnValue({ "width": m_width, @@ -433,51 +443,32 @@ class MediaRepository(object): ) return - scales = set() - crops = set() for r_width, r_height, r_method, r_type in requirements: - if r_method == "scale": - t_width, t_height = thumbnailer.aspect(r_width, r_height) - scales.add(( - min(m_width, t_width), min(m_height, t_height), r_type, - )) - elif r_method == "crop": - crops.add((r_width, r_height, r_type)) - - for t_width, t_height, t_type in scales: - t_method = "scale" - t_path = self.filepaths.remote_media_thumbnail( - server_name, file_id, t_width, t_height, t_type, t_method + t_byte_source = self._generate_thumbnail( + thumbnailer, r_width, r_height, r_method, r_type, ) - self._makedirs(t_path) - t_len = thumbnailer.scale(t_path, t_width, t_height, t_type) - remote_thumbnails.append([ - server_name, media_id, file_id, - t_width, t_height, t_type, t_method, t_len - ]) - for t_width, t_height, t_type in crops: - if (t_width, t_height, t_type) in scales: - # If the aspect ratio of the cropped thumbnail matches a purely - # scaled one then there is no point in calculating a separate - # thumbnail. - continue - t_method = "crop" - t_path = self.filepaths.remote_media_thumbnail( - server_name, file_id, t_width, t_height, t_type, t_method - ) - self._makedirs(t_path) - t_len = thumbnailer.crop(t_path, t_width, t_height, t_type) - remote_thumbnails.append([ - server_name, media_id, file_id, - t_width, t_height, t_type, t_method, t_len - ]) + remote_thumbnails.append(( + r_width, r_height, r_method, r_type, t_byte_source + )) yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) for r in remote_thumbnails: yield self.store.store_remote_media_thumbnail(*r) + for t_width, t_height, t_method, t_type, t_byte_source in local_thumbnails: + path_name_func = lambda f: f.remote_media_thumbnail( + server_name, media_id, file_id, t_width, t_height, t_type, t_method + ) + + yield self._write_to_file(t_byte_source, path_name_func) + + yield self.store.store_remote_media_thumbnail( + server_name, media_id, file_id, + t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) + ) + defer.returnValue({ "width": m_width, "height": m_height, diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py index 3868d4f65..60498b08a 100644 --- a/synapse/rest/media/v1/thumbnailer.py +++ b/synapse/rest/media/v1/thumbnailer.py @@ -50,12 +50,12 @@ class Thumbnailer(object): else: return ((max_height * self.width) // self.height, max_height) - def scale(self, output_path, width, height, output_type): + def scale(self, width, height, output_type): """Rescales the image to the given dimensions""" scaled = self.image.resize((width, height), Image.ANTIALIAS) - return self.save_image(scaled, output_type, output_path) + return self._encode_image(scaled, output_type) - def crop(self, output_path, width, height, output_type): + def crop(self, width, height, output_type): """Rescales and crops the image to the given dimensions preserving aspect:: (w_in / h_in) = (w_scaled / h_scaled) @@ -82,13 +82,9 @@ class Thumbnailer(object): crop_left = (scaled_width - width) // 2 crop_right = width + crop_left cropped = scaled_image.crop((crop_left, 0, crop_right, height)) - return self.save_image(cropped, output_type, output_path) + return self._encode_image(cropped, output_type) - def save_image(self, output_image, output_type, output_path): + def _encode_image(self, output_image, output_type): output_bytes_io = BytesIO() output_image.save(output_bytes_io, self.FORMATS[output_type], quality=80) - output_bytes = output_bytes_io.getvalue() - with open(output_path, "wb") as output_file: - output_file.write(output_bytes) - logger.info("Stored thumbnail in file %r", output_path) - return len(output_bytes) + return output_bytes_io diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index 4ab33f73b..f6f498cdc 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -93,7 +93,7 @@ class UploadResource(Resource): # TODO(markjh): parse content-dispostion content_uri = yield self.media_repo.create_content( - media_type, upload_name, request.content.read(), + media_type, upload_name, request.content, content_length, requester.user ) From 67cb89fbdf62dfb2ff65f6f7f0ca23445cdac0ac Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 15:23:41 +0100 Subject: [PATCH 164/223] Fix typo --- synapse/rest/media/v1/media_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 3b442cc16..f26f793be 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -107,7 +107,7 @@ class MediaRepository(object): # Write to backup repository if self.backup_filepaths: - backup_fname = file_name_func(backup_filepaths) + backup_fname = file_name_func(self.backup_filepaths) self._makedirs(backup_fname) # We can either wait for successful writing to the backup repository From c8eeef6947af762c3eabef6ecca0f69833fbf8ab Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 15:28:24 +0100 Subject: [PATCH 165/223] Fix typos --- synapse/rest/media/v1/media_repository.py | 46 +++++++++++++---------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index f26f793be..a16034fd6 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -65,6 +65,8 @@ class MediaRepository(object): if hs.config.backup_media_store_path: self.backup_filepaths = MediaFilePaths(hs.config.backup_media_store_path) + self.synchronous_backup_media_store = hs.config.synchronous_backup_media_store + self.dynamic_thumbnails = hs.config.dynamic_thumbnails self.thumbnail_requirements = hs.config.thumbnail_requirements @@ -112,12 +114,12 @@ class MediaRepository(object): # We can either wait for successful writing to the backup repository # or write in the background and immediately return - if hs.config.synchronous_backup_media_store: + if self.synchronous_backup_media_store: yield preserve_context_over_fn( threads.deferToThread, write_file_thread, backup_fname, ) else: - preserve_fn(threads.deferToThread)(write_file, backup_fname) + preserve_fn(threads.deferToThread)(write_file_thread, backup_fname) defer.returnValue(fname) @@ -321,7 +323,7 @@ class MediaRepository(object): if t_byte_source: output_path = yield self._write_to_file( - content, + t_byte_source, lambda f: f.local_media_thumbnail( media_id, t_width, t_height, t_type, t_method ) @@ -329,10 +331,11 @@ class MediaRepository(object): logger.info("Stored thumbnail in file %r", output_path) yield self.store.store_local_thumbnail( - media_id, t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) + media_id, t_width, t_height, t_type, t_method, + len(t_byte_source.getvalue()) ) - defer.returnValue(t_path) + defer.returnValue(output_path) @defer.inlineCallbacks def generate_remote_exact_thumbnail(self, server_name, file_id, media_id, @@ -348,7 +351,7 @@ class MediaRepository(object): if t_byte_source: output_path = yield self._write_to_file( - content, + t_byte_source, lambda f: f.remote_media_thumbnail( server_name, file_id, t_width, t_height, t_type, t_method ) @@ -360,7 +363,7 @@ class MediaRepository(object): t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) ) - defer.returnValue(t_path) + defer.returnValue(output_path) @defer.inlineCallbacks def _generate_local_thumbnails(self, media_id, media_info, url_cache=False): @@ -400,19 +403,21 @@ class MediaRepository(object): yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) for t_width, t_height, t_method, t_type, t_byte_source in local_thumbnails: - if url_cache: - path_name_func = lambda f: f.url_cache_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) - else: - path_name_func = lambda f: f.local_media_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) + def path_name_func(f): + if url_cache: + return f.url_cache_thumbnail( + media_id, t_width, t_height, t_type, t_method + ) + else: + return f.local_media_thumbnail( + media_id, t_width, t_height, t_type, t_method + ) yield self._write_to_file(t_byte_source, path_name_func) yield self.store.store_local_thumbnail( - media_id, t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) + media_id, t_width, t_height, t_type, t_method, + len(t_byte_source.getvalue()) ) defer.returnValue({ @@ -457,10 +462,11 @@ class MediaRepository(object): for r in remote_thumbnails: yield self.store.store_remote_media_thumbnail(*r) - for t_width, t_height, t_method, t_type, t_byte_source in local_thumbnails: - path_name_func = lambda f: f.remote_media_thumbnail( - server_name, media_id, file_id, t_width, t_height, t_type, t_method - ) + for t_width, t_height, t_method, t_type, t_byte_source in remote_thumbnails: + def path_name_func(f): + return f.remote_media_thumbnail( + server_name, media_id, file_id, t_width, t_height, t_type, t_method + ) yield self._write_to_file(t_byte_source, path_name_func) From 6dfde6d4856695890271232f8a2e4c5f32615dd1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 15:30:26 +0100 Subject: [PATCH 166/223] Remove dead code --- synapse/rest/media/v1/media_repository.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index a16034fd6..1eeb128d2 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -459,9 +459,6 @@ class MediaRepository(object): yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) - for r in remote_thumbnails: - yield self.store.store_remote_media_thumbnail(*r) - for t_width, t_height, t_method, t_type, t_byte_source in remote_thumbnails: def path_name_func(f): return f.remote_media_thumbnail( From b77a13812c38b2e79b2ebfddb52ce88a2ac8e9b9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 15:32:32 +0100 Subject: [PATCH 167/223] Typo --- synapse/rest/media/v1/media_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 1eeb128d2..93b35af9c 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -462,7 +462,7 @@ class MediaRepository(object): for t_width, t_height, t_method, t_type, t_byte_source in remote_thumbnails: def path_name_func(f): return f.remote_media_thumbnail( - server_name, media_id, file_id, t_width, t_height, t_type, t_method + server_name, file_id, t_width, t_height, t_type, t_method ) yield self._write_to_file(t_byte_source, path_name_func) From e283b555b1f20de4fd393fd947e82eb3c635b7e9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 17:31:24 +0100 Subject: [PATCH 168/223] Copy everything to backup --- synapse/config/repository.py | 4 +- synapse/rest/media/v1/filepath.py | 99 ++++++++++------ synapse/rest/media/v1/media_repository.py | 109 +++++++++++------- synapse/rest/media/v1/preview_url_resource.py | 7 +- synapse/rest/media/v1/thumbnailer.py | 9 +- 5 files changed, 151 insertions(+), 77 deletions(-) diff --git a/synapse/config/repository.py b/synapse/config/repository.py index e3c83d56f..6baa47493 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -75,7 +75,9 @@ class ContentRepositoryConfig(Config): self.backup_media_store_path = config.get("backup_media_store_path") if self.backup_media_store_path: - self.ensure_directory(self.backup_media_store_path) + self.backup_media_store_path = self.ensure_directory( + self.backup_media_store_path + ) self.synchronous_backup_media_store = config.get( "synchronous_backup_media_store", False diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index d5cec1012..43d0eea00 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -15,103 +15,134 @@ import os import re +import functools NEW_FORMAT_ID_RE = re.compile(r"^\d\d\d\d-\d\d-\d\d") +def _wrap_in_base_path(func): + """Takes a function that returns a relative path and turns it into an + absolute path based on the location of the primary media store + """ + @functools.wraps(func) + def _wrapped(self, *args, **kwargs): + path = func(self, *args, **kwargs) + return os.path.join(self.primary_base_path, path) + + return _wrapped + + class MediaFilePaths(object): + """Describes where files are stored on disk. - def __init__(self, base_path): - self.base_path = base_path + Most of the function have a `*_rel` variant which returns a file path that + is relative to the base media store path. This is mainly used when we want + to write to the backup media store (when one is configured) + """ - def default_thumbnail(self, default_top_level, default_sub_type, width, - height, content_type, method): + def __init__(self, primary_base_path): + self.primary_base_path = primary_base_path + + def default_thumbnail_rel(self, default_top_level, default_sub_type, width, + height, content_type, method): top_level_type, sub_type = content_type.split("/") file_name = "%i-%i-%s-%s-%s" % ( width, height, top_level_type, sub_type, method ) return os.path.join( - self.base_path, "default_thumbnails", default_top_level, + "default_thumbnails", default_top_level, default_sub_type, file_name ) - def local_media_filepath(self, media_id): + default_thumbnail = _wrap_in_base_path(default_thumbnail_rel) + + def local_media_filepath_rel(self, media_id): return os.path.join( - self.base_path, "local_content", + "local_content", media_id[0:2], media_id[2:4], media_id[4:] ) - def local_media_thumbnail(self, media_id, width, height, content_type, - method): + local_media_filepath = _wrap_in_base_path(local_media_filepath_rel) + + def local_media_thumbnail_rel(self, media_id, width, height, content_type, + method): top_level_type, sub_type = content_type.split("/") file_name = "%i-%i-%s-%s-%s" % ( width, height, top_level_type, sub_type, method ) return os.path.join( - self.base_path, "local_thumbnails", + "local_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], file_name ) - def remote_media_filepath(self, server_name, file_id): + local_media_thumbnail = _wrap_in_base_path(local_media_thumbnail_rel) + + def remote_media_filepath_rel(self, server_name, file_id): return os.path.join( - self.base_path, "remote_content", server_name, + "remote_content", server_name, file_id[0:2], file_id[2:4], file_id[4:] ) - def remote_media_thumbnail(self, server_name, file_id, width, height, - content_type, method): + remote_media_filepath = _wrap_in_base_path(remote_media_filepath_rel) + + def remote_media_thumbnail_rel(self, server_name, file_id, width, height, + content_type, method): top_level_type, sub_type = content_type.split("/") file_name = "%i-%i-%s-%s" % (width, height, top_level_type, sub_type) return os.path.join( - self.base_path, "remote_thumbnail", server_name, + "remote_thumbnail", server_name, file_id[0:2], file_id[2:4], file_id[4:], file_name ) + remote_media_thumbnail = _wrap_in_base_path(remote_media_thumbnail_rel) + def remote_media_thumbnail_dir(self, server_name, file_id): return os.path.join( - self.base_path, "remote_thumbnail", server_name, + "remote_thumbnail", server_name, file_id[0:2], file_id[2:4], file_id[4:], ) - def url_cache_filepath(self, media_id): + def url_cache_filepath_rel(self, media_id): if NEW_FORMAT_ID_RE.match(media_id): # Media id is of the form # E.g.: 2017-09-28-fsdRDt24DS234dsf return os.path.join( - self.base_path, "url_cache", + "url_cache", media_id[:10], media_id[11:] ) else: return os.path.join( - self.base_path, "url_cache", + "url_cache", media_id[0:2], media_id[2:4], media_id[4:], ) + url_cache_filepath = _wrap_in_base_path(url_cache_filepath_rel) + def url_cache_filepath_dirs_to_delete(self, media_id): "The dirs to try and remove if we delete the media_id file" if NEW_FORMAT_ID_RE.match(media_id): return [ os.path.join( - self.base_path, "url_cache", + "url_cache", media_id[:10], ), ] else: return [ os.path.join( - self.base_path, "url_cache", + "url_cache", media_id[0:2], media_id[2:4], ), os.path.join( - self.base_path, "url_cache", + "url_cache", media_id[0:2], ), ] - def url_cache_thumbnail(self, media_id, width, height, content_type, - method): + def url_cache_thumbnail_rel(self, media_id, width, height, content_type, + method): # Media id is of the form # E.g.: 2017-09-28-fsdRDt24DS234dsf @@ -122,29 +153,31 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[:10], media_id[11:], file_name ) else: return os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], file_name ) + url_cache_thumbnail = _wrap_in_base_path(url_cache_thumbnail_rel) + def url_cache_thumbnail_directory(self, media_id): # Media id is of the form # E.g.: 2017-09-28-fsdRDt24DS234dsf if NEW_FORMAT_ID_RE.match(media_id): return os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[:10], media_id[11:], ) else: return os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], ) @@ -155,26 +188,26 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return [ os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[:10], media_id[11:], ), os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[:10], ), ] else: return [ os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], ), os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[0:2], media_id[2:4], ), os.path.join( - self.base_path, "url_cache_thumbnails", + "url_cache_thumbnails", media_id[0:2], ), ] diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 93b35af9c..398e973ca 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -60,10 +60,12 @@ class MediaRepository(object): self.max_upload_size = hs.config.max_upload_size self.max_image_pixels = hs.config.max_image_pixels - self.filepaths = MediaFilePaths(hs.config.media_store_path) - self.backup_filepaths = None + self.primary_base_path = hs.config.media_store_path + self.filepaths = MediaFilePaths(self.primary_base_path) + + self.backup_base_path = None if hs.config.backup_media_store_path: - self.backup_filepaths = MediaFilePaths(hs.config.backup_media_store_path) + self.backup_base_path = hs.config.backup_media_store_path self.synchronous_backup_media_store = hs.config.synchronous_backup_media_store @@ -94,42 +96,63 @@ class MediaRepository(object): if not os.path.exists(dirname): os.makedirs(dirname) - @defer.inlineCallbacks - def _write_to_file(self, source, file_name_func): - def write_file_thread(file_name): - source.seek(0) # Ensure we read from the start of the file - with open(file_name, "wb") as f: - shutil.copyfileobj(source, f) + @staticmethod + def write_file_synchronously(source, fname): + source.seek(0) # Ensure we read from the start of the file + with open(fname, "wb") as f: + shutil.copyfileobj(source, f) - fname = file_name_func(self.filepaths) + @defer.inlineCallbacks + def write_to_file(self, source, path): + """Write `source` to the on disk media store, and also the backup store + if configured. + + Args: + source: A file like object that should be written + path: Relative path to write file to + + Returns: + string: the file path written to in the primary media store + """ + fname = os.path.join(self.primary_base_path, path) self._makedirs(fname) # Write to the main repository - yield preserve_context_over_fn(threads.deferToThread, write_file_thread, fname) + yield preserve_context_over_fn( + threads.deferToThread, + self.write_file_synchronously, source, fname, + ) # Write to backup repository - if self.backup_filepaths: - backup_fname = file_name_func(self.backup_filepaths) + yield self.copy_to_backup(source, path) + + defer.returnValue(fname) + + @defer.inlineCallbacks + def copy_to_backup(self, source, path): + if self.backup_base_path: + backup_fname = os.path.join(self.backup_base_path, path) self._makedirs(backup_fname) # We can either wait for successful writing to the backup repository # or write in the background and immediately return if self.synchronous_backup_media_store: yield preserve_context_over_fn( - threads.deferToThread, write_file_thread, backup_fname, + threads.deferToThread, + self.write_file_synchronously, source, backup_fname, ) else: - preserve_fn(threads.deferToThread)(write_file_thread, backup_fname) - - defer.returnValue(fname) + preserve_fn(threads.deferToThread)( + self.write_file_synchronously, source, backup_fname, + ) @defer.inlineCallbacks def create_content(self, media_type, upload_name, content, content_length, auth_user): media_id = random_string(24) - fname = yield self._write_to_file( - content, lambda f: f.local_media_filepath(media_id) + fname = yield self.write_to_file( + content, self.filepaths.local_media_filepath_rel(media_id) ) logger.info("Stored local media in file %r", fname) @@ -180,9 +203,10 @@ class MediaRepository(object): def _download_remote_file(self, server_name, media_id): file_id = random_string(24) - fname = self.filepaths.remote_media_filepath( + fpath = self.filepaths.remote_media_filepath_rel( server_name, file_id ) + fname = os.path.join(self.primary_base_path, fpath) self._makedirs(fname) try: @@ -224,6 +248,9 @@ class MediaRepository(object): server_name, media_id) raise SynapseError(502, "Failed to fetch remote media") + with open(fname) as f: + yield self.copy_to_backup(f, fpath) + media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() @@ -322,15 +349,15 @@ class MediaRepository(object): ) if t_byte_source: - output_path = yield self._write_to_file( + output_path = yield self.write_to_file( t_byte_source, - lambda f: f.local_media_thumbnail( + self.filepaths.local_media_thumbnail_rel( media_id, t_width, t_height, t_type, t_method ) ) logger.info("Stored thumbnail in file %r", output_path) - yield self.store.store_local_thumbnail( + yield self.store.store_local_thumbnail_rel( media_id, t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) ) @@ -350,15 +377,15 @@ class MediaRepository(object): ) if t_byte_source: - output_path = yield self._write_to_file( + output_path = yield self.write_to_file( t_byte_source, - lambda f: f.remote_media_thumbnail( + self.filepaths.remote_media_thumbnail_rel( server_name, file_id, t_width, t_height, t_type, t_method ) ) logger.info("Stored thumbnail in file %r", output_path) - yield self.store.store_remote_media_thumbnail( + yield self.store.store_remote_media_thumbnail_rel( server_name, media_id, file_id, t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) ) @@ -403,17 +430,16 @@ class MediaRepository(object): yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) for t_width, t_height, t_method, t_type, t_byte_source in local_thumbnails: - def path_name_func(f): - if url_cache: - return f.url_cache_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) - else: - return f.local_media_thumbnail( - media_id, t_width, t_height, t_type, t_method - ) + if url_cache: + file_path = self.filepaths.url_cache_thumbnail_rel( + media_id, t_width, t_height, t_type, t_method + ) + else: + file_path = self.filepaths.local_media_thumbnail_rel( + media_id, t_width, t_height, t_type, t_method + ) - yield self._write_to_file(t_byte_source, path_name_func) + yield self.write_to_file(t_byte_source, file_path) yield self.store.store_local_thumbnail( media_id, t_width, t_height, t_type, t_method, @@ -460,12 +486,11 @@ class MediaRepository(object): yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) for t_width, t_height, t_method, t_type, t_byte_source in remote_thumbnails: - def path_name_func(f): - return f.remote_media_thumbnail( - server_name, file_id, t_width, t_height, t_type, t_method - ) + file_path = self.filepaths.remote_media_thumbnail_rel( + server_name, file_id, t_width, t_height, t_type, t_method + ) - yield self._write_to_file(t_byte_source, path_name_func) + yield self.write_to_file(t_byte_source, file_path) yield self.store.store_remote_media_thumbnail( server_name, media_id, file_id, @@ -491,6 +516,8 @@ class MediaRepository(object): logger.info("Deleting: %r", key) + # TODO: Should we delete from the backup store + with (yield self.remote_media_linearizer.queue(key)): full_path = self.filepaths.remote_media_filepath(origin, file_id) try: diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 895b480d5..f82b8fbc5 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -59,6 +59,7 @@ class PreviewUrlResource(Resource): self.store = hs.get_datastore() self.client = SpiderHttpClient(hs) self.media_repo = media_repo + self.primary_base_path = media_repo.primary_base_path self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist @@ -262,7 +263,8 @@ class PreviewUrlResource(Resource): file_id = datetime.date.today().isoformat() + '_' + random_string(16) - fname = self.filepaths.url_cache_filepath(file_id) + fpath = self.filepaths.url_cache_filepath_rel(file_id) + fname = os.path.join(self.primary_base_path, fpath) self.media_repo._makedirs(fname) try: @@ -273,6 +275,9 @@ class PreviewUrlResource(Resource): ) # FIXME: pass through 404s and other error messages nicely + with open(fname) as f: + yield self.media_repo.copy_to_backup(f, fpath) + media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py index 60498b08a..e1ee535b9 100644 --- a/synapse/rest/media/v1/thumbnailer.py +++ b/synapse/rest/media/v1/thumbnailer.py @@ -51,7 +51,11 @@ class Thumbnailer(object): return ((max_height * self.width) // self.height, max_height) def scale(self, width, height, output_type): - """Rescales the image to the given dimensions""" + """Rescales the image to the given dimensions. + + Returns: + BytesIO: the bytes of the encoded image ready to be written to disk + """ scaled = self.image.resize((width, height), Image.ANTIALIAS) return self._encode_image(scaled, output_type) @@ -65,6 +69,9 @@ class Thumbnailer(object): Args: max_width: The largest possible width. max_height: The larget possible height. + + Returns: + BytesIO: the bytes of the encoded image ready to be written to disk """ if width * self.height > height * self.width: scaled_height = (width * self.height) // self.width From 802ca12d0551ff761e01d9af8348df1dc96fc372 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 17:37:21 +0100 Subject: [PATCH 169/223] Don't close file prematurely --- synapse/rest/media/v1/media_repository.py | 22 ++++++++++++++----- synapse/rest/media/v1/preview_url_resource.py | 4 ++-- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 398e973ca..63ed1c426 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -97,16 +97,20 @@ class MediaRepository(object): os.makedirs(dirname) @staticmethod - def write_file_synchronously(source, fname): + def _write_file_synchronously(source, fname): source.seek(0) # Ensure we read from the start of the file with open(fname, "wb") as f: shutil.copyfileobj(source, f) + source.close() + @defer.inlineCallbacks def write_to_file(self, source, path): """Write `source` to the on disk media store, and also the backup store if configured. + Will close source once finished. + Args: source: A file like object that should be written path: Relative path to write file to @@ -120,7 +124,7 @@ class MediaRepository(object): # Write to the main repository yield preserve_context_over_fn( threads.deferToThread, - self.write_file_synchronously, source, fname, + self._write_file_synchronously, source, fname, ) # Write to backup repository @@ -130,6 +134,10 @@ class MediaRepository(object): @defer.inlineCallbacks def copy_to_backup(self, source, path): + """Copy file like object source to the backup media store, if configured. + + Will close source after its done. + """ if self.backup_base_path: backup_fname = os.path.join(self.backup_base_path, path) self._makedirs(backup_fname) @@ -139,12 +147,14 @@ class MediaRepository(object): if self.synchronous_backup_media_store: yield preserve_context_over_fn( threads.deferToThread, - self.write_file_synchronously, source, backup_fname, + self._write_file_synchronously, source, backup_fname, ) else: preserve_fn(threads.deferToThread)( - self.write_file_synchronously, source, backup_fname, + self._write_file_synchronously, source, backup_fname, ) + else: + source.close() @defer.inlineCallbacks def create_content(self, media_type, upload_name, content, content_length, @@ -248,8 +258,8 @@ class MediaRepository(object): server_name, media_id) raise SynapseError(502, "Failed to fetch remote media") - with open(fname) as f: - yield self.copy_to_backup(f, fpath) + # Will close the file after its done + yield self.copy_to_backup(open(fname), fpath) media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index f82b8fbc5..a3288c9cc 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -275,8 +275,8 @@ class PreviewUrlResource(Resource): ) # FIXME: pass through 404s and other error messages nicely - with open(fname) as f: - yield self.media_repo.copy_to_backup(f, fpath) + # Will close the file after its done + yield self.media_repo.copy_to_backup(open(fname), fpath) media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() From 1259a76047a0a718ce0c9fb26513c9127f8ea919 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 17:39:23 +0100 Subject: [PATCH 170/223] Get len before close --- synapse/rest/media/v1/media_repository.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 63ed1c426..d25b98db4 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -359,6 +359,8 @@ class MediaRepository(object): ) if t_byte_source: + t_len = len(t_byte_source.getvalue()) + output_path = yield self.write_to_file( t_byte_source, self.filepaths.local_media_thumbnail_rel( @@ -368,8 +370,7 @@ class MediaRepository(object): logger.info("Stored thumbnail in file %r", output_path) yield self.store.store_local_thumbnail_rel( - media_id, t_width, t_height, t_type, t_method, - len(t_byte_source.getvalue()) + media_id, t_width, t_height, t_type, t_method, t_len ) defer.returnValue(output_path) @@ -387,6 +388,7 @@ class MediaRepository(object): ) if t_byte_source: + t_len = len(t_byte_source.getvalue()) output_path = yield self.write_to_file( t_byte_source, self.filepaths.remote_media_thumbnail_rel( @@ -397,7 +399,7 @@ class MediaRepository(object): yield self.store.store_remote_media_thumbnail_rel( server_name, media_id, file_id, - t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) + t_width, t_height, t_type, t_method, t_len ) defer.returnValue(output_path) @@ -449,11 +451,12 @@ class MediaRepository(object): media_id, t_width, t_height, t_type, t_method ) + t_len = len(t_byte_source.getvalue()) + yield self.write_to_file(t_byte_source, file_path) yield self.store.store_local_thumbnail( - media_id, t_width, t_height, t_type, t_method, - len(t_byte_source.getvalue()) + media_id, t_width, t_height, t_type, t_method, t_len ) defer.returnValue({ @@ -500,11 +503,13 @@ class MediaRepository(object): server_name, file_id, t_width, t_height, t_type, t_method ) + t_len = len(t_byte_source.getvalue()) + yield self.write_to_file(t_byte_source, file_path) yield self.store.store_remote_media_thumbnail( server_name, media_id, file_id, - t_width, t_height, t_type, t_method, len(t_byte_source.getvalue()) + t_width, t_height, t_type, t_method, t_len ) defer.returnValue({ From cc505b4b5e98ba70d8576a562fc36b03d6aa5150 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 17:52:30 +0100 Subject: [PATCH 171/223] getvalue closes buffer --- synapse/rest/media/v1/media_repository.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index d25b98db4..ff2ddd2f1 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -258,7 +258,7 @@ class MediaRepository(object): server_name, media_id) raise SynapseError(502, "Failed to fetch remote media") - # Will close the file after its done + # Will close the file after its done yield self.copy_to_backup(open(fname), fpath) media_type = headers["Content-Type"][0] @@ -359,8 +359,6 @@ class MediaRepository(object): ) if t_byte_source: - t_len = len(t_byte_source.getvalue()) - output_path = yield self.write_to_file( t_byte_source, self.filepaths.local_media_thumbnail_rel( @@ -369,6 +367,8 @@ class MediaRepository(object): ) logger.info("Stored thumbnail in file %r", output_path) + t_len = os.path.getsize(output_path) + yield self.store.store_local_thumbnail_rel( media_id, t_width, t_height, t_type, t_method, t_len ) @@ -388,7 +388,6 @@ class MediaRepository(object): ) if t_byte_source: - t_len = len(t_byte_source.getvalue()) output_path = yield self.write_to_file( t_byte_source, self.filepaths.remote_media_thumbnail_rel( @@ -397,7 +396,9 @@ class MediaRepository(object): ) logger.info("Stored thumbnail in file %r", output_path) - yield self.store.store_remote_media_thumbnail_rel( + t_len = os.path.getsize(output_path) + + yield self.store.store_remote_media_thumbnail( server_name, media_id, file_id, t_width, t_height, t_type, t_method, t_len ) @@ -451,9 +452,8 @@ class MediaRepository(object): media_id, t_width, t_height, t_type, t_method ) - t_len = len(t_byte_source.getvalue()) - - yield self.write_to_file(t_byte_source, file_path) + output_path = yield self.write_to_file(t_byte_source, file_path) + t_len = os.path.getsize(output_path) yield self.store.store_local_thumbnail( media_id, t_width, t_height, t_type, t_method, t_len @@ -503,9 +503,8 @@ class MediaRepository(object): server_name, file_id, t_width, t_height, t_type, t_method ) - t_len = len(t_byte_source.getvalue()) - - yield self.write_to_file(t_byte_source, file_path) + output_path = yield self.write_to_file(t_byte_source, file_path) + t_len = os.path.getsize(output_path) yield self.store.store_remote_media_thumbnail( server_name, media_id, file_id, From 4ae85ae12190015595f979f1a302ee608de6fd65 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 17:57:31 +0100 Subject: [PATCH 172/223] Don't close prematurely.. --- synapse/rest/media/v1/media_repository.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index ff2ddd2f1..80b14a673 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -97,12 +97,13 @@ class MediaRepository(object): os.makedirs(dirname) @staticmethod - def _write_file_synchronously(source, fname): + def _write_file_synchronously(source, fname, close_source=False): source.seek(0) # Ensure we read from the start of the file with open(fname, "wb") as f: shutil.copyfileobj(source, f) - source.close() + if close_source: + source.close() @defer.inlineCallbacks def write_to_file(self, source, path): @@ -148,10 +149,12 @@ class MediaRepository(object): yield preserve_context_over_fn( threads.deferToThread, self._write_file_synchronously, source, backup_fname, + close_source=True, ) else: preserve_fn(threads.deferToThread)( self._write_file_synchronously, source, backup_fname, + close_source=True, ) else: source.close() From d76621a47b7b4b778055760d43df9d02614dac19 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 12 Oct 2017 18:16:25 +0100 Subject: [PATCH 173/223] Fix comments --- synapse/rest/media/v1/filepath.py | 2 +- synapse/rest/media/v1/preview_url_resource.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index 43d0eea00..6923a3fbd 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -35,7 +35,7 @@ def _wrap_in_base_path(func): class MediaFilePaths(object): """Describes where files are stored on disk. - Most of the function have a `*_rel` variant which returns a file path that + Most of the functions have a `*_rel` variant which returns a file path that is relative to the base media store path. This is mainly used when we want to write to the backup media store (when one is configured) """ diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index a3288c9cc..e986e855a 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -343,6 +343,9 @@ class PreviewUrlResource(Resource): def _expire_url_cache_data(self): """Clean up expired url cache content, media and thumbnails. """ + + # TODO: Delete from backup media store + now = self.clock.time_msec() # First we delete expired url cache entries From b60859d6cc429ea1934b94a8749caadd9a96ee21 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 10:24:19 +0100 Subject: [PATCH 174/223] Use make_deferred_yieldable --- synapse/rest/media/v1/media_repository.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 80b14a673..5c5020fe9 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -33,7 +33,7 @@ from synapse.api.errors import SynapseError, HttpResponseException, \ from synapse.util.async import Linearizer from synapse.util.stringutils import is_ascii -from synapse.util.logcontext import preserve_context_over_fn, preserve_fn +from synapse.util.logcontext import make_deferred_yieldable, preserve_fn from synapse.util.retryutils import NotRetryingDestination import os @@ -123,7 +123,7 @@ class MediaRepository(object): self._makedirs(fname) # Write to the main repository - yield preserve_context_over_fn( + yield make_deferred_yieldable( threads.deferToThread, self._write_file_synchronously, source, fname, ) @@ -146,7 +146,7 @@ class MediaRepository(object): # We can either wait for successful writing to the backup repository # or write in the background and immediately return if self.synchronous_backup_media_store: - yield preserve_context_over_fn( + yield make_deferred_yieldable( threads.deferToThread, self._write_file_synchronously, source, backup_fname, close_source=True, @@ -355,7 +355,7 @@ class MediaRepository(object): input_path = self.filepaths.local_media_filepath(media_id) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield preserve_context_over_fn( + t_byte_source = yield make_deferred_yieldable( threads.deferToThread, self._generate_thumbnail, thumbnailer, t_width, t_height, t_method, t_type @@ -384,7 +384,7 @@ class MediaRepository(object): input_path = self.filepaths.remote_media_filepath(server_name, file_id) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield preserve_context_over_fn( + t_byte_source = yield make_deferred_yieldable( threads.deferToThread, self._generate_thumbnail, thumbnailer, t_width, t_height, t_method, t_type @@ -443,7 +443,7 @@ class MediaRepository(object): r_width, r_height, r_method, r_type, t_byte_source )) - yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) + yield make_deferred_yieldable(threads.deferToThread, generate_thumbnails) for t_width, t_height, t_method, t_type, t_byte_source in local_thumbnails: if url_cache: @@ -499,7 +499,7 @@ class MediaRepository(object): r_width, r_height, r_method, r_type, t_byte_source )) - yield preserve_context_over_fn(threads.deferToThread, generate_thumbnails) + yield make_deferred_yieldable(threads.deferToThread, generate_thumbnails) for t_width, t_height, t_method, t_type, t_byte_source in remote_thumbnails: file_path = self.filepaths.remote_media_thumbnail_rel( From 64db043a71238db3f65f575c40f29260b83145be Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 10:25:01 +0100 Subject: [PATCH 175/223] Move makedirs to thread --- synapse/rest/media/v1/media_repository.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 5c5020fe9..72aad221a 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -98,6 +98,7 @@ class MediaRepository(object): @staticmethod def _write_file_synchronously(source, fname, close_source=False): + MediaRepository._makedirs(fname) source.seek(0) # Ensure we read from the start of the file with open(fname, "wb") as f: shutil.copyfileobj(source, f) @@ -120,7 +121,6 @@ class MediaRepository(object): string: the file path written to in the primary media store """ fname = os.path.join(self.primary_base_path, path) - self._makedirs(fname) # Write to the main repository yield make_deferred_yieldable( @@ -141,7 +141,6 @@ class MediaRepository(object): """ if self.backup_base_path: backup_fname = os.path.join(self.backup_base_path, path) - self._makedirs(backup_fname) # We can either wait for successful writing to the backup repository # or write in the background and immediately return From 35332298ef6a9828aa1fdb10f59230f47763084e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 10:39:32 +0100 Subject: [PATCH 176/223] Fix up comments --- synapse/rest/media/v1/media_repository.py | 28 +++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 72aad221a..f3a5b19a8 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -98,6 +98,14 @@ class MediaRepository(object): @staticmethod def _write_file_synchronously(source, fname, close_source=False): + """Write `source` to the path `fname` synchronously. Should be called + from a thread. + + Args: + source: A file like object to be written + fname (str): Path to write to + close_source (bool): Whether to close source after writing + """ MediaRepository._makedirs(fname) source.seek(0) # Ensure we read from the start of the file with open(fname, "wb") as f: @@ -115,10 +123,10 @@ class MediaRepository(object): Args: source: A file like object that should be written - path: Relative path to write file to + path(str): Relative path to write file to Returns: - string: the file path written to in the primary media store + Deferred[str]: the file path written to in the primary media store """ fname = os.path.join(self.primary_base_path, path) @@ -138,6 +146,10 @@ class MediaRepository(object): """Copy file like object source to the backup media store, if configured. Will close source after its done. + + Args: + source: A file like object that should be written + path(str): Relative path to write file to """ if self.backup_base_path: backup_fname = os.path.join(self.backup_base_path, path) @@ -161,6 +173,18 @@ class MediaRepository(object): @defer.inlineCallbacks def create_content(self, media_type, upload_name, content, content_length, auth_user): + """Store uploaded content for a local user and return the mxc URL + + Args: + media_type(str): The content type of the file + upload_name(str): The name of the file + content: A file like object that is the content to store + content_length(int): The length of the content + auth_user(str): The user_id of the uploader + + Returns: + Deferred[str]: The mxc url of the stored content + """ media_id = random_string(24) fname = yield self.write_to_file( From e3428d26ca5a23a3dac6d106aff8ac19f9839f32 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 10:39:59 +0100 Subject: [PATCH 177/223] Fix typo --- synapse/rest/media/v1/media_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index f3a5b19a8..76220a553 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -395,7 +395,7 @@ class MediaRepository(object): t_len = os.path.getsize(output_path) - yield self.store.store_local_thumbnail_rel( + yield self.store.store_local_thumbnail( media_id, t_width, t_height, t_type, t_method, t_len ) From 505371414f6ba9aeaa95eb8d34f7893c4cc2b07e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 11:23:53 +0100 Subject: [PATCH 178/223] Fix up thumbnailing function --- synapse/rest/media/v1/media_repository.py | 127 ++++++++---------- synapse/rest/media/v1/preview_url_resource.py | 8 +- synapse/rest/media/v1/thumbnailer.py | 13 +- 3 files changed, 73 insertions(+), 75 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 76220a553..36f42c73b 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -206,7 +206,7 @@ class MediaRepository(object): "media_length": content_length, } - yield self._generate_local_thumbnails(media_id, media_info) + yield self._generate_thumbnails(None, media_id, media_info) defer.returnValue("mxc://%s/%s" % (self.server_name, media_id)) @@ -339,7 +339,7 @@ class MediaRepository(object): "filesystem_id": file_id, } - yield self._generate_remote_thumbnails( + yield self._generate_thumbnails( server_name, media_id, media_info ) @@ -385,6 +385,8 @@ class MediaRepository(object): ) if t_byte_source: + t_width, t_height = t_byte_source.dimensions + output_path = yield self.write_to_file( t_byte_source, self.filepaths.local_media_thumbnail_rel( @@ -414,6 +416,8 @@ class MediaRepository(object): ) if t_byte_source: + t_width, t_height = t_byte_source.dimensions + output_path = yield self.write_to_file( t_byte_source, self.filepaths.remote_media_thumbnail_rel( @@ -432,13 +436,28 @@ class MediaRepository(object): defer.returnValue(output_path) @defer.inlineCallbacks - def _generate_local_thumbnails(self, media_id, media_info, url_cache=False): + def _generate_thumbnails(self, server_name, media_id, media_info, url_cache=False): + """Generate and store thumbnails for an image. + + Args: + server_name(str|None): The server name if remote media, else None if local + media_id(str) + media_info(dict) + url_cache(bool): If we are thumbnailing images downloaded for the URL cache, + used exclusively by the url previewer + + Returns: + Deferred[dict]: Dict with "width" and "height" keys of original image + """ media_type = media_info["media_type"] + file_id = media_info.get("filesystem_id") requirements = self._get_thumbnail_requirements(media_type) if not requirements: return - if url_cache: + if server_name: + input_path = self.filepaths.remote_media_filepath(server_name, file_id) + elif url_cache: input_path = self.filepaths.url_cache_filepath(media_id) else: input_path = self.filepaths.local_media_filepath(media_id) @@ -454,22 +473,40 @@ class MediaRepository(object): ) return - local_thumbnails = [] + # We deduplicate the thumbnail sizes by ignoring the cropped versions if + # they have the same dimensions of a scaled one. + thumbnails = {} + for r_width, r_height, r_method, r_type in requirements: + if r_method == "crop": + thumbnails.setdefault[(r_width, r_height)] = (r_method, r_type) + elif r_method == "scale": + t_width, t_height = thumbnailer.aspect(t_width, t_height) + t_width = min(m_width, t_width) + t_height = min(m_height, t_height) + thumbnails[(t_width, t_height)] = (r_method, r_type) - def generate_thumbnails(): - for r_width, r_height, r_method, r_type in requirements: - t_byte_source = self._generate_thumbnail( - thumbnailer, r_width, r_height, r_method, r_type, + # Now we generate the thumbnails for each dimension, store it + for (r_width, r_height), (r_method, r_type) in thumbnails.iteritems(): + t_byte_source = thumbnailer.crop(t_width, t_height, t_type) + + if r_type == "crop": + t_byte_source = yield make_deferred_yieldable( + threads.deferToThread, thumbnailer.crop, + r_width, r_height, r_type, + ) + else: + t_byte_source = yield make_deferred_yieldable( + threads.deferToThread, thumbnailer.scale, + r_width, r_height, r_type, ) - local_thumbnails.append(( - r_width, r_height, r_method, r_type, t_byte_source - )) + t_width, t_height = t_byte_source.dimensions - yield make_deferred_yieldable(threads.deferToThread, generate_thumbnails) - - for t_width, t_height, t_method, t_type, t_byte_source in local_thumbnails: - if url_cache: + if server_name: + file_path = self.filepaths.remote_media_thumbnail_rel( + server_name, file_id, t_width, t_height, t_type, t_method + ) + elif url_cache: file_path = self.filepaths.url_cache_thumbnail_rel( media_id, t_width, t_height, t_type, t_method ) @@ -481,61 +518,15 @@ class MediaRepository(object): output_path = yield self.write_to_file(t_byte_source, file_path) t_len = os.path.getsize(output_path) - yield self.store.store_local_thumbnail( - media_id, t_width, t_height, t_type, t_method, t_len - ) - - defer.returnValue({ - "width": m_width, - "height": m_height, - }) - - @defer.inlineCallbacks - def _generate_remote_thumbnails(self, server_name, media_id, media_info): - media_type = media_info["media_type"] - file_id = media_info["filesystem_id"] - requirements = self._get_thumbnail_requirements(media_type) - if not requirements: - return - - remote_thumbnails = [] - - input_path = self.filepaths.remote_media_filepath(server_name, file_id) - thumbnailer = Thumbnailer(input_path) - m_width = thumbnailer.width - m_height = thumbnailer.height - - def generate_thumbnails(): - if m_width * m_height >= self.max_image_pixels: - logger.info( - "Image too large to thumbnail %r x %r > %r", - m_width, m_height, self.max_image_pixels - ) - return - - for r_width, r_height, r_method, r_type in requirements: - t_byte_source = self._generate_thumbnail( - thumbnailer, r_width, r_height, r_method, r_type, - ) - - remote_thumbnails.append(( - r_width, r_height, r_method, r_type, t_byte_source - )) - - yield make_deferred_yieldable(threads.deferToThread, generate_thumbnails) - - for t_width, t_height, t_method, t_type, t_byte_source in remote_thumbnails: - file_path = self.filepaths.remote_media_thumbnail_rel( - server_name, file_id, t_width, t_height, t_type, t_method - ) - - output_path = yield self.write_to_file(t_byte_source, file_path) - t_len = os.path.getsize(output_path) - - yield self.store.store_remote_media_thumbnail( + if server_name: + yield self.store.store_remote_media_thumbnail( server_name, media_id, file_id, t_width, t_height, t_type, t_method, t_len ) + else: + yield self.store.store_local_thumbnail( + media_id, t_width, t_height, t_type, t_method, t_len + ) defer.returnValue({ "width": m_width, diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index e986e855a..c734f6b7c 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -171,8 +171,8 @@ class PreviewUrlResource(Resource): logger.debug("got media_info of '%s'" % media_info) if _is_media(media_info['media_type']): - dims = yield self.media_repo._generate_local_thumbnails( - media_info['filesystem_id'], media_info, url_cache=True, + dims = yield self.media_repo._generate_thumbnails( + None, media_info['filesystem_id'], media_info, url_cache=True, ) og = { @@ -217,8 +217,8 @@ class PreviewUrlResource(Resource): if _is_media(image_info['media_type']): # TODO: make sure we don't choke on white-on-transparent images - dims = yield self.media_repo._generate_local_thumbnails( - image_info['filesystem_id'], image_info, url_cache=True, + dims = yield self.media_repo._generate_thumbnails( + None, image_info['filesystem_id'], image_info, url_cache=True, ) if dims: og["og:image:width"] = dims['width'] diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py index e1ee535b9..09650bd52 100644 --- a/synapse/rest/media/v1/thumbnailer.py +++ b/synapse/rest/media/v1/thumbnailer.py @@ -54,7 +54,7 @@ class Thumbnailer(object): """Rescales the image to the given dimensions. Returns: - BytesIO: the bytes of the encoded image ready to be written to disk + ImageIO: the bytes of the encoded image ready to be written to disk """ scaled = self.image.resize((width, height), Image.ANTIALIAS) return self._encode_image(scaled, output_type) @@ -71,7 +71,7 @@ class Thumbnailer(object): max_height: The larget possible height. Returns: - BytesIO: the bytes of the encoded image ready to be written to disk + ImageIO: the bytes of the encoded image ready to be written to disk """ if width * self.height > height * self.width: scaled_height = (width * self.height) // self.width @@ -92,6 +92,13 @@ class Thumbnailer(object): return self._encode_image(cropped, output_type) def _encode_image(self, output_image, output_type): - output_bytes_io = BytesIO() + output_bytes_io = ImageIO(output_image.size) output_image.save(output_bytes_io, self.FORMATS[output_type], quality=80) + output_image.close() return output_bytes_io + + +class ImageIO(BytesIO): + def __init__(self, dimensions): + super(ImageIO, self).__init__() + self.dimensions = dimensions From 0e28281a021101ac199cbf2d0d130190110921bb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 11:33:49 +0100 Subject: [PATCH 179/223] Fix up --- synapse/rest/media/v1/media_repository.py | 62 +++++++++++------------ synapse/rest/media/v1/thumbnailer.py | 13 ++--- 2 files changed, 32 insertions(+), 43 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 36f42c73b..a310d08f5 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -131,10 +131,9 @@ class MediaRepository(object): fname = os.path.join(self.primary_base_path, path) # Write to the main repository - yield make_deferred_yieldable( - threads.deferToThread, + yield make_deferred_yieldable(threads.deferToThread( self._write_file_synchronously, source, fname, - ) + )) # Write to backup repository yield self.copy_to_backup(source, path) @@ -157,11 +156,10 @@ class MediaRepository(object): # We can either wait for successful writing to the backup repository # or write in the background and immediately return if self.synchronous_backup_media_store: - yield make_deferred_yieldable( - threads.deferToThread, + yield make_deferred_yieldable(threads.deferToThread( self._write_file_synchronously, source, backup_fname, close_source=True, - ) + )) else: preserve_fn(threads.deferToThread)( self._write_file_synchronously, source, backup_fname, @@ -378,11 +376,10 @@ class MediaRepository(object): input_path = self.filepaths.local_media_filepath(media_id) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield make_deferred_yieldable( - threads.deferToThread, + t_byte_source = yield make_deferred_yieldable(threads.deferToThread( self._generate_thumbnail, thumbnailer, t_width, t_height, t_method, t_type - ) + )) if t_byte_source: t_width, t_height = t_byte_source.dimensions @@ -409,11 +406,10 @@ class MediaRepository(object): input_path = self.filepaths.remote_media_filepath(server_name, file_id) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield make_deferred_yieldable( - threads.deferToThread, + t_byte_source = yield make_deferred_yieldable(threads.deferToThread( self._generate_thumbnail, thumbnailer, t_width, t_height, t_method, t_type - ) + )) if t_byte_source: t_width, t_height = t_byte_source.dimensions @@ -478,34 +474,32 @@ class MediaRepository(object): thumbnails = {} for r_width, r_height, r_method, r_type in requirements: if r_method == "crop": - thumbnails.setdefault[(r_width, r_height)] = (r_method, r_type) + thumbnails.setdefault((r_width, r_height), (r_method, r_type)) elif r_method == "scale": - t_width, t_height = thumbnailer.aspect(t_width, t_height) + t_width, t_height = thumbnailer.aspect(r_width, r_height) t_width = min(m_width, t_width) t_height = min(m_height, t_height) thumbnails[(t_width, t_height)] = (r_method, r_type) # Now we generate the thumbnails for each dimension, store it - for (r_width, r_height), (r_method, r_type) in thumbnails.iteritems(): - t_byte_source = thumbnailer.crop(t_width, t_height, t_type) - - if r_type == "crop": - t_byte_source = yield make_deferred_yieldable( - threads.deferToThread, thumbnailer.crop, - r_width, r_height, r_type, - ) + for (t_width, t_height), (t_method, t_type) in thumbnails.iteritems(): + # Generate the thumbnail + if t_type == "crop": + t_byte_source = yield make_deferred_yieldable(threads.deferToThread( + thumbnailer.crop, + r_width, r_height, t_type, + )) else: - t_byte_source = yield make_deferred_yieldable( - threads.deferToThread, thumbnailer.scale, - r_width, r_height, r_type, - ) - - t_width, t_height = t_byte_source.dimensions + t_byte_source = yield make_deferred_yieldable(threads.deferToThread( + thumbnailer.scale, + r_width, r_height, t_type, + )) + # Work out the correct file name for thumbnail if server_name: file_path = self.filepaths.remote_media_thumbnail_rel( - server_name, file_id, t_width, t_height, t_type, t_method - ) + server_name, file_id, t_width, t_height, t_type, t_method + ) elif url_cache: file_path = self.filepaths.url_cache_thumbnail_rel( media_id, t_width, t_height, t_type, t_method @@ -515,14 +509,16 @@ class MediaRepository(object): media_id, t_width, t_height, t_type, t_method ) + # Write to disk output_path = yield self.write_to_file(t_byte_source, file_path) t_len = os.path.getsize(output_path) + # Write to database if server_name: yield self.store.store_remote_media_thumbnail( - server_name, media_id, file_id, - t_width, t_height, t_type, t_method, t_len - ) + server_name, media_id, file_id, + t_width, t_height, t_type, t_method, t_len + ) else: yield self.store.store_local_thumbnail( media_id, t_width, t_height, t_type, t_method, t_len diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py index 09650bd52..e1ee535b9 100644 --- a/synapse/rest/media/v1/thumbnailer.py +++ b/synapse/rest/media/v1/thumbnailer.py @@ -54,7 +54,7 @@ class Thumbnailer(object): """Rescales the image to the given dimensions. Returns: - ImageIO: the bytes of the encoded image ready to be written to disk + BytesIO: the bytes of the encoded image ready to be written to disk """ scaled = self.image.resize((width, height), Image.ANTIALIAS) return self._encode_image(scaled, output_type) @@ -71,7 +71,7 @@ class Thumbnailer(object): max_height: The larget possible height. Returns: - ImageIO: the bytes of the encoded image ready to be written to disk + BytesIO: the bytes of the encoded image ready to be written to disk """ if width * self.height > height * self.width: scaled_height = (width * self.height) // self.width @@ -92,13 +92,6 @@ class Thumbnailer(object): return self._encode_image(cropped, output_type) def _encode_image(self, output_image, output_type): - output_bytes_io = ImageIO(output_image.size) + output_bytes_io = BytesIO() output_image.save(output_bytes_io, self.FORMATS[output_type], quality=80) - output_image.close() return output_bytes_io - - -class ImageIO(BytesIO): - def __init__(self, dimensions): - super(ImageIO, self).__init__() - self.dimensions = dimensions From 9732ec6797339dd33bc472cef5081a858ddccb30 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 11:34:41 +0100 Subject: [PATCH 180/223] s/write_to_file/write_to_file_and_backup/ --- synapse/rest/media/v1/media_repository.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index a310d08f5..c9753ebb5 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -115,7 +115,7 @@ class MediaRepository(object): source.close() @defer.inlineCallbacks - def write_to_file(self, source, path): + def write_to_file_and_backup(self, source, path): """Write `source` to the on disk media store, and also the backup store if configured. @@ -185,7 +185,7 @@ class MediaRepository(object): """ media_id = random_string(24) - fname = yield self.write_to_file( + fname = yield self.write_to_file_and_backup( content, self.filepaths.local_media_filepath_rel(media_id) ) @@ -384,7 +384,7 @@ class MediaRepository(object): if t_byte_source: t_width, t_height = t_byte_source.dimensions - output_path = yield self.write_to_file( + output_path = yield self.write_to_file_and_backup( t_byte_source, self.filepaths.local_media_thumbnail_rel( media_id, t_width, t_height, t_type, t_method @@ -414,7 +414,7 @@ class MediaRepository(object): if t_byte_source: t_width, t_height = t_byte_source.dimensions - output_path = yield self.write_to_file( + output_path = yield self.write_to_file_and_backup( t_byte_source, self.filepaths.remote_media_thumbnail_rel( server_name, file_id, t_width, t_height, t_type, t_method @@ -510,7 +510,7 @@ class MediaRepository(object): ) # Write to disk - output_path = yield self.write_to_file(t_byte_source, file_path) + output_path = yield self.write_to_file_and_backup(t_byte_source, file_path) t_len = os.path.getsize(output_path) # Write to database From ae5d18617afd98bb5d51b43c2a12a99e9d96da39 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 11:35:44 +0100 Subject: [PATCH 181/223] Make things be absolute paths again --- synapse/rest/media/v1/filepath.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index 6923a3fbd..a3a15ac30 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -172,12 +172,12 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return os.path.join( - "url_cache_thumbnails", + self.primary_base_path, "url_cache_thumbnails", media_id[:10], media_id[11:], ) else: return os.path.join( - "url_cache_thumbnails", + self.primary_base_path, "url_cache_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], ) @@ -188,26 +188,26 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return [ os.path.join( - "url_cache_thumbnails", + self.primary_base_path, "url_cache_thumbnails", media_id[:10], media_id[11:], ), os.path.join( - "url_cache_thumbnails", + self.primary_base_path, "url_cache_thumbnails", media_id[:10], ), ] else: return [ os.path.join( - "url_cache_thumbnails", + self.primary_base_path, "url_cache_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], ), os.path.join( - "url_cache_thumbnails", + self.primary_base_path, "url_cache_thumbnails", media_id[0:2], media_id[2:4], ), os.path.join( - "url_cache_thumbnails", + self.primary_base_path, "url_cache_thumbnails", media_id[0:2], ), ] From 4d7e1dde70e6f2300ab83fb3208152f3d73bde71 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 11:36:32 +0100 Subject: [PATCH 182/223] Remove unnecessary diff --- synapse/rest/media/v1/media_repository.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index c9753ebb5..f06813c48 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -63,9 +63,7 @@ class MediaRepository(object): self.primary_base_path = hs.config.media_store_path self.filepaths = MediaFilePaths(self.primary_base_path) - self.backup_base_path = None - if hs.config.backup_media_store_path: - self.backup_base_path = hs.config.backup_media_store_path + self.backup_base_path = hs.config.backup_media_store_path self.synchronous_backup_media_store = hs.config.synchronous_backup_media_store From a675bd08bd1a016a16bd0e10547e8c26be391ee0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 11:41:06 +0100 Subject: [PATCH 183/223] Add paths back in... --- synapse/rest/media/v1/filepath.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index a3a15ac30..fec0bbf57 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -100,7 +100,7 @@ class MediaFilePaths(object): def remote_media_thumbnail_dir(self, server_name, file_id): return os.path.join( - "remote_thumbnail", server_name, + self.primary_base_path, "remote_thumbnail", server_name, file_id[0:2], file_id[2:4], file_id[4:], ) @@ -125,18 +125,18 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return [ os.path.join( - "url_cache", + self.primary_base_path, "url_cache", media_id[:10], ), ] else: return [ os.path.join( - "url_cache", + self.primary_base_path, "url_cache", media_id[0:2], media_id[2:4], ), os.path.join( - "url_cache", + self.primary_base_path, "url_cache", media_id[0:2], ), ] From 1f43d2239757db0b376a3582066190221942cddc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 11:42:07 +0100 Subject: [PATCH 184/223] Don't needlessly rename variable --- synapse/rest/media/v1/filepath.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py index fec0bbf57..d5164e47e 100644 --- a/synapse/rest/media/v1/filepath.py +++ b/synapse/rest/media/v1/filepath.py @@ -27,7 +27,7 @@ def _wrap_in_base_path(func): @functools.wraps(func) def _wrapped(self, *args, **kwargs): path = func(self, *args, **kwargs) - return os.path.join(self.primary_base_path, path) + return os.path.join(self.base_path, path) return _wrapped @@ -41,7 +41,7 @@ class MediaFilePaths(object): """ def __init__(self, primary_base_path): - self.primary_base_path = primary_base_path + self.base_path = primary_base_path def default_thumbnail_rel(self, default_top_level, default_sub_type, width, height, content_type, method): @@ -100,7 +100,7 @@ class MediaFilePaths(object): def remote_media_thumbnail_dir(self, server_name, file_id): return os.path.join( - self.primary_base_path, "remote_thumbnail", server_name, + self.base_path, "remote_thumbnail", server_name, file_id[0:2], file_id[2:4], file_id[4:], ) @@ -125,18 +125,18 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return [ os.path.join( - self.primary_base_path, "url_cache", + self.base_path, "url_cache", media_id[:10], ), ] else: return [ os.path.join( - self.primary_base_path, "url_cache", + self.base_path, "url_cache", media_id[0:2], media_id[2:4], ), os.path.join( - self.primary_base_path, "url_cache", + self.base_path, "url_cache", media_id[0:2], ), ] @@ -172,12 +172,12 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return os.path.join( - self.primary_base_path, "url_cache_thumbnails", + self.base_path, "url_cache_thumbnails", media_id[:10], media_id[11:], ) else: return os.path.join( - self.primary_base_path, "url_cache_thumbnails", + self.base_path, "url_cache_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], ) @@ -188,26 +188,26 @@ class MediaFilePaths(object): if NEW_FORMAT_ID_RE.match(media_id): return [ os.path.join( - self.primary_base_path, "url_cache_thumbnails", + self.base_path, "url_cache_thumbnails", media_id[:10], media_id[11:], ), os.path.join( - self.primary_base_path, "url_cache_thumbnails", + self.base_path, "url_cache_thumbnails", media_id[:10], ), ] else: return [ os.path.join( - self.primary_base_path, "url_cache_thumbnails", + self.base_path, "url_cache_thumbnails", media_id[0:2], media_id[2:4], media_id[4:], ), os.path.join( - self.primary_base_path, "url_cache_thumbnails", + self.base_path, "url_cache_thumbnails", media_id[0:2], media_id[2:4], ), os.path.join( - self.primary_base_path, "url_cache_thumbnails", + self.base_path, "url_cache_thumbnails", media_id[0:2], ), ] From c021c39cbd0bf1f0a85c9699275600ac35aa9ec4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 13:46:53 +0100 Subject: [PATCH 185/223] Remove spurious addition --- synapse/rest/media/v1/media_repository.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index f06813c48..3b8fe5ddb 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -380,8 +380,6 @@ class MediaRepository(object): )) if t_byte_source: - t_width, t_height = t_byte_source.dimensions - output_path = yield self.write_to_file_and_backup( t_byte_source, self.filepaths.local_media_thumbnail_rel( @@ -410,8 +408,6 @@ class MediaRepository(object): )) if t_byte_source: - t_width, t_height = t_byte_source.dimensions - output_path = yield self.write_to_file_and_backup( t_byte_source, self.filepaths.remote_media_thumbnail_rel( From ad1911bbf46f658ee343ee26e3011b3f1bcbd572 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 13:47:05 +0100 Subject: [PATCH 186/223] Comment --- synapse/rest/media/v1/media_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 3b8fe5ddb..700fd0dd2 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -121,7 +121,7 @@ class MediaRepository(object): Args: source: A file like object that should be written - path(str): Relative path to write file to + path (str): Relative path to write file to Returns: Deferred[str]: the file path written to in the primary media store From 31aa7bd8d1748548b2523f58b348bb6787dcc019 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 13:47:38 +0100 Subject: [PATCH 187/223] Move type into key --- synapse/rest/media/v1/media_repository.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 700fd0dd2..dee834389 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -468,15 +468,15 @@ class MediaRepository(object): thumbnails = {} for r_width, r_height, r_method, r_type in requirements: if r_method == "crop": - thumbnails.setdefault((r_width, r_height), (r_method, r_type)) + thumbnails.setdefault((r_width, r_height,r_type), r_method) elif r_method == "scale": t_width, t_height = thumbnailer.aspect(r_width, r_height) t_width = min(m_width, t_width) t_height = min(m_height, t_height) - thumbnails[(t_width, t_height)] = (r_method, r_type) + thumbnails[(t_width, t_height, r_type)] = r_method # Now we generate the thumbnails for each dimension, store it - for (t_width, t_height), (t_method, t_type) in thumbnails.iteritems(): + for (t_width, t_height, t_type), t_method in thumbnails.iteritems(): # Generate the thumbnail if t_type == "crop": t_byte_source = yield make_deferred_yieldable(threads.deferToThread( From 931fc43cc840f40402c78e9478cf3adac6559b27 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Fri, 13 Oct 2017 13:54:19 +0100 Subject: [PATCH 188/223] fix copyright to companies which actually exist(ed) --- docs/sphinx/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/conf.py b/docs/sphinx/conf.py index 15c19834f..06e1b3c33 100644 --- a/docs/sphinx/conf.py +++ b/docs/sphinx/conf.py @@ -50,7 +50,7 @@ master_doc = 'index' # General information about the project. project = u'Synapse' -copyright = u'2014, TNG' +copyright = u'Copyright 2014-2017 OpenMarket, 2017 Vector Creations Ltd, 2017 New Vector Ltd' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From b92a8e6e4aa2283daaa4e6050f1dbd503ddc9434 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 13:58:57 +0100 Subject: [PATCH 189/223] PEP8 --- synapse/rest/media/v1/media_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index dee834389..d2ac0175d 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -468,7 +468,7 @@ class MediaRepository(object): thumbnails = {} for r_width, r_height, r_method, r_type in requirements: if r_method == "crop": - thumbnails.setdefault((r_width, r_height,r_type), r_method) + thumbnails.setdefault((r_width, r_height, r_type), r_method) elif r_method == "scale": t_width, t_height = thumbnailer.aspect(r_width, r_height) t_width = min(m_width, t_width) From 2b24416e90b0bf1ee6d29cfc384670f4eeca0ced Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 14:11:34 +0100 Subject: [PATCH 190/223] Don't reuse source but instead copy from primary media store to backup --- synapse/rest/media/v1/media_repository.py | 28 ++++++------------- synapse/rest/media/v1/preview_url_resource.py | 3 +- 2 files changed, 9 insertions(+), 22 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index d2ac0175d..e32a67e16 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -95,7 +95,7 @@ class MediaRepository(object): os.makedirs(dirname) @staticmethod - def _write_file_synchronously(source, fname, close_source=False): + def _write_file_synchronously(source, fname): """Write `source` to the path `fname` synchronously. Should be called from a thread. @@ -109,16 +109,11 @@ class MediaRepository(object): with open(fname, "wb") as f: shutil.copyfileobj(source, f) - if close_source: - source.close() - @defer.inlineCallbacks def write_to_file_and_backup(self, source, path): """Write `source` to the on disk media store, and also the backup store if configured. - Will close source once finished. - Args: source: A file like object that should be written path (str): Relative path to write file to @@ -134,37 +129,31 @@ class MediaRepository(object): )) # Write to backup repository - yield self.copy_to_backup(source, path) + yield self.copy_to_backup(path) defer.returnValue(fname) @defer.inlineCallbacks - def copy_to_backup(self, source, path): - """Copy file like object source to the backup media store, if configured. - - Will close source after its done. + def copy_to_backup(self, path): + """Copy a file from the primary to backup media store, if configured. Args: - source: A file like object that should be written path(str): Relative path to write file to """ if self.backup_base_path: + primary_fname = os.path.join(self.primary_base_path, path) backup_fname = os.path.join(self.backup_base_path, path) # We can either wait for successful writing to the backup repository # or write in the background and immediately return if self.synchronous_backup_media_store: yield make_deferred_yieldable(threads.deferToThread( - self._write_file_synchronously, source, backup_fname, - close_source=True, + shutil.copyfile, primary_fname, backup_fname, )) else: preserve_fn(threads.deferToThread)( - self._write_file_synchronously, source, backup_fname, - close_source=True, + shutil.copyfile, primary_fname, backup_fname, ) - else: - source.close() @defer.inlineCallbacks def create_content(self, media_type, upload_name, content, content_length, @@ -280,8 +269,7 @@ class MediaRepository(object): server_name, media_id) raise SynapseError(502, "Failed to fetch remote media") - # Will close the file after its done - yield self.copy_to_backup(open(fname), fpath) + yield self.copy_to_backup(fpath) media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index c734f6b7c..2a3e37fdf 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -275,8 +275,7 @@ class PreviewUrlResource(Resource): ) # FIXME: pass through 404s and other error messages nicely - # Will close the file after its done - yield self.media_repo.copy_to_backup(open(fname), fpath) + yield self.media_repo.copy_to_backup(fpath) media_type = headers["Content-Type"][0] time_now_ms = self.clock.time_msec() From 64665b57d0902c44235994d9cbf16ae61a784ab1 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Fri, 13 Oct 2017 14:26:07 +0100 Subject: [PATCH 191/223] oops --- docs/sphinx/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/conf.py b/docs/sphinx/conf.py index 06e1b3c33..0b15bd891 100644 --- a/docs/sphinx/conf.py +++ b/docs/sphinx/conf.py @@ -50,7 +50,7 @@ master_doc = 'index' # General information about the project. project = u'Synapse' -copyright = u'Copyright 2014-2017 OpenMarket, 2017 Vector Creations Ltd, 2017 New Vector Ltd' +copyright = u'Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From 6b725cf56aaf090f9cc9d5409dec7912feae8869 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 15:23:41 +0100 Subject: [PATCH 192/223] Remove old comment --- synapse/rest/media/v1/media_repository.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index e32a67e16..cc267d0c1 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -102,7 +102,6 @@ class MediaRepository(object): Args: source: A file like object to be written fname (str): Path to write to - close_source (bool): Whether to close source after writing """ MediaRepository._makedirs(fname) source.seek(0) # Ensure we read from the start of the file From 1b6b0b1e66ab1cd5682ad1fa99020474afd6db7b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 13 Oct 2017 15:34:08 +0100 Subject: [PATCH 193/223] Add try/finally block to close t_byte_source --- synapse/rest/media/v1/media_repository.py | 65 ++++++++++++++--------- 1 file changed, 41 insertions(+), 24 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index cc267d0c1..515b3d3e7 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -367,12 +367,16 @@ class MediaRepository(object): )) if t_byte_source: - output_path = yield self.write_to_file_and_backup( - t_byte_source, - self.filepaths.local_media_thumbnail_rel( - media_id, t_width, t_height, t_type, t_method + try: + output_path = yield self.write_to_file_and_backup( + t_byte_source, + self.filepaths.local_media_thumbnail_rel( + media_id, t_width, t_height, t_type, t_method + ) ) - ) + finally: + t_byte_source.close() + logger.info("Stored thumbnail in file %r", output_path) t_len = os.path.getsize(output_path) @@ -395,12 +399,16 @@ class MediaRepository(object): )) if t_byte_source: - output_path = yield self.write_to_file_and_backup( - t_byte_source, - self.filepaths.remote_media_thumbnail_rel( - server_name, file_id, t_width, t_height, t_type, t_method + try: + output_path = yield self.write_to_file_and_backup( + t_byte_source, + self.filepaths.remote_media_thumbnail_rel( + server_name, file_id, t_width, t_height, t_type, t_method + ) ) - ) + finally: + t_byte_source.close() + logger.info("Stored thumbnail in file %r", output_path) t_len = os.path.getsize(output_path) @@ -464,18 +472,6 @@ class MediaRepository(object): # Now we generate the thumbnails for each dimension, store it for (t_width, t_height, t_type), t_method in thumbnails.iteritems(): - # Generate the thumbnail - if t_type == "crop": - t_byte_source = yield make_deferred_yieldable(threads.deferToThread( - thumbnailer.crop, - r_width, r_height, t_type, - )) - else: - t_byte_source = yield make_deferred_yieldable(threads.deferToThread( - thumbnailer.scale, - r_width, r_height, t_type, - )) - # Work out the correct file name for thumbnail if server_name: file_path = self.filepaths.remote_media_thumbnail_rel( @@ -490,8 +486,29 @@ class MediaRepository(object): media_id, t_width, t_height, t_type, t_method ) - # Write to disk - output_path = yield self.write_to_file_and_backup(t_byte_source, file_path) + # Generate the thumbnail + if t_type == "crop": + t_byte_source = yield make_deferred_yieldable(threads.deferToThread( + thumbnailer.crop, + r_width, r_height, t_type, + )) + else: + t_byte_source = yield make_deferred_yieldable(threads.deferToThread( + thumbnailer.scale, + r_width, r_height, t_type, + )) + + if not t_byte_source: + continue + + try: + # Write to disk + output_path = yield self.write_to_file_and_backup( + t_byte_source, file_path, + ) + finally: + t_byte_source.close() + t_len = os.path.getsize(output_path) # Write to database From 9342bcfce08c862ea5027e865b23bf2bd6ad3a8c Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 16 Oct 2017 13:38:10 +0100 Subject: [PATCH 194/223] Omit the *s for @room notifications They're just redundant --- synapse/push/baserules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 9dce99ebe..7a18afe5f 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -245,7 +245,7 @@ BASE_APPEND_OVERRIDE_RULES = [ { 'kind': 'event_match', 'key': 'content.body', - 'pattern': '*@room*', + 'pattern': '@room', '_id': '_roomnotif_content', }, { From 6079d0027aa79a6e2e41254ceda42206e307add7 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Mon, 16 Oct 2017 14:20:45 +0100 Subject: [PATCH 195/223] Log a warning when no profile for invited member And return empty profile --- synapse/handlers/groups_local.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 3b676d46b..97a20f2b0 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -313,8 +313,11 @@ class GroupsLocalHandler(object): self.notifier.on_new_event( "groups_key", token, users=[user_id], ) - - user_profile = yield self.profile_handler.get_profile(user_id) + try: + user_profile = yield self.profile_handler.get_profile(user_id) + except Exception as e: + logger.warn("No profile for user %s: %s", user_id, e) + user_profile = {} defer.returnValue({"state": "invite", "user_profile": user_profile}) From 2c5972f87f0541aaeff43846f7050ab91d11cf0e Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Mon, 16 Oct 2017 15:31:11 +0100 Subject: [PATCH 196/223] Implement GET /groups/$groupId/invited_users --- synapse/federation/transport/client.py | 13 ++++++++++ synapse/federation/transport/server.py | 18 ++++++++++++- synapse/groups/groups_server.py | 35 ++++++++++++++++++++++++++ synapse/handlers/groups_local.py | 17 +++++++++++++ synapse/rest/client/v2_alpha/groups.py | 21 ++++++++++++++++ synapse/storage/group_server.py | 12 +++++++++ 6 files changed, 115 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index f96561c1f..125d8f359 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -550,6 +550,19 @@ class TransportLayerClient(object): ignore_backoff=True, ) + @log_function + def get_invited_users_in_group(self, destination, group_id, requester_user_id): + """Get users that have been invited to a group + """ + path = PREFIX + "/groups/%s/invited_users" % (group_id,) + + return self.client.get_json( + destination=destination, + path=path, + args={"requester_user_id": requester_user_id}, + ignore_backoff=True, + ) + @log_function def accept_group_invite(self, destination, group_id, user_id, content): """Accept a group invite diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index c7565e073..625a2fe27 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -720,6 +720,22 @@ class FederationGroupsUsersServlet(BaseFederationServlet): defer.returnValue((200, new_content)) +class FederationGroupsInvitedUsersServlet(BaseFederationServlet): + """Get the users that have been invited to a group + """ + PATH = "/groups/(?P[^/]*)/invited_users$" + + @defer.inlineCallbacks + def on_GET(self, origin, content, query, group_id): + requester_user_id = parse_string_from_args(query, "requester_user_id") + if get_domain_from_id(requester_user_id) != origin: + raise SynapseError(403, "requester_user_id doesn't match origin") + + new_content = yield self.handler.get_invited_users_in_group( + group_id, requester_user_id + ) + + defer.returnValue((200, new_content)) class FederationGroupsInviteServlet(BaseFederationServlet): """Ask a group server to invite someone to the group @@ -1109,12 +1125,12 @@ ROOM_LIST_CLASSES = ( PublicRoomList, ) - GROUP_SERVER_SERVLET_CLASSES = ( FederationGroupsProfileServlet, FederationGroupsSummaryServlet, FederationGroupsRoomsServlet, FederationGroupsUsersServlet, + FederationGroupsInvitedUsersServlet, FederationGroupsInviteServlet, FederationGroupsAcceptInviteServlet, FederationGroupsRemoveUserServlet, diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 1083bc299..bfa46b7cb 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -420,6 +420,41 @@ class GroupsServerHandler(object): "total_user_count_estimate": len(user_results), }) + @defer.inlineCallbacks + def get_invited_users_in_group(self, group_id, requester_user_id): + """Get the users that have been invited to a group as seen by requester_user_id. + + The ordering is arbitrary at the moment + """ + + yield self.check_group_is_ours(group_id, and_exists=True) + + is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id) + + if not is_user_in_group: + raise SynapseError(403, "User not in group") + + invited_users = yield self.store.get_invited_users_in_group(group_id) + + user_profiles = [] + + for user_id in invited_users: + user_profile = { + "user_id": user_id + } + try: + profile = yield self.profile_handler.get_profile_from_cache(user) + user_profile.update(profile) + except Exception as e: + pass + user_profiles.append(user_profile) + + defer.returnValue({ + "chunk": user_profiles, + "total_user_count_estimate": len(invited_users), + }) + + @defer.inlineCallbacks def get_rooms_in_group(self, group_id, requester_user_id): """Get the rooms in group as seen by requester_user_id diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 97a20f2b0..5263e769b 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -219,6 +219,23 @@ class GroupsLocalHandler(object): defer.returnValue(res) + @defer.inlineCallbacks + def get_invited_users_in_group(self, group_id, requester_user_id): + """Get users invited to a group + """ + if self.is_mine_id(group_id): + res = yield self.groups_server_handler.get_invited_users_in_group( + group_id, requester_user_id + ) + defer.returnValue(res) + + group_server_name = get_domain_from_id(group_id) + + res = yield self.transport_client.get_users_in_group( + get_domain_from_id(group_id), group_id, requester_user_id, + ) + defer.returnValue(res) + @defer.inlineCallbacks def join_group(self, group_id, user_id, content): """Request to join a group diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 8f3ce15b0..4532112cf 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -370,6 +370,26 @@ class GroupUsersServlet(RestServlet): defer.returnValue((200, result)) +class GroupInvitedUsersServlet(RestServlet): + """Get users invited to a group + """ + PATTERNS = client_v2_patterns("/groups/(?P[^/]*)/invited_users$") + + def __init__(self, hs): + super(GroupInvitedUsersServlet, self).__init__() + self.auth = hs.get_auth() + self.clock = hs.get_clock() + self.groups_handler = hs.get_groups_local_handler() + + @defer.inlineCallbacks + def on_GET(self, request, group_id): + requester = yield self.auth.get_user_by_req(request) + user_id = requester.user.to_string() + + result = yield self.groups_handler.get_invited_users_in_group(group_id, user_id) + + defer.returnValue((200, result)) + class GroupCreateServlet(RestServlet): """Create a group @@ -674,6 +694,7 @@ class GroupsForUserServlet(RestServlet): def register_servlets(hs, http_server): GroupServlet(hs).register(http_server) GroupSummaryServlet(hs).register(http_server) + GroupInvitedUsersServlet(hs).register(http_server) GroupUsersServlet(hs).register(http_server) GroupRoomServlet(hs).register(http_server) GroupCreateServlet(hs).register(http_server) diff --git a/synapse/storage/group_server.py b/synapse/storage/group_server.py index 3af372de5..9e63db5c6 100644 --- a/synapse/storage/group_server.py +++ b/synapse/storage/group_server.py @@ -56,6 +56,18 @@ class GroupServerStore(SQLBaseStore): desc="get_users_in_group", ) + def get_invited_users_in_group(self, group_id): + # TODO: Pagination + + return self._simple_select_onecol( + table="group_invites", + keyvalues={ + "group_id": group_id, + }, + retcol="user_id", + desc="get_invited_users_in_group", + ) + def get_rooms_in_group(self, group_id, include_private=False): # TODO: Pagination From a3ac4f6b0ad9cce172678fa87c10a1100d16236f Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Mon, 16 Oct 2017 15:41:03 +0100 Subject: [PATCH 197/223] _create_rererouter for get_invited_users_in_group --- synapse/handlers/groups_local.py | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py index 5263e769b..6699d0888 100644 --- a/synapse/handlers/groups_local.py +++ b/synapse/handlers/groups_local.py @@ -68,6 +68,8 @@ class GroupsLocalHandler(object): update_group_profile = _create_rerouter("update_group_profile") get_rooms_in_group = _create_rerouter("get_rooms_in_group") + get_invited_users_in_group = _create_rerouter("get_invited_users_in_group") + add_room_to_group = _create_rerouter("add_room_to_group") remove_room_from_group = _create_rerouter("remove_room_from_group") @@ -219,23 +221,6 @@ class GroupsLocalHandler(object): defer.returnValue(res) - @defer.inlineCallbacks - def get_invited_users_in_group(self, group_id, requester_user_id): - """Get users invited to a group - """ - if self.is_mine_id(group_id): - res = yield self.groups_server_handler.get_invited_users_in_group( - group_id, requester_user_id - ) - defer.returnValue(res) - - group_server_name = get_domain_from_id(group_id) - - res = yield self.transport_client.get_users_in_group( - get_domain_from_id(group_id), group_id, requester_user_id, - ) - defer.returnValue(res) - @defer.inlineCallbacks def join_group(self, group_id, user_id, content): """Request to join a group From c43e8a97367e939cd1e0b7819095950a156c9f49 Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Mon, 16 Oct 2017 15:50:39 +0100 Subject: [PATCH 198/223] Make it work. Warn about lack of user profile --- synapse/groups/groups_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index bfa46b7cb..31fc71187 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -443,10 +443,10 @@ class GroupsServerHandler(object): "user_id": user_id } try: - profile = yield self.profile_handler.get_profile_from_cache(user) + profile = yield self.profile_handler.get_profile_from_cache(user_id) user_profile.update(profile) except Exception as e: - pass + logger.warn("Error getting profile for %s: %s", user_id, e) user_profiles.append(user_profile) defer.returnValue({ From 85f5674e44d695177cbff74e11b4ce6dac85d53a Mon Sep 17 00:00:00 2001 From: Luke Barnard Date: Mon, 16 Oct 2017 15:52:12 +0100 Subject: [PATCH 199/223] Delint --- synapse/federation/transport/server.py | 2 ++ synapse/groups/groups_server.py | 1 - synapse/rest/client/v2_alpha/groups.py | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 625a2fe27..6a0bd8d22 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -720,6 +720,7 @@ class FederationGroupsUsersServlet(BaseFederationServlet): defer.returnValue((200, new_content)) + class FederationGroupsInvitedUsersServlet(BaseFederationServlet): """Get the users that have been invited to a group """ @@ -737,6 +738,7 @@ class FederationGroupsInvitedUsersServlet(BaseFederationServlet): defer.returnValue((200, new_content)) + class FederationGroupsInviteServlet(BaseFederationServlet): """Ask a group server to invite someone to the group """ diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index 31fc71187..a3a500b9d 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -454,7 +454,6 @@ class GroupsServerHandler(object): "total_user_count_estimate": len(invited_users), }) - @defer.inlineCallbacks def get_rooms_in_group(self, group_id, requester_user_id): """Get the rooms in group as seen by requester_user_id diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py index 4532112cf..d11bccc1d 100644 --- a/synapse/rest/client/v2_alpha/groups.py +++ b/synapse/rest/client/v2_alpha/groups.py @@ -370,6 +370,7 @@ class GroupUsersServlet(RestServlet): defer.returnValue((200, result)) + class GroupInvitedUsersServlet(RestServlet): """Get users invited to a group """ From c05e6015cc522b838ab2db6bffd494b017cf8ec6 Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 16 Oct 2017 17:57:27 +0100 Subject: [PATCH 200/223] Add config option to auto-join new users to rooms New users who register on the server will be dumped into all rooms in auto_join_rooms in the config. --- synapse/config/registration.py | 6 +++++ synapse/rest/client/v2_alpha/register.py | 34 ++++++++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index f7e03c4cd..9e2a6d1ae 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -41,6 +41,8 @@ class RegistrationConfig(Config): self.allow_guest_access and config.get("invite_3pid_guest", False) ) + self.auto_join_rooms = config.get("auto_join_rooms", []) + def default_config(self, **kwargs): registration_shared_secret = random_string_with_symbols(50) @@ -70,6 +72,10 @@ class RegistrationConfig(Config): - matrix.org - vector.im - riot.im + + # Users who register on this homeserver will automatically be joined to these rooms + #auto_join_rooms: + # - "#example:example.com" """ % locals() def add_arguments(self, parser): diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 1421c1815..d9a8cdbbb 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -17,8 +17,10 @@ from twisted.internet import defer import synapse +import synapse.types from synapse.api.auth import get_access_token_from_request, has_access_token from synapse.api.constants import LoginType +from synapse.types import RoomID, RoomAlias from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError from synapse.http.servlet import ( RestServlet, parse_json_object_from_request, assert_params_in_request, parse_string @@ -170,6 +172,7 @@ class RegisterRestServlet(RestServlet): self.auth_handler = hs.get_auth_handler() self.registration_handler = hs.get_handlers().registration_handler self.identity_handler = hs.get_handlers().identity_handler + self.room_member_handler = hs.get_handlers().room_member_handler self.device_handler = hs.get_device_handler() self.macaroon_gen = hs.get_macaroon_generator() @@ -340,6 +343,14 @@ class RegisterRestServlet(RestServlet): generate_token=False, ) + # auto-join the user to any rooms we're supposed to dump them into + fake_requester = synapse.types.create_requester(registered_user_id) + for r in self.hs.config.auto_join_rooms: + try: + yield self._join_user_to_room(fake_requester, r) + except Exception as e: + logger.error("Failed to join new user to %r: %r", r, e) + # remember that we've now registered that user account, and with # what user ID (since the user may not have specified) self.auth_handler.set_session_data( @@ -372,6 +383,29 @@ class RegisterRestServlet(RestServlet): def on_OPTIONS(self, _): return 200, {} + @defer.inlineCallbacks + def _join_user_to_room(self, requester, room_identifier): + room_id = None + if RoomID.is_valid(room_identifier): + room_id = room_identifier + elif RoomAlias.is_valid(room_identifier): + room_alias = RoomAlias.from_string(room_identifier) + room_id, remote_room_hosts = ( + yield self.room_member_handler.lookup_room_alias(room_alias) + ) + room_id = room_id.to_string() + else: + raise SynapseError(400, "%s was not legal room ID or room alias" % ( + room_identifier, + )) + + yield self.room_member_handler.update_membership( + requester=requester, + target=requester.user, + room_id=room_id, + action="join", + ) + @defer.inlineCallbacks def _do_appservice_registration(self, username, as_token, body): user_id = yield self.registration_handler.appservice_register( From a9c2e930ac455db13ce37a90b3ff0d93c0d1ea43 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 17 Oct 2017 10:13:13 +0100 Subject: [PATCH 201/223] pep8 --- synapse/config/registration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 9e2a6d1ae..ef917fc9f 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -73,7 +73,8 @@ class RegistrationConfig(Config): - vector.im - riot.im - # Users who register on this homeserver will automatically be joined to these rooms + # Users who register on this homeserver will automatically be joined + # to these rooms #auto_join_rooms: # - "#example:example.com" """ % locals() From 33122c5a1b3642712546bc290d591077ce4cc847 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 17 Oct 2017 10:39:50 +0100 Subject: [PATCH 202/223] Fix test --- tests/rest/client/v2_alpha/test_register.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index b6173ab2e..821c73552 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -47,6 +47,7 @@ class RegisterRestServletTestCase(unittest.TestCase): self.hs.get_auth_handler = Mock(return_value=self.auth_handler) self.hs.get_device_handler = Mock(return_value=self.device_handler) self.hs.config.enable_registration = True + self.hs.config.auto_join_rooms = [] # init the thing we're testing self.servlet = RegisterRestServlet(self.hs) From 9b714abf357fbe16482a729933aa8e139ed278ad Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 10:43:36 +0100 Subject: [PATCH 203/223] Remove dead class This isn't used anywhere. --- tests/storage/event_injector.py | 76 --------------------------------- 1 file changed, 76 deletions(-) delete mode 100644 tests/storage/event_injector.py diff --git a/tests/storage/event_injector.py b/tests/storage/event_injector.py deleted file mode 100644 index 024ac1506..000000000 --- a/tests/storage/event_injector.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015, 2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from twisted.internet import defer - -from synapse.api.constants import EventTypes - - -class EventInjector: - def __init__(self, hs): - self.hs = hs - self.store = hs.get_datastore() - self.message_handler = hs.get_handlers().message_handler - self.event_builder_factory = hs.get_event_builder_factory() - - @defer.inlineCallbacks - def create_room(self, room, user): - builder = self.event_builder_factory.new({ - "type": EventTypes.Create, - "sender": user.to_string(), - "room_id": room.to_string(), - "content": {}, - }) - - event, context = yield self.message_handler._create_new_client_event( - builder - ) - - yield self.store.persist_event(event, context) - - @defer.inlineCallbacks - def inject_room_member(self, room, user, membership): - builder = self.event_builder_factory.new({ - "type": EventTypes.Member, - "sender": user.to_string(), - "state_key": user.to_string(), - "room_id": room.to_string(), - "content": {"membership": membership}, - }) - - event, context = yield self.message_handler._create_new_client_event( - builder - ) - - yield self.store.persist_event(event, context) - - defer.returnValue(event) - - @defer.inlineCallbacks - def inject_message(self, room, user, body): - builder = self.event_builder_factory.new({ - "type": EventTypes.Message, - "sender": user.to_string(), - "state_key": user.to_string(), - "room_id": room.to_string(), - "content": {"body": body, "msgtype": u"message"}, - }) - - event, context = yield self.message_handler._create_new_client_event( - builder - ) - - yield self.store.persist_event(event, context) From 5b5f35ccc0cdd74fe8c8902988da222a7b8e2a67 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 10:52:31 +0100 Subject: [PATCH 204/223] Add some tests for make_deferred_yieldable --- tests/util/test_log_context.py | 38 ++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/util/test_log_context.py b/tests/util/test_log_context.py index 9ffe209c4..e2f7765f4 100644 --- a/tests/util/test_log_context.py +++ b/tests/util/test_log_context.py @@ -94,3 +94,41 @@ class LoggingContextTestCase(unittest.TestCase): yield defer.succeed(None) return self._test_preserve_fn(nonblocking_function) + + @defer.inlineCallbacks + def test_make_deferred_yieldable(self): + # a function which retuns an incomplete deferred, but doesn't follow + # the synapse rules. + def blocking_function(): + d = defer.Deferred() + reactor.callLater(0, d.callback, None) + return d + + sentinel_context = LoggingContext.current_context() + + with LoggingContext() as context_one: + context_one.test_key = "one" + + d1 = logcontext.make_deferred_yieldable(blocking_function()) + # make sure that the context was reset by make_deferred_yieldable + self.assertIs(LoggingContext.current_context(), sentinel_context) + + yield d1 + + # now it should be restored + self._check_test_key("one") + + @defer.inlineCallbacks + def test_make_deferred_yieldable_on_non_deferred(self): + """Check that make_deferred_yieldable does the right thing when its + argument isn't actually a deferred""" + + with LoggingContext() as context_one: + context_one.test_key = "one" + + d1 = logcontext.make_deferred_yieldable("bum") + self._check_test_key("one") + + r = yield d1 + self.assertEqual(r, "bum") + self._check_test_key("one") From a6ad8148b9d3058e316589a952744ecb15aa2057 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 10:53:34 +0100 Subject: [PATCH 205/223] Fix name of test_logcontext The file under test is logcontext.py, not log_context.py --- tests/util/{test_log_context.py => test_logcontext.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/util/{test_log_context.py => test_logcontext.py} (100%) diff --git a/tests/util/test_log_context.py b/tests/util/test_logcontext.py similarity index 100% rename from tests/util/test_log_context.py rename to tests/util/test_logcontext.py From 2e9f5ea31a9c66eceb6276c5241cc6537cb0ae4c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 10:59:30 +0100 Subject: [PATCH 206/223] Fix logcontext handling for persist_events * don't use preserve_context_over_deferred, which is known broken. * remove a redundant preserve_fn. * add/improve some comments --- synapse/storage/events.py | 24 +++++++++++++++++------- synapse/util/async.py | 5 +++++ 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 4f0b43c36..637640ec2 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -21,7 +21,7 @@ from synapse.events.utils import prune_event from synapse.util.async import ObservableDeferred from synapse.util.logcontext import ( - preserve_fn, PreserveLoggingContext, preserve_context_over_deferred + preserve_fn, PreserveLoggingContext, make_deferred_yieldable ) from synapse.util.logutils import log_function from synapse.util.metrics import Measure @@ -88,13 +88,23 @@ class _EventPeristenceQueue(object): def add_to_queue(self, room_id, events_and_contexts, backfilled): """Add events to the queue, with the given persist_event options. + NB: due to the normal usage pattern of this method, it does *not* + follow the synapse logcontext rules, and leaves the logcontext in + place whether or not the returned deferred is ready. + Args: room_id (str): events_and_contexts (list[(EventBase, EventContext)]): backfilled (bool): + + Returns: + defer.Deferred: a deferred which will resolve once the events are + persisted. Runs its callbacks *without* a logcontext. """ queue = self._event_persist_queues.setdefault(room_id, deque()) if queue: + # if the last item in the queue has the same `backfilled` setting, + # we can just add these new events to that item. end_item = queue[-1] if end_item.backfilled == backfilled: end_item.events_and_contexts.extend(events_and_contexts) @@ -113,11 +123,11 @@ class _EventPeristenceQueue(object): def handle_queue(self, room_id, per_item_callback): """Attempts to handle the queue for a room if not already being handled. - The given callback will be invoked with for each item in the queue,1 + The given callback will be invoked with for each item in the queue, of type _EventPersistQueueItem. The per_item_callback will continuously be called with new items, unless the queue becomnes empty. The return value of the function will be given to the deferreds waiting on the item, - exceptions will be passed to the deferres as well. + exceptions will be passed to the deferreds as well. This function should therefore be called whenever anything is added to the queue. @@ -233,7 +243,7 @@ class EventsStore(SQLBaseStore): deferreds = [] for room_id, evs_ctxs in partitioned.iteritems(): - d = preserve_fn(self._event_persist_queue.add_to_queue)( + d = self._event_persist_queue.add_to_queue( room_id, evs_ctxs, backfilled=backfilled, ) @@ -242,7 +252,7 @@ class EventsStore(SQLBaseStore): for room_id in partitioned: self._maybe_start_persisting(room_id) - return preserve_context_over_deferred( + return make_deferred_yieldable( defer.gatherResults(deferreds, consumeErrors=True) ) @@ -267,7 +277,7 @@ class EventsStore(SQLBaseStore): self._maybe_start_persisting(event.room_id) - yield preserve_context_over_deferred(deferred) + yield make_deferred_yieldable(deferred) max_persisted_id = yield self._stream_id_gen.get_current_token() defer.returnValue((event.internal_metadata.stream_ordering, max_persisted_id)) @@ -1526,7 +1536,7 @@ class EventsStore(SQLBaseStore): if not allow_rejected: rows[:] = [r for r in rows if not r["rejects"]] - res = yield preserve_context_over_deferred(defer.gatherResults( + res = yield make_deferred_yieldable(defer.gatherResults( [ preserve_fn(self._get_event_from_row)( row["internal_metadata"], row["json"], row["redacts"], diff --git a/synapse/util/async.py b/synapse/util/async.py index 0fd5b4252..a0a903947 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -53,6 +53,11 @@ class ObservableDeferred(object): Cancelling or otherwise resolving an observer will not affect the original ObservableDeferred. + + NB that it does not attempt to do anything with logcontexts; in general + you should probably make_deferred_yieldable the deferreds + returned by `observe`, and ensure that the original deferred runs its + callbacks in the sentinel logcontext. """ __slots__ = ["_deferred", "_observers", "_result"] From a6245478c82d8bd2c9abea34b9ec4a94ccc5ed09 Mon Sep 17 00:00:00 2001 From: Krombel Date: Tue, 17 Oct 2017 12:45:33 +0200 Subject: [PATCH 207/223] fix thumbnailing (#2548) in commit 0e28281a the code for thumbnailing got refactored and the renaming of this variables was not done correctly. Signed-Off-by: Matthias Kesler --- synapse/rest/media/v1/media_repository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 515b3d3e7..057c925b7 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -490,12 +490,12 @@ class MediaRepository(object): if t_type == "crop": t_byte_source = yield make_deferred_yieldable(threads.deferToThread( thumbnailer.crop, - r_width, r_height, t_type, + t_width, t_height, t_type, )) else: t_byte_source = yield make_deferred_yieldable(threads.deferToThread( thumbnailer.scale, - r_width, r_height, t_type, + t_width, t_height, t_type, )) if not t_byte_source: From 7216c76654bbb57bc0ebc27498de7eda247f8ffb Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 14:46:17 +0100 Subject: [PATCH 208/223] Improve error handling for missing files (#2551) `os.path.exists` doesn't allow us to distinguish between permissions errors and the path actually not existing, which repeatedly confuses people. It also means that we try to overwrite existing key files, which is super-confusing. (cf issues #2455, #2379). Use os.stat instead. Also, don't recomemnd the the use of --generate-config, which screws everything up if you're using debian (cf #2455). --- synapse/config/_base.py | 36 ++++++++++++++++++++++++++---------- synapse/config/key.py | 8 ++++---- synapse/config/tls.py | 6 +++--- 3 files changed, 33 insertions(+), 17 deletions(-) diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 1ab5593c6..fa105bce7 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -81,22 +81,38 @@ class Config(object): def abspath(file_path): return os.path.abspath(file_path) if file_path else file_path + @classmethod + def path_exists(cls, file_path): + """Check if a file exists + + Unlike os.path.exists, this throws an exception if there is an error + checking if the file exists (for example, if there is a perms error on + the parent dir). + + Returns: + bool: True if the file exists; False if not. + """ + try: + os.stat(file_path) + return True + except OSError as e: + if e.errno != errno.ENOENT: + raise e + return False + @classmethod def check_file(cls, file_path, config_name): if file_path is None: raise ConfigError( "Missing config for %s." - " You must specify a path for the config file. You can " - "do this with the -c or --config-path option. " - "Adding --generate-config along with --server-name " - " will generate a config file at the given path." % (config_name,) ) - if not os.path.exists(file_path): + try: + os.stat(file_path) + except OSError as e: raise ConfigError( - "File %s config for %s doesn't exist." - " Try running again with --generate-config" - % (file_path, config_name,) + "Error accessing file '%s' (config for %s): %s" + % (file_path, config_name, e.strerror) ) return cls.abspath(file_path) @@ -248,7 +264,7 @@ class Config(object): " -c CONFIG-FILE\"" ) (config_path,) = config_files - if not os.path.exists(config_path): + if not cls.path_exists(config_path): if config_args.keys_directory: config_dir_path = config_args.keys_directory else: @@ -261,7 +277,7 @@ class Config(object): "Must specify a server_name to a generate config for." " Pass -H server.name." ) - if not os.path.exists(config_dir_path): + if not cls.path_exists(config_dir_path): os.makedirs(config_dir_path) with open(config_path, "wb") as config_file: config_bytes, config = obj.generate_config( diff --git a/synapse/config/key.py b/synapse/config/key.py index 6ee643793..4b8fc063d 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -118,10 +118,9 @@ class KeyConfig(Config): signing_keys = self.read_file(signing_key_path, "signing_key") try: return read_signing_keys(signing_keys.splitlines(True)) - except Exception: + except Exception as e: raise ConfigError( - "Error reading signing_key." - " Try running again with --generate-config" + "Error reading signing_key: %s" % (str(e)) ) def read_old_signing_keys(self, old_signing_keys): @@ -141,7 +140,8 @@ class KeyConfig(Config): def generate_files(self, config): signing_key_path = config["signing_key_path"] - if not os.path.exists(signing_key_path): + + if not self.path_exists(signing_key_path): with open(signing_key_path, "w") as signing_key_file: key_id = "a_" + random_string(4) write_signing_keys( diff --git a/synapse/config/tls.py b/synapse/config/tls.py index e081840a8..247f18f45 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -126,7 +126,7 @@ class TlsConfig(Config): tls_private_key_path = config["tls_private_key_path"] tls_dh_params_path = config["tls_dh_params_path"] - if not os.path.exists(tls_private_key_path): + if not self.path_exists(tls_private_key_path): with open(tls_private_key_path, "w") as private_key_file: tls_private_key = crypto.PKey() tls_private_key.generate_key(crypto.TYPE_RSA, 2048) @@ -141,7 +141,7 @@ class TlsConfig(Config): crypto.FILETYPE_PEM, private_key_pem ) - if not os.path.exists(tls_certificate_path): + if not self.path_exists(tls_certificate_path): with open(tls_certificate_path, "w") as certificate_file: cert = crypto.X509() subject = cert.get_subject() @@ -159,7 +159,7 @@ class TlsConfig(Config): certificate_file.write(cert_pem) - if not os.path.exists(tls_dh_params_path): + if not self.path_exists(tls_dh_params_path): if GENERATE_DH_PARAMS: subprocess.check_call([ "openssl", "dhparam", From c2bd177ea0e58d9831167273faa9bf9155466513 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 17:05:42 +0100 Subject: [PATCH 209/223] Fix 500 error when fields missing from power_levels event If the users or events keys were missing from a power_levels event, then we would throw 500s when trying to auth them. --- synapse/event_auth.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 4096c606f..9e746a28b 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -470,14 +470,14 @@ def _check_power_levels(event, auth_events): ("invite", None), ] - old_list = current_state.content.get("users") + old_list = current_state.content.get("users", {}) for user in set(old_list.keys() + user_list.keys()): levels_to_check.append( (user, "users") ) - old_list = current_state.content.get("events") - new_list = event.content.get("events") + old_list = current_state.content.get("events", {}) + new_list = event.content.get("events", {}) for ev_id in set(old_list.keys() + new_list.keys()): levels_to_check.append( (ev_id, "events") From 74f99f227cb700e41fc6c309398c0e5a6a42439a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 19:46:56 +0100 Subject: [PATCH 210/223] Doc some more dynamic Homeserver methods --- synapse/server.pyi | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/synapse/server.pyi b/synapse/server.pyi index 9570df553..e8c0386b7 100644 --- a/synapse/server.pyi +++ b/synapse/server.pyi @@ -1,4 +1,6 @@ import synapse.api.auth +import synapse.federation.transaction_queue +import synapse.federation.transport.client import synapse.handlers import synapse.handlers.auth import synapse.handlers.device @@ -27,3 +29,9 @@ class HomeServer(object): def get_state_handler(self) -> synapse.state.StateHandler: pass + + def get_federation_sender(self) -> synapse.federation.transaction_queue.TransactionQueue: + pass + + def get_federation_transport_client(self) -> synapse.federation.transport.client.TransportLayerClient: + pass From 582bd19ee9de4850010b58e0bb0e2a93f01b2f44 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 17 Oct 2017 20:46:55 +0100 Subject: [PATCH 211/223] Fix 500 error when we get an error handling a PDU FederationServer doesn't have a send_failure (and nor does its subclass, ReplicationLayer), so this was failing. I'm not really sure what the idea behind send_failure is, given (a) we don't do anything at the other end with it except log it, and (b) we also send back the failure via the transaction response. I suspect there's a whole lot of dead code around it, but for now I'm just removing the broken bit. --- synapse/federation/federation_server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index a8034bddc..e15228e70 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -192,7 +192,6 @@ class FederationServer(FederationBase): pdu_results[event_id] = {} except FederationError as e: logger.warn("Error handling PDU %s: %s", event_id, e) - self.send_failure(e, transaction.origin) pdu_results[event_id] = {"error": str(e)} except Exception as e: pdu_results[event_id] = {"error": str(e)} From 161a862ffb250b5387f1a2c57e07e999ba00571e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 10:17:43 +0100 Subject: [PATCH 212/223] Fix typo --- synapse/groups/attestations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 5ef7a12cb..02e2e1758 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -126,7 +126,7 @@ class GroupAttestionRenewer(object): ) @defer.inlineCallbacks - def _renew_attestation(self, group_id, user_id): + def _renew_attestation(group_id, user_id): attestation = self.attestations.create_attestation(group_id, user_id) if self.hs.is_mine_id(group_id): From bd5718d0ad2a9380ae292507a1022a230f8b2011 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 10:27:18 +0100 Subject: [PATCH 213/223] Fix typo in thumbnail generation --- synapse/rest/media/v1/media_repository.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 057c925b7..6b50b45b1 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -487,16 +487,19 @@ class MediaRepository(object): ) # Generate the thumbnail - if t_type == "crop": + if t_method == "crop": t_byte_source = yield make_deferred_yieldable(threads.deferToThread( thumbnailer.crop, t_width, t_height, t_type, )) - else: + elif t_method == "scale": t_byte_source = yield make_deferred_yieldable(threads.deferToThread( thumbnailer.scale, t_width, t_height, t_type, )) + else: + logger.error("Unrecognized method: %r", t_method) + continue if not t_byte_source: continue From 9ab859f27b177bbcd4fe0b3eb1bef0ce649ea41e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 10:55:44 +0100 Subject: [PATCH 214/223] Fix typo in group attestation handling --- synapse/federation/transport/server.py | 2 +- synapse/groups/attestations.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 6a0bd8d22..09b97138c 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -835,7 +835,7 @@ class FederationGroupsRenewAttestaionServlet(BaseFederationServlet): def on_POST(self, origin, content, query, group_id, user_id): # We don't need to check auth here as we check the attestation signatures - new_content = yield self.handler.on_renew_group_attestation( + new_content = yield self.handler.on_renew_attestation( origin, content, group_id, user_id ) diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index 02e2e1758..b751cf5e4 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -90,6 +90,7 @@ class GroupAttestionRenewer(object): self.assestations = hs.get_groups_attestation_signing() self.transport_client = hs.get_federation_transport_client() self.is_mine_id = hs.is_mine_id + self.attestations = hs.get_groups_attestation_signing() self._renew_attestations_loop = self.clock.looping_call( self._renew_attestations, 30 * 60 * 1000, @@ -129,7 +130,7 @@ class GroupAttestionRenewer(object): def _renew_attestation(group_id, user_id): attestation = self.attestations.create_attestation(group_id, user_id) - if self.hs.is_mine_id(group_id): + if self.is_mine_id(group_id): destination = get_domain_from_id(user_id) else: destination = get_domain_from_id(group_id) From 011d03a0f6519676998fd32e6b553f7a043e40a3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 11:22:48 +0100 Subject: [PATCH 215/223] Fix typo --- synapse/federation/transport/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 09b97138c..f0778c65c 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -836,7 +836,7 @@ class FederationGroupsRenewAttestaionServlet(BaseFederationServlet): # We don't need to check auth here as we check the attestation signatures new_content = yield self.handler.on_renew_attestation( - origin, content, group_id, user_id + group_id, user_id, content ) defer.returnValue((200, new_content)) From 513c23bfd90b9386f59dea96e5f1ccc609da1c03 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 12:01:01 +0100 Subject: [PATCH 216/223] Enforce sensible group IDs --- synapse/groups/groups_server.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index a3a500b9d..e9b44c097 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -16,10 +16,11 @@ from twisted.internet import defer from synapse.api.errors import SynapseError -from synapse.types import UserID, get_domain_from_id, RoomID +from synapse.types import UserID, get_domain_from_id, RoomID, GroupID import logging +import urllib logger = logging.getLogger(__name__) @@ -697,6 +698,8 @@ class GroupsServerHandler(object): def create_group(self, group_id, user_id, content): group = yield self.check_group_is_ours(group_id) + _validate_group_id(group_id) + logger.info("Attempting to create group with ID: %r", group_id) if group: raise SynapseError(400, "Group already exists") @@ -773,3 +776,18 @@ def _parse_visibility_from_contents(content): is_public = True return is_public + + +def _validate_group_id(group_id): + """Validates the group ID is valid for creation on this home server + """ + localpart = GroupID.from_string(group_id).localpart + + if localpart.lower() != localpart: + raise SynapseError(400, "Group ID must be lower case") + + if urllib.quote(localpart.encode('utf-8')) != localpart: + raise SynapseError( + 400, + "Group ID can only contain characters a-z, 0-9, or '_-./'", + ) From 29bafe2f7e82e48b9aad03fb23a790b3719faf78 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 12:13:44 +0100 Subject: [PATCH 217/223] Add config to enable group creation --- synapse/config/homeserver.py | 3 ++- synapse/groups/groups_server.py | 12 ++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py index 3f9d9d5f8..05e242aef 100644 --- a/synapse/config/homeserver.py +++ b/synapse/config/homeserver.py @@ -35,6 +35,7 @@ from .emailconfig import EmailConfig from .workers import WorkerConfig from .push import PushConfig from .spam_checker import SpamCheckerConfig +from .groups import GroupsConfig class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, @@ -43,7 +44,7 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, AppServiceConfig, KeyConfig, SAML2Config, CasConfig, JWTConfig, PasswordConfig, EmailConfig, WorkerConfig, PasswordAuthProviderConfig, PushConfig, - SpamCheckerConfig,): + SpamCheckerConfig, GroupsConfig,): pass diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index e9b44c097..c19d733d7 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -704,10 +704,18 @@ class GroupsServerHandler(object): if group: raise SynapseError(400, "Group already exists") - # TODO: Add config to enforce that only server admins can create rooms is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id)) if not is_admin: - raise SynapseError(403, "Only server admin can create group on this server") + if not self.hs.config.enable_group_creation: + raise SynapseError(403, "Only server admin can create group on this server") + localpart = GroupID.from_string(group_id).localpart + if not localpart.startswith(self.hs.config.group_creation_prefix): + raise SynapseError( + 400, + "Can only create groups with prefix %r on this server" % ( + self.hs.config.group_creation_prefix, + ), + ) profile = content.get("profile", {}) name = profile.get("name") From ffd3f1a7838eb12a30abe40831275f360b528d1f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 12:17:30 +0100 Subject: [PATCH 218/223] Add missing file... --- synapse/config/groups.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 synapse/config/groups.py diff --git a/synapse/config/groups.py b/synapse/config/groups.py new file mode 100644 index 000000000..7683a3753 --- /dev/null +++ b/synapse/config/groups.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.util.module_loader import load_module + +from ._base import Config + +from distutils.util import strtobool + + +class GroupsConfig(Config): + def read_config(self, config): + self.enable_group_creation = config.get("enable_group_creation", False) + self.group_creation_prefix = config.get("group_creation_prefix", "") + + def default_config(self, **kwargs): + return """\ + # Whether to allow non server admins to create groups on this server + enable_group_creation: false + + # If enabled, non server admins can only create groups with local parts + # starting with this prefix + # group_creation_prefix: "unofficial/" + """ From c7d46510d7a700fda9730d90b010e1e1e596c58e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 13:36:06 +0100 Subject: [PATCH 219/223] Flake8 --- synapse/config/groups.py | 4 ---- synapse/groups/groups_server.py | 6 ++++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/synapse/config/groups.py b/synapse/config/groups.py index 7683a3753..997fa2881 100644 --- a/synapse/config/groups.py +++ b/synapse/config/groups.py @@ -13,12 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.module_loader import load_module - from ._base import Config -from distutils.util import strtobool - class GroupsConfig(Config): def read_config(self, config): diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py index c19d733d7..fc4edb7f0 100644 --- a/synapse/groups/groups_server.py +++ b/synapse/groups/groups_server.py @@ -707,7 +707,9 @@ class GroupsServerHandler(object): is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id)) if not is_admin: if not self.hs.config.enable_group_creation: - raise SynapseError(403, "Only server admin can create group on this server") + raise SynapseError( + 403, "Only server admin can create group on this server", + ) localpart = GroupID.from_string(group_id).localpart if not localpart.startswith(self.hs.config.group_creation_prefix): raise SynapseError( @@ -715,7 +717,7 @@ class GroupsServerHandler(object): "Can only create groups with prefix %r on this server" % ( self.hs.config.group_creation_prefix, ), - ) + ) profile = content.get("profile", {}) name = profile.get("name") From 0ef0aeceacb2c1298561ab52e68a046cc78028de Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 11:49:14 +0100 Subject: [PATCH 220/223] Bump version and changelog --- CHANGES.rst | 35 +++++++++++++++++++++++++++++++++++ synapse/__init__.py | 2 +- 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index f1529e79b..aaa4a2c21 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,38 @@ +Changes in synapse v0.24.0-rc1 (2017-10-19) +=========================================== + +Features: + +* Add Group Server (PR #2352, #2363, #2374, #2377, #2378, #2382, #2410, #2426, + #2430, #2454, #2471, #2472, #2544) +* Add support for channel notifications (PR #2501) +* Add basic implementation of backup media store (PR #2538) +* Add config option to auto-join new users to rooms (PR #2545) + + +Changes: + +* Make the spam checker a module (PR #2474) +* Delete expired url cache data (PR #2478) +* Ignore incoming events for rooms that we have left (PR #2490) +* Allow spam checker to reject invites too (PR #2492) +* Add room creation checks to spam checker (PR #2495) +* Spam checking: add the invitee to user_may_invite (PR #2502) +* Process events from federation for different rooms in parallel (PR #2520) +* Allow error strings from spam checker (PR #2531) + + +Bug fixes: + +* Fix handling SERVFAILs when doing AAAA lookups for federation (PR #2477) +* Fix incompatibility with newer versions of ujson (PR #2483) Thanks to + @jeremycline! +* Fix notification keywords that start/end with non-word chars (PR #2500) +* Fix stack overflow and logcontexts from linearizer (PR #2532) +* Fix 500 error when fields missing from power_levels event (PR #2552) +* Fix 500 error when we get an error handling a PDU (PR #2553) + + Changes in synapse v0.23.1 (2017-10-02) ======================================= diff --git a/synapse/__init__.py b/synapse/__init__.py index bee4aba62..c867d1cfd 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.23.1" +__version__ = "0.24.0" From d6237859f6cc220f7a1e0b1d63e60421d14eb07f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 14:19:52 +0100 Subject: [PATCH 221/223] Update changelog --- CHANGES.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.rst b/CHANGES.rst index aaa4a2c21..85830d832 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -20,6 +20,7 @@ Changes: * Spam checking: add the invitee to user_may_invite (PR #2502) * Process events from federation for different rooms in parallel (PR #2520) * Allow error strings from spam checker (PR #2531) +* Improve error handling for missing files in config (PR #2551) Bug fixes: From 6070647774d335d36de640aefce7a291a111d908 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Oct 2017 16:40:20 +0100 Subject: [PATCH 222/223] Correctly bump version --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index c867d1cfd..d2480271f 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.24.0" +__version__ = "0.24.0-rc1" From 13e16cf3025136e17ad4a7a06fafd683b3aa0456 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 23 Oct 2017 13:13:31 +0100 Subject: [PATCH 223/223] Bump version and changelog --- CHANGES.rst | 6 ++++++ synapse/__init__.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 85830d832..80518b7ba 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,9 @@ +Changes in synapse v0.24.0 (2017-10-23) +======================================= + +No changes since v0.24.0-rc1 + + Changes in synapse v0.24.0-rc1 (2017-10-19) =========================================== diff --git a/synapse/__init__.py b/synapse/__init__.py index d2480271f..c867d1cfd 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.24.0-rc1" +__version__ = "0.24.0"